_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
9f19c6b755f600fb14f8189f86a6c0969599380d377dadf935364327948abfc4 | clojure-interop/aws-api | core.clj | (ns com.amazonaws.services.workdocs.core
(:refer-clojure :only [require comment defn ->])
(:import ))
(require '[com.amazonaws.services.workdocs.AbstractAmazonWorkDocs])
(require '[com.amazonaws.services.workdocs.AbstractAmazonWorkDocsAsync])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocs])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsync])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsyncClient])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsyncClientBuilder])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsClient])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsClientBuilder])
(require '[com.amazonaws.services.workdocs.ContentManager])
(require '[com.amazonaws.services.workdocs.ContentManagerAsync])
(require '[com.amazonaws.services.workdocs.ContentManagerAsyncBuilder])
(require '[com.amazonaws.services.workdocs.ContentManagerBuilder])
(require '[com.amazonaws.services.workdocs.GetDocumentStreamRequest])
(require '[com.amazonaws.services.workdocs.GetDocumentStreamResult])
(require '[com.amazonaws.services.workdocs.UploadDocumentStreamRequest])
(require '[com.amazonaws.services.workdocs.UploadDocumentStreamResult])
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.workdocs/src/com/amazonaws/services/workdocs/core.clj | clojure | (ns com.amazonaws.services.workdocs.core
(:refer-clojure :only [require comment defn ->])
(:import ))
(require '[com.amazonaws.services.workdocs.AbstractAmazonWorkDocs])
(require '[com.amazonaws.services.workdocs.AbstractAmazonWorkDocsAsync])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocs])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsync])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsyncClient])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsAsyncClientBuilder])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsClient])
(require '[com.amazonaws.services.workdocs.AmazonWorkDocsClientBuilder])
(require '[com.amazonaws.services.workdocs.ContentManager])
(require '[com.amazonaws.services.workdocs.ContentManagerAsync])
(require '[com.amazonaws.services.workdocs.ContentManagerAsyncBuilder])
(require '[com.amazonaws.services.workdocs.ContentManagerBuilder])
(require '[com.amazonaws.services.workdocs.GetDocumentStreamRequest])
(require '[com.amazonaws.services.workdocs.GetDocumentStreamResult])
(require '[com.amazonaws.services.workdocs.UploadDocumentStreamRequest])
(require '[com.amazonaws.services.workdocs.UploadDocumentStreamResult])
| |
a2e65a9f0ea6eb63d37508270e714609c61be3adf6c9cddca6f06a3ab23450ad | ublubu/shapes | Linear.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE MagicHash #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
{- |
Arithmetic utility functions for vectors and matrices.
-}
module Physics.Linear where
import GHC.Generics (Generic)
import GHC.Prim
import GHC.Types (Double(D#))
import Control.DeepSeq
import Control.Lens
import Data.Vector.Unboxed.Deriving
import Shapes.Linear.Template (makeVectorType, defineJoinSplit)
import Shapes.Linear.MatrixTemplate
import Shapes.Linear.ValueInfos (doubleInfo)
import Utils.Utils
$(makeVectorType doubleInfo 2)
$(makeVectorType doubleInfo 3)
$(makeVectorType doubleInfo 6)
$(makeMatrixType doubleInfo (2, 2))
$(makeMatrixType doubleInfo (3, 3))
$(makeMatrixType doubleInfo (6, 6))
$(defineMatrixMul doubleInfo (2, 2, 2))
$(defineMatrixMul doubleInfo (3, 3, 3))
$(defineJoinSplit doubleInfo (3, 3))
newtype Diag6 = Diag6 V6 deriving Show
instance NFData V2 where
rnf (V2 _ _) = ()
# INLINE rnf #
newtype P2 = P2 V2 deriving (Generic, Show, NFData)
makeLenses ''P2
derivingUnbox "V2"
[t| V2 -> (Double, Double) |]
[| \(V2 a b) -> (D# a, D# b) |]
[| \(D# a, D# b) -> V2 a b |]
derivingUnbox "P2"
[t| P2 -> V2 |]
[| \(P2 v) -> v |]
[| P2 |]
derivingUnbox "V6"
[t| V6 -> (Double, Double, Double, Double, Double, Double) |]
[| \(V6 a b c d e f) -> (D# a, D# b, D# c, D# d, D# e, D# f) |]
[| \(D# a, D# b, D# c, D# d, D# e, D# f) -> V6 a b c d e f |]
append2 :: V2 -> Double -> V3
(V2 a b) `append2` (D# c) = V3 a b c
# INLINE append2 #
split3 :: V3 -> (V2, Double)
split3 (V3 a b c) = (V2 a b, D# c)
# INLINE split3 #
smulV2 :: Double -> V2 -> V2
smulV2 (D# s) = liftV2 (*## s)
# INLINE smulV2 #
smulV2' :: V2 -> Double -> V2
smulV2' = flip smulV2
# INLINE smulV2 ' #
sdivV2 :: Double -> V2 -> V2
sdivV2 (D# s) = liftV2 (/## s)
# INLINE sdivV2 #
smulV6 :: Double -> V6 -> V6
smulV6 (D# s) = liftV6 (*## s)
# INLINE smulV6 #
smulV6' :: V6 -> Double -> V6
smulV6' = flip smulV6
{-# INLINE smulV6' #-}
smulM2x2 :: Double -> M2x2 -> M2x2
smulM2x2 (D# s) = liftM2x2 (*## s)
# INLINE smulM2x2 #
smulM2x2' :: M2x2 -> Double -> M2x2
smulM2x2' = flip smulM2x2
{-# INLINE smulM2x2' #-}
plusV2 :: V2 -> V2 -> V2
plusV2 = lift2V2 (+##)
# INLINE plusV2 #
plusV6 :: V6 -> V6 -> V6
plusV6 = lift2V6 (+##)
# INLINE plusV6 #
zeroV2 :: V2
zeroV2 = V2 0.0## 0.0##
zeroP2 :: P2
zeroP2 = P2 zeroV2
minusV2 :: V2 -> V2 -> V2
minusV2 = lift2V2 (-##)
# INLINE minusV2 #
crossV2 :: V2 -> V2 -> Double
crossV2 (V2 ax ay) (V2 bx by) = D# ((ax *## by) -## (ay *## bx))
# INLINE crossV2 #
crosszV2 :: V2 -> Double -> V2
crosszV2 (V2 ax ay) (D# bz) = V2 x y
where x = ay *## bz
y = negateDouble# (ax *## bz)
zcrossV2 :: Double -> V2 -> V2
zcrossV2 (D# az) (V2 bx by) = V2 x y
where x = negateDouble# (az *## by)
y = az *## bx
unitV2 :: Double -> V2
unitV2 (D# theta) = V2 (cosDouble# theta) (sinDouble# theta)
crossV2V2 :: V2 -> V2 -> V2 -> V2
crossV2V2 (V2 ax ay) (V2 bx by) (V2 cx cy) = V2 abcx abcy
where abz = ax *## by -## ay *## bx
abcx = negateDouble# (abz *## cy)
abcy = abz *## cx
vmulDiag6 :: V6 -> Diag6 -> V6
vmulDiag6 v (Diag6 m) = lift2V6 (*##) v m
# INLINE vmulDiag6 #
vmulDiag6' :: Diag6 -> V6 -> V6
vmulDiag6' (Diag6 m) v = lift2V6 (*##) v m
{-# INLINE vmulDiag6' #-}
flip3v3 :: V6 -> V6
flip3v3 (V6 a b c d e f) = V6 d e f a b c
# INLINE flip3v3 #
afdot :: P2 -> V2 -> Double
afdot (P2 v0) v1 = D# (v0 `dotV2` v1)
# INLINE afdot #
afdot' :: V2 -> P2 -> Double
afdot' = flip afdot
# INLINE afdot ' #
clockwiseV2 :: V2 -> V2
clockwiseV2 (V2 x y) = V2 y (negateDouble# x)
# INLINE clockwiseV2 #
normalizeV2 :: V2 -> V2
normalizeV2 (V2 x y) = V2 (x /## n) (y /## n)
where n = sqrtDouble# ((x *## x) +## (y *## y))
{-# INLINE normalizeV2 #-}
-- | Length of a vector.
lengthV2 :: V2 -> Double
lengthV2 (V2 x y) = D# (sqrtDouble# ((x *## x) +## (y *## y)))
-- | Squared length of a vector.
sqLengthV2 :: V2 -> Double
sqLengthV2 (V2 x y) = D# ((x *## x) +## (y *## y))
diffP2 :: P2 -> P2 -> V2
diffP2 (P2 v0) (P2 v1) = v0 `minusV2` v1
# INLINE diffP2 #
midpointP2 :: P2 -> P2 -> P2
midpointP2 (P2 v0) (P2 v1) = P2 (2 `sdivV2` (v0 `plusV2` v1))
vplusP2 :: V2 -> P2 -> P2
vplusP2 v0 (P2 v1) = P2 (v0 `plusV2` v1)
pminusV2 :: P2 -> V2 -> P2
pminusV2 (P2 v0) v1 = P2 (v0 `minusV2` v1)
pplusV2 :: P2 -> V2 -> P2
pplusV2 (P2 v0) v1 = P2 (v0 `plusV2` v1)
invM2x2 :: M2x2 -> M2x2
invM2x2 (M2x2 a b c d) =
D# invDet `smulM2x2` M2x2 d (negateDouble# b) (negateDouble# c) a
where det = (a *## d) -## (b *## c)
invDet = 1.0## /## det
# INLINE invM2x2 #
negateV2 :: V2 -> V2
negateV2 = liftV2 negateDouble#
# INLINE negateV2 #
identity2x2 :: M2x2
identity2x2 = M2x2 1.0## 0.0## 0.0## 1.0##
# INLINE identity2x2 #
identity3x3 :: M3x3
identity3x3 =
M3x3
1.0## 0.0## 0.0##
0.0## 1.0## 0.0##
0.0## 0.0## 1.0##
# INLINE identity3x3 #
afmul :: M3x3 -> V2 -> V2
afmul t (V2 a b) = V2 x y
where !(V3 x y _) = t `mul3x3c` V3 a b 1.0##
# INLINE afmul #
afmul' :: M3x3 -> P2 -> P2
afmul' t (P2 v) = P2 $ t `afmul` v
# INLINE afmul ' #
WORKING WITH LINES
WORKING WITH LINES
-}
data Line2 = Line2 { linePoint :: !P2
, lineNormal :: !V2 }
toLine2 :: P2 -> P2 -> Line2
toLine2 a b = Line2 { linePoint = a
, lineNormal = clockwiseV2 (b `diffP2` a) }
# INLINE toLine2 #
perpLine2 :: P2 -> P2 -> Line2
perpLine2 a b = Line2 { linePoint = a
, lineNormal = b `diffP2` a }
# INLINE perpLine2 #
-- solving some `mx = b` up in here
intersect2 :: Line2 -> Line2 -> P2
intersect2 (Line2 p n@(V2 n0 n1)) (Line2 p' n'@(V2 n2 n3)) =
P2 (invM2x2 m `mul2x2c` b)
where b = V2 b0 b1
!(D# b0) = p `afdot` n
!(D# b1) = p' `afdot` n'
m = M2x2 n0 n1 n2 n3
# INLINE intersect2 #
{-
CLIPPING LINE SEGMENTS
-}
data ClipResult a
= ClipLeft !a -- ^ clip the left side to this new endpoint
| ClipRight !a -- ^ clip the right side to this new endpoint
| ClipBoth !a -- ^ the entire segment was out-of-bounds
| ClipNone -- ^ the entire segment was in-bounds
|
Apply a ' ClipResult ' to a line segment . Replaces clipped endpoints .
If both endpoints ( entire segment ) clipped , use ' Left'ed clip point .
TODO : Delete this function ?
Apply a 'ClipResult' to a line segment. Replaces clipped endpoints.
If both endpoints (entire segment) clipped, use 'Left'ed clip point.
TODO: Delete this function?
-}
applyClip ::
ClipResult a
-> SP a a
-> Either a (SP a a)
applyClip res (SP a b) = case res of
ClipLeft c -> Right (SP c b)
ClipRight c -> Right (SP a c)
ClipBoth c -> Left c
ClipNone -> Right (SP a b)
# INLINE applyClip #
-- | Alternate form of 'applyClip'. 'Nothing' if entire segment clipped.
applyClip' :: ClipResult a -> SP a a -> Maybe (SP a a)
applyClip' (ClipBoth _) _ = Nothing -- redundant definition
applyClip' res seg = either (const Nothing) Just (applyClip res seg)
# INLINE applyClip ' #
-- | Alternate form of 'applyClip'. Removes clipped points.
applyClip'' :: ClipResult a -> SP s s -> Maybe (Either s (SP s s))
applyClip'' res (SP a b) = case res of
ClipLeft _ -> Just $ Left b
ClipRight _ -> Just $ Left a
ClipBoth _ -> Nothing
ClipNone -> Just $ Right (SP a b)
{-# INLINE applyClip'' #-}
|
Alternate form of ' applyClip ' . Applies clipping using the given lens .
If ' ' , then use only the ' first ' vertex of the line segment
and change it to use the clipping point . ( TODO : Why ? )
TODO : Delete this function ?
Alternate form of 'applyClip'. Applies clipping using the given lens.
If 'ClipBoth', then use only the 'first' vertex of the line segment
and change it to use the clipping point. (TODO: Why?)
TODO: Delete this function?
-}
lApplyClip :: ASetter' s a
-- ^ lens to access the "point" data to apply the clipping
-> ClipResult a
-- ^ clipping
-> SP s s
-- ^ line segment with endpoints that contain "point" data
-> Either s (SP s s)
lApplyClip l res (SP a b) = case res of
ClipLeft c -> Right (SP (set l c a) b)
ClipRight c -> Right (SP a (set l c b))
ClipBoth c -> Left (set l c a) -- use the 'first' vertex by default
ClipNone -> Right (SP a b)
# INLINE lApplyClip #
-- | Alternate form of 'lApplyClip'. If the entire segment was behind the bound, use 'Nothing'.
lApplyClip' :: ASetter' s a -> ClipResult a -> SP s s -> Maybe (SP s s)
lApplyClip' _ (ClipBoth _) _ = Nothing -- redundant definition
lApplyClip' l res seg = either (const Nothing) Just (lApplyClip l res seg)
# INLINE lApplyClip ' #
{- |
Given a bounding plane (expressed as a point and a normal),
figure out how to clip a line segment so it is on the positive side of the plane.
-}
clipSegment :: Line2
-- ^ bounding plane
-> SP Line2 (SP P2 P2)
-- ^ (plane of the line segment, endpoints of the line segment)
-> ClipResult P2
-- ^ which endpoint(s) to clip, and what point to clip to
clipSegment boundary (SP incident (SP a b))
| a' < c' = if b' < c' then ClipBoth c
else ClipLeft c
| b' < c' = ClipRight c
| otherwise = ClipNone
where c = intersect2 boundary incident
n = lineNormal boundary
a' = a `afdot` n
b' = b `afdot` n
c' = c `afdot` n
# INLINE clipSegment #
{-
TRANSFORMS
-}
rotate22_ :: Double# -> Double# -> M2x2
rotate22_ cosv sinv = M2x2 cosv (negateDouble# sinv) sinv cosv
{-# INLINE rotate22_ #-}
rotate22 :: Double# -> M2x2
rotate22 ori = rotate22_ c s
where c = cosDouble# ori
s = sinDouble# ori
# INLINE rotate22 #
afmat33 :: M2x2 -> M3x3
afmat33 (M2x2 x0 x1 y0 y1) =
M3x3
x0 x1 zer
y0 y1 zer
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE afmat33 #
aftranslate33 :: V2 -> M3x3
aftranslate33 (V2 x y) =
M3x3
one zer x
zer one y
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE aftranslate33 #
afrotate33 :: Double# -> M3x3
afrotate33 ori = afmat33 (rotate22 ori)
# INLINE afrotate33 #
afscale33 :: V2 -> M3x3
afscale33 (V2 x y) =
M3x3
x zer zer
zer y zer
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE afscale33 #
| null | https://raw.githubusercontent.com/ublubu/shapes/fa5d959c17224a851d517826deeae097f1583392/shapes/src/Physics/Linear.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE BangPatterns #
# LANGUAGE DeriveAnyClass #
|
Arithmetic utility functions for vectors and matrices.
# INLINE smulV6' #
# INLINE smulM2x2' #
# INLINE vmulDiag6' #
# INLINE normalizeV2 #
| Length of a vector.
| Squared length of a vector.
solving some `mx = b` up in here
CLIPPING LINE SEGMENTS
^ clip the left side to this new endpoint
^ clip the right side to this new endpoint
^ the entire segment was out-of-bounds
^ the entire segment was in-bounds
| Alternate form of 'applyClip'. 'Nothing' if entire segment clipped.
redundant definition
| Alternate form of 'applyClip'. Removes clipped points.
# INLINE applyClip'' #
^ lens to access the "point" data to apply the clipping
^ clipping
^ line segment with endpoints that contain "point" data
use the 'first' vertex by default
| Alternate form of 'lApplyClip'. If the entire segment was behind the bound, use 'Nothing'.
redundant definition
|
Given a bounding plane (expressed as a point and a normal),
figure out how to clip a line segment so it is on the positive side of the plane.
^ bounding plane
^ (plane of the line segment, endpoints of the line segment)
^ which endpoint(s) to clip, and what point to clip to
TRANSFORMS
# INLINE rotate22_ # | # LANGUAGE TemplateHaskell #
# LANGUAGE MagicHash #
# LANGUAGE DeriveGeneric #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
module Physics.Linear where
import GHC.Generics (Generic)
import GHC.Prim
import GHC.Types (Double(D#))
import Control.DeepSeq
import Control.Lens
import Data.Vector.Unboxed.Deriving
import Shapes.Linear.Template (makeVectorType, defineJoinSplit)
import Shapes.Linear.MatrixTemplate
import Shapes.Linear.ValueInfos (doubleInfo)
import Utils.Utils
$(makeVectorType doubleInfo 2)
$(makeVectorType doubleInfo 3)
$(makeVectorType doubleInfo 6)
$(makeMatrixType doubleInfo (2, 2))
$(makeMatrixType doubleInfo (3, 3))
$(makeMatrixType doubleInfo (6, 6))
$(defineMatrixMul doubleInfo (2, 2, 2))
$(defineMatrixMul doubleInfo (3, 3, 3))
$(defineJoinSplit doubleInfo (3, 3))
newtype Diag6 = Diag6 V6 deriving Show
instance NFData V2 where
rnf (V2 _ _) = ()
# INLINE rnf #
newtype P2 = P2 V2 deriving (Generic, Show, NFData)
makeLenses ''P2
derivingUnbox "V2"
[t| V2 -> (Double, Double) |]
[| \(V2 a b) -> (D# a, D# b) |]
[| \(D# a, D# b) -> V2 a b |]
derivingUnbox "P2"
[t| P2 -> V2 |]
[| \(P2 v) -> v |]
[| P2 |]
derivingUnbox "V6"
[t| V6 -> (Double, Double, Double, Double, Double, Double) |]
[| \(V6 a b c d e f) -> (D# a, D# b, D# c, D# d, D# e, D# f) |]
[| \(D# a, D# b, D# c, D# d, D# e, D# f) -> V6 a b c d e f |]
append2 :: V2 -> Double -> V3
(V2 a b) `append2` (D# c) = V3 a b c
# INLINE append2 #
split3 :: V3 -> (V2, Double)
split3 (V3 a b c) = (V2 a b, D# c)
# INLINE split3 #
smulV2 :: Double -> V2 -> V2
smulV2 (D# s) = liftV2 (*## s)
# INLINE smulV2 #
smulV2' :: V2 -> Double -> V2
smulV2' = flip smulV2
# INLINE smulV2 ' #
sdivV2 :: Double -> V2 -> V2
sdivV2 (D# s) = liftV2 (/## s)
# INLINE sdivV2 #
smulV6 :: Double -> V6 -> V6
smulV6 (D# s) = liftV6 (*## s)
# INLINE smulV6 #
smulV6' :: V6 -> Double -> V6
smulV6' = flip smulV6
smulM2x2 :: Double -> M2x2 -> M2x2
smulM2x2 (D# s) = liftM2x2 (*## s)
# INLINE smulM2x2 #
smulM2x2' :: M2x2 -> Double -> M2x2
smulM2x2' = flip smulM2x2
plusV2 :: V2 -> V2 -> V2
plusV2 = lift2V2 (+##)
# INLINE plusV2 #
plusV6 :: V6 -> V6 -> V6
plusV6 = lift2V6 (+##)
# INLINE plusV6 #
zeroV2 :: V2
zeroV2 = V2 0.0## 0.0##
zeroP2 :: P2
zeroP2 = P2 zeroV2
minusV2 :: V2 -> V2 -> V2
minusV2 = lift2V2 (-##)
# INLINE minusV2 #
crossV2 :: V2 -> V2 -> Double
crossV2 (V2 ax ay) (V2 bx by) = D# ((ax *## by) -## (ay *## bx))
# INLINE crossV2 #
crosszV2 :: V2 -> Double -> V2
crosszV2 (V2 ax ay) (D# bz) = V2 x y
where x = ay *## bz
y = negateDouble# (ax *## bz)
zcrossV2 :: Double -> V2 -> V2
zcrossV2 (D# az) (V2 bx by) = V2 x y
where x = negateDouble# (az *## by)
y = az *## bx
unitV2 :: Double -> V2
unitV2 (D# theta) = V2 (cosDouble# theta) (sinDouble# theta)
crossV2V2 :: V2 -> V2 -> V2 -> V2
crossV2V2 (V2 ax ay) (V2 bx by) (V2 cx cy) = V2 abcx abcy
where abz = ax *## by -## ay *## bx
abcx = negateDouble# (abz *## cy)
abcy = abz *## cx
vmulDiag6 :: V6 -> Diag6 -> V6
vmulDiag6 v (Diag6 m) = lift2V6 (*##) v m
# INLINE vmulDiag6 #
vmulDiag6' :: Diag6 -> V6 -> V6
vmulDiag6' (Diag6 m) v = lift2V6 (*##) v m
flip3v3 :: V6 -> V6
flip3v3 (V6 a b c d e f) = V6 d e f a b c
# INLINE flip3v3 #
afdot :: P2 -> V2 -> Double
afdot (P2 v0) v1 = D# (v0 `dotV2` v1)
# INLINE afdot #
afdot' :: V2 -> P2 -> Double
afdot' = flip afdot
# INLINE afdot ' #
clockwiseV2 :: V2 -> V2
clockwiseV2 (V2 x y) = V2 y (negateDouble# x)
# INLINE clockwiseV2 #
normalizeV2 :: V2 -> V2
normalizeV2 (V2 x y) = V2 (x /## n) (y /## n)
where n = sqrtDouble# ((x *## x) +## (y *## y))
lengthV2 :: V2 -> Double
lengthV2 (V2 x y) = D# (sqrtDouble# ((x *## x) +## (y *## y)))
sqLengthV2 :: V2 -> Double
sqLengthV2 (V2 x y) = D# ((x *## x) +## (y *## y))
diffP2 :: P2 -> P2 -> V2
diffP2 (P2 v0) (P2 v1) = v0 `minusV2` v1
# INLINE diffP2 #
midpointP2 :: P2 -> P2 -> P2
midpointP2 (P2 v0) (P2 v1) = P2 (2 `sdivV2` (v0 `plusV2` v1))
vplusP2 :: V2 -> P2 -> P2
vplusP2 v0 (P2 v1) = P2 (v0 `plusV2` v1)
pminusV2 :: P2 -> V2 -> P2
pminusV2 (P2 v0) v1 = P2 (v0 `minusV2` v1)
pplusV2 :: P2 -> V2 -> P2
pplusV2 (P2 v0) v1 = P2 (v0 `plusV2` v1)
invM2x2 :: M2x2 -> M2x2
invM2x2 (M2x2 a b c d) =
D# invDet `smulM2x2` M2x2 d (negateDouble# b) (negateDouble# c) a
where det = (a *## d) -## (b *## c)
invDet = 1.0## /## det
# INLINE invM2x2 #
negateV2 :: V2 -> V2
negateV2 = liftV2 negateDouble#
# INLINE negateV2 #
identity2x2 :: M2x2
identity2x2 = M2x2 1.0## 0.0## 0.0## 1.0##
# INLINE identity2x2 #
identity3x3 :: M3x3
identity3x3 =
M3x3
1.0## 0.0## 0.0##
0.0## 1.0## 0.0##
0.0## 0.0## 1.0##
# INLINE identity3x3 #
afmul :: M3x3 -> V2 -> V2
afmul t (V2 a b) = V2 x y
where !(V3 x y _) = t `mul3x3c` V3 a b 1.0##
# INLINE afmul #
afmul' :: M3x3 -> P2 -> P2
afmul' t (P2 v) = P2 $ t `afmul` v
# INLINE afmul ' #
WORKING WITH LINES
WORKING WITH LINES
-}
data Line2 = Line2 { linePoint :: !P2
, lineNormal :: !V2 }
toLine2 :: P2 -> P2 -> Line2
toLine2 a b = Line2 { linePoint = a
, lineNormal = clockwiseV2 (b `diffP2` a) }
# INLINE toLine2 #
perpLine2 :: P2 -> P2 -> Line2
perpLine2 a b = Line2 { linePoint = a
, lineNormal = b `diffP2` a }
# INLINE perpLine2 #
intersect2 :: Line2 -> Line2 -> P2
intersect2 (Line2 p n@(V2 n0 n1)) (Line2 p' n'@(V2 n2 n3)) =
P2 (invM2x2 m `mul2x2c` b)
where b = V2 b0 b1
!(D# b0) = p `afdot` n
!(D# b1) = p' `afdot` n'
m = M2x2 n0 n1 n2 n3
# INLINE intersect2 #
data ClipResult a
|
Apply a ' ClipResult ' to a line segment . Replaces clipped endpoints .
If both endpoints ( entire segment ) clipped , use ' Left'ed clip point .
TODO : Delete this function ?
Apply a 'ClipResult' to a line segment. Replaces clipped endpoints.
If both endpoints (entire segment) clipped, use 'Left'ed clip point.
TODO: Delete this function?
-}
applyClip ::
ClipResult a
-> SP a a
-> Either a (SP a a)
applyClip res (SP a b) = case res of
ClipLeft c -> Right (SP c b)
ClipRight c -> Right (SP a c)
ClipBoth c -> Left c
ClipNone -> Right (SP a b)
# INLINE applyClip #
applyClip' :: ClipResult a -> SP a a -> Maybe (SP a a)
applyClip' res seg = either (const Nothing) Just (applyClip res seg)
# INLINE applyClip ' #
applyClip'' :: ClipResult a -> SP s s -> Maybe (Either s (SP s s))
applyClip'' res (SP a b) = case res of
ClipLeft _ -> Just $ Left b
ClipRight _ -> Just $ Left a
ClipBoth _ -> Nothing
ClipNone -> Just $ Right (SP a b)
|
Alternate form of ' applyClip ' . Applies clipping using the given lens .
If ' ' , then use only the ' first ' vertex of the line segment
and change it to use the clipping point . ( TODO : Why ? )
TODO : Delete this function ?
Alternate form of 'applyClip'. Applies clipping using the given lens.
If 'ClipBoth', then use only the 'first' vertex of the line segment
and change it to use the clipping point. (TODO: Why?)
TODO: Delete this function?
-}
lApplyClip :: ASetter' s a
-> ClipResult a
-> SP s s
-> Either s (SP s s)
lApplyClip l res (SP a b) = case res of
ClipLeft c -> Right (SP (set l c a) b)
ClipRight c -> Right (SP a (set l c b))
ClipNone -> Right (SP a b)
# INLINE lApplyClip #
lApplyClip' :: ASetter' s a -> ClipResult a -> SP s s -> Maybe (SP s s)
lApplyClip' l res seg = either (const Nothing) Just (lApplyClip l res seg)
# INLINE lApplyClip ' #
clipSegment :: Line2
-> SP Line2 (SP P2 P2)
-> ClipResult P2
clipSegment boundary (SP incident (SP a b))
| a' < c' = if b' < c' then ClipBoth c
else ClipLeft c
| b' < c' = ClipRight c
| otherwise = ClipNone
where c = intersect2 boundary incident
n = lineNormal boundary
a' = a `afdot` n
b' = b `afdot` n
c' = c `afdot` n
# INLINE clipSegment #
rotate22_ :: Double# -> Double# -> M2x2
rotate22_ cosv sinv = M2x2 cosv (negateDouble# sinv) sinv cosv
rotate22 :: Double# -> M2x2
rotate22 ori = rotate22_ c s
where c = cosDouble# ori
s = sinDouble# ori
# INLINE rotate22 #
afmat33 :: M2x2 -> M3x3
afmat33 (M2x2 x0 x1 y0 y1) =
M3x3
x0 x1 zer
y0 y1 zer
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE afmat33 #
aftranslate33 :: V2 -> M3x3
aftranslate33 (V2 x y) =
M3x3
one zer x
zer one y
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE aftranslate33 #
afrotate33 :: Double# -> M3x3
afrotate33 ori = afmat33 (rotate22 ori)
# INLINE afrotate33 #
afscale33 :: V2 -> M3x3
afscale33 (V2 x y) =
M3x3
x zer zer
zer y zer
zer zer one
where !one = 1.0##
!zer = 0.0##
# INLINE afscale33 #
|
ac780eb92bb0463f4c4050e859926d9835308616818d75ccaabdc401a7e5d29b | domino-clj/domino | runner.cljs | (ns domino.runner
(:require
[doo.runner :refer-macros [doo-tests]]
[domino.core-test]
[domino.effects-test]
[domino.events-test]
[domino.model-test]
[domino.util-test]
[domino.validation-test]))
(doo-tests 'domino.core-test
'domino.effects-test
'domino.events-test
'domino.model-test
'domino.util-test
'domino.validation-test)
| null | https://raw.githubusercontent.com/domino-clj/domino/a93dd3a4c6108d6d1cf53083652540c9297f8c45/test/domino/runner.cljs | clojure | (ns domino.runner
(:require
[doo.runner :refer-macros [doo-tests]]
[domino.core-test]
[domino.effects-test]
[domino.events-test]
[domino.model-test]
[domino.util-test]
[domino.validation-test]))
(doo-tests 'domino.core-test
'domino.effects-test
'domino.events-test
'domino.model-test
'domino.util-test
'domino.validation-test)
| |
7500bd94df194a48130a009875a4efdf21272743e6de9dc92dbfae6872c82795 | Sintrastes/xen-fret | AppData.hs |
module XenFret.AppData where
import Data.Aeson.TH
import XenFret.Data
import qualified Data.Text as T
import Data.Map hiding(fromList)
import Data.MultiMap
import Data.List.NonEmpty hiding(fromList)
import Data.Ratio
import XenFret.Sagittal
( sagittal5CommaUp, sagittalSharp, sagittalSharp5CDown )
import Control.Lens.TH
import XenFret.App.Widgets.ColorPicker (Color(..))
import Data.Tree.Lens (root)
import Control.Lens.Internal.Fold (NonEmptyDList(NonEmptyDList))
import qualified Data.Map as Map
data LineStyle =
Solid
| Dashed
$(deriveJSON defaultOptions ''LineStyle)
type TemperamentName = String
type InstrumentName = String
type TuningName = String
data PreferenceData = PreferenceData {
useDarkMode :: Bool,
noteNameSize :: Int,
dotSize :: Double,
rootNoteColor :: Color,
fretboardColor :: Color,
fretStyle :: LineStyle,
fretThickness :: Double,
defaultTemperament :: Maybe TemperamentName,
defaultInstrument :: Maybe InstrumentName,
defaultTuning :: Map (TemperamentName, InstrumentName) TuningName
}
defaultPreferences :: PreferenceData
defaultPreferences = PreferenceData {
useDarkMode = False,
noteNameSize = 12,
dotSize = 1.0,
rootNoteColor = Color 51 92 255,
fretboardColor = Color 255 255 255,
fretStyle = Solid,
fretThickness = 1.0,
defaultTemperament = Nothing,
defaultInstrument = Nothing,
defaultTuning = Map.fromList []
}
$(deriveJSON defaultOptions ''PreferenceData)
data AppData = AppData {
-- | Get the list of temperaments
_temperaments :: [Temperament],
-- | Get the current preferences for the app.
_preferences :: PreferenceData
}
$(makeLenses ''AppData)
$(deriveJSON defaultOptions ''AppData)
defaultAppData :: AppData
defaultAppData = AppData {
_temperaments =
[
Temperament {
_temperamentName = "11-TET"
, _divisions = 11
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"Q","Q#","R","R#","S",
"S#","T","T#","U","U#","P"
]
]
, _chords =
[
Chord "Major" (4 :| [3, 4])
, Chord "Minor" (3 :| [4, 4])
]
, _scales =
[
Scale "Orgone[7]"
(1 :| [2, 1, 2, 1, 2, 2])
, Scale "Machine[5]"
(2 :| [2, 2, 2, 3])
, Scale "Machine[6]"
(2 :| [2, 2, 2, 2, 1])
, Scale "Joan heptatonic"
(1 :| [1, 1, 3, 1, 1, 3])
, Scale "Joan pentatonic"
(1 :| [4, 1, 4, 1])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
, Tuning "Major Thirds Tuning" "Six-String Guitar"
(0 :| [4, 8, 12, 16, 20]) 0
, Tuning "Wide Fourths Tuning" "Four-String Bass Guitar"
(0 :| [5, 10, 15]) 0
, Tuning "Major Thirds Tuning" "Four-String Bass Guitar"
(0 :| [4, 8, 120]) 0
]
},
Temperament {
_temperamentName = "12-TET"
, _divisions = 12
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","A#","B","C","C#","D",
"D#","E","F","F#","G","G#"
]
]
, _chords =
[
Chord "Major" (4 :| [3, 5])
, Chord "Minor" (3 :| [4, 5])
, Chord "Major 7th" (4 :| [3, 4, 1])
, Chord "Dominant 7th" (4 :| [3, 3, 2])
, Chord "Minor 7th" (3 :| [4, 3, 2])
, Chord "MinMaj 7th" (3 :| [4, 4, 1])
]
, _scales =
[
Scale "Ionian (Major)"
(2 :| [2, 1, 2, 2, 2, 1])
, Scale "Mixolydian"
(2 :| [2, 1, 2, 2, 1, 2])
, Scale "Minor"
(2 :| [1, 2, 2, 1, 2, 2])
, Scale "Dorian"
(2 :| [1, 2, 2, 2, 1, 2])
, Scale "diminished[8] (Octatonic)"
(2 :| [1, 2, 1, 2, 1, 2, 1])
, Scale "Whole tone"
(2 :| [2, 2, 2, 2, 2])
, Scale "augmented[6]"
(3 :| [1, 3, 1, 3, 1])
, Scale "Blues"
(3 :| [2, 1, 1, 3, 2])
, Scale "Mixolydian b6"
(2 :| [2, 1, 2, 1, 2, 2])
, Scale "Hirojoshi"
(2 :| [1, 4, 1, 4])
, Scale "Ryo"
(2 :| [2, 3, 2, 3])
, Scale "Insen"
(1 :| [4, 2, 3, 2])
, Scale "Engimatic Scale"
(1 :| [3, 2, 2, 2, 1, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Mandolin"
(fmap (+10) $ 0 :| [7, 14, 21]) 0
, Tuning "Standard Tuning" "Ukulele"
(fmap (+3) $ 7 :| [0, 4, 9]) 0
, Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+7) $ 0 :| [5, 10, 15, 19, 24]) 0
, Tuning "Standard Tuning" "Four-String Bass Guitar"
(fmap (+7) $ 0 :| [5, 10, 15]) 0
, Tuning "Standard Tuning" "Seven-String Guitar"
(fmap (+2) $ 0 :| [5, 10, 15, 20, 14, 29]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+5) $ 0 :| [7, 12, 17, 21, 26]) 0
, Tuning "DADGAD" "Six-String Guitar"
(fmap (+5) $ 0 :| [7, 12, 17, 19, 24]) 0
, Tuning "All Fourths" "Six-String Guitar"
(fmap (+7) $ 0 :| [5, 10, 15, 20, 25]) 0
, Tuning "All Fifths" "Six-String Guitar"
(0 :| [7, 14, 21, 28, 35]) 0
]
},
Temperament {
_temperamentName = "13-TET"
, _divisions = 13
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"J","J#","K","L","L#","M","M#",
"N","O","O#","P","Q","Q#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Archeotonic (Ryonian Mode)"
(2 :| [2, 2, 2, 2, 2, 1])
, Scale "Oneirotonic (Dylathian Mode)"
(2 :| [2, 1, 2, 2, 1, 2, 1])
]
, _tunings =
[
Tuning "Oneirotonic Tuning" "Six-String Guitar"
(3 :| [8, 14, 19, 24, 29]) 0
]
},
Temperament {
_temperamentName = "14-TET"
, _divisions = 14
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","^A","B","^B","C","^C","D","^D","E","^E",
"F","^F","G","^G"
]
]
, _chords =
[
]
, _scales =
[
Scale "Titanium[9]"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2])
, Scale "antipentic"
(4 :| [1, 4, 1, 4])
, Scale "Manual"
(3 :| [3, 2, 3, 3])
, Scale "Citric"
(3 :| [1, 3, 3, 1, 3])
, Scale "Ekic"
(2 :| [2, 1, 2, 2, 2, 1, 2])
, Scale "Semiquartal"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
]
},
Temperament {
_temperamentName = "15-TET"
, _divisions = 15
, _period = (2 % 1)
, _notationSystems =
[
NotationSystem "" [
"α","β\\","β","χ\\","χ","δ\\","δ",
"ε\\","ε","φ\\","φ","γ\\","γ","η\\",
"η"
]
]
, _chords =
[
]
, _scales =
[
Scale "Augmented[6]"
(4 :| [1, 4, 1, 4, 1])
, Scale "Triforce[6]"
(3 :| [2, 3, 2, 3, 2])
, Scale "Porcupine[7]"
(3 :| [2, 2, 2, 2, 2, 2])
, Scale "Orgone[7]"
(1 :| [3, 1, 3, 1, 3, 3])
, Scale "Porcupine[8]"
(2 :| [1, 2, 2, 2, 2, 2, 2])
, Scale "Augmented[9]"
(3 :| [1, 1, 3, 1, 1, 3, 1, 1])
, Scale "Triforce[9]"
(2 :| [1, 2, 2, 1, 2, 2, 1, 2])
, Scale "Blackwood[10]"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Marvel double harmonic major"
(1 :| [4,1,3,1,4,1])
, Scale "Ptolemy diatonic, \"just\" major"
(3 :| [2, 1, 3, 2, 3, 1])
, Scale "Ptolemy diatonic, natural minor"
(3 :| [1, 2, 3, 1, 3, 2])
, Scale "tetrachordal major, Sa grama"
(3 :| [2, 1, 3, 3, 2, 1])
, Scale "tetrachordal minor"
(3 :| [1, 2, 3, 1, 2, 3])
, Scale "Porcupine bright major #7"
(3 :| [2, 2, 2, 2, 3, 1])
, Scale "Porcupine bright major #6 #7"
(3 :| [2, 2, 2, 3, 2, 1])
, Scale "Porcupine bright minor #2"
(3 :| [1, 3, 2, 2, 2, 2])
, Scale "Porcupine dark minor #2"
(3 :| [1, 2, 3, 2, 2, 2])
, Scale "Porcupine bright harmonic 11th"
(3 :| [2, 2, 2, 2, 1, 3])
]
, _tunings =
[
Tuning "All Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
]
},
Temperament {
_temperamentName = "16-TET"
, _divisions = 16
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","B#","B","Bb","C#","C","D#","D",
"E#","E","Eb","F#","F","G#","G","A#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Mavilla[5]"
(5 :| [2, 5, 2, 2])
, Scale "Mavila[7]"
(2 :| [2, 2, 3, 2, 2, 3])
, Scale "Mavilla[9]"
(1 :| [2, 2, 2, 1, 2, 2, 2, 2])
, Scale "Lemba[6]"
(3 :| [3, 2, 3, 3, 2])
, Scale "Lemba[10]"
(2 :| [1, 2, 1, 2, 2, 1, 2, 1, 2])
, Scale "Magic[7]"
(1 :| [4, 1, 4, 1, 4, 1])
, Scale "Magic[10]"
(1 :| [3, 1, 1, 3, 1, 1, 1, 3, 1])
, Scale "Gorgo[5]"
(3 :| [3, 4, 3, 3])
, Scale "Gorgo[6]"
(3 :| [3, 1, 3, 3, 3])
, Scale "Gorgo[11]"
(1 :| [2, 1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Diminished[8]"
(1 :| [3, 1, 3, 1, 3, 1, 3])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(fmap (+9) $ 0 :| [7, 14, 21, 28, 35]) 0
, Tuning "Diminished Fourths Tuning" "Six-String Guitar"
(fmap (+9) $ 0 :| [6, 12, 18, 24, 30]) 0
, Tuning "Wide Fourths Tuning (7 String)" "Seven-String Guitar"
(fmap (+9) $ 0 :| [7, 14, 21, 28, 35, 40]) 0
, Tuning "Diminished Fourths Tuning (7 String)" "Seven-String Guitar"
(fmap (+2) $ 0 :| [6, 12, 18, 24, 30, 36]) 0
]
},
Temperament {
_temperamentName = "17-TET"
, _divisions = 17
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","Bb","A#","B","C","Db","C#","D",
"Eb","D#","E","F","Gb","F#","G","Ab",
"G#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Major"
(3 :| [3, 3, 1, 3, 3, 1])
, Scale "Maqamic[7] (bish mode)"
(2 :| [3, 2, 3, 2, 3, 2])
, Scale "Maqamic[7] (dril mode)"
(3 :| [2, 3, 2, 3, 2, 2])
, Scale "Maqamic[7] (fish mode)"
(2 :| [3, 2, 3, 2, 2, 3])
, Scale "Maqamic[7] (gil mode)"
(3 :| [2, 3, 2, 2, 3, 2])
, Scale "Maqamic[7] (jwl mode)"
(2 :| [3, 2, 2, 3, 2, 3])
, Scale "Maqamic[7] (kleeth mode)"
(3 :| [2, 2, 3, 2, 3, 2])
, Scale "Maqamic[7] (led mode)"
(2 :| [2, 3, 2, 3, 2, 3])
, Scale "Maqamic[10]"
(2 :| [2, 2, 1, 2, 2, 1, 2, 2, 1])
, Scale "Lovecraft[9]"
(3 :| [1, 3, 1, 3, 1, 3, 1, 1])
, Scale "Squares[5]"
(5 :| [5, 1, 5, 1])
, Scale "Squares[8]"
(1 :| [1, 4, 1, 4, 1, 4])
, Scale "Hydra"
(3 :| [3, 1, 1, 2, 3, 2, 1, 1])
, Scale "Springfieldian"
(3 :| [3, 2, 2, 3, 3, 1])
, Scale "Northhaverbrookian"
(2 :| [3, 3, 1, 3, 3, 2])
, Scale "Shelbyvillean"
(3 :| [3, 1, 3, 3, 2, 2])
, Scale "Otonal 17"
(3 :| [2, 3, 2, 2, 2, 3])
, Scale "Bleu[8]"
(3 :| [2, 2, 2, 2, 2, 2, 2])
, Scale "Bleu[9]"
(1 :| [2, 2, 2, 2, 2, 2, 2, 2])
, Scale "Machine[5]"
(5 :| [3, 3, 3, 3])
, Scale "Machine[6]"
(2 :| [3, 3, 3, 3, 3])
, Scale "Machine[11]"
(2 :| [2, 1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Huxley[5]"
(1 :| [4, 4, 4, 4])
, Scale "Huxley[9]"
(1 :| [1, 3, 1, 3, 1, 3, 1, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+10) $ 0 :| [7, 14, 21, 27, 34]) 0
, Tuning "All Fourths" "Six-String Guitar"
(fmap (+10) $ 0 :| [7, 14, 21, 28, 35]) 0
]
},
Temperament {
_temperamentName = "18-TET"
, _divisions = 18
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","Bb","A#","B","C","Db","C#","D",
"Eb","D#","E","F","Gb","F#","G",
"Hb","G#","H"
]
]
, _chords =
[
]
, _scales =
[
Scale "Antipentic"
(4 :| [4, 3, 4, 3])
, Scale "Bicycle"
(4 :| [4, 1, 4, 4, 1])
, Scale "Mavila[5]"
(2 :| [6, 2, 6, 2])
, Scale "Malic[6]"
(2 :| [5, 2, 2, 5, 2])
, Scale "Mish Heptatonic"
(3 :| [2, 3, 2, 3, 3, 2])
, Scale "Smitonic"
(3 :| [2, 3, 2, 3, 3, 2])
, Scale "Oneirotonic"
(3 :| [1, 3, 3, 1, 3, 3, 1])
, Scale "Antiekic"
(2 :| [2, 3, 2, 2, 2, 3, 2])
, Scale "Tcherepnin"
(4 :| [1, 1, 4, 1, 1, 4, 1, 1])
, Scale "Taric"
(2 :| [2, 1, 2, 2, 2, 2, 1, 2, 2])
]
, _tunings =
[
Tuning "Wide Fourths" "Six-String Guitar"
(0 :| [8, 16, 24, 32, 40]) 0
]
},
Temperament {
_temperamentName = "19-TET"
, _divisions = 19
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","A#","Bb","B","B#","C",
"C#","Db","D","D#","Eb","E",
"E#","F","F#","Gb","G","G#","Ab"
]
]
, _chords =
[
]
, _scales =
[
Scale "Ionian (Major)"
(3 :| [3, 2, 3, 3, 3, 2])
, Scale "Sensi[5]"
(5 :| [5, 2, 5, 2])
, Scale "Sensi[8]"
(2 :| [3, 2, 2, 3, 2, 2, 3])
, Scale "Negri[9]"
(2 :| [2, 2, 2, 3, 2, 2, 2, 2])
, Scale "Negri[10]"
(2 :| [2, 2, 2, 2, 2, 2, 2, 2, 1])
, Scale "Kleismic[7]"
(1 :| [4, 1, 4, 1, 4, 4])
, Scale "Semaphore[5]"
(4 :| [4, 4, 4, 3])
, Scale "Semaphore[9]"
(3 :| [3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Magic[7]"
(5 :| [1, 5, 1, 5, 1, 1])
, Scale "Magic[10]"
(4 :| [1, 1, 4, 1, 1, 4, 1, 1, 1])
, Scale "Marvel hexatonic"
(4 :| [2, 5, 2, 4, 2])
, Scale "deutone[6]"
(4 :| [3, 3, 3, 3, 3])
, Scale "deutone[7]"
(3 :| [3, 3, 3, 3, 3, 1])
, Scale "kleismic[7]"
(4 :| [4, 1, 4, 1, 4, 1])
, Scale "liese[5]"
(8 :| [1, 8, 1, 1])
, Scale "liese[7]"
(7 :| [1, 1, 7, 1, 1, 1])
, Scale "liese[9]"
(6 :| [1, 1, 1, 6, 1, 1, 1, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+11) $ 0 :| [8, 16, 24, 30, 38]) 0
]
},
Temperament {
_temperamentName = "20-TET"
, _divisions = 20
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Blackwood Major Decatonic"
(3 :| [1, 3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Blackwood Minor Decatonic"
(1 :| [3, 1, 3, 1, 3, 1, 3, 1, 3])
, Scale "Blackwood Major Pentadecatonic"
(2 :| [1, 1, 2, 1, 1, 2, 1, 1, 2, 1, 1])
, Scale "Blackwood Diminished Pentadecatonic"
(1 :| [1, 2, 1, 1, 2, 1, 1, 2, 1, 1, 2])
, Scale "Blackwood Minor Pentadecatonic"
(1 :| [2, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1])
, Scale "Balzano Nine-tone"
(2 :| [3, 2, 2, 2, 3, 2, 2, 2])
, Scale "Balzano Eleven-tone"
(2 :| [2, 2, 2, 1, 2, 2, 2, 2, 2, 1])
, Scale "Balzano Nine-tone inverse"
(2 :| [2, 2, 3, 2, 2, 2, 3, 2])
, Scale "Balzano Eleven-tone inverse"
(1 :| [2, 2, 2, 2, 2, 1, 2, 2, 2, 2])
, Scale "Octatonic"
(2 :| [3, 2, 3, 2, 3, 2, 3])
, Scale "Diminished"
(3 :| [2, 3, 2, 3, 2, 3, 2])
, Scale "Dodecatonic"
(2 :| [2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1])
, Scale "Major"
(4 :| [3, 1, 4, 3, 4, 1])
, Scale "Minor"
(4 :| [1, 3, 4, 1, 4, 3])
, Scale "Major quasi-equal Heptatonic"
(3 :| [3, 3, 3, 3, 3, 2])
, Scale "Minor quasi-equal Heptatonic"
(3 :| [2, 3, 3, 3, 3, 3])
, Scale "Rothenberg Generalized Diatonic"
(3 :| [2, 2, 2, 2, 3, 2, 2, 2])
, Scale "Stearns Major"
(3 :| [4, 1, 4, 3, 3, 2])
, Scale "score5"
(7 :| [2, 7, 2, 2])
, Scale "Mavilla[7]"
(5 :| [2, 2, 5, 2, 2, 2])
]
, _tunings =
[
Tuning "Flat Forths" "Six-String Guitar"
(0 :| [8, 16, 24, 32, 40]) 0
]
},
Temperament {
_temperamentName = "21-TET"
, _divisions = 21
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Antisinatonic (1L 9s)"
(3 :| [2, 2, 2, 2, 2, 2, 2, 2, 2])
, Scale "Machinoid (5L 1s)"
(4 :| [4, 4, 4, 4, 1])
, Scale "p-chro Machinoid (5L 6s)"
(3 :| [1, 1, 3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Manual (4L 1s)"
(5 :| [5, 5, 5, 1])
, Scale "Gramitonic (4L 5s)"
(4 :| [1, 4, 1, 4, 1, 4, 1, 1])
, Scale "Antipentic (3L 2s)"
(5 :| [5, 3, 5, 3])
, Scale "Oneirotonic (5L 3s)"
(3 :| [3, 2, 3, 3, 2, 3, 2])
, Scale "LH Diasem Ionian"
(3 :| [1, 3, 2, 3, 3, 1, 3, 2])
, Scale "LH Diasem Mixo"
(3 :| [1, 3, 2, 3, 1, 3, 2, 3])
, Scale "LH Diasem Dorian"
(1 :| [3, 2, 3, 3, 1, 3, 2, 3])
, Scale "LH Diasem Aeolian"
(3 :| [2, 3, 1, 3, 2, 3, 3, 1])
, Scale "LH Diasem Phrygian"
(2 :| [3, 3, 1, 3, 2, 3, 1, 3])
, Scale "LH Diasem Lydian"
(3 :| [3, 1, 3, 2, 3, 1, 3, 2])
, Scale "LH Diasem Darkened Dorian"
(3 :| [2, 3, 3, 1, 3, 2, 3, 1])
, Scale "LH Diasem Brightened Aeolian"
(1 :| [3, 2, 3, 1, 3, 2, 3, 3])
, Scale "LH Diasem Locrian"
(2 :| [3, 1, 3, 2, 3, 3, 1, 3])
, Scale "RH Diasem Ionian"
(3 :| [1, 3, 2, 3, 1, 3, 3, 2])
, Scale "RH Diasem Mixo"
(1 :| [3, 3, 2, 3, 1, 3, 2, 3])
, Scale "RH Diasem Dorian"
(3 :| [2, 3, 1, 3, 3, 2, 3, 1])
, Scale "RH Diasem Aeolian"
(3 :| [2, 3, 1, 3, 2, 3, 1, 3])
, Scale "RH Diasem Phrygian"
(2 :| [3, 1, 3, 3, 2, 3, 1, 3])
, Scale "RH Diasem Lydian"
(3 :| [1, 3, 3, 2, 3, 1, 3, 2])
, Scale "RH Diasem Darkened Mixo"
(3 :| [3, 2, 3, 1, 3, 2, 3, 1])
, Scale "RH Diasem Brightened Dorian"
(1 :| [3, 2, 3, 1, 3, 3, 2, 3])
, Scale "RH Diasem Locrian"
(2 :| [3, 1, 3, 2, 3, 1, 3, 3])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [9, 18, 27, 33, 42]) 0
]
},
Temperament {
_temperamentName = "22-TET"
, _divisions = 22
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Sagittal" [
"A",
"A" <> sagittal5CommaUp,
"A" <> sagittalSharp5CDown,
"A" <> sagittalSharp,
"B",
"C",
"C" <> sagittal5CommaUp,
"C" <> sagittalSharp5CDown,
"C" <> sagittalSharp,
"D",
"D" <> sagittal5CommaUp,
"D" <> sagittalSharp5CDown,
"D" <> sagittalSharp,
"E",
"F",
"F" <> sagittal5CommaUp,
"F" <> sagittalSharp5CDown,
"F" <> sagittalSharp,
"G",
"G" <> sagittal5CommaUp,
"G" <> sagittalSharp5CDown,
"G" <> sagittalSharp
]
, NotationSystem "Standard (Meantone)" [
"A","A#","Bb","B","B#",
"Cb","C","C#","Db","D",
"D#","Eb","E","E#","Fb",
"F","F#","Gb","G","G#",
"Gx","Ab"
]
]
, _chords =
[
Chord "Major"
(0 :| [7, 6, 9])
, Chord "Minor"
(0 :| [6, 7, 9])
, Chord "SuperMajor"
(0 :| [8, 5, 9])
, Chord "SubMinor"
(0 :| [5, 8, 9])
, Chord "Magical"
(0 :| [5, 7, 9])
, Chord "Tiny"
(0 :| [5, 5, 11])
, Chord "Giant"
(0 :| [8, 7, 6])
, Chord "Minor Seventh"
(0 :| [6, 7, 6, 3])
, Chord "Super Seventh"
(0 :| [8, 5, 8, 1])
, Chord "Sub Seventh"
(0 :| [5, 8, 5, 4])
, Chord "Magical Seventh"
(0 :| [5, 7, 6, 4])
, Chord "Major Super seventh"
(0 :| [8, 5, 8, 1])
, Chord "Minor Sub Seventh"
(0 :| [5, 8, 5, 4])
, Chord "Super Minor Seventh"
(0 :| [8, 5, 6, 3])
, Chord "Sub Major Seventh"
(0 :| [5, 8, 6, 3])
, Chord "Super Sub Seventh"
(0 :| [8, 5, 5, 4])
, Chord "Harmonic Seventh"
(0 :| [7, 6, 5, 4])
, Chord "Tiny seventh"
(0 :| [5, 5, 5, 7])
, Chord "Giant Sixth"
(0 :| [8, 7, 5, 1])
, Chord "Harmonic Minor Sixth"
(0 :| [6, 7, 5, 4])
]
, _scales =
[
Scale "Superpyth[7] (Major)"
(4 :| [4, 1, 4, 4, 4, 1])
, Scale "Superpyth[7] (Dorian)"
(4 :| [1, 4, 4, 4, 1, 4])
, Scale "Superpyth[7] (Phrygian)"
(1 :| [4, 4, 4, 1, 4, 4])
, Scale "Superpyth[7] (Lydian)"
(4 :| [4, 4, 1, 4, 4, 1])
, Scale "Superpyth[7] (Mixolydian)"
(4 :| [4, 1, 4, 4, 1, 4])
, Scale "Superpyth[7] (Minor)"
(4 :| [1, 4, 4, 1, 4, 4])
, Scale "Superpyth[7] (Locrian)"
(1 :| [4, 4, 1, 4, 4, 4])
, Scale "Maqam Bayati"
(3 :| [2, 4, 4, 1, 4, 4])
, Scale "Maqam Jiharkah"
(4 :| [4, 1, 4, 4, 2, 3])
, Scale "Maqam Husayni 'Ushayran"
(3 :| [2, 4, 3, 2, 4, 4])
, Scale "Maqam Saba"
(3 :| [2, 4, 4, 1, 4, 2, 2])
, Scale "Maqam Rast"
(4 :| [2, 3, 4, 4, 2, 3])
, Scale "Syntonic Major"
(4 :| [3,2,4,3,4,2])
, Scale "Syntonic Dorian"
(3 :| [2,4,3,4,2,4])
, Scale "Syntonic Phrygian"
(2 :| [4,3,4,2,4,3])
, Scale "Syntonic Lydian"
(4 :| [3,4,2,4,3,2])
, Scale "Syntonic Mixolydian"
(3 :| [4,2,4,3,2,4])
, Scale "Syntonic Minor"
(4 :| [2,4,3,2,4,3])
, Scale "Syntonic Locrian"
(2 :| [4,3,2,4,3,4])
, Scale "Superpyth Blues"
(5 :| [4, 1, 3, 5, 4])
, Scale "Bright Minor Blues"
(6 :| [3, 1, 3, 6, 3])
, Scale "Astrology[6]"
(4 :| [3, 4, 4, 3, 4])
, Scale "Porcupine[7]"
(3 :| [3, 3, 4, 3, 3, 3])
, Scale "Porcupine[8]"
(3 :| [3, 3, 3, 3, 3, 3, 1])
, Scale "Orwell[5]"
(5 :| [5, 2, 5, 5])
, Scale "Orwell[9]"
(2 :| [3, 2, 3, 2, 3, 2, 3, 2])
, Scale "Magic[7]"
(1 :| [6, 1, 6, 1, 6, 1])
, Scale "Magic[10]"
(5 :| [1, 1, 5, 1, 1, 5, 1, 1, 1])
, Scale "Pajara[10]"
(2 :| [2, 3, 2, 2, 2, 2, 3, 2, 2])
, Scale "Pentachordal Decatonic"
(2 :| [2, 3, 2, 2, 2, 3, 2, 2, 2])
, Scale "Hedgehog[6]"
(3 :| [5, 3, 3, 5, 3])
, Scale "Hedgehog[8]"
(3 :| [3, 3, 2, 3, 3, 3, 2])
, Scale "Astrology[6]"
(4 :| [3, 4, 4, 3, 4])
, Scale "Astrology[10]"
(3 :| [1, 3, 1, 3, 3, 1, 3, 1, 3])
, Scale "Doublewide[6]"
(5 :| [5, 1, 5, 5, 1])
, Scale "Doublewide[10]"
(4 :| [1, 4, 1, 1, 4, 1, 4, 1, 1])
, Scale "Porcupine bright major #7"
(4 :| [3, 3, 3, 3, 4, 2])
, Scale "Porcupine bright major #6 #7"
(4 :| [3, 3, 3, 4, 3, 2])
, Scale "Porcupine bright minor #2"
(4 :| [2, 4, 3, 3, 3, 3])
, Scale "Porcupine dark minor #2"
(4 :| [2, 3, 4, 3, 3, 3])
, Scale "Porcupine bright harmonic 11th mode"
(4 :| [3, 3, 3, 3, 2, 4])
, Scale "Superpyth harmonic minor"
(4 :| [1, 4, 4, 1, 7, 1])
, Scale "Superpyth harmonic major"
(4 :| [4, 1, 4, 1, 7, 1])
, Scale "Superpyth melodic minor"
(4 :| [1, 4, 4, 4, 4, 1])
, Scale "Superpyth double harmonic major"
(1 :| [7, 1, 4, 1, 7, 1])
, Scale "Syntonic Harmonic Minor"
(4 :| [2, 3, 4, 2, 5, 2])
, Scale "Syntonic Harmonic Major"
(4 :| [3, 2, 4, 2, 5, 2])
, Scale "Syntonic Melodic Minor"
(4 :| [2, 3, 4, 3, 4, 2])
, Scale "Marvel Double Harmonic Major"
(2 :| [5, 2, 4, 2, 5, 2])
, Scale "Blackdye"
(1 :| [3, 2, 3, 1, 3, 2, 3, 1, 3])
, Scale "Marvel Hexatonic"
(5 :| [2, 6, 2, 5, 2])
11 - EDO inclusions
, Scale "Machine[6]"
(fmap (*2) $ 2 :| [2, 2, 2, 2, 1])
, Scale "Orgone[7] (Nerevarine)"
(fmap (*2) $ 2 :| [2, 1, 2, 1, 2, 1])
, Scale "Orgone[7] (Vivecan)"
(fmap (*2) $ 2 :| [1, 2, 2, 1, 2, 1])
, Scale "Orgone[7] (Lorkhanic)"
(fmap (*2) $ 2 :| [1, 2, 1, 2, 2, 1])
, Scale "Orgone[7] (Sothic)"
(fmap (*2) $ 2 :| [1, 2, 1, 2, 1, 2])
, Scale "Orgone[7] (Kagrenacan)"
(fmap (*2) $ 1 :| [2, 2, 1, 2, 1, 2])
, Scale "Orgone[7] (Almalexian)"
(fmap (*2) $ 1 :| [2, 1, 2, 2, 1, 2])
, Scale "Orgone[7] (Dagothic)"
(fmap (*2) $ 1 :| [2, 1, 2, 1, 2, 2])
, Scale "Joan Pentatonic"
(fmap (*2) $ 1 :| [4, 1, 4, 1])
, Scale "Joan Heptatonic"
(fmap (*2) $ 1 :| [1, 1, 3, 1, 1, 3])
, Scale "Joan Nonatonic"
(fmap (*2) $ 1 :| [1, 1, 2, 1, 1, 1, 2, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [9, 18, 27, 35, 44]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+13) $ 0 :| [5, 18, 27, 35, 44]) 0
, Tuning "All Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [9, 18, 27, 36, 45]) 0
, Tuning "Narrow Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [8, 16, 24, 32, 40]) 0
, Tuning "Wide Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [10, 20, 30, 40, 50]) 0
]
},
Temperament {
_temperamentName = "23-TET"
, _divisions = 23
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "3L 2s (oneiro-pentatonic)"
(5 :| [4, 5, 5, 4])
, Scale "4L 1s (bug pentatonic)"
(5 :| [5, 5, 5, 3])
, Scale "5L 1s (machinoid)"
(4 :| [4, 4, 4, 4, 3])
, Scale "4L 3s (smitonic)"
(5 :| [1, 5, 1, 5, 1, 5])
, Scale "1L 6s (antiarcheotonic)"
(3 :| [3, 3, 5, 3, 3, 3])
, Scale "2L 5s (mavila, anti-diatonic)"
(3 :| [3, 4, 3, 3, 3, 4])
, Scale "3L 4s (mosh)"
(2 :| [5, 2, 5, 2, 5, 2])
, Scale "5L 3s (oneirotonic)"
(4 :| [1, 4, 4, 1, 4, 4, 1])
, Scale "7L 1s (porcupoid)"
(3 :| [3, 3, 3, 3, 3, 3, 2])
, Scale "7L 2s (mavila superdiatonic)"
(3 :| [3, 3, 1, 3, 3, 3, 3, 1])
, Scale "5L 4s (bug semiquartal)"
(3 :| [2, 3, 2, 3, 2, 3, 2, 3])
, Scale "3L 7s (sephiroid)"
(3 :| [2, 2, 3, 2, 2, 3, 2, 2, 2])
]
, _tunings =
[
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [10, 20, 30, 40, 50]) 0
]
},
Temperament {
_temperamentName = "24-TET"
, _divisions = 24
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Ionian (Major)"
(4 :| [4, 2, 4, 4, 4, 2])
, Scale "Anchihoye: Ethiopia"
(2 :| [8, 3, 6, 5])
, Scale "Enharmonic Phrygian"
(8 :| [1, 1, 8, 4, 1, 1])
, Scale "Maqam Rast"
(4 :| [3, 3, 4, 4, 3, 3])
, Scale "Mohajira[7]"
(3 :| [4, 3, 4, 3, 4, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+14) $ 0 :| [10, 20, 30, 38, 48]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+12) $ 0 :| [14, 24, 34, 42, 52]) 0
]
},
Temperament {
_temperamentName = "25-TET"
, _divisions = 25
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Bleu[8]"
(3 :| [3,3,3,3,3,3,4])
, Scale "Bleu[9]"
(3 :| [3,3,3,3,3,3,3,1])
, Scale "Luna[6]"
(4 :| [4,4,4,4,5])
, Scale "Luna[7]"
(4 :| [4,4,4,4,4,1])
, Scale "Gariberttet[5]"
(6 :| [6,6,6,1])
, Scale "Gariberttet[9]"
(5 :| [1,5,1,5,1,5,1,1])
, Scale "Sixix[7]"
(3 :| [4,3,4,3,4,4])
, Scale "Magic[7]"
(7 :| [1,7,1,7,1,1])
, Scale "Magic[10]"
(6 :| [1,1,6,1,1,6,1,1,1])
, Scale "Antipentic (3L 2s)"
(2 :| [7,2,7,7])
, Scale "Checkertonic (3L 5s)"
(2 :| [2,5,2,2,5,2,5])
, Scale "Pelogic[5]"
(8 :| [3,8,3,3])
, Scale "Pelogic[7]"
(5 :| [3,3,5,3,3,3])
, Scale "Pelogic[9]"
(2 :| [3,3,3,2,3,3,3,3])
, Scale "Triton[5]"
(11 :| [1,11,1,1])
, Scale "Triton[7]"
(10 :| [1,1,10,1,1,1])
, Scale "Triton[9]"
(9 :| [1,1,1,9,1,1,1,1])
]
, _tunings = []
},
Temperament {
_temperamentName = "26-TET"
, _divisions = 26
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Flattone"
(4 :| [4, 4, 3, 4, 4, 3])
, Scale "Orgone"
(5 :| [5, 2, 5, 2, 5, 2])
, Scale "Lemba"
(5 :| [5, 3, 5, 5, 3])
]
, _tunings =
[
Tuning "All Fourths" "Six String Guitar"
(0 :| [11, 22, 33, 44, 55]) 0
]
},
Temperament {
_temperamentName = "27-TET"
, _divisions = 27
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Tetracot[6]"
(4 :| [4,4,4,4,7])
, Scale "Tetractot[7]"
(4 :| [4,4,4,4,4,3])
, Scale "Machine[5]"
(5 :| [5,5,5,7])
, Scale "Machine[6]"
(5 :| [5,5,5,5,2])
, Scale "Myna[7]"
(1 :| [6,1,6,1,6,6])
, Scale "Beatles[7]"
(5 :| [3,5,3,5,3,3])
, Scale "Beatles[10]"
(2 :| [3,3,2,3,3,2,3,3,3])
, Scale "Sensi[5]"
(3 :| [7,3,7,7])
, Scale "Sensi[8]"
(3 :| [3,4,3,3,4,3,4])
, Scale "Superpyth[7]"
(1 :| [5,5,1,5,5,5])
, Scale "Fervor[5]"
(12 :| [1,12,1,1])
, Scale "Fervor[7]"
(11 :| [1,1,11,1,1,1])
, Scale "Fervor[9]"
(10 :| [1,1,1,10,1,1,1,1])
]
, _tunings = []
},
Temperament {
_temperamentName = "28-TET"
, _divisions = 28
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Negri [9]"
(3 :| [3, 3, 3, 4, 3, 3, 3, 3])
, Scale "Negri [10]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 3, 1])
, Scale "Diatonic Major [7]"
(5 :| [4, 3, 4, 5, 5, 2])
, Scale "Diatonic Minor [7]"
(5 :| [2, 5, 4, 3, 4, 5])
, Scale "Diatonic Naive Major [7]"
(4 :| [5, 3, 4, 5, 4, 3])
, Scale "Diatonic Naive Minor [7]"
(4 :| [3, 5, 4, 3, 4, 5])
, Scale "Harmonic Minor [7]"
(5 :| [2, 5, 4, 3, 7, 2])
, Scale "Harmonic Major [7]"
(5 :| [4, 3, 4, 3, 7, 2])
, Scale "Diasem (Right-handed)"
(4 :| [1, 4, 4, 3, 4, 1, 4, 3])
, Scale "Diasem (Left-handed)"
(4 :| [4, 1, 4, 3, 4, 1, 4, 3])
, Scale "Oneirotonic [5]"
(6 :| [5, 6, 5, 6])
, Scale "Oneirotonic [8]"
(5 :| [5, 1, 5, 5, 1, 5, 1])
]
, _tunings =
[
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [12, 24, 36, 48, 69]) 0
, Tuning "Narrow Fourths" "Six String Guitar"
(0 :| [11, 22, 33, 44, 55]) 0
]
},
Temperament {
_temperamentName = "29-TET"
, _divisions = 29
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Nicetone"
(5 :| [4, 3, 5, 4, 5, 3])
, Scale "Porcupine[7]"
(4 :| [4, 4, 4, 4, 4, 5])
, Scale "Porcupine[8]"
(4 :| [4, 4, 4, 4, 4, 4, 1])
, Scale "Negri[9]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 5])
, Scale "Negri[10]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 3, 2])
, Scale "Semaphore[5]"
(5 :| [6, 6, 6, 6])
, Scale "Semaphore[9]"
(5 :| [5, 1, 5, 1, 5, 1, 5, 1])
, Scale "Leapfrog[7]"
(5 :| [5, 5, 2, 5, 5, 2])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [12, 24, 36, 46, 58]) 0
]
},
Temperament {
_temperamentName = "30-TET"
, _divisions = 30
, _period = 2 % 1
, _notationSystems =
[
]
, _chords = []
, _scales =
[
Scale "Lovecraft[5]"
(7 :| [7, 7, 7, 2])
, Scale "Lovecraft[9]"
(5 :| [2, 5, 2, 5, 2, 5, 2, 2])
, Scale "Sensi[5]"
(8 :| [3, 8, 3, 8])
, Scale "Sensi[8]"
(5 :| [3, 3, 5, 3, 3, 5, 3])
, Scale "Mavila[5]"
(9 :| [4, 9, 4, 4])
, Scale "Mavila[7]"
(5 :| [4, 4, 5, 4, 4, 4])
, Scale "Mavila[9]"
(4 :| [4, 4, 4, 1, 4, 4, 4, 1])
]
, _tunings =
[
Tuning "Narrow Fourths" "Six String Guitar"
(0 :| [12, 24, 36, 48, 60]) 0
]
},
Temperament {
_temperamentName = "31-TET"
, _divisions = 31
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Miracle[5]"
(3 :| [3, 3, 3, 19])
, Scale "Nusecond[5]"
(4 :| [4, 4, 4, 15])
, Scale "Hemithirds[5]"
(5 :| [5, 5, 5, 11])
, Scale "Mothra[5]"
(6 :| [6, 6, 6, 7])
, Scale "Orwell[5]"
(7 :| [7, 7, 7, 3])
, Scale "Squares[5]"
(2 :| [9, 2, 9, 9])
, Scale "Semisept[5]"
(5 :| [7, 5, 7, 7])
, Scale "Meantone[5]"
(8 :| [5, 8, 5, 5])
, Scale "Casablanca[5]"
(11 :| [3, 11, 3, 3])
, Scale "Tritonic[5]"
(14 :| [1, 14, 1, 1])
, Scale "Miracle[6]"
(3 :| [3, 3, 3, 3, 16])
, Scale "Nusecond[6]"
(4 :| [4, 4, 4, 4, 11])
, Scale "Hemithirds[6]"
(5 :| [5, 5, 5, 5, 6])
, Scale "Mothra[6]"
(6 :| [6, 6, 6, 6, 1])
, Scale "Miracle[7]"
(3 :| [3, 3, 3, 3, 3, 13])
, Scale "Nusecond[7]"
(4 :| [4, 4, 4, 4, 4, 7])
, Scale "Hemithirds[7]"
(5 :| [5, 5, 5, 5, 5, 1])
, Scale "Myna[7]"
(1 :| [7, 1, 7, 1, 7, 7])
, Scale "Mohajira[7]"
(5 :| [4, 5, 4, 5, 4, 4])
, Scale "Würschmidt[7]"
(9 :| [1, 9, 1, 9, 1, 1])
, Scale "Meantone[7]"
(3 :| [5, 5, 3, 5, 5, 5])
, Scale "Casablanca[7]"
(8 :| [3, 3, 8, 3, 3, 3])
, Scale "Tritonic[7]"
(13 :| [1, 1, 13, 1, 1, 1])
, Scale "Miracle[8]"
(3 :| [3, 3, 3, 3, 3, 3, 10])
, Scale "Nusecond[8]"
(4 :| [4, 4, 4, 4, 4, 4, 3])
, Scale "Squares[8]"
(2 :| [2, 7, 2, 2, 7, 2, 7])
, Scale "Semisept[8]"
(5 :| [5, 2, 5, 5, 2, 5, 2])
, Scale "Miracle[9]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 7])
, Scale "Orwell[9]"
(4 :| [3, 4, 3, 4, 3, 4, 3, 3])
, Scale "Casablanca[9]"
(5 :| [3, 3, 3, 5, 3, 3, 3, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [13, 26, 39, 49, 62]) 0
]
},
Temperament {
_temperamentName = "32-TET"
, _divisions = 32
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "blackdye"
(1 :| [5, 2, 5, 1, 5, 2, 5, 1, 5])
, Scale "Sixix[7]"
(4 :| [5,4,5,4,5,5])
, Scale "Pajara[5]"
(6 :| [6,7,6,7])
, Scale "Pajara[7]"
(6 :| [6,6,1,6,6,1])
, Scale "Pentic"
(4 :| [4,10,4,10])
, Scale "Antidiatonic"
(4 :| [4,4,6,4,4,6])
]
, _tunings = [
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [14, 28, 42, 56, 70]) 0
]
},
Temperament {
_temperamentName = "33-TET"
, _divisions = 33
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "1L 4s" (6 :| [6,6,6,9])
, Scale "4L 1s" (7 :| [7,7,7,5])
, Scale "3L 2s" (3 :| [9,3,9,9])
, Scale "4L 1s" (5 :| [7,7,7,7])
, Scale "1L 5s" (5 :| [5,5,5,5,8])
, Scale "5L 1s" (6 :| [6,6,6,6,3])
, Scale "5L 2s" (5 :| [5,5,4,5,5,4])
, Scale "4L 3s" (6 :| [6,3,6,3,6,3])
, Scale "3L 5s" (3 :| [3,6,3,3,6,3,6])
, Scale "5L 3s" (6 :| [6,1,6,6,1,6,1])
]
, _tunings = [
Tuning "All Fourths" "Six String Guitar"
(0 :| [14, 28, 42, 56, 70]) 0
]
},
Temperament {
_temperamentName = "34-TET"
, _divisions = 34
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Tetracot[5]"
(5 :| [5,5,5,14])
, Scale "Tetracot[6]"
(5 :| [5,5,5,5,9])
, Scale "Tetracot[7]"
(5 :| [5,5,5,5,5,4])
, Scale "Immunity[5]"
(7 :| [7,7,7,6])
, Scale "Immunity[9]"
(1 :| [6,1,6,1,6,1,6,6])
, Scale "Hanson[7]"
(2 :| [7,2,7,2,7,7])
, Scale "Petrtri[5]"
(5 :| [8,5,8,8])
, Scale "Petrtri[8]"
(5 :| [5,3,5,5,3,5,3])
, Scale "Mabila[5]"
(11 :| [4,11,4,4])
, Scale "Mabila[7]"
(7 :| [4,4,7,4,4,4])
]
, _tunings = []
},
Temperament {
_temperamentName = "35-TET"
, _divisions = 35
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Secund[9]"
(4 :| [4,4,4,4,4,4,4,3])
, Scale "Ripple[10]"
(3 :| [3,3,3,3,3,3,3,3,8])
, Scale "Baldy[5]"
(6 :| [6,6,6,11])
, Scale "Baldy[6]"
(6 :| [6,6,6,6,5])
, Scale "Baldy[11]"
(1 :| [5,1,5,1,5,1,5,1,5,5])
, Scale "Orwell[5]"
(8 :| [8,8,8,3])
, Scale "Orwell[9]"
(5 :| [3,5,3,5,3,5,3,3])
, Scale "Myna[7]"
(1 :| [8,1,8,1,8,8])
, Scale "Myna[11]"
(1 :| [1,7,1,1,7,1,1,7,1,7])
, Scale "Muggles[7]"
(9 :| [2,9,2,9,2,2])
, Scale "Muggles[10]"
(7 :| [2,2,7,2,2,7,2,2,2])
, Scale "Roman[5]"
(1 :| [11,1,11,11])
, Scale "Roman[8]"
(1 :| [1,10,1,1,10,1,10])
, Scale "Sensi[5]"
(4 :| [9,4,9,9])
, Scale "Sensi[8]"
(4 :| [4,5,4,4,5,4,5])
, Scale "Sensi[11]"
(4 :| [4,4,1,4,4,4,1,4,4,1])
]
, _tunings = [
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [15, 30, 45, 60, 75]) 0
]
},
Temperament {
_temperamentName = "36-TET"
, _divisions = 36
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Pentatonic"
(6 :| [6,9,6,9])
, Scale "Lydian"
(6 :| [6,6,3,6,6,3])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [15, 30, 45, 57, 72]) 0
]
},
Temperament {
_temperamentName = "41-TET"
, _divisions = 41
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Down Lydian"
(7 :| [6,7,4,7,6,4])
, Scale "Down Major"
(7 :| [6, 4, 7, 6, 7, 4])
, Scale "Down Mixolydian"
(6 :| [7, 4, 7, 6, 4, 7])
, Scale "Up Minor"
(7 :| [4, 6, 7, 4, 7, 6])
, Scale "Up Phrygian"
(4 :| [7, 6, 7, 4, 6, 7])
, Scale "Up Dorian"
(7 :| [4, 6, 7, 7, 4, 6])
, Scale "Up Locrian"
(4 :| [6, 7, 3, 8, 6, 7])
, Scale "Up Lydian"
(7 :| [8, 7, 2, 7, 8, 2])
, Scale "Up Major"
(7 :| [8, 2, 7, 8, 7, 2])
, Scale "Up Mixolydian"
(8 :| [7, 2, 7, 8, 2, 7])
, Scale "Down Minor"
(7 :| [2, 8, 7, 2, 7, 8])
, Scale "Down Phrygian"
(2 :| [7, 8, 7, 2, 8, 7])
, Scale "Down Dorian"
(7 :| [2, 8, 7, 7, 2, 8])
, Scale "Down Locrian"
(2 :| [8, 7, 3, 6, 8, 7])
]
, _tunings =
[
Tuning "Standard Tuning" "Kite Guitar"
(0 :| [13, 26, 39, 52, 65]) 1
]
},
Temperament {
_temperamentName = "Bohlen Pierce"
, _divisions = 13
, _period = 3 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","A#","B","C","C#","D","E","F","F#","G","H","H#","J"
]
]
, _chords =
[
]
, _scales =
[
Scale "Lambda"
(2 :| [1, 1, 2, 1, 2, 1, 2, 1])
, Scale "Moll 1"
(1 :| [2,1,2,1,2,1,2,1])
, Scale "Harmonic"
(1 :| [2,1,2,1,2,1,1,2])
, Scale "Dur I"
(1 :| [2,1,2,1,1,2,1,2])
, Scale "Moll 2"
(2 :| [1,2,1,1,2,1,2,1])
, Scale "Dur II"
(2 :| [1,1,2,1,2,1,1,2])
, Scale "Gamma"
(1 :| [2,1,2,1,1,2,2,1])
, Scale "Walker A"
(1 :| [1,2,1,2,1,2,1,2])
, Scale "Walker B"
(1 :| [2,1,1,2,1,2,1,2])
, Scale "Walker I"
(2 :| [1,2,1,2,1,2,1,1])
, Scale "Walker II"
(2 :| [1,2,1,2,1,1,2,1])
, Scale "Sirius[6]"
(2 :| [2,2,2,2,3])
, Scale "Sirius[7]"
(2 :| [2,2,2,2,2,1])
, Scale "Canopus[7]"
(3 :| [1,3,1,3,1,1])
, Scale "Arcturus[5]"
(5 :| [1,5,1,1])
, Scale "Arcturus[7]"
(4 :| [1,1,4,1,1,1])
]
, _tunings =
[
Tuning "Bohlen's Tuning" "Six String Guitar"
(0 :| [3,6,9,13,16]) 0
]
}
]
, _preferences = defaultPreferences
} | null | https://raw.githubusercontent.com/Sintrastes/xen-fret/cb0b6a04b9e0286ddc58fbdd1f9d91a28b883661/xen-fret/src/XenFret/AppData.hs | haskell | | Get the list of temperaments
| Get the current preferences for the app. |
module XenFret.AppData where
import Data.Aeson.TH
import XenFret.Data
import qualified Data.Text as T
import Data.Map hiding(fromList)
import Data.MultiMap
import Data.List.NonEmpty hiding(fromList)
import Data.Ratio
import XenFret.Sagittal
( sagittal5CommaUp, sagittalSharp, sagittalSharp5CDown )
import Control.Lens.TH
import XenFret.App.Widgets.ColorPicker (Color(..))
import Data.Tree.Lens (root)
import Control.Lens.Internal.Fold (NonEmptyDList(NonEmptyDList))
import qualified Data.Map as Map
data LineStyle =
Solid
| Dashed
$(deriveJSON defaultOptions ''LineStyle)
type TemperamentName = String
type InstrumentName = String
type TuningName = String
data PreferenceData = PreferenceData {
useDarkMode :: Bool,
noteNameSize :: Int,
dotSize :: Double,
rootNoteColor :: Color,
fretboardColor :: Color,
fretStyle :: LineStyle,
fretThickness :: Double,
defaultTemperament :: Maybe TemperamentName,
defaultInstrument :: Maybe InstrumentName,
defaultTuning :: Map (TemperamentName, InstrumentName) TuningName
}
defaultPreferences :: PreferenceData
defaultPreferences = PreferenceData {
useDarkMode = False,
noteNameSize = 12,
dotSize = 1.0,
rootNoteColor = Color 51 92 255,
fretboardColor = Color 255 255 255,
fretStyle = Solid,
fretThickness = 1.0,
defaultTemperament = Nothing,
defaultInstrument = Nothing,
defaultTuning = Map.fromList []
}
$(deriveJSON defaultOptions ''PreferenceData)
data AppData = AppData {
_temperaments :: [Temperament],
_preferences :: PreferenceData
}
$(makeLenses ''AppData)
$(deriveJSON defaultOptions ''AppData)
defaultAppData :: AppData
defaultAppData = AppData {
_temperaments =
[
Temperament {
_temperamentName = "11-TET"
, _divisions = 11
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"Q","Q#","R","R#","S",
"S#","T","T#","U","U#","P"
]
]
, _chords =
[
Chord "Major" (4 :| [3, 4])
, Chord "Minor" (3 :| [4, 4])
]
, _scales =
[
Scale "Orgone[7]"
(1 :| [2, 1, 2, 1, 2, 2])
, Scale "Machine[5]"
(2 :| [2, 2, 2, 3])
, Scale "Machine[6]"
(2 :| [2, 2, 2, 2, 1])
, Scale "Joan heptatonic"
(1 :| [1, 1, 3, 1, 1, 3])
, Scale "Joan pentatonic"
(1 :| [4, 1, 4, 1])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
, Tuning "Major Thirds Tuning" "Six-String Guitar"
(0 :| [4, 8, 12, 16, 20]) 0
, Tuning "Wide Fourths Tuning" "Four-String Bass Guitar"
(0 :| [5, 10, 15]) 0
, Tuning "Major Thirds Tuning" "Four-String Bass Guitar"
(0 :| [4, 8, 120]) 0
]
},
Temperament {
_temperamentName = "12-TET"
, _divisions = 12
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","A#","B","C","C#","D",
"D#","E","F","F#","G","G#"
]
]
, _chords =
[
Chord "Major" (4 :| [3, 5])
, Chord "Minor" (3 :| [4, 5])
, Chord "Major 7th" (4 :| [3, 4, 1])
, Chord "Dominant 7th" (4 :| [3, 3, 2])
, Chord "Minor 7th" (3 :| [4, 3, 2])
, Chord "MinMaj 7th" (3 :| [4, 4, 1])
]
, _scales =
[
Scale "Ionian (Major)"
(2 :| [2, 1, 2, 2, 2, 1])
, Scale "Mixolydian"
(2 :| [2, 1, 2, 2, 1, 2])
, Scale "Minor"
(2 :| [1, 2, 2, 1, 2, 2])
, Scale "Dorian"
(2 :| [1, 2, 2, 2, 1, 2])
, Scale "diminished[8] (Octatonic)"
(2 :| [1, 2, 1, 2, 1, 2, 1])
, Scale "Whole tone"
(2 :| [2, 2, 2, 2, 2])
, Scale "augmented[6]"
(3 :| [1, 3, 1, 3, 1])
, Scale "Blues"
(3 :| [2, 1, 1, 3, 2])
, Scale "Mixolydian b6"
(2 :| [2, 1, 2, 1, 2, 2])
, Scale "Hirojoshi"
(2 :| [1, 4, 1, 4])
, Scale "Ryo"
(2 :| [2, 3, 2, 3])
, Scale "Insen"
(1 :| [4, 2, 3, 2])
, Scale "Engimatic Scale"
(1 :| [3, 2, 2, 2, 1, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Mandolin"
(fmap (+10) $ 0 :| [7, 14, 21]) 0
, Tuning "Standard Tuning" "Ukulele"
(fmap (+3) $ 7 :| [0, 4, 9]) 0
, Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+7) $ 0 :| [5, 10, 15, 19, 24]) 0
, Tuning "Standard Tuning" "Four-String Bass Guitar"
(fmap (+7) $ 0 :| [5, 10, 15]) 0
, Tuning "Standard Tuning" "Seven-String Guitar"
(fmap (+2) $ 0 :| [5, 10, 15, 20, 14, 29]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+5) $ 0 :| [7, 12, 17, 21, 26]) 0
, Tuning "DADGAD" "Six-String Guitar"
(fmap (+5) $ 0 :| [7, 12, 17, 19, 24]) 0
, Tuning "All Fourths" "Six-String Guitar"
(fmap (+7) $ 0 :| [5, 10, 15, 20, 25]) 0
, Tuning "All Fifths" "Six-String Guitar"
(0 :| [7, 14, 21, 28, 35]) 0
]
},
Temperament {
_temperamentName = "13-TET"
, _divisions = 13
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"J","J#","K","L","L#","M","M#",
"N","O","O#","P","Q","Q#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Archeotonic (Ryonian Mode)"
(2 :| [2, 2, 2, 2, 2, 1])
, Scale "Oneirotonic (Dylathian Mode)"
(2 :| [2, 1, 2, 2, 1, 2, 1])
]
, _tunings =
[
Tuning "Oneirotonic Tuning" "Six-String Guitar"
(3 :| [8, 14, 19, 24, 29]) 0
]
},
Temperament {
_temperamentName = "14-TET"
, _divisions = 14
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","^A","B","^B","C","^C","D","^D","E","^E",
"F","^F","G","^G"
]
]
, _chords =
[
]
, _scales =
[
Scale "Titanium[9]"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2])
, Scale "antipentic"
(4 :| [1, 4, 1, 4])
, Scale "Manual"
(3 :| [3, 2, 3, 3])
, Scale "Citric"
(3 :| [1, 3, 3, 1, 3])
, Scale "Ekic"
(2 :| [2, 1, 2, 2, 2, 1, 2])
, Scale "Semiquartal"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
]
},
Temperament {
_temperamentName = "15-TET"
, _divisions = 15
, _period = (2 % 1)
, _notationSystems =
[
NotationSystem "" [
"α","β\\","β","χ\\","χ","δ\\","δ",
"ε\\","ε","φ\\","φ","γ\\","γ","η\\",
"η"
]
]
, _chords =
[
]
, _scales =
[
Scale "Augmented[6]"
(4 :| [1, 4, 1, 4, 1])
, Scale "Triforce[6]"
(3 :| [2, 3, 2, 3, 2])
, Scale "Porcupine[7]"
(3 :| [2, 2, 2, 2, 2, 2])
, Scale "Orgone[7]"
(1 :| [3, 1, 3, 1, 3, 3])
, Scale "Porcupine[8]"
(2 :| [1, 2, 2, 2, 2, 2, 2])
, Scale "Augmented[9]"
(3 :| [1, 1, 3, 1, 1, 3, 1, 1])
, Scale "Triforce[9]"
(2 :| [1, 2, 2, 1, 2, 2, 1, 2])
, Scale "Blackwood[10]"
(2 :| [1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Marvel double harmonic major"
(1 :| [4,1,3,1,4,1])
, Scale "Ptolemy diatonic, \"just\" major"
(3 :| [2, 1, 3, 2, 3, 1])
, Scale "Ptolemy diatonic, natural minor"
(3 :| [1, 2, 3, 1, 3, 2])
, Scale "tetrachordal major, Sa grama"
(3 :| [2, 1, 3, 3, 2, 1])
, Scale "tetrachordal minor"
(3 :| [1, 2, 3, 1, 2, 3])
, Scale "Porcupine bright major #7"
(3 :| [2, 2, 2, 2, 3, 1])
, Scale "Porcupine bright major #6 #7"
(3 :| [2, 2, 2, 3, 2, 1])
, Scale "Porcupine bright minor #2"
(3 :| [1, 3, 2, 2, 2, 2])
, Scale "Porcupine dark minor #2"
(3 :| [1, 2, 3, 2, 2, 2])
, Scale "Porcupine bright harmonic 11th"
(3 :| [2, 2, 2, 2, 1, 3])
]
, _tunings =
[
Tuning "All Fourths Tuning" "Six-String Guitar"
(0 :| [5, 10, 15, 20, 25]) 0
]
},
Temperament {
_temperamentName = "16-TET"
, _divisions = 16
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","B#","B","Bb","C#","C","D#","D",
"E#","E","Eb","F#","F","G#","G","A#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Mavilla[5]"
(5 :| [2, 5, 2, 2])
, Scale "Mavila[7]"
(2 :| [2, 2, 3, 2, 2, 3])
, Scale "Mavilla[9]"
(1 :| [2, 2, 2, 1, 2, 2, 2, 2])
, Scale "Lemba[6]"
(3 :| [3, 2, 3, 3, 2])
, Scale "Lemba[10]"
(2 :| [1, 2, 1, 2, 2, 1, 2, 1, 2])
, Scale "Magic[7]"
(1 :| [4, 1, 4, 1, 4, 1])
, Scale "Magic[10]"
(1 :| [3, 1, 1, 3, 1, 1, 1, 3, 1])
, Scale "Gorgo[5]"
(3 :| [3, 4, 3, 3])
, Scale "Gorgo[6]"
(3 :| [3, 1, 3, 3, 3])
, Scale "Gorgo[11]"
(1 :| [2, 1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Diminished[8]"
(1 :| [3, 1, 3, 1, 3, 1, 3])
]
, _tunings =
[
Tuning "Wide Fourths Tuning" "Six-String Guitar"
(fmap (+9) $ 0 :| [7, 14, 21, 28, 35]) 0
, Tuning "Diminished Fourths Tuning" "Six-String Guitar"
(fmap (+9) $ 0 :| [6, 12, 18, 24, 30]) 0
, Tuning "Wide Fourths Tuning (7 String)" "Seven-String Guitar"
(fmap (+9) $ 0 :| [7, 14, 21, 28, 35, 40]) 0
, Tuning "Diminished Fourths Tuning (7 String)" "Seven-String Guitar"
(fmap (+2) $ 0 :| [6, 12, 18, 24, 30, 36]) 0
]
},
Temperament {
_temperamentName = "17-TET"
, _divisions = 17
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","Bb","A#","B","C","Db","C#","D",
"Eb","D#","E","F","Gb","F#","G","Ab",
"G#"
]
]
, _chords =
[
]
, _scales =
[
Scale "Major"
(3 :| [3, 3, 1, 3, 3, 1])
, Scale "Maqamic[7] (bish mode)"
(2 :| [3, 2, 3, 2, 3, 2])
, Scale "Maqamic[7] (dril mode)"
(3 :| [2, 3, 2, 3, 2, 2])
, Scale "Maqamic[7] (fish mode)"
(2 :| [3, 2, 3, 2, 2, 3])
, Scale "Maqamic[7] (gil mode)"
(3 :| [2, 3, 2, 2, 3, 2])
, Scale "Maqamic[7] (jwl mode)"
(2 :| [3, 2, 2, 3, 2, 3])
, Scale "Maqamic[7] (kleeth mode)"
(3 :| [2, 2, 3, 2, 3, 2])
, Scale "Maqamic[7] (led mode)"
(2 :| [2, 3, 2, 3, 2, 3])
, Scale "Maqamic[10]"
(2 :| [2, 2, 1, 2, 2, 1, 2, 2, 1])
, Scale "Lovecraft[9]"
(3 :| [1, 3, 1, 3, 1, 3, 1, 1])
, Scale "Squares[5]"
(5 :| [5, 1, 5, 1])
, Scale "Squares[8]"
(1 :| [1, 4, 1, 4, 1, 4])
, Scale "Hydra"
(3 :| [3, 1, 1, 2, 3, 2, 1, 1])
, Scale "Springfieldian"
(3 :| [3, 2, 2, 3, 3, 1])
, Scale "Northhaverbrookian"
(2 :| [3, 3, 1, 3, 3, 2])
, Scale "Shelbyvillean"
(3 :| [3, 1, 3, 3, 2, 2])
, Scale "Otonal 17"
(3 :| [2, 3, 2, 2, 2, 3])
, Scale "Bleu[8]"
(3 :| [2, 2, 2, 2, 2, 2, 2])
, Scale "Bleu[9]"
(1 :| [2, 2, 2, 2, 2, 2, 2, 2])
, Scale "Machine[5]"
(5 :| [3, 3, 3, 3])
, Scale "Machine[6]"
(2 :| [3, 3, 3, 3, 3])
, Scale "Machine[11]"
(2 :| [2, 1, 2, 1, 2, 1, 2, 1, 2, 1])
, Scale "Huxley[5]"
(1 :| [4, 4, 4, 4])
, Scale "Huxley[9]"
(1 :| [1, 3, 1, 3, 1, 3, 1, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+10) $ 0 :| [7, 14, 21, 27, 34]) 0
, Tuning "All Fourths" "Six-String Guitar"
(fmap (+10) $ 0 :| [7, 14, 21, 28, 35]) 0
]
},
Temperament {
_temperamentName = "18-TET"
, _divisions = 18
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "" [
"A","Bb","A#","B","C","Db","C#","D",
"Eb","D#","E","F","Gb","F#","G",
"Hb","G#","H"
]
]
, _chords =
[
]
, _scales =
[
Scale "Antipentic"
(4 :| [4, 3, 4, 3])
, Scale "Bicycle"
(4 :| [4, 1, 4, 4, 1])
, Scale "Mavila[5]"
(2 :| [6, 2, 6, 2])
, Scale "Malic[6]"
(2 :| [5, 2, 2, 5, 2])
, Scale "Mish Heptatonic"
(3 :| [2, 3, 2, 3, 3, 2])
, Scale "Smitonic"
(3 :| [2, 3, 2, 3, 3, 2])
, Scale "Oneirotonic"
(3 :| [1, 3, 3, 1, 3, 3, 1])
, Scale "Antiekic"
(2 :| [2, 3, 2, 2, 2, 3, 2])
, Scale "Tcherepnin"
(4 :| [1, 1, 4, 1, 1, 4, 1, 1])
, Scale "Taric"
(2 :| [2, 1, 2, 2, 2, 2, 1, 2, 2])
]
, _tunings =
[
Tuning "Wide Fourths" "Six-String Guitar"
(0 :| [8, 16, 24, 32, 40]) 0
]
},
Temperament {
_temperamentName = "19-TET"
, _divisions = 19
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","A#","Bb","B","B#","C",
"C#","Db","D","D#","Eb","E",
"E#","F","F#","Gb","G","G#","Ab"
]
]
, _chords =
[
]
, _scales =
[
Scale "Ionian (Major)"
(3 :| [3, 2, 3, 3, 3, 2])
, Scale "Sensi[5]"
(5 :| [5, 2, 5, 2])
, Scale "Sensi[8]"
(2 :| [3, 2, 2, 3, 2, 2, 3])
, Scale "Negri[9]"
(2 :| [2, 2, 2, 3, 2, 2, 2, 2])
, Scale "Negri[10]"
(2 :| [2, 2, 2, 2, 2, 2, 2, 2, 1])
, Scale "Kleismic[7]"
(1 :| [4, 1, 4, 1, 4, 4])
, Scale "Semaphore[5]"
(4 :| [4, 4, 4, 3])
, Scale "Semaphore[9]"
(3 :| [3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Magic[7]"
(5 :| [1, 5, 1, 5, 1, 1])
, Scale "Magic[10]"
(4 :| [1, 1, 4, 1, 1, 4, 1, 1, 1])
, Scale "Marvel hexatonic"
(4 :| [2, 5, 2, 4, 2])
, Scale "deutone[6]"
(4 :| [3, 3, 3, 3, 3])
, Scale "deutone[7]"
(3 :| [3, 3, 3, 3, 3, 1])
, Scale "kleismic[7]"
(4 :| [4, 1, 4, 1, 4, 1])
, Scale "liese[5]"
(8 :| [1, 8, 1, 1])
, Scale "liese[7]"
(7 :| [1, 1, 7, 1, 1, 1])
, Scale "liese[9]"
(6 :| [1, 1, 1, 6, 1, 1, 1, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+11) $ 0 :| [8, 16, 24, 30, 38]) 0
]
},
Temperament {
_temperamentName = "20-TET"
, _divisions = 20
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Blackwood Major Decatonic"
(3 :| [1, 3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Blackwood Minor Decatonic"
(1 :| [3, 1, 3, 1, 3, 1, 3, 1, 3])
, Scale "Blackwood Major Pentadecatonic"
(2 :| [1, 1, 2, 1, 1, 2, 1, 1, 2, 1, 1])
, Scale "Blackwood Diminished Pentadecatonic"
(1 :| [1, 2, 1, 1, 2, 1, 1, 2, 1, 1, 2])
, Scale "Blackwood Minor Pentadecatonic"
(1 :| [2, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1])
, Scale "Balzano Nine-tone"
(2 :| [3, 2, 2, 2, 3, 2, 2, 2])
, Scale "Balzano Eleven-tone"
(2 :| [2, 2, 2, 1, 2, 2, 2, 2, 2, 1])
, Scale "Balzano Nine-tone inverse"
(2 :| [2, 2, 3, 2, 2, 2, 3, 2])
, Scale "Balzano Eleven-tone inverse"
(1 :| [2, 2, 2, 2, 2, 1, 2, 2, 2, 2])
, Scale "Octatonic"
(2 :| [3, 2, 3, 2, 3, 2, 3])
, Scale "Diminished"
(3 :| [2, 3, 2, 3, 2, 3, 2])
, Scale "Dodecatonic"
(2 :| [2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1])
, Scale "Major"
(4 :| [3, 1, 4, 3, 4, 1])
, Scale "Minor"
(4 :| [1, 3, 4, 1, 4, 3])
, Scale "Major quasi-equal Heptatonic"
(3 :| [3, 3, 3, 3, 3, 2])
, Scale "Minor quasi-equal Heptatonic"
(3 :| [2, 3, 3, 3, 3, 3])
, Scale "Rothenberg Generalized Diatonic"
(3 :| [2, 2, 2, 2, 3, 2, 2, 2])
, Scale "Stearns Major"
(3 :| [4, 1, 4, 3, 3, 2])
, Scale "score5"
(7 :| [2, 7, 2, 2])
, Scale "Mavilla[7]"
(5 :| [2, 2, 5, 2, 2, 2])
]
, _tunings =
[
Tuning "Flat Forths" "Six-String Guitar"
(0 :| [8, 16, 24, 32, 40]) 0
]
},
Temperament {
_temperamentName = "21-TET"
, _divisions = 21
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Antisinatonic (1L 9s)"
(3 :| [2, 2, 2, 2, 2, 2, 2, 2, 2])
, Scale "Machinoid (5L 1s)"
(4 :| [4, 4, 4, 4, 1])
, Scale "p-chro Machinoid (5L 6s)"
(3 :| [1, 1, 3, 1, 3, 1, 3, 1, 3, 1])
, Scale "Manual (4L 1s)"
(5 :| [5, 5, 5, 1])
, Scale "Gramitonic (4L 5s)"
(4 :| [1, 4, 1, 4, 1, 4, 1, 1])
, Scale "Antipentic (3L 2s)"
(5 :| [5, 3, 5, 3])
, Scale "Oneirotonic (5L 3s)"
(3 :| [3, 2, 3, 3, 2, 3, 2])
, Scale "LH Diasem Ionian"
(3 :| [1, 3, 2, 3, 3, 1, 3, 2])
, Scale "LH Diasem Mixo"
(3 :| [1, 3, 2, 3, 1, 3, 2, 3])
, Scale "LH Diasem Dorian"
(1 :| [3, 2, 3, 3, 1, 3, 2, 3])
, Scale "LH Diasem Aeolian"
(3 :| [2, 3, 1, 3, 2, 3, 3, 1])
, Scale "LH Diasem Phrygian"
(2 :| [3, 3, 1, 3, 2, 3, 1, 3])
, Scale "LH Diasem Lydian"
(3 :| [3, 1, 3, 2, 3, 1, 3, 2])
, Scale "LH Diasem Darkened Dorian"
(3 :| [2, 3, 3, 1, 3, 2, 3, 1])
, Scale "LH Diasem Brightened Aeolian"
(1 :| [3, 2, 3, 1, 3, 2, 3, 3])
, Scale "LH Diasem Locrian"
(2 :| [3, 1, 3, 2, 3, 3, 1, 3])
, Scale "RH Diasem Ionian"
(3 :| [1, 3, 2, 3, 1, 3, 3, 2])
, Scale "RH Diasem Mixo"
(1 :| [3, 3, 2, 3, 1, 3, 2, 3])
, Scale "RH Diasem Dorian"
(3 :| [2, 3, 1, 3, 3, 2, 3, 1])
, Scale "RH Diasem Aeolian"
(3 :| [2, 3, 1, 3, 2, 3, 1, 3])
, Scale "RH Diasem Phrygian"
(2 :| [3, 1, 3, 3, 2, 3, 1, 3])
, Scale "RH Diasem Lydian"
(3 :| [1, 3, 3, 2, 3, 1, 3, 2])
, Scale "RH Diasem Darkened Mixo"
(3 :| [3, 2, 3, 1, 3, 2, 3, 1])
, Scale "RH Diasem Brightened Dorian"
(1 :| [3, 2, 3, 1, 3, 3, 2, 3])
, Scale "RH Diasem Locrian"
(2 :| [3, 1, 3, 2, 3, 1, 3, 3])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [9, 18, 27, 33, 42]) 0
]
},
Temperament {
_temperamentName = "22-TET"
, _divisions = 22
, _period = 2 % 1
, _notationSystems =
[
NotationSystem "Sagittal" [
"A",
"A" <> sagittal5CommaUp,
"A" <> sagittalSharp5CDown,
"A" <> sagittalSharp,
"B",
"C",
"C" <> sagittal5CommaUp,
"C" <> sagittalSharp5CDown,
"C" <> sagittalSharp,
"D",
"D" <> sagittal5CommaUp,
"D" <> sagittalSharp5CDown,
"D" <> sagittalSharp,
"E",
"F",
"F" <> sagittal5CommaUp,
"F" <> sagittalSharp5CDown,
"F" <> sagittalSharp,
"G",
"G" <> sagittal5CommaUp,
"G" <> sagittalSharp5CDown,
"G" <> sagittalSharp
]
, NotationSystem "Standard (Meantone)" [
"A","A#","Bb","B","B#",
"Cb","C","C#","Db","D",
"D#","Eb","E","E#","Fb",
"F","F#","Gb","G","G#",
"Gx","Ab"
]
]
, _chords =
[
Chord "Major"
(0 :| [7, 6, 9])
, Chord "Minor"
(0 :| [6, 7, 9])
, Chord "SuperMajor"
(0 :| [8, 5, 9])
, Chord "SubMinor"
(0 :| [5, 8, 9])
, Chord "Magical"
(0 :| [5, 7, 9])
, Chord "Tiny"
(0 :| [5, 5, 11])
, Chord "Giant"
(0 :| [8, 7, 6])
, Chord "Minor Seventh"
(0 :| [6, 7, 6, 3])
, Chord "Super Seventh"
(0 :| [8, 5, 8, 1])
, Chord "Sub Seventh"
(0 :| [5, 8, 5, 4])
, Chord "Magical Seventh"
(0 :| [5, 7, 6, 4])
, Chord "Major Super seventh"
(0 :| [8, 5, 8, 1])
, Chord "Minor Sub Seventh"
(0 :| [5, 8, 5, 4])
, Chord "Super Minor Seventh"
(0 :| [8, 5, 6, 3])
, Chord "Sub Major Seventh"
(0 :| [5, 8, 6, 3])
, Chord "Super Sub Seventh"
(0 :| [8, 5, 5, 4])
, Chord "Harmonic Seventh"
(0 :| [7, 6, 5, 4])
, Chord "Tiny seventh"
(0 :| [5, 5, 5, 7])
, Chord "Giant Sixth"
(0 :| [8, 7, 5, 1])
, Chord "Harmonic Minor Sixth"
(0 :| [6, 7, 5, 4])
]
, _scales =
[
Scale "Superpyth[7] (Major)"
(4 :| [4, 1, 4, 4, 4, 1])
, Scale "Superpyth[7] (Dorian)"
(4 :| [1, 4, 4, 4, 1, 4])
, Scale "Superpyth[7] (Phrygian)"
(1 :| [4, 4, 4, 1, 4, 4])
, Scale "Superpyth[7] (Lydian)"
(4 :| [4, 4, 1, 4, 4, 1])
, Scale "Superpyth[7] (Mixolydian)"
(4 :| [4, 1, 4, 4, 1, 4])
, Scale "Superpyth[7] (Minor)"
(4 :| [1, 4, 4, 1, 4, 4])
, Scale "Superpyth[7] (Locrian)"
(1 :| [4, 4, 1, 4, 4, 4])
, Scale "Maqam Bayati"
(3 :| [2, 4, 4, 1, 4, 4])
, Scale "Maqam Jiharkah"
(4 :| [4, 1, 4, 4, 2, 3])
, Scale "Maqam Husayni 'Ushayran"
(3 :| [2, 4, 3, 2, 4, 4])
, Scale "Maqam Saba"
(3 :| [2, 4, 4, 1, 4, 2, 2])
, Scale "Maqam Rast"
(4 :| [2, 3, 4, 4, 2, 3])
, Scale "Syntonic Major"
(4 :| [3,2,4,3,4,2])
, Scale "Syntonic Dorian"
(3 :| [2,4,3,4,2,4])
, Scale "Syntonic Phrygian"
(2 :| [4,3,4,2,4,3])
, Scale "Syntonic Lydian"
(4 :| [3,4,2,4,3,2])
, Scale "Syntonic Mixolydian"
(3 :| [4,2,4,3,2,4])
, Scale "Syntonic Minor"
(4 :| [2,4,3,2,4,3])
, Scale "Syntonic Locrian"
(2 :| [4,3,2,4,3,4])
, Scale "Superpyth Blues"
(5 :| [4, 1, 3, 5, 4])
, Scale "Bright Minor Blues"
(6 :| [3, 1, 3, 6, 3])
, Scale "Astrology[6]"
(4 :| [3, 4, 4, 3, 4])
, Scale "Porcupine[7]"
(3 :| [3, 3, 4, 3, 3, 3])
, Scale "Porcupine[8]"
(3 :| [3, 3, 3, 3, 3, 3, 1])
, Scale "Orwell[5]"
(5 :| [5, 2, 5, 5])
, Scale "Orwell[9]"
(2 :| [3, 2, 3, 2, 3, 2, 3, 2])
, Scale "Magic[7]"
(1 :| [6, 1, 6, 1, 6, 1])
, Scale "Magic[10]"
(5 :| [1, 1, 5, 1, 1, 5, 1, 1, 1])
, Scale "Pajara[10]"
(2 :| [2, 3, 2, 2, 2, 2, 3, 2, 2])
, Scale "Pentachordal Decatonic"
(2 :| [2, 3, 2, 2, 2, 3, 2, 2, 2])
, Scale "Hedgehog[6]"
(3 :| [5, 3, 3, 5, 3])
, Scale "Hedgehog[8]"
(3 :| [3, 3, 2, 3, 3, 3, 2])
, Scale "Astrology[6]"
(4 :| [3, 4, 4, 3, 4])
, Scale "Astrology[10]"
(3 :| [1, 3, 1, 3, 3, 1, 3, 1, 3])
, Scale "Doublewide[6]"
(5 :| [5, 1, 5, 5, 1])
, Scale "Doublewide[10]"
(4 :| [1, 4, 1, 1, 4, 1, 4, 1, 1])
, Scale "Porcupine bright major #7"
(4 :| [3, 3, 3, 3, 4, 2])
, Scale "Porcupine bright major #6 #7"
(4 :| [3, 3, 3, 4, 3, 2])
, Scale "Porcupine bright minor #2"
(4 :| [2, 4, 3, 3, 3, 3])
, Scale "Porcupine dark minor #2"
(4 :| [2, 3, 4, 3, 3, 3])
, Scale "Porcupine bright harmonic 11th mode"
(4 :| [3, 3, 3, 3, 2, 4])
, Scale "Superpyth harmonic minor"
(4 :| [1, 4, 4, 1, 7, 1])
, Scale "Superpyth harmonic major"
(4 :| [4, 1, 4, 1, 7, 1])
, Scale "Superpyth melodic minor"
(4 :| [1, 4, 4, 4, 4, 1])
, Scale "Superpyth double harmonic major"
(1 :| [7, 1, 4, 1, 7, 1])
, Scale "Syntonic Harmonic Minor"
(4 :| [2, 3, 4, 2, 5, 2])
, Scale "Syntonic Harmonic Major"
(4 :| [3, 2, 4, 2, 5, 2])
, Scale "Syntonic Melodic Minor"
(4 :| [2, 3, 4, 3, 4, 2])
, Scale "Marvel Double Harmonic Major"
(2 :| [5, 2, 4, 2, 5, 2])
, Scale "Blackdye"
(1 :| [3, 2, 3, 1, 3, 2, 3, 1, 3])
, Scale "Marvel Hexatonic"
(5 :| [2, 6, 2, 5, 2])
11 - EDO inclusions
, Scale "Machine[6]"
(fmap (*2) $ 2 :| [2, 2, 2, 2, 1])
, Scale "Orgone[7] (Nerevarine)"
(fmap (*2) $ 2 :| [2, 1, 2, 1, 2, 1])
, Scale "Orgone[7] (Vivecan)"
(fmap (*2) $ 2 :| [1, 2, 2, 1, 2, 1])
, Scale "Orgone[7] (Lorkhanic)"
(fmap (*2) $ 2 :| [1, 2, 1, 2, 2, 1])
, Scale "Orgone[7] (Sothic)"
(fmap (*2) $ 2 :| [1, 2, 1, 2, 1, 2])
, Scale "Orgone[7] (Kagrenacan)"
(fmap (*2) $ 1 :| [2, 2, 1, 2, 1, 2])
, Scale "Orgone[7] (Almalexian)"
(fmap (*2) $ 1 :| [2, 1, 2, 2, 1, 2])
, Scale "Orgone[7] (Dagothic)"
(fmap (*2) $ 1 :| [2, 1, 2, 1, 2, 2])
, Scale "Joan Pentatonic"
(fmap (*2) $ 1 :| [4, 1, 4, 1])
, Scale "Joan Heptatonic"
(fmap (*2) $ 1 :| [1, 1, 3, 1, 1, 3])
, Scale "Joan Nonatonic"
(fmap (*2) $ 1 :| [1, 1, 2, 1, 1, 1, 2, 1])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [9, 18, 27, 35, 44]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+13) $ 0 :| [5, 18, 27, 35, 44]) 0
, Tuning "All Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [9, 18, 27, 36, 45]) 0
, Tuning "Narrow Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [8, 16, 24, 32, 40]) 0
, Tuning "Wide Fourths Tuning" "Six-String Guitar"
(fmap (+13) $ 0 :| [10, 20, 30, 40, 50]) 0
]
},
Temperament {
_temperamentName = "23-TET"
, _divisions = 23
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "3L 2s (oneiro-pentatonic)"
(5 :| [4, 5, 5, 4])
, Scale "4L 1s (bug pentatonic)"
(5 :| [5, 5, 5, 3])
, Scale "5L 1s (machinoid)"
(4 :| [4, 4, 4, 4, 3])
, Scale "4L 3s (smitonic)"
(5 :| [1, 5, 1, 5, 1, 5])
, Scale "1L 6s (antiarcheotonic)"
(3 :| [3, 3, 5, 3, 3, 3])
, Scale "2L 5s (mavila, anti-diatonic)"
(3 :| [3, 4, 3, 3, 3, 4])
, Scale "3L 4s (mosh)"
(2 :| [5, 2, 5, 2, 5, 2])
, Scale "5L 3s (oneirotonic)"
(4 :| [1, 4, 4, 1, 4, 4, 1])
, Scale "7L 1s (porcupoid)"
(3 :| [3, 3, 3, 3, 3, 3, 2])
, Scale "7L 2s (mavila superdiatonic)"
(3 :| [3, 3, 1, 3, 3, 3, 3, 1])
, Scale "5L 4s (bug semiquartal)"
(3 :| [2, 3, 2, 3, 2, 3, 2, 3])
, Scale "3L 7s (sephiroid)"
(3 :| [2, 2, 3, 2, 2, 3, 2, 2, 2])
]
, _tunings =
[
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [10, 20, 30, 40, 50]) 0
]
},
Temperament {
_temperamentName = "24-TET"
, _divisions = 24
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Ionian (Major)"
(4 :| [4, 2, 4, 4, 4, 2])
, Scale "Anchihoye: Ethiopia"
(2 :| [8, 3, 6, 5])
, Scale "Enharmonic Phrygian"
(8 :| [1, 1, 8, 4, 1, 1])
, Scale "Maqam Rast"
(4 :| [3, 3, 4, 4, 3, 3])
, Scale "Mohajira[7]"
(3 :| [4, 3, 4, 3, 4, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six-String Guitar"
(fmap (+14) $ 0 :| [10, 20, 30, 38, 48]) 0
, Tuning "Drop D" "Six-String Guitar"
(fmap (+12) $ 0 :| [14, 24, 34, 42, 52]) 0
]
},
Temperament {
_temperamentName = "25-TET"
, _divisions = 25
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Bleu[8]"
(3 :| [3,3,3,3,3,3,4])
, Scale "Bleu[9]"
(3 :| [3,3,3,3,3,3,3,1])
, Scale "Luna[6]"
(4 :| [4,4,4,4,5])
, Scale "Luna[7]"
(4 :| [4,4,4,4,4,1])
, Scale "Gariberttet[5]"
(6 :| [6,6,6,1])
, Scale "Gariberttet[9]"
(5 :| [1,5,1,5,1,5,1,1])
, Scale "Sixix[7]"
(3 :| [4,3,4,3,4,4])
, Scale "Magic[7]"
(7 :| [1,7,1,7,1,1])
, Scale "Magic[10]"
(6 :| [1,1,6,1,1,6,1,1,1])
, Scale "Antipentic (3L 2s)"
(2 :| [7,2,7,7])
, Scale "Checkertonic (3L 5s)"
(2 :| [2,5,2,2,5,2,5])
, Scale "Pelogic[5]"
(8 :| [3,8,3,3])
, Scale "Pelogic[7]"
(5 :| [3,3,5,3,3,3])
, Scale "Pelogic[9]"
(2 :| [3,3,3,2,3,3,3,3])
, Scale "Triton[5]"
(11 :| [1,11,1,1])
, Scale "Triton[7]"
(10 :| [1,1,10,1,1,1])
, Scale "Triton[9]"
(9 :| [1,1,1,9,1,1,1,1])
]
, _tunings = []
},
Temperament {
_temperamentName = "26-TET"
, _divisions = 26
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Flattone"
(4 :| [4, 4, 3, 4, 4, 3])
, Scale "Orgone"
(5 :| [5, 2, 5, 2, 5, 2])
, Scale "Lemba"
(5 :| [5, 3, 5, 5, 3])
]
, _tunings =
[
Tuning "All Fourths" "Six String Guitar"
(0 :| [11, 22, 33, 44, 55]) 0
]
},
Temperament {
_temperamentName = "27-TET"
, _divisions = 27
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Tetracot[6]"
(4 :| [4,4,4,4,7])
, Scale "Tetractot[7]"
(4 :| [4,4,4,4,4,3])
, Scale "Machine[5]"
(5 :| [5,5,5,7])
, Scale "Machine[6]"
(5 :| [5,5,5,5,2])
, Scale "Myna[7]"
(1 :| [6,1,6,1,6,6])
, Scale "Beatles[7]"
(5 :| [3,5,3,5,3,3])
, Scale "Beatles[10]"
(2 :| [3,3,2,3,3,2,3,3,3])
, Scale "Sensi[5]"
(3 :| [7,3,7,7])
, Scale "Sensi[8]"
(3 :| [3,4,3,3,4,3,4])
, Scale "Superpyth[7]"
(1 :| [5,5,1,5,5,5])
, Scale "Fervor[5]"
(12 :| [1,12,1,1])
, Scale "Fervor[7]"
(11 :| [1,1,11,1,1,1])
, Scale "Fervor[9]"
(10 :| [1,1,1,10,1,1,1,1])
]
, _tunings = []
},
Temperament {
_temperamentName = "28-TET"
, _divisions = 28
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales =
[
Scale "Negri [9]"
(3 :| [3, 3, 3, 4, 3, 3, 3, 3])
, Scale "Negri [10]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 3, 1])
, Scale "Diatonic Major [7]"
(5 :| [4, 3, 4, 5, 5, 2])
, Scale "Diatonic Minor [7]"
(5 :| [2, 5, 4, 3, 4, 5])
, Scale "Diatonic Naive Major [7]"
(4 :| [5, 3, 4, 5, 4, 3])
, Scale "Diatonic Naive Minor [7]"
(4 :| [3, 5, 4, 3, 4, 5])
, Scale "Harmonic Minor [7]"
(5 :| [2, 5, 4, 3, 7, 2])
, Scale "Harmonic Major [7]"
(5 :| [4, 3, 4, 3, 7, 2])
, Scale "Diasem (Right-handed)"
(4 :| [1, 4, 4, 3, 4, 1, 4, 3])
, Scale "Diasem (Left-handed)"
(4 :| [4, 1, 4, 3, 4, 1, 4, 3])
, Scale "Oneirotonic [5]"
(6 :| [5, 6, 5, 6])
, Scale "Oneirotonic [8]"
(5 :| [5, 1, 5, 5, 1, 5, 1])
]
, _tunings =
[
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [12, 24, 36, 48, 69]) 0
, Tuning "Narrow Fourths" "Six String Guitar"
(0 :| [11, 22, 33, 44, 55]) 0
]
},
Temperament {
_temperamentName = "29-TET"
, _divisions = 29
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Nicetone"
(5 :| [4, 3, 5, 4, 5, 3])
, Scale "Porcupine[7]"
(4 :| [4, 4, 4, 4, 4, 5])
, Scale "Porcupine[8]"
(4 :| [4, 4, 4, 4, 4, 4, 1])
, Scale "Negri[9]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 5])
, Scale "Negri[10]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 3, 2])
, Scale "Semaphore[5]"
(5 :| [6, 6, 6, 6])
, Scale "Semaphore[9]"
(5 :| [5, 1, 5, 1, 5, 1, 5, 1])
, Scale "Leapfrog[7]"
(5 :| [5, 5, 2, 5, 5, 2])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [12, 24, 36, 46, 58]) 0
]
},
Temperament {
_temperamentName = "30-TET"
, _divisions = 30
, _period = 2 % 1
, _notationSystems =
[
]
, _chords = []
, _scales =
[
Scale "Lovecraft[5]"
(7 :| [7, 7, 7, 2])
, Scale "Lovecraft[9]"
(5 :| [2, 5, 2, 5, 2, 5, 2, 2])
, Scale "Sensi[5]"
(8 :| [3, 8, 3, 8])
, Scale "Sensi[8]"
(5 :| [3, 3, 5, 3, 3, 5, 3])
, Scale "Mavila[5]"
(9 :| [4, 9, 4, 4])
, Scale "Mavila[7]"
(5 :| [4, 4, 5, 4, 4, 4])
, Scale "Mavila[9]"
(4 :| [4, 4, 4, 1, 4, 4, 4, 1])
]
, _tunings =
[
Tuning "Narrow Fourths" "Six String Guitar"
(0 :| [12, 24, 36, 48, 60]) 0
]
},
Temperament {
_temperamentName = "31-TET"
, _divisions = 31
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Miracle[5]"
(3 :| [3, 3, 3, 19])
, Scale "Nusecond[5]"
(4 :| [4, 4, 4, 15])
, Scale "Hemithirds[5]"
(5 :| [5, 5, 5, 11])
, Scale "Mothra[5]"
(6 :| [6, 6, 6, 7])
, Scale "Orwell[5]"
(7 :| [7, 7, 7, 3])
, Scale "Squares[5]"
(2 :| [9, 2, 9, 9])
, Scale "Semisept[5]"
(5 :| [7, 5, 7, 7])
, Scale "Meantone[5]"
(8 :| [5, 8, 5, 5])
, Scale "Casablanca[5]"
(11 :| [3, 11, 3, 3])
, Scale "Tritonic[5]"
(14 :| [1, 14, 1, 1])
, Scale "Miracle[6]"
(3 :| [3, 3, 3, 3, 16])
, Scale "Nusecond[6]"
(4 :| [4, 4, 4, 4, 11])
, Scale "Hemithirds[6]"
(5 :| [5, 5, 5, 5, 6])
, Scale "Mothra[6]"
(6 :| [6, 6, 6, 6, 1])
, Scale "Miracle[7]"
(3 :| [3, 3, 3, 3, 3, 13])
, Scale "Nusecond[7]"
(4 :| [4, 4, 4, 4, 4, 7])
, Scale "Hemithirds[7]"
(5 :| [5, 5, 5, 5, 5, 1])
, Scale "Myna[7]"
(1 :| [7, 1, 7, 1, 7, 7])
, Scale "Mohajira[7]"
(5 :| [4, 5, 4, 5, 4, 4])
, Scale "Würschmidt[7]"
(9 :| [1, 9, 1, 9, 1, 1])
, Scale "Meantone[7]"
(3 :| [5, 5, 3, 5, 5, 5])
, Scale "Casablanca[7]"
(8 :| [3, 3, 8, 3, 3, 3])
, Scale "Tritonic[7]"
(13 :| [1, 1, 13, 1, 1, 1])
, Scale "Miracle[8]"
(3 :| [3, 3, 3, 3, 3, 3, 10])
, Scale "Nusecond[8]"
(4 :| [4, 4, 4, 4, 4, 4, 3])
, Scale "Squares[8]"
(2 :| [2, 7, 2, 2, 7, 2, 7])
, Scale "Semisept[8]"
(5 :| [5, 2, 5, 5, 2, 5, 2])
, Scale "Miracle[9]"
(3 :| [3, 3, 3, 3, 3, 3, 3, 7])
, Scale "Orwell[9]"
(4 :| [3, 4, 3, 4, 3, 4, 3, 3])
, Scale "Casablanca[9]"
(5 :| [3, 3, 3, 5, 3, 3, 3, 3])
]
, _tunings =
[
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [13, 26, 39, 49, 62]) 0
]
},
Temperament {
_temperamentName = "32-TET"
, _divisions = 32
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "blackdye"
(1 :| [5, 2, 5, 1, 5, 2, 5, 1, 5])
, Scale "Sixix[7]"
(4 :| [5,4,5,4,5,5])
, Scale "Pajara[5]"
(6 :| [6,7,6,7])
, Scale "Pajara[7]"
(6 :| [6,6,1,6,6,1])
, Scale "Pentic"
(4 :| [4,10,4,10])
, Scale "Antidiatonic"
(4 :| [4,4,6,4,4,6])
]
, _tunings = [
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [14, 28, 42, 56, 70]) 0
]
},
Temperament {
_temperamentName = "33-TET"
, _divisions = 33
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "1L 4s" (6 :| [6,6,6,9])
, Scale "4L 1s" (7 :| [7,7,7,5])
, Scale "3L 2s" (3 :| [9,3,9,9])
, Scale "4L 1s" (5 :| [7,7,7,7])
, Scale "1L 5s" (5 :| [5,5,5,5,8])
, Scale "5L 1s" (6 :| [6,6,6,6,3])
, Scale "5L 2s" (5 :| [5,5,4,5,5,4])
, Scale "4L 3s" (6 :| [6,3,6,3,6,3])
, Scale "3L 5s" (3 :| [3,6,3,3,6,3,6])
, Scale "5L 3s" (6 :| [6,1,6,6,1,6,1])
]
, _tunings = [
Tuning "All Fourths" "Six String Guitar"
(0 :| [14, 28, 42, 56, 70]) 0
]
},
Temperament {
_temperamentName = "34-TET"
, _divisions = 34
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Tetracot[5]"
(5 :| [5,5,5,14])
, Scale "Tetracot[6]"
(5 :| [5,5,5,5,9])
, Scale "Tetracot[7]"
(5 :| [5,5,5,5,5,4])
, Scale "Immunity[5]"
(7 :| [7,7,7,6])
, Scale "Immunity[9]"
(1 :| [6,1,6,1,6,1,6,6])
, Scale "Hanson[7]"
(2 :| [7,2,7,2,7,7])
, Scale "Petrtri[5]"
(5 :| [8,5,8,8])
, Scale "Petrtri[8]"
(5 :| [5,3,5,5,3,5,3])
, Scale "Mabila[5]"
(11 :| [4,11,4,4])
, Scale "Mabila[7]"
(7 :| [4,4,7,4,4,4])
]
, _tunings = []
},
Temperament {
_temperamentName = "35-TET"
, _divisions = 35
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Secund[9]"
(4 :| [4,4,4,4,4,4,4,3])
, Scale "Ripple[10]"
(3 :| [3,3,3,3,3,3,3,3,8])
, Scale "Baldy[5]"
(6 :| [6,6,6,11])
, Scale "Baldy[6]"
(6 :| [6,6,6,6,5])
, Scale "Baldy[11]"
(1 :| [5,1,5,1,5,1,5,1,5,5])
, Scale "Orwell[5]"
(8 :| [8,8,8,3])
, Scale "Orwell[9]"
(5 :| [3,5,3,5,3,5,3,3])
, Scale "Myna[7]"
(1 :| [8,1,8,1,8,8])
, Scale "Myna[11]"
(1 :| [1,7,1,1,7,1,1,7,1,7])
, Scale "Muggles[7]"
(9 :| [2,9,2,9,2,2])
, Scale "Muggles[10]"
(7 :| [2,2,7,2,2,7,2,2,2])
, Scale "Roman[5]"
(1 :| [11,1,11,11])
, Scale "Roman[8]"
(1 :| [1,10,1,1,10,1,10])
, Scale "Sensi[5]"
(4 :| [9,4,9,9])
, Scale "Sensi[8]"
(4 :| [4,5,4,4,5,4,5])
, Scale "Sensi[11]"
(4 :| [4,4,1,4,4,4,1,4,4,1])
]
, _tunings = [
Tuning "Wide Fourths" "Six String Guitar"
(0 :| [15, 30, 45, 60, 75]) 0
]
},
Temperament {
_temperamentName = "36-TET"
, _divisions = 36
, _period = 2 % 1
, _notationSystems = []
, _chords = []
, _scales = [
Scale "Pentatonic"
(6 :| [6,9,6,9])
, Scale "Lydian"
(6 :| [6,6,3,6,6,3])
]
, _tunings = [
Tuning "Standard Tuning" "Six String Guitar"
(0 :| [15, 30, 45, 57, 72]) 0
]
},
Temperament {
_temperamentName = "41-TET"
, _divisions = 41
, _period = 2 % 1
, _notationSystems =
[
]
, _chords =
[
]
, _scales =
[
Scale "Down Lydian"
(7 :| [6,7,4,7,6,4])
, Scale "Down Major"
(7 :| [6, 4, 7, 6, 7, 4])
, Scale "Down Mixolydian"
(6 :| [7, 4, 7, 6, 4, 7])
, Scale "Up Minor"
(7 :| [4, 6, 7, 4, 7, 6])
, Scale "Up Phrygian"
(4 :| [7, 6, 7, 4, 6, 7])
, Scale "Up Dorian"
(7 :| [4, 6, 7, 7, 4, 6])
, Scale "Up Locrian"
(4 :| [6, 7, 3, 8, 6, 7])
, Scale "Up Lydian"
(7 :| [8, 7, 2, 7, 8, 2])
, Scale "Up Major"
(7 :| [8, 2, 7, 8, 7, 2])
, Scale "Up Mixolydian"
(8 :| [7, 2, 7, 8, 2, 7])
, Scale "Down Minor"
(7 :| [2, 8, 7, 2, 7, 8])
, Scale "Down Phrygian"
(2 :| [7, 8, 7, 2, 8, 7])
, Scale "Down Dorian"
(7 :| [2, 8, 7, 7, 2, 8])
, Scale "Down Locrian"
(2 :| [8, 7, 3, 6, 8, 7])
]
, _tunings =
[
Tuning "Standard Tuning" "Kite Guitar"
(0 :| [13, 26, 39, 52, 65]) 1
]
},
Temperament {
_temperamentName = "Bohlen Pierce"
, _divisions = 13
, _period = 3 % 1
, _notationSystems =
[
NotationSystem "Standard" [
"A","A#","B","C","C#","D","E","F","F#","G","H","H#","J"
]
]
, _chords =
[
]
, _scales =
[
Scale "Lambda"
(2 :| [1, 1, 2, 1, 2, 1, 2, 1])
, Scale "Moll 1"
(1 :| [2,1,2,1,2,1,2,1])
, Scale "Harmonic"
(1 :| [2,1,2,1,2,1,1,2])
, Scale "Dur I"
(1 :| [2,1,2,1,1,2,1,2])
, Scale "Moll 2"
(2 :| [1,2,1,1,2,1,2,1])
, Scale "Dur II"
(2 :| [1,1,2,1,2,1,1,2])
, Scale "Gamma"
(1 :| [2,1,2,1,1,2,2,1])
, Scale "Walker A"
(1 :| [1,2,1,2,1,2,1,2])
, Scale "Walker B"
(1 :| [2,1,1,2,1,2,1,2])
, Scale "Walker I"
(2 :| [1,2,1,2,1,2,1,1])
, Scale "Walker II"
(2 :| [1,2,1,2,1,1,2,1])
, Scale "Sirius[6]"
(2 :| [2,2,2,2,3])
, Scale "Sirius[7]"
(2 :| [2,2,2,2,2,1])
, Scale "Canopus[7]"
(3 :| [1,3,1,3,1,1])
, Scale "Arcturus[5]"
(5 :| [1,5,1,1])
, Scale "Arcturus[7]"
(4 :| [1,1,4,1,1,1])
]
, _tunings =
[
Tuning "Bohlen's Tuning" "Six String Guitar"
(0 :| [3,6,9,13,16]) 0
]
}
]
, _preferences = defaultPreferences
} |
948f24cf0f4b51cfa7d48f674a91944413d929072072f993589940aefc44736e | exoscale/clojure-kubernetes-client | v1_priority_class.clj | (ns clojure-kubernetes-client.specs.v1-priority-class
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
)
(:import (java.io File)))
(declare v1-priority-class-data v1-priority-class)
(def v1-priority-class-data
{
(ds/opt :apiVersion) string?
(ds/opt :description) string?
(ds/opt :globalDefault) boolean?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/req :value) int?
})
(def v1-priority-class
(ds/spec
{:name ::v1-priority-class
:spec v1-priority-class-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_priority_class.clj | clojure | (ns clojure-kubernetes-client.specs.v1-priority-class
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
)
(:import (java.io File)))
(declare v1-priority-class-data v1-priority-class)
(def v1-priority-class-data
{
(ds/opt :apiVersion) string?
(ds/opt :description) string?
(ds/opt :globalDefault) boolean?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/req :value) int?
})
(def v1-priority-class
(ds/spec
{:name ::v1-priority-class
:spec v1-priority-class-data}))
| |
21d54a68b08b0397e19af1c7064a4722bf09d92f898026ed25a132aa90e0030a | zerokarmaleft/tapl-haskell | Typechecker.hs | module Simplebool.Typechecker where
import Simplebool.Context
import Simplebool.Syntax
data TypeError = IfArmsTypeMismatch
| IfGuardNotBool
| ArrowParamTypeMismatch
| AppArrowTypeExpected
| VarTypeErrorWat
deriving (Eq, Show)
typeOf :: Context -> Term -> Either TypeError Type
typeOf _ TermTrue = Right TypeBool
typeOf _ TermFalse = Right TypeBool
typeOf ctx (TermIf t1 t2 t3) =
if typeOf ctx t1 == Right TypeBool
then if typeOf ctx t2 == typeOf ctx t3
then typeOf ctx t2
else Left IfArmsTypeMismatch
else Left IfGuardNotBool
typeOf ctx (TermVar x _) =
case getType x ctx of
Just (VarBinding tyT) -> Right tyT
_ -> Left VarTypeErrorWat
typeOf ctx (TermAbs x tyT1 t2) =
let ctx' = addBinding (x,VarBinding tyT1) ctx
tyT2 = typeOf ctx' t2
in case tyT2 of
Right tyT2' -> Right $ TypeArrow tyT1 tyT2'
Left tyErrT2 -> Left tyErrT2
typeOf ctx (TermApp t1 t2) =
let tyT1 = typeOf ctx t1
tyT2 = typeOf ctx t2
in case tyT1 of
Right (TypeArrow tyT11 tyT12) ->
if tyT2 == Right tyT11
then Right tyT12
else Left ArrowParamTypeMismatch
_ -> Left AppArrowTypeExpected
| null | https://raw.githubusercontent.com/zerokarmaleft/tapl-haskell/7fced6ab1d3d4a7bbe905902ee0443b4c535c306/simplebool/src/Simplebool/Typechecker.hs | haskell | module Simplebool.Typechecker where
import Simplebool.Context
import Simplebool.Syntax
data TypeError = IfArmsTypeMismatch
| IfGuardNotBool
| ArrowParamTypeMismatch
| AppArrowTypeExpected
| VarTypeErrorWat
deriving (Eq, Show)
typeOf :: Context -> Term -> Either TypeError Type
typeOf _ TermTrue = Right TypeBool
typeOf _ TermFalse = Right TypeBool
typeOf ctx (TermIf t1 t2 t3) =
if typeOf ctx t1 == Right TypeBool
then if typeOf ctx t2 == typeOf ctx t3
then typeOf ctx t2
else Left IfArmsTypeMismatch
else Left IfGuardNotBool
typeOf ctx (TermVar x _) =
case getType x ctx of
Just (VarBinding tyT) -> Right tyT
_ -> Left VarTypeErrorWat
typeOf ctx (TermAbs x tyT1 t2) =
let ctx' = addBinding (x,VarBinding tyT1) ctx
tyT2 = typeOf ctx' t2
in case tyT2 of
Right tyT2' -> Right $ TypeArrow tyT1 tyT2'
Left tyErrT2 -> Left tyErrT2
typeOf ctx (TermApp t1 t2) =
let tyT1 = typeOf ctx t1
tyT2 = typeOf ctx t2
in case tyT1 of
Right (TypeArrow tyT11 tyT12) ->
if tyT2 == Right tyT11
then Right tyT12
else Left ArrowParamTypeMismatch
_ -> Left AppArrowTypeExpected
| |
926290cf31c5c7c9f7b3769947bb1697f1ba438b90ddc390d8a2d8e914f44e22 | PapenfussLab/bioshake | SomaticSniper.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Bioshake.Cluster.SomaticSniper where
import Bioshake
import Bioshake.Cluster.Torque
import Bioshake.Internal.SomaticSniper
import Bioshake.TH
import Data.List
import Development.Shake
import Development.Shake.FilePath
$(makeSingleCluster ''CallSomatic [''IsBam,''Referenced] 'buildSomaticSniper)
| null | https://raw.githubusercontent.com/PapenfussLab/bioshake/afeb7219b171e242b6e9bb9e99e2f80c0a099aff/Bioshake/Cluster/SomaticSniper.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators # | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Bioshake.Cluster.SomaticSniper where
import Bioshake
import Bioshake.Cluster.Torque
import Bioshake.Internal.SomaticSniper
import Bioshake.TH
import Data.List
import Development.Shake
import Development.Shake.FilePath
$(makeSingleCluster ''CallSomatic [''IsBam,''Referenced] 'buildSomaticSniper)
|
faadb6519f4f98bfa05f9764d97e7cab9c268dffd6d62ac7052eed84a2d29646 | mfikes/fifth-postulate | ns33.cljs | (ns fifth-postulate.ns33)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| null | https://raw.githubusercontent.com/mfikes/fifth-postulate/22cfd5f8c2b4a2dead1c15a96295bfeb4dba235e/src/fifth_postulate/ns33.cljs | clojure | (ns fifth-postulate.ns33)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| |
ab1145cc3b23c1e5da02c21d88cc95f611a6a3464d43488bcb8d7b87a5af3c64 | VictorNicollet/Ohm | config.ml | Ohm is © 2012
open BatPervasives
open Common
let apache_vhost args =
let name = Filename.basename Path.root in
let domain = name ^ ".local" in
Printf.printf
"<VirtualHost *:80>
ServerName %s
FastCgiServer %s/server
RewriteEngine On
RewriteCond %s%%{REQUEST_FILENAME} !-f
RewriteRule .* /server [L,QSA]
DocumentRoot %s
<Directory %s>
AllowOverride None
Order allow,deny
allow from all
</Directory>
ErrorDocument 500 /500.htm
</VirtualHost>\n"
domain Path.www Path.www Path.www Path.www
let nginx_vhost args =
let name = Filename.basename Path.root in
let domain = name ^ ".local" in
Printf.printf "server {
listen 80;
server_name %s;
root %s;
location @fastcgi {
include /etc/nginx/fastcgi_params;
fastcgi_pass unix:%s/socket;
}
location / {
try_files $uri @fastcgi;
}
}"
domain Path.www Path.www
let vhost = function
| "apache" :: args -> apache_vhost args
| "nginx" :: args -> nginx_vhost args
| _ -> print_endline "Unknown vhost target, specify 'apache' or 'nginx'"
| null | https://raw.githubusercontent.com/VictorNicollet/Ohm/ca90c162f6c49927c893114491f29d44aaf71feb/tool/config.ml | ocaml | Ohm is © 2012
open BatPervasives
open Common
let apache_vhost args =
let name = Filename.basename Path.root in
let domain = name ^ ".local" in
Printf.printf
"<VirtualHost *:80>
ServerName %s
FastCgiServer %s/server
RewriteEngine On
RewriteCond %s%%{REQUEST_FILENAME} !-f
RewriteRule .* /server [L,QSA]
DocumentRoot %s
<Directory %s>
AllowOverride None
Order allow,deny
allow from all
</Directory>
ErrorDocument 500 /500.htm
</VirtualHost>\n"
domain Path.www Path.www Path.www Path.www
let nginx_vhost args =
let name = Filename.basename Path.root in
let domain = name ^ ".local" in
Printf.printf "server {
listen 80;
server_name %s;
root %s;
location @fastcgi {
include /etc/nginx/fastcgi_params;
fastcgi_pass unix:%s/socket;
}
location / {
try_files $uri @fastcgi;
}
}"
domain Path.www Path.www
let vhost = function
| "apache" :: args -> apache_vhost args
| "nginx" :: args -> nginx_vhost args
| _ -> print_endline "Unknown vhost target, specify 'apache' or 'nginx'"
| |
818ce259ef98d50700bcacc5826b9306d1bdcfd8f3edc511e9d59b4f171105f8 | logseq/logseq | api.cljs | (ns ^:no-doc logseq.api
(:require [camel-snake-kebab.core :as csk]
[cljs-bean.core :as bean]
[cljs.reader]
[clojure.string :as string]
[clojure.walk :as walk]
[datascript.core :as d]
[electron.ipc :as ipc]
[frontend.commands :as commands]
[frontend.components.plugins :as plugins]
[frontend.config :as config]
[frontend.db :as db]
[frontend.db.model :as db-model]
[frontend.db.query-dsl :as query-dsl]
[frontend.db.utils :as db-utils]
[frontend.db.react :refer [sub-key-value]]
[frontend.db.query-react :as query-react]
[frontend.fs :as fs]
[frontend.handler.dnd :as editor-dnd-handler]
[frontend.handler.editor :as editor-handler]
[frontend.handler.export :as export-handler]
[frontend.handler.notification :as notification]
[frontend.handler.page :as page-handler]
[frontend.handler.plugin :as plugin-handler]
[frontend.handler.common.plugin :as plugin-common-handler]
[frontend.modules.outliner.core :as outliner]
[frontend.modules.outliner.tree :as outliner-tree]
[frontend.handler.command-palette :as palette-handler]
[frontend.modules.shortcut.core :as st]
[electron.listener :as el]
[frontend.state :as state]
[frontend.util :as util]
[frontend.util.cursor :as cursor]
[frontend.loader :as loader]
[goog.dom :as gdom]
[lambdaisland.glogi :as log]
[promesa.core :as p]
[reitit.frontend.easy :as rfe]
[sci.core :as sci]
[frontend.version :as fv]
[frontend.handler.shell :as shell]
[frontend.modules.layout.core]
[frontend.handler.code :as code-handler]
[frontend.handler.search :as search-handler]))
;; helpers
(defn- normalize-keyword-for-json
([input] (normalize-keyword-for-json input true))
([input camel-case?]
(when input
(walk/postwalk
(fn [a]
(cond
(keyword? a)
(cond-> (name a)
camel-case?
(csk/->camelCase))
(uuid? a) (str a)
:else a)) input))))
(defn- uuid-or-throw-error
[s]
(cond
(uuid? s)
s
(util/uuid-string? s)
(uuid s)
:else
(throw (js/Error. (str s " is not a valid UUID string.")))))
(defn- parse-hiccup-ui
[input]
(when (string? input)
(try
(sci/eval-string input {:preset :termination-safe})
(catch :default e
(js/console.error "[parse hiccup error]" e) input))))
(defn ^:export install-plugin-hook
[pid hook ^js opts]
(state/install-plugin-hook pid hook (bean/->clj opts)))
(defn ^:export uninstall-plugin-hook
[pid hook-or-all]
(state/uninstall-plugin-hook pid hook-or-all))
(defn ^:export should-exec-plugin-hook
[pid hook]
(plugin-handler/plugin-hook-installed? pid hook))
;; base
(defn ^:export get_state_from_store
[^js path]
(when-let [path (if (string? path) [path] (bean/->clj path))]
(some->> path
(map #(if (string/starts-with? % "@")
(subs % 1)
(keyword %)))
(get-in @state/state)
(normalize-keyword-for-json)
(bean/->js))))
(defn ^:export set_state_from_store
[^js path ^js value]
(when-let [path (if (string? path) [path] (bean/->clj path))]
(some->> path
(map #(if (string/starts-with? % "@")
(subs % 1)
(keyword %)))
(into [])
(#(state/set-state! % (bean/->clj value))))))
(defn ^:export get_app_info
;; get app base info
[]
(bean/->js
(normalize-keyword-for-json
{:version fv/version})))
(def ^:export get_user_configs
(fn []
(bean/->js
(normalize-keyword-for-json
{:preferred-language (:preferred-language @state/state)
:preferred-theme-mode (:ui/theme @state/state)
:preferred-format (state/get-preferred-format)
:preferred-workflow (state/get-preferred-workflow)
:preferred-todo (state/get-preferred-todo)
:preferred-date-format (state/get-date-formatter)
:preferred-start-of-week (state/get-start-of-week)
:current-graph (state/get-current-repo)
:show-brackets (state/show-brackets?)
:enabled-journals (state/enable-journals?)
:enabled-flashcards (state/enable-flashcards?)
:me (state/get-me)}))))
(def ^:export get_current_graph_configs
(fn []
(some-> (state/get-config)
(normalize-keyword-for-json)
(bean/->js))))
(def ^:export get_current_graph_favorites
(fn []
(some->> (:favorites (state/get-config))
(remove string/blank?)
(filter string?)
(bean/->js))))
(def ^:export get_current_graph_recent
(fn []
(some->> (sub-key-value :recent/pages)
(remove string/blank?)
(filter string?)
(bean/->js))))
(def ^:export get_current_graph
(fn []
(when-let [repo (state/get-current-repo)]
(when-not (= config/local-repo repo)
(bean/->js {:url repo
:name (util/node-path.basename repo)
:path (config/get-repo-dir repo)})))))
(def ^:export show_themes
(fn []
(plugins/open-select-theme!)))
(def ^:export set_theme_mode
(fn [mode]
(state/set-theme-mode! mode)))
(def ^:export load_plugin_config
(fn [path]
(fs/read-file "" (util/node-path.join path "package.json"))))
(def ^:export load_plugin_readme
(fn [path]
(fs/read-file "" (util/node-path.join path "readme.md"))))
(def ^:export save_plugin_config
(fn [path ^js data]
(let [repo ""
path (util/node-path.join path "package.json")]
(fs/write-file! repo "" path (js/JSON.stringify data nil 2) {:skip-compare? true}))))
(def ^:export save_focused_code_editor_content
(fn []
(code-handler/save-code-editor!)))
(defn ^:private write_rootdir_file
[file content sub-root root-dir]
(p/let [repo ""
path (util/node-path.join root-dir sub-root)
exist? (fs/file-exists? path "")
_ (when-not exist? (fs/mkdir-recur! path))
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir?
(log/info :debug user-path)
(throw "write file denied"))
user-path-root (util/node-path.dirname user-path)
exist? (fs/file-exists? user-path-root "")
_ (when-not exist? (fs/mkdir-recur! user-path-root))
_ (fs/write-file! repo "" user-path content {:skip-compare? true})]
user-path))
(defn ^:private write_dotdir_file
[file content sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(write_rootdir_file file content sub-root %))))
(defn ^:private write_assetsdir_file
[file content sub-root]
(if-let [assets-dir (config/get-current-repo-assets-root)]
(write_rootdir_file file content sub-root assets-dir)
false))
(defn ^:private read_rootdir_file
[file sub-root root-dir]
(p/let [path (util/node-path.join root-dir sub-root)
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir? (log/info :debug user-path) (throw "read file denied"))
exist? (fs/file-exists? "" user-path)
_ (when-not exist? (log/info :debug user-path) (throw "file not existed"))
content (fs/read-file "" user-path)]
content))
(defn ^:private read_dotdir_file
[file sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(read_rootdir_file file sub-root %))))
(defn ^:private read_assetsdir_file
[file sub-root]
(when-let [root-dir (config/get-current-repo-assets-root)]
(read_rootdir_file file sub-root root-dir)))
(defn ^:private unlink_rootdir_file!
[file sub-root root-dir]
(p/let [repo ""
path (util/node-path.join root-dir sub-root)
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir? (log/info :debug user-path) (throw "access file denied"))
exist? (fs/file-exists? "" user-path)
_ (when-not exist? (log/info :debug user-path) (throw "file not existed"))
_ (fs/unlink! repo user-path {})]))
(defn ^:private unlink_dotdir_file!
[file sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(unlink_rootdir_file! file sub-root %))))
(defn ^:private unlink_assetsdir_file!
[file sub-root]
(when-let [root-dir (config/get-current-repo-assets-root)]
(unlink_rootdir_file! file sub-root root-dir)))
(def ^:export write_user_tmp_file
(fn [file content]
(write_dotdir_file file content "tmp")))
(def ^:export write_plugin_storage_file
(fn [plugin-id file content assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(write_assetsdir_file file content sub-root)
(write_dotdir_file file content sub-root)))))
(def ^:export read_plugin_storage_file
(fn [plugin-id file assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(read_assetsdir_file file sub-root)
(read_dotdir_file file sub-root)))))
(def ^:export unlink_plugin_storage_file
(fn [plugin-id file assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(unlink_assetsdir_file! file sub-root)
(unlink_dotdir_file! file sub-root)))))
(def ^:export exist_plugin_storage_file
(fn [plugin-id file assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)
exist? (fs/file-exists?
(util/node-path.join root "storages" plugin-id)
file)]
exist?)))
(def ^:export clear_plugin_storage_files
(fn [plugin-id assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)]
(fs/rmdir! (util/node-path.join root "storages" plugin-id)))))
(def ^:export list_plugin_storage_files
(fn [plugin-id assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)
files-path (util/node-path.join root "storages" plugin-id)
^js files (ipc/ipc :listdir files-path)]
(when (js-iterable? files)
(bean/->js
(map #(some-> (string/replace-first % files-path "")
(string/replace #"^/+" "")) files))))))
(def ^:export load_user_preferences
(fn []
(p/let [repo ""
path (plugin-handler/get-ls-dotdir-root)
path (util/node-path.join path "preferences.json")
_ (fs/create-if-not-exists repo "" path)
json (fs/read-file "" path)
json (if (string/blank? json) "{}" json)]
(js/JSON.parse json))))
(def ^:export save_user_preferences
(fn [^js data]
(when data
(p/let [repo ""
path (plugin-handler/get-ls-dotdir-root)
path (util/node-path.join path "preferences.json")]
(fs/write-file! repo "" path (js/JSON.stringify data nil 2) {:skip-compare? true})))))
(def ^:export load_plugin_user_settings
;; results [path data]
(plugin-handler/make-fn-to-load-dotdir-json "settings" "{}"))
(def ^:export save_plugin_user_settings
(fn [key ^js data]
((plugin-handler/make-fn-to-save-dotdir-json "settings")
key (js/JSON.stringify data nil 2))))
(def ^:export unlink_plugin_user_settings
(plugin-handler/make-fn-to-unlink-dotdir-json "settings"))
(def ^:export register_plugin_slash_command
(fn [pid ^js cmd-actions]
(when-let [[cmd actions] (bean/->clj cmd-actions)]
(plugin-handler/register-plugin-slash-command
pid [cmd (mapv #(into [(keyword (first %))]
(rest %)) actions)]))))
(def ^:export register_plugin_simple_command
(fn [pid ^js cmd-action palette?]
(when-let [[cmd action] (bean/->clj cmd-action)]
(let [action (assoc action 0 (keyword (first action)))
cmd (assoc cmd :key (string/replace (:key cmd) ":" "-"))
key (:key cmd)
keybinding (:keybinding cmd)
palette-cmd (and palette? (plugin-handler/simple-cmd->palette-cmd pid cmd action))
action' #(state/pub-event! [:exec-plugin-cmd {:type type :key key :pid pid :cmd cmd :action action}])]
;; handle simple commands
(plugin-handler/register-plugin-simple-command pid cmd action)
;; handle palette commands
(when palette?
(palette-handler/register palette-cmd))
handle commands
(when-let [shortcut-args (and keybinding (plugin-handler/simple-cmd-keybinding->shortcut-args pid key keybinding))]
(let [dispatch-cmd (fn [_e]
(if palette?
(palette-handler/invoke-command palette-cmd)
(action')))
[handler-id id shortcut-map] (update shortcut-args 2 assoc :fn dispatch-cmd)]
(js/console.debug :shortcut/register-shortcut [handler-id id shortcut-map])
(st/register-shortcut! handler-id id shortcut-map)))))))
(defn ^:export unregister_plugin_simple_command
[pid]
;; remove simple commands
(plugin-handler/unregister-plugin-simple-command pid)
;; remove palette commands
(let [palette-matched (->> (palette-handler/get-commands)
(filter #(string/includes? (str (:id %)) (str "plugin." pid))))]
(when (seq palette-matched)
(doseq [cmd palette-matched]
(palette-handler/unregister (:id cmd))
remove commands
(when (seq (:shortcut cmd))
(js/console.debug :shortcut/unregister-shortcut cmd)
(st/unregister-shortcut! (:handler-id cmd) (:id cmd)))))))
(defn ^:export register_search_service
[pid name ^js opts]
(plugin-handler/register-plugin-search-service pid name (bean/->clj opts)))
(defn ^:export unregister_search_services
[pid]
(plugin-handler/unregister-plugin-search-services pid))
(def ^:export register_plugin_ui_item
(fn [pid type ^js opts]
(when-let [opts (bean/->clj opts)]
(plugin-handler/register-plugin-ui-item
pid (assoc opts :type type)))))
;; app
(def ^:export relaunch
(fn []
(ipc/ipc "relaunchApp")))
(def ^:export quit
(fn []
(ipc/ipc "quitApp")))
(def ^:export open_external_link
(fn [url]
(when (re-find #"https?://" url)
(js/apis.openExternal url))))
(def ^:export invoke_external_command
(fn [type & args]
(when-let [id (and (string/starts-with? type "logseq.")
(-> (string/replace type #"^logseq." "")
(util/safe-lower-case)
(keyword)))]
(when-let [action (get-in (palette-handler/get-commands-unique) [id :action])]
(apply action args)))))
;; flag - boolean | 'toggle'
(def ^:export set_left_sidebar_visible
(fn [flag]
(if (= flag "toggle")
(state/toggle-left-sidebar!)
(state/set-state! :ui/left-sidebar-open? (boolean flag)))
nil))
;; flag - boolean | 'toggle'
(def ^:export set_right_sidebar_visible
(fn [flag]
(if (= flag "toggle")
(state/toggle-sidebar-open?!)
(state/set-state! :ui/sidebar-open? (boolean flag)))
nil))
(def ^:export clear_right_sidebar_blocks
(fn [^js opts]
(state/clear-sidebar-blocks!)
(when-let [opts (and opts (bean/->clj opts))]
(and (:close opts) (state/hide-right-sidebar!)))
nil))
(def ^:export push_state
(fn [^js k ^js params ^js query]
(rfe/push-state
(keyword k)
(bean/->clj params)
(bean/->clj query))))
(def ^:export replace_state
(fn [^js k ^js params ^js query]
(rfe/replace-state
(keyword k)
(bean/->clj params)
(bean/->clj query))))
(defn ^:export get_external_plugin
[pid]
(when-let [^js pl (plugin-handler/get-plugin-inst pid)]
(.toJSON pl)))
(defn ^:export invoke_external_plugin_cmd
[pid cmd-group cmd-key cmd-args]
(case (keyword cmd-group)
:models
(plugin-handler/call-plugin-user-model! pid cmd-key cmd-args)
:commands
(plugin-handler/call-plugin-user-command! pid cmd-key cmd-args)))
;; editor
(def ^:export check_editing
(fn []
(if (state/get-edit-input-id)
(str (:block/uuid (state/get-edit-block))) false)))
(def ^:export exit_editing_mode
(fn [select?]
(editor-handler/escape-editing select?)
nil))
(def ^:export insert_at_editing_cursor
(fn [content]
(when-let [input-id (state/get-edit-input-id)]
(commands/simple-insert! input-id content {})
(when-let [input (gdom/getElement input-id)]
(.focus input)))))
(def ^:export restore_editing_cursor
(fn []
(when-let [input-id (state/get-edit-input-id)]
(when-let [input (gdom/getElement input-id)]
(.focus input)))))
(def ^:export get_editing_cursor_position
(fn []
(when-let [input-id (state/get-edit-input-id)]
(bean/->js (normalize-keyword-for-json (cursor/get-caret-pos (gdom/getElement input-id)))))))
(def ^:export get_editing_block_content
(fn []
(state/get-edit-content)))
(def ^:export get_selected_blocks
(fn []
(when-let [blocks (and (state/in-selection-mode?)
(seq (state/get-selection-blocks)))]
(let [blocks (->> blocks
(map (fn [^js el] (some-> (.getAttribute el "blockid")
(db-model/query-block-by-uuid)))))]
(bean/->js (normalize-keyword-for-json blocks))))))
(def ^:export get_current_page
(fn []
(when-let [page (state/get-current-page)]
(when-let [page (db-model/get-page page)]
(bean/->js (normalize-keyword-for-json (db-utils/pull (:db/id page))))))))
(def ^:export get_page
(fn [id-or-page-name]
(when-let [page (cond
(number? id-or-page-name) (db-utils/pull id-or-page-name)
(string? id-or-page-name) (db-model/get-page id-or-page-name))]
(when-not (contains? page :block/left)
(bean/->js (normalize-keyword-for-json (db-utils/pull (:db/id page))))))))
(def ^:export get_all_pages
(fn [repo]
(let [pages (page-handler/get-all-pages repo)]
(bean/->js (normalize-keyword-for-json pages)))))
(def ^:export create_page
(fn [name ^js properties ^js opts]
(some-> (if-let [page (db-model/get-page name)]
page
(let [properties (bean/->clj properties)
{:keys [redirect createFirstBlock format journal]} (bean/->clj opts)
name (page-handler/create!
name
{:redirect? (if (boolean? redirect) redirect true)
:journal? journal
:create-first-block? (if (boolean? createFirstBlock) createFirstBlock true)
:format format
:properties properties})]
(db-model/get-page name)))
(:db/id)
(db-utils/pull)
(normalize-keyword-for-json)
(bean/->js))))
(def ^:export delete_page
(fn [name]
(p/create (fn [ok] (page-handler/delete! name ok)))))
(def ^:export rename_page
page-handler/rename!)
(defn ^:export open_in_right_sidebar
[block-uuid]
(editor-handler/open-block-in-sidebar! (uuid-or-throw-error block-uuid)))
(defn ^:export new_block_uuid []
(str (db/new-block-id)))
(def ^:export select_block
(fn [block-uuid]
(when-let [block (db-model/get-block-by-uuid (uuid-or-throw-error block-uuid))]
(editor-handler/select-block! (:block/uuid block)) nil)))
(def ^:export edit_block
(fn [block-uuid ^js opts]
(when-let [block-uuid (and block-uuid (uuid-or-throw-error block-uuid))]
(when-let [block (db-model/query-block-by-uuid block-uuid)]
(let [{:keys [pos] :or {pos :max}} (bean/->clj opts)]
(editor-handler/edit-block! block pos block-uuid))))))
(def ^:export insert_block
(fn [block-uuid-or-page-name content ^js opts]
(when (string/blank? block-uuid-or-page-name)
(throw (js/Error. "Page title or block UUID shouldn't be empty.")))
(let [{:keys [before sibling focus customUUID properties]} (bean/->clj opts)
[page-name block-uuid] (if (util/uuid-string? block-uuid-or-page-name)
[nil (uuid block-uuid-or-page-name)]
[block-uuid-or-page-name nil])
page-name (when page-name (util/page-name-sanity-lc page-name))
_ (when (and page-name (not (db/entity [:block/name page-name])))
(page-handler/create! block-uuid-or-page-name {:create-first-block? false}))
custom-uuid (or customUUID (:id properties))
custom-uuid (when custom-uuid (uuid-or-throw-error custom-uuid))
edit-block? (if (nil? focus) true focus)
_ (when (and custom-uuid (db-model/query-block-by-uuid custom-uuid))
(throw (js/Error.
(util/format "Custom block UUID already exists (%s)." custom-uuid))))
block-uuid' (if (and (not sibling) before block-uuid)
(let [block (db/entity [:block/uuid block-uuid])
first-child (db-model/get-by-parent-&-left (db/get-db)
(:db/id block)
(:db/id block))]
(if first-child
(:block/uuid first-child)
block-uuid))
block-uuid)
insert-at-first-child? (not= block-uuid' block-uuid)
[sibling? before?] (if insert-at-first-child?
[true true]
[sibling before])
before? (if (and (false? sibling?) before? (not insert-at-first-child?))
false
before?)
new-block (editor-handler/api-insert-new-block!
content
{:block-uuid block-uuid'
:sibling? sibling?
:before? before?
:edit-block? edit-block?
:page page-name
:custom-uuid custom-uuid
:properties (merge properties
(when custom-uuid {:id custom-uuid}))})]
(bean/->js (normalize-keyword-for-json new-block)))))
(def ^:export insert_batch_block
(fn [block-uuid ^js batch-blocks ^js opts]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(when-let [bb (bean/->clj batch-blocks)]
(let [bb (if-not (vector? bb) (vector bb) bb)
{:keys [sibling keepUUID]} (bean/->clj opts)
keep-uuid? (or keepUUID false)
_ (when keep-uuid? (doseq
[block (outliner/tree-vec-flatten bb :children)]
(let [uuid (:id (:properties block))]
(when (and uuid (db-model/query-block-by-uuid (uuid-or-throw-error uuid)))
(throw (js/Error.
(util/format "Custom block UUID already exists (%s)." uuid)))))))
_ (editor-handler/insert-block-tree-after-target
(:db/id block) sibling bb (:block/format block) keep-uuid?)]
nil)))))
(def ^:export remove_block
(fn [block-uuid ^js _opts]
(let [includeChildren true
repo (state/get-current-repo)]
(editor-handler/delete-block-aux!
{:block/uuid (uuid-or-throw-error block-uuid) :repo repo} includeChildren)
nil)))
(def ^:export update_block
(fn [block-uuid content ^js _opts]
(let [repo (state/get-current-repo)
edit-input (state/get-edit-input-id)
editing? (and edit-input (string/ends-with? edit-input (str block-uuid)))]
(if editing?
(state/set-edit-content! edit-input content)
(editor-handler/save-block! repo (uuid-or-throw-error block-uuid) content))
nil)))
(def ^:export move_block
(fn [src-block-uuid target-block-uuid ^js opts]
(let [{:keys [before children]} (bean/->clj opts)
move-to (cond
(boolean before)
:top
(boolean children)
:nested
:else
nil)
src-block (db-model/query-block-by-uuid (uuid-or-throw-error src-block-uuid))
target-block (db-model/query-block-by-uuid (uuid-or-throw-error target-block-uuid))]
(editor-dnd-handler/move-blocks nil [src-block] target-block move-to) nil)))
(def ^:export get_block
(fn [id-or-uuid ^js opts]
(when-let [block (cond
(number? id-or-uuid) (db-utils/pull id-or-uuid)
(string? id-or-uuid) (db-model/query-block-by-uuid (uuid-or-throw-error id-or-uuid)))]
(when-not (contains? block :block/name)
(when-let [uuid (:block/uuid block)]
(let [{:keys [includeChildren]} (bean/->clj opts)
repo (state/get-current-repo)
block (if includeChildren
;; nested children results
(first (outliner-tree/blocks->vec-tree
(db-model/get-block-and-children repo uuid) uuid))
;; attached shallow children
(assoc block :block/children
(map #(list :uuid (get-in % [:data :block/uuid]))
(db/get-block-immediate-children repo uuid))))]
(bean/->js (normalize-keyword-for-json block))))))))
(def ^:export get_current_block
(fn [^js opts]
(let [block (state/get-edit-block)
block (or block
(some-> (or (first (state/get-selection-blocks))
(gdom/getElement (state/get-editing-block-dom-id)))
(.getAttribute "blockid")
(db-model/get-block-by-uuid)))]
(get_block (:db/id block) opts))))
(def ^:export get_previous_sibling_block
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(let [{:block/keys [parent left]} block
block (when-not (= parent left) (db-utils/pull (:db/id left)))]
(and block (bean/->js (normalize-keyword-for-json block)))))))
(def ^:export get_next_sibling_block
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(when-let [right-siblings (outliner/get-right-siblings (outliner/->Block block))]
(bean/->js (normalize-keyword-for-json (:data (first right-siblings))))))))
(def ^:export set_block_collapsed
(fn [block-uuid ^js opts]
(let [block-uuid (uuid-or-throw-error block-uuid)]
(when-let [block (db-model/get-block-by-uuid block-uuid)]
(let [opts (bean/->clj opts)
opts (if (or (string? opts) (boolean? opts)) {:flag opts} opts)
{:keys [flag]} opts
flag (if (= "toggle" flag)
(not (util/collapsed? block))
(boolean flag))]
(if flag (editor-handler/collapse-block! block-uuid)
(editor-handler/expand-block! block-uuid))
nil)))))
(def ^:export upsert_block_property
(fn [block-uuid key value]
(editor-handler/set-block-property! (uuid-or-throw-error block-uuid) key value)))
(def ^:export remove_block_property
(fn [block-uuid key]
(editor-handler/remove-block-property! (uuid-or-throw-error block-uuid) key)))
(def ^:export get_block_property
(fn [block-uuid key]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(get (:block/properties block) (keyword key)))))
(def ^:export get_block_properties
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(bean/->js (normalize-keyword-for-json (:block/properties block))))))
(def ^:export get_current_page_blocks_tree
(fn []
(when-let [page (state/get-current-page)]
(let [blocks (db-model/get-page-blocks-no-cache page)
blocks (outliner-tree/blocks->vec-tree blocks page)
;; clean key
blocks (normalize-keyword-for-json blocks)]
(bean/->js blocks)))))
(def ^:export get_page_blocks_tree
(fn [page-name]
(when-let [_ (db-model/get-page page-name)]
(let [blocks (db-model/get-page-blocks-no-cache page-name)
blocks (outliner-tree/blocks->vec-tree blocks page-name)
blocks (normalize-keyword-for-json blocks)]
(bean/->js blocks)))))
(defn ^:export get_page_linked_references
[page-name-or-uuid]
(when-let [page (and page-name-or-uuid (db-model/get-page page-name-or-uuid))]
(let [page-name (:block/name page)
ref-blocks (if page-name
(db-model/get-page-referenced-blocks-full page-name)
(db-model/get-block-referenced-blocks (:block/uuid page)))
ref-blocks (and (seq ref-blocks) (into [] ref-blocks))]
(bean/->js (normalize-keyword-for-json ref-blocks)))))
(defn ^:export get_pages_from_namespace
[ns]
(when-let [repo (and ns (state/get-current-repo))]
(when-let [pages (db-model/get-namespace-pages repo ns)]
(bean/->js (normalize-keyword-for-json pages)))))
(defn ^:export get_pages_tree_from_namespace
[ns]
(when-let [repo (and ns (state/get-current-repo))]
(when-let [pages (db-model/get-namespace-hierarchy repo ns)]
(bean/->js (normalize-keyword-for-json pages)))))
(defn first-child-of-block
[block]
(when-let [children (:block/_parent block)]
(first (db-model/sort-by-left children block))))
(defn second-child-of-block
[block]
(when-let [children (:block/_parent block)]
(second (db-model/sort-by-left children block))))
(defn last-child-of-block
[block]
(when-let [children (:block/_parent block)]
(last (db-model/sort-by-left children block))))
(defn ^:export prepend_block_in_page
[uuid-or-page-name content ^js opts]
(let [page? (not (util/uuid-string? uuid-or-page-name))
page-not-exist? (and page? (nil? (db-model/get-page uuid-or-page-name)))
_ (and page-not-exist? (page-handler/create! uuid-or-page-name
{:redirect? false
:create-first-block? true
:format (state/get-preferred-format)}))]
(when-let [block (db-model/get-page uuid-or-page-name)]
(let [block' (if page? (second-child-of-block block) (first-child-of-block block))
sibling? (and page? (not (nil? block')))
opts (bean/->clj opts)
opts (merge opts {:sibling sibling? :before sibling?})
src (if sibling? (str (:block/uuid block')) uuid-or-page-name)]
(insert_block src content (bean/->js opts))))))
(defn ^:export append_block_in_page
[uuid-or-page-name content ^js opts]
(let [page? (not (util/uuid-string? uuid-or-page-name))
page-not-exist? (and page? (nil? (db-model/get-page uuid-or-page-name)))
_ (and page-not-exist? (page-handler/create! uuid-or-page-name
{:redirect? false
:create-first-block? true
:format (state/get-preferred-format)}))]
(when-let [block (db-model/get-page uuid-or-page-name)]
(let [block' (last-child-of-block block)
sibling? (not (nil? block'))
opts (bean/->clj opts)
opts (merge opts {:sibling sibling?}
(when sibling? {:before false}))
src (if sibling? (str (:block/uuid block')) uuid-or-page-name)]
(insert_block src content (bean/->js opts))))))
;; plugins
(defn ^:export validate_external_plugins [urls]
(ipc/ipc :validateUserExternalPlugins urls))
(def ^:export __install_plugin
(fn [^js manifest]
(when-let [{:keys [repo id] :as mft} (bean/->clj manifest)]
(if-not (and repo id)
(throw (js/Error. "[required] :repo :id"))
(plugin-common-handler/install-marketplace-plugin mft)))))
;; db
(defn ^:export q
[query-string]
(when-let [repo (state/get-current-repo)]
(when-let [result (query-dsl/query repo query-string)]
(bean/->js (normalize-keyword-for-json (flatten @result))))))
(defn ^:export datascript_query
[query & inputs]
(when-let [repo (state/get-current-repo)]
(when-let [db (db/get-db repo)]
(let [query (cljs.reader/read-string query)
resolved-inputs (map #(cond
(string? %)
(some->> % (cljs.reader/read-string) (query-react/resolve-input db))
(fn? %)
(fn [& args]
(.apply % nil (clj->js (mapv bean/->js args))))
:else %)
inputs)
result (apply d/q query db resolved-inputs)]
(bean/->js (normalize-keyword-for-json result false))))))
(defn ^:export custom_query
[query-string]
(let [result (let [query (cljs.reader/read-string query-string)]
(db/custom-query {:query query}))]
(bean/->js (normalize-keyword-for-json (flatten @result)))))
(defn ^:export download_graph_db
[]
(when-let [repo (state/get-current-repo)]
(when-let [db (db/get-db repo)]
(let [db-str (if db (db/db->string db) "")
data-str (str "data:text/edn;charset=utf-8," (js/encodeURIComponent db-str))]
(when-let [anchor (gdom/getElement "download")]
(.setAttribute anchor "href" data-str)
(.setAttribute anchor "download" (str (string/replace repo "/" " ") ".transit"))
(.click anchor))))))
(defn ^:export download_graph_pages
[]
(when-let [repo (state/get-current-repo)]
(export-handler/export-repo-as-zip! repo)))
(defn ^:export exec_git_command
[^js args]
(when-let [args (and args (seq (bean/->clj args)))]
(shell/run-git-command! args)))
;; git
(defn ^:export git_exec_command
[^js args]
(when-let [args (and args (seq (bean/->clj args)))]
(shell/run-git-command2! args)))
(defn ^:export git_load_ignore_file
[]
(when-let [repo (state/get-current-repo)]
(p/let [file ".gitignore"
dir (config/get-repo-dir repo)
_ (fs/create-if-not-exists repo dir file)
content (fs/read-file dir file)]
content)))
(defn ^:export git_save_ignore_file
[content]
(when-let [repo (and (string? content) (state/get-current-repo))]
(p/let [file ".gitignore"
dir (config/get-repo-dir repo)
_ (fs/write-file! repo dir file content {:skip-compare? true})])))
;; ui
(defn ^:export show_msg
([content] (show_msg content :success nil))
([content status] (show_msg content status nil))
([content status ^js opts]
(let [{:keys [key timeout]} (bean/->clj opts)
hiccup? (and (string? content) (string/starts-with? (string/triml content) "[:"))
content (if hiccup? (parse-hiccup-ui content) content)
uid (when (string? key) (keyword key))
clear? (not= timeout 0)
key' (notification/show! content (keyword status) clear? uid timeout)]
(name key'))))
(defn ^:export ui_show_msg
[& args]
(apply show_msg args))
(defn ^:export ui_close_msg
[key]
(when (string? key)
(notification/clear! (keyword key)) nil))
;; assets
(defn ^:export assets_list_files_of_current_graph
[^js exts]
(p/let [files (ipc/ipc :getAssetsFiles {:exts exts})]
(bean/->js files)))
;; experiments
(defn ^:export exper_load_scripts
[pid & scripts]
(when-let [^js _pl (plugin-handler/get-plugin-inst pid)]
(doseq [s scripts
:let [upt-status #(state/upt-plugin-resource pid :scripts s :status %)
init? (plugin-handler/register-plugin-resources pid :scripts {:key s :src s})]]
(when init?
(p/catch
(p/then
(do
(upt-status :pending)
(loader/load s nil {:attributes {:data-ref (name pid)}}))
#(upt-status :done))
#(upt-status :error))))))
(defn ^:export exper_register_fenced_code_renderer
[pid type ^js opts]
(when-let [^js _pl (plugin-handler/get-plugin-inst pid)]
(plugin-handler/register-fenced-code-renderer
(keyword pid) type (reduce #(assoc %1 %2 (aget opts (name %2))) {}
[:edit :before :subs :render]))))
(defn ^:export exper_register_extensions_enhancer
[pid type enhancer]
(when-let [^js _pl (and (fn? enhancer) (plugin-handler/get-plugin-inst pid))]
(plugin-handler/register-extensions-enhancer
(keyword pid) type {:enhancer enhancer})))
(defonce *request-k (volatile! 0))
(defn ^:export exper_request
[pid ^js options]
(when-let [^js pl (plugin-handler/get-plugin-inst pid)]
(let [req-id (vreset! *request-k (inc @*request-k))
req-cb #(plugin-handler/request-callback pl req-id %)]
(-> (ipc/ipc :httpRequest req-id options)
(p/then #(req-cb %))
(p/catch #(req-cb %)))
req-id)))
(defn ^:export http_request_abort
[req-id]
(ipc/ipc :httpRequestAbort req-id))
;; search
(defn ^:export search
[q]
(search-handler/search q))
;; helpers
(defn ^:export query_element_by_id
[id]
(when-let [^js el (gdom/getElement id)]
(if el (str (.-tagName el) "#" id) false)))
(defn ^:export query_element_rect
[selector]
(when-let [^js el (js/document.querySelector selector)]
(bean/->js (.toJSON (.getBoundingClientRect el)))))
(defn ^:export set_focused_settings
[pid]
(when-let [plugin (state/get-plugin-by-id pid)]
(state/set-state! :plugin/focused-settings pid)
(state/pub-event! [:go/plugins-settings pid false (or (:name plugin) (:title plugin))])))
(defn ^:export force_save_graph
[]
(p/let [_ (el/persist-dbs!)]
true))
(def ^:export make_asset_url editor-handler/make-asset-url)
(def ^:export set_blocks_id #(editor-handler/set-blocks-id! (map uuid %)))
(defn ^:export __debug_state
[path]
(-> (if (string? path)
(get @state/state (keyword path))
@state/state)
(bean/->js)))
| null | https://raw.githubusercontent.com/logseq/logseq/17f51c7b77c979add2685e0b656ce15a29dc447e/src/main/logseq/api.cljs | clojure | helpers
base
get app base info
results [path data]
handle simple commands
handle palette commands
remove simple commands
remove palette commands
app
flag - boolean | 'toggle'
flag - boolean | 'toggle'
editor
nested children results
attached shallow children
clean key
plugins
db
git
ui
assets
experiments
search
helpers | (ns ^:no-doc logseq.api
(:require [camel-snake-kebab.core :as csk]
[cljs-bean.core :as bean]
[cljs.reader]
[clojure.string :as string]
[clojure.walk :as walk]
[datascript.core :as d]
[electron.ipc :as ipc]
[frontend.commands :as commands]
[frontend.components.plugins :as plugins]
[frontend.config :as config]
[frontend.db :as db]
[frontend.db.model :as db-model]
[frontend.db.query-dsl :as query-dsl]
[frontend.db.utils :as db-utils]
[frontend.db.react :refer [sub-key-value]]
[frontend.db.query-react :as query-react]
[frontend.fs :as fs]
[frontend.handler.dnd :as editor-dnd-handler]
[frontend.handler.editor :as editor-handler]
[frontend.handler.export :as export-handler]
[frontend.handler.notification :as notification]
[frontend.handler.page :as page-handler]
[frontend.handler.plugin :as plugin-handler]
[frontend.handler.common.plugin :as plugin-common-handler]
[frontend.modules.outliner.core :as outliner]
[frontend.modules.outliner.tree :as outliner-tree]
[frontend.handler.command-palette :as palette-handler]
[frontend.modules.shortcut.core :as st]
[electron.listener :as el]
[frontend.state :as state]
[frontend.util :as util]
[frontend.util.cursor :as cursor]
[frontend.loader :as loader]
[goog.dom :as gdom]
[lambdaisland.glogi :as log]
[promesa.core :as p]
[reitit.frontend.easy :as rfe]
[sci.core :as sci]
[frontend.version :as fv]
[frontend.handler.shell :as shell]
[frontend.modules.layout.core]
[frontend.handler.code :as code-handler]
[frontend.handler.search :as search-handler]))
(defn- normalize-keyword-for-json
([input] (normalize-keyword-for-json input true))
([input camel-case?]
(when input
(walk/postwalk
(fn [a]
(cond
(keyword? a)
(cond-> (name a)
camel-case?
(csk/->camelCase))
(uuid? a) (str a)
:else a)) input))))
(defn- uuid-or-throw-error
[s]
(cond
(uuid? s)
s
(util/uuid-string? s)
(uuid s)
:else
(throw (js/Error. (str s " is not a valid UUID string.")))))
(defn- parse-hiccup-ui
[input]
(when (string? input)
(try
(sci/eval-string input {:preset :termination-safe})
(catch :default e
(js/console.error "[parse hiccup error]" e) input))))
(defn ^:export install-plugin-hook
[pid hook ^js opts]
(state/install-plugin-hook pid hook (bean/->clj opts)))
(defn ^:export uninstall-plugin-hook
[pid hook-or-all]
(state/uninstall-plugin-hook pid hook-or-all))
(defn ^:export should-exec-plugin-hook
[pid hook]
(plugin-handler/plugin-hook-installed? pid hook))
(defn ^:export get_state_from_store
[^js path]
(when-let [path (if (string? path) [path] (bean/->clj path))]
(some->> path
(map #(if (string/starts-with? % "@")
(subs % 1)
(keyword %)))
(get-in @state/state)
(normalize-keyword-for-json)
(bean/->js))))
(defn ^:export set_state_from_store
[^js path ^js value]
(when-let [path (if (string? path) [path] (bean/->clj path))]
(some->> path
(map #(if (string/starts-with? % "@")
(subs % 1)
(keyword %)))
(into [])
(#(state/set-state! % (bean/->clj value))))))
(defn ^:export get_app_info
[]
(bean/->js
(normalize-keyword-for-json
{:version fv/version})))
(def ^:export get_user_configs
(fn []
(bean/->js
(normalize-keyword-for-json
{:preferred-language (:preferred-language @state/state)
:preferred-theme-mode (:ui/theme @state/state)
:preferred-format (state/get-preferred-format)
:preferred-workflow (state/get-preferred-workflow)
:preferred-todo (state/get-preferred-todo)
:preferred-date-format (state/get-date-formatter)
:preferred-start-of-week (state/get-start-of-week)
:current-graph (state/get-current-repo)
:show-brackets (state/show-brackets?)
:enabled-journals (state/enable-journals?)
:enabled-flashcards (state/enable-flashcards?)
:me (state/get-me)}))))
(def ^:export get_current_graph_configs
(fn []
(some-> (state/get-config)
(normalize-keyword-for-json)
(bean/->js))))
(def ^:export get_current_graph_favorites
(fn []
(some->> (:favorites (state/get-config))
(remove string/blank?)
(filter string?)
(bean/->js))))
(def ^:export get_current_graph_recent
(fn []
(some->> (sub-key-value :recent/pages)
(remove string/blank?)
(filter string?)
(bean/->js))))
(def ^:export get_current_graph
(fn []
(when-let [repo (state/get-current-repo)]
(when-not (= config/local-repo repo)
(bean/->js {:url repo
:name (util/node-path.basename repo)
:path (config/get-repo-dir repo)})))))
(def ^:export show_themes
(fn []
(plugins/open-select-theme!)))
(def ^:export set_theme_mode
(fn [mode]
(state/set-theme-mode! mode)))
(def ^:export load_plugin_config
(fn [path]
(fs/read-file "" (util/node-path.join path "package.json"))))
(def ^:export load_plugin_readme
(fn [path]
(fs/read-file "" (util/node-path.join path "readme.md"))))
(def ^:export save_plugin_config
(fn [path ^js data]
(let [repo ""
path (util/node-path.join path "package.json")]
(fs/write-file! repo "" path (js/JSON.stringify data nil 2) {:skip-compare? true}))))
(def ^:export save_focused_code_editor_content
(fn []
(code-handler/save-code-editor!)))
(defn ^:private write_rootdir_file
[file content sub-root root-dir]
(p/let [repo ""
path (util/node-path.join root-dir sub-root)
exist? (fs/file-exists? path "")
_ (when-not exist? (fs/mkdir-recur! path))
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir?
(log/info :debug user-path)
(throw "write file denied"))
user-path-root (util/node-path.dirname user-path)
exist? (fs/file-exists? user-path-root "")
_ (when-not exist? (fs/mkdir-recur! user-path-root))
_ (fs/write-file! repo "" user-path content {:skip-compare? true})]
user-path))
(defn ^:private write_dotdir_file
[file content sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(write_rootdir_file file content sub-root %))))
(defn ^:private write_assetsdir_file
[file content sub-root]
(if-let [assets-dir (config/get-current-repo-assets-root)]
(write_rootdir_file file content sub-root assets-dir)
false))
(defn ^:private read_rootdir_file
[file sub-root root-dir]
(p/let [path (util/node-path.join root-dir sub-root)
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir? (log/info :debug user-path) (throw "read file denied"))
exist? (fs/file-exists? "" user-path)
_ (when-not exist? (log/info :debug user-path) (throw "file not existed"))
content (fs/read-file "" user-path)]
content))
(defn ^:private read_dotdir_file
[file sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(read_rootdir_file file sub-root %))))
(defn ^:private read_assetsdir_file
[file sub-root]
(when-let [root-dir (config/get-current-repo-assets-root)]
(read_rootdir_file file sub-root root-dir)))
(defn ^:private unlink_rootdir_file!
[file sub-root root-dir]
(p/let [repo ""
path (util/node-path.join root-dir sub-root)
user-path (util/node-path.join path file)
sub-dir? (string/starts-with? user-path path)
_ (when-not sub-dir? (log/info :debug user-path) (throw "access file denied"))
exist? (fs/file-exists? "" user-path)
_ (when-not exist? (log/info :debug user-path) (throw "file not existed"))
_ (fs/unlink! repo user-path {})]))
(defn ^:private unlink_dotdir_file!
[file sub-root]
(some-> (plugin-handler/get-ls-dotdir-root)
(p/then #(unlink_rootdir_file! file sub-root %))))
(defn ^:private unlink_assetsdir_file!
[file sub-root]
(when-let [root-dir (config/get-current-repo-assets-root)]
(unlink_rootdir_file! file sub-root root-dir)))
(def ^:export write_user_tmp_file
(fn [file content]
(write_dotdir_file file content "tmp")))
(def ^:export write_plugin_storage_file
(fn [plugin-id file content assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(write_assetsdir_file file content sub-root)
(write_dotdir_file file content sub-root)))))
(def ^:export read_plugin_storage_file
(fn [plugin-id file assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(read_assetsdir_file file sub-root)
(read_dotdir_file file sub-root)))))
(def ^:export unlink_plugin_storage_file
(fn [plugin-id file assets?]
(let [plugin-id (util/node-path.basename plugin-id)
sub-root (util/node-path.join "storages" plugin-id)]
(if (true? assets?)
(unlink_assetsdir_file! file sub-root)
(unlink_dotdir_file! file sub-root)))))
(def ^:export exist_plugin_storage_file
(fn [plugin-id file assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)
exist? (fs/file-exists?
(util/node-path.join root "storages" plugin-id)
file)]
exist?)))
(def ^:export clear_plugin_storage_files
(fn [plugin-id assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)]
(fs/rmdir! (util/node-path.join root "storages" plugin-id)))))
(def ^:export list_plugin_storage_files
(fn [plugin-id assets?]
(p/let [root (if (true? assets?)
(config/get-current-repo-assets-root)
(plugin-handler/get-ls-dotdir-root))
plugin-id (util/node-path.basename plugin-id)
files-path (util/node-path.join root "storages" plugin-id)
^js files (ipc/ipc :listdir files-path)]
(when (js-iterable? files)
(bean/->js
(map #(some-> (string/replace-first % files-path "")
(string/replace #"^/+" "")) files))))))
(def ^:export load_user_preferences
(fn []
(p/let [repo ""
path (plugin-handler/get-ls-dotdir-root)
path (util/node-path.join path "preferences.json")
_ (fs/create-if-not-exists repo "" path)
json (fs/read-file "" path)
json (if (string/blank? json) "{}" json)]
(js/JSON.parse json))))
(def ^:export save_user_preferences
(fn [^js data]
(when data
(p/let [repo ""
path (plugin-handler/get-ls-dotdir-root)
path (util/node-path.join path "preferences.json")]
(fs/write-file! repo "" path (js/JSON.stringify data nil 2) {:skip-compare? true})))))
(def ^:export load_plugin_user_settings
(plugin-handler/make-fn-to-load-dotdir-json "settings" "{}"))
(def ^:export save_plugin_user_settings
(fn [key ^js data]
((plugin-handler/make-fn-to-save-dotdir-json "settings")
key (js/JSON.stringify data nil 2))))
(def ^:export unlink_plugin_user_settings
(plugin-handler/make-fn-to-unlink-dotdir-json "settings"))
(def ^:export register_plugin_slash_command
(fn [pid ^js cmd-actions]
(when-let [[cmd actions] (bean/->clj cmd-actions)]
(plugin-handler/register-plugin-slash-command
pid [cmd (mapv #(into [(keyword (first %))]
(rest %)) actions)]))))
(def ^:export register_plugin_simple_command
(fn [pid ^js cmd-action palette?]
(when-let [[cmd action] (bean/->clj cmd-action)]
(let [action (assoc action 0 (keyword (first action)))
cmd (assoc cmd :key (string/replace (:key cmd) ":" "-"))
key (:key cmd)
keybinding (:keybinding cmd)
palette-cmd (and palette? (plugin-handler/simple-cmd->palette-cmd pid cmd action))
action' #(state/pub-event! [:exec-plugin-cmd {:type type :key key :pid pid :cmd cmd :action action}])]
(plugin-handler/register-plugin-simple-command pid cmd action)
(when palette?
(palette-handler/register palette-cmd))
handle commands
(when-let [shortcut-args (and keybinding (plugin-handler/simple-cmd-keybinding->shortcut-args pid key keybinding))]
(let [dispatch-cmd (fn [_e]
(if palette?
(palette-handler/invoke-command palette-cmd)
(action')))
[handler-id id shortcut-map] (update shortcut-args 2 assoc :fn dispatch-cmd)]
(js/console.debug :shortcut/register-shortcut [handler-id id shortcut-map])
(st/register-shortcut! handler-id id shortcut-map)))))))
(defn ^:export unregister_plugin_simple_command
[pid]
(plugin-handler/unregister-plugin-simple-command pid)
(let [palette-matched (->> (palette-handler/get-commands)
(filter #(string/includes? (str (:id %)) (str "plugin." pid))))]
(when (seq palette-matched)
(doseq [cmd palette-matched]
(palette-handler/unregister (:id cmd))
remove commands
(when (seq (:shortcut cmd))
(js/console.debug :shortcut/unregister-shortcut cmd)
(st/unregister-shortcut! (:handler-id cmd) (:id cmd)))))))
(defn ^:export register_search_service
[pid name ^js opts]
(plugin-handler/register-plugin-search-service pid name (bean/->clj opts)))
(defn ^:export unregister_search_services
[pid]
(plugin-handler/unregister-plugin-search-services pid))
(def ^:export register_plugin_ui_item
(fn [pid type ^js opts]
(when-let [opts (bean/->clj opts)]
(plugin-handler/register-plugin-ui-item
pid (assoc opts :type type)))))
(def ^:export relaunch
(fn []
(ipc/ipc "relaunchApp")))
(def ^:export quit
(fn []
(ipc/ipc "quitApp")))
(def ^:export open_external_link
(fn [url]
(when (re-find #"https?://" url)
(js/apis.openExternal url))))
(def ^:export invoke_external_command
(fn [type & args]
(when-let [id (and (string/starts-with? type "logseq.")
(-> (string/replace type #"^logseq." "")
(util/safe-lower-case)
(keyword)))]
(when-let [action (get-in (palette-handler/get-commands-unique) [id :action])]
(apply action args)))))
(def ^:export set_left_sidebar_visible
(fn [flag]
(if (= flag "toggle")
(state/toggle-left-sidebar!)
(state/set-state! :ui/left-sidebar-open? (boolean flag)))
nil))
(def ^:export set_right_sidebar_visible
(fn [flag]
(if (= flag "toggle")
(state/toggle-sidebar-open?!)
(state/set-state! :ui/sidebar-open? (boolean flag)))
nil))
(def ^:export clear_right_sidebar_blocks
(fn [^js opts]
(state/clear-sidebar-blocks!)
(when-let [opts (and opts (bean/->clj opts))]
(and (:close opts) (state/hide-right-sidebar!)))
nil))
(def ^:export push_state
(fn [^js k ^js params ^js query]
(rfe/push-state
(keyword k)
(bean/->clj params)
(bean/->clj query))))
(def ^:export replace_state
(fn [^js k ^js params ^js query]
(rfe/replace-state
(keyword k)
(bean/->clj params)
(bean/->clj query))))
(defn ^:export get_external_plugin
[pid]
(when-let [^js pl (plugin-handler/get-plugin-inst pid)]
(.toJSON pl)))
(defn ^:export invoke_external_plugin_cmd
[pid cmd-group cmd-key cmd-args]
(case (keyword cmd-group)
:models
(plugin-handler/call-plugin-user-model! pid cmd-key cmd-args)
:commands
(plugin-handler/call-plugin-user-command! pid cmd-key cmd-args)))
(def ^:export check_editing
(fn []
(if (state/get-edit-input-id)
(str (:block/uuid (state/get-edit-block))) false)))
(def ^:export exit_editing_mode
(fn [select?]
(editor-handler/escape-editing select?)
nil))
(def ^:export insert_at_editing_cursor
(fn [content]
(when-let [input-id (state/get-edit-input-id)]
(commands/simple-insert! input-id content {})
(when-let [input (gdom/getElement input-id)]
(.focus input)))))
(def ^:export restore_editing_cursor
(fn []
(when-let [input-id (state/get-edit-input-id)]
(when-let [input (gdom/getElement input-id)]
(.focus input)))))
(def ^:export get_editing_cursor_position
(fn []
(when-let [input-id (state/get-edit-input-id)]
(bean/->js (normalize-keyword-for-json (cursor/get-caret-pos (gdom/getElement input-id)))))))
(def ^:export get_editing_block_content
(fn []
(state/get-edit-content)))
(def ^:export get_selected_blocks
(fn []
(when-let [blocks (and (state/in-selection-mode?)
(seq (state/get-selection-blocks)))]
(let [blocks (->> blocks
(map (fn [^js el] (some-> (.getAttribute el "blockid")
(db-model/query-block-by-uuid)))))]
(bean/->js (normalize-keyword-for-json blocks))))))
(def ^:export get_current_page
(fn []
(when-let [page (state/get-current-page)]
(when-let [page (db-model/get-page page)]
(bean/->js (normalize-keyword-for-json (db-utils/pull (:db/id page))))))))
(def ^:export get_page
(fn [id-or-page-name]
(when-let [page (cond
(number? id-or-page-name) (db-utils/pull id-or-page-name)
(string? id-or-page-name) (db-model/get-page id-or-page-name))]
(when-not (contains? page :block/left)
(bean/->js (normalize-keyword-for-json (db-utils/pull (:db/id page))))))))
(def ^:export get_all_pages
(fn [repo]
(let [pages (page-handler/get-all-pages repo)]
(bean/->js (normalize-keyword-for-json pages)))))
(def ^:export create_page
(fn [name ^js properties ^js opts]
(some-> (if-let [page (db-model/get-page name)]
page
(let [properties (bean/->clj properties)
{:keys [redirect createFirstBlock format journal]} (bean/->clj opts)
name (page-handler/create!
name
{:redirect? (if (boolean? redirect) redirect true)
:journal? journal
:create-first-block? (if (boolean? createFirstBlock) createFirstBlock true)
:format format
:properties properties})]
(db-model/get-page name)))
(:db/id)
(db-utils/pull)
(normalize-keyword-for-json)
(bean/->js))))
(def ^:export delete_page
(fn [name]
(p/create (fn [ok] (page-handler/delete! name ok)))))
(def ^:export rename_page
page-handler/rename!)
(defn ^:export open_in_right_sidebar
[block-uuid]
(editor-handler/open-block-in-sidebar! (uuid-or-throw-error block-uuid)))
(defn ^:export new_block_uuid []
(str (db/new-block-id)))
(def ^:export select_block
(fn [block-uuid]
(when-let [block (db-model/get-block-by-uuid (uuid-or-throw-error block-uuid))]
(editor-handler/select-block! (:block/uuid block)) nil)))
(def ^:export edit_block
(fn [block-uuid ^js opts]
(when-let [block-uuid (and block-uuid (uuid-or-throw-error block-uuid))]
(when-let [block (db-model/query-block-by-uuid block-uuid)]
(let [{:keys [pos] :or {pos :max}} (bean/->clj opts)]
(editor-handler/edit-block! block pos block-uuid))))))
(def ^:export insert_block
(fn [block-uuid-or-page-name content ^js opts]
(when (string/blank? block-uuid-or-page-name)
(throw (js/Error. "Page title or block UUID shouldn't be empty.")))
(let [{:keys [before sibling focus customUUID properties]} (bean/->clj opts)
[page-name block-uuid] (if (util/uuid-string? block-uuid-or-page-name)
[nil (uuid block-uuid-or-page-name)]
[block-uuid-or-page-name nil])
page-name (when page-name (util/page-name-sanity-lc page-name))
_ (when (and page-name (not (db/entity [:block/name page-name])))
(page-handler/create! block-uuid-or-page-name {:create-first-block? false}))
custom-uuid (or customUUID (:id properties))
custom-uuid (when custom-uuid (uuid-or-throw-error custom-uuid))
edit-block? (if (nil? focus) true focus)
_ (when (and custom-uuid (db-model/query-block-by-uuid custom-uuid))
(throw (js/Error.
(util/format "Custom block UUID already exists (%s)." custom-uuid))))
block-uuid' (if (and (not sibling) before block-uuid)
(let [block (db/entity [:block/uuid block-uuid])
first-child (db-model/get-by-parent-&-left (db/get-db)
(:db/id block)
(:db/id block))]
(if first-child
(:block/uuid first-child)
block-uuid))
block-uuid)
insert-at-first-child? (not= block-uuid' block-uuid)
[sibling? before?] (if insert-at-first-child?
[true true]
[sibling before])
before? (if (and (false? sibling?) before? (not insert-at-first-child?))
false
before?)
new-block (editor-handler/api-insert-new-block!
content
{:block-uuid block-uuid'
:sibling? sibling?
:before? before?
:edit-block? edit-block?
:page page-name
:custom-uuid custom-uuid
:properties (merge properties
(when custom-uuid {:id custom-uuid}))})]
(bean/->js (normalize-keyword-for-json new-block)))))
(def ^:export insert_batch_block
(fn [block-uuid ^js batch-blocks ^js opts]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(when-let [bb (bean/->clj batch-blocks)]
(let [bb (if-not (vector? bb) (vector bb) bb)
{:keys [sibling keepUUID]} (bean/->clj opts)
keep-uuid? (or keepUUID false)
_ (when keep-uuid? (doseq
[block (outliner/tree-vec-flatten bb :children)]
(let [uuid (:id (:properties block))]
(when (and uuid (db-model/query-block-by-uuid (uuid-or-throw-error uuid)))
(throw (js/Error.
(util/format "Custom block UUID already exists (%s)." uuid)))))))
_ (editor-handler/insert-block-tree-after-target
(:db/id block) sibling bb (:block/format block) keep-uuid?)]
nil)))))
(def ^:export remove_block
(fn [block-uuid ^js _opts]
(let [includeChildren true
repo (state/get-current-repo)]
(editor-handler/delete-block-aux!
{:block/uuid (uuid-or-throw-error block-uuid) :repo repo} includeChildren)
nil)))
(def ^:export update_block
(fn [block-uuid content ^js _opts]
(let [repo (state/get-current-repo)
edit-input (state/get-edit-input-id)
editing? (and edit-input (string/ends-with? edit-input (str block-uuid)))]
(if editing?
(state/set-edit-content! edit-input content)
(editor-handler/save-block! repo (uuid-or-throw-error block-uuid) content))
nil)))
(def ^:export move_block
(fn [src-block-uuid target-block-uuid ^js opts]
(let [{:keys [before children]} (bean/->clj opts)
move-to (cond
(boolean before)
:top
(boolean children)
:nested
:else
nil)
src-block (db-model/query-block-by-uuid (uuid-or-throw-error src-block-uuid))
target-block (db-model/query-block-by-uuid (uuid-or-throw-error target-block-uuid))]
(editor-dnd-handler/move-blocks nil [src-block] target-block move-to) nil)))
(def ^:export get_block
(fn [id-or-uuid ^js opts]
(when-let [block (cond
(number? id-or-uuid) (db-utils/pull id-or-uuid)
(string? id-or-uuid) (db-model/query-block-by-uuid (uuid-or-throw-error id-or-uuid)))]
(when-not (contains? block :block/name)
(when-let [uuid (:block/uuid block)]
(let [{:keys [includeChildren]} (bean/->clj opts)
repo (state/get-current-repo)
block (if includeChildren
(first (outliner-tree/blocks->vec-tree
(db-model/get-block-and-children repo uuid) uuid))
(assoc block :block/children
(map #(list :uuid (get-in % [:data :block/uuid]))
(db/get-block-immediate-children repo uuid))))]
(bean/->js (normalize-keyword-for-json block))))))))
(def ^:export get_current_block
(fn [^js opts]
(let [block (state/get-edit-block)
block (or block
(some-> (or (first (state/get-selection-blocks))
(gdom/getElement (state/get-editing-block-dom-id)))
(.getAttribute "blockid")
(db-model/get-block-by-uuid)))]
(get_block (:db/id block) opts))))
(def ^:export get_previous_sibling_block
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(let [{:block/keys [parent left]} block
block (when-not (= parent left) (db-utils/pull (:db/id left)))]
(and block (bean/->js (normalize-keyword-for-json block)))))))
(def ^:export get_next_sibling_block
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(when-let [right-siblings (outliner/get-right-siblings (outliner/->Block block))]
(bean/->js (normalize-keyword-for-json (:data (first right-siblings))))))))
(def ^:export set_block_collapsed
(fn [block-uuid ^js opts]
(let [block-uuid (uuid-or-throw-error block-uuid)]
(when-let [block (db-model/get-block-by-uuid block-uuid)]
(let [opts (bean/->clj opts)
opts (if (or (string? opts) (boolean? opts)) {:flag opts} opts)
{:keys [flag]} opts
flag (if (= "toggle" flag)
(not (util/collapsed? block))
(boolean flag))]
(if flag (editor-handler/collapse-block! block-uuid)
(editor-handler/expand-block! block-uuid))
nil)))))
(def ^:export upsert_block_property
(fn [block-uuid key value]
(editor-handler/set-block-property! (uuid-or-throw-error block-uuid) key value)))
(def ^:export remove_block_property
(fn [block-uuid key]
(editor-handler/remove-block-property! (uuid-or-throw-error block-uuid) key)))
(def ^:export get_block_property
(fn [block-uuid key]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(get (:block/properties block) (keyword key)))))
(def ^:export get_block_properties
(fn [block-uuid]
(when-let [block (db-model/query-block-by-uuid (uuid-or-throw-error block-uuid))]
(bean/->js (normalize-keyword-for-json (:block/properties block))))))
(def ^:export get_current_page_blocks_tree
(fn []
(when-let [page (state/get-current-page)]
(let [blocks (db-model/get-page-blocks-no-cache page)
blocks (outliner-tree/blocks->vec-tree blocks page)
blocks (normalize-keyword-for-json blocks)]
(bean/->js blocks)))))
(def ^:export get_page_blocks_tree
(fn [page-name]
(when-let [_ (db-model/get-page page-name)]
(let [blocks (db-model/get-page-blocks-no-cache page-name)
blocks (outliner-tree/blocks->vec-tree blocks page-name)
blocks (normalize-keyword-for-json blocks)]
(bean/->js blocks)))))
(defn ^:export get_page_linked_references
[page-name-or-uuid]
(when-let [page (and page-name-or-uuid (db-model/get-page page-name-or-uuid))]
(let [page-name (:block/name page)
ref-blocks (if page-name
(db-model/get-page-referenced-blocks-full page-name)
(db-model/get-block-referenced-blocks (:block/uuid page)))
ref-blocks (and (seq ref-blocks) (into [] ref-blocks))]
(bean/->js (normalize-keyword-for-json ref-blocks)))))
(defn ^:export get_pages_from_namespace
[ns]
(when-let [repo (and ns (state/get-current-repo))]
(when-let [pages (db-model/get-namespace-pages repo ns)]
(bean/->js (normalize-keyword-for-json pages)))))
(defn ^:export get_pages_tree_from_namespace
[ns]
(when-let [repo (and ns (state/get-current-repo))]
(when-let [pages (db-model/get-namespace-hierarchy repo ns)]
(bean/->js (normalize-keyword-for-json pages)))))
(defn first-child-of-block
[block]
(when-let [children (:block/_parent block)]
(first (db-model/sort-by-left children block))))
(defn second-child-of-block
[block]
(when-let [children (:block/_parent block)]
(second (db-model/sort-by-left children block))))
(defn last-child-of-block
[block]
(when-let [children (:block/_parent block)]
(last (db-model/sort-by-left children block))))
(defn ^:export prepend_block_in_page
[uuid-or-page-name content ^js opts]
(let [page? (not (util/uuid-string? uuid-or-page-name))
page-not-exist? (and page? (nil? (db-model/get-page uuid-or-page-name)))
_ (and page-not-exist? (page-handler/create! uuid-or-page-name
{:redirect? false
:create-first-block? true
:format (state/get-preferred-format)}))]
(when-let [block (db-model/get-page uuid-or-page-name)]
(let [block' (if page? (second-child-of-block block) (first-child-of-block block))
sibling? (and page? (not (nil? block')))
opts (bean/->clj opts)
opts (merge opts {:sibling sibling? :before sibling?})
src (if sibling? (str (:block/uuid block')) uuid-or-page-name)]
(insert_block src content (bean/->js opts))))))
(defn ^:export append_block_in_page
[uuid-or-page-name content ^js opts]
(let [page? (not (util/uuid-string? uuid-or-page-name))
page-not-exist? (and page? (nil? (db-model/get-page uuid-or-page-name)))
_ (and page-not-exist? (page-handler/create! uuid-or-page-name
{:redirect? false
:create-first-block? true
:format (state/get-preferred-format)}))]
(when-let [block (db-model/get-page uuid-or-page-name)]
(let [block' (last-child-of-block block)
sibling? (not (nil? block'))
opts (bean/->clj opts)
opts (merge opts {:sibling sibling?}
(when sibling? {:before false}))
src (if sibling? (str (:block/uuid block')) uuid-or-page-name)]
(insert_block src content (bean/->js opts))))))
(defn ^:export validate_external_plugins [urls]
(ipc/ipc :validateUserExternalPlugins urls))
(def ^:export __install_plugin
(fn [^js manifest]
(when-let [{:keys [repo id] :as mft} (bean/->clj manifest)]
(if-not (and repo id)
(throw (js/Error. "[required] :repo :id"))
(plugin-common-handler/install-marketplace-plugin mft)))))
(defn ^:export q
[query-string]
(when-let [repo (state/get-current-repo)]
(when-let [result (query-dsl/query repo query-string)]
(bean/->js (normalize-keyword-for-json (flatten @result))))))
(defn ^:export datascript_query
[query & inputs]
(when-let [repo (state/get-current-repo)]
(when-let [db (db/get-db repo)]
(let [query (cljs.reader/read-string query)
resolved-inputs (map #(cond
(string? %)
(some->> % (cljs.reader/read-string) (query-react/resolve-input db))
(fn? %)
(fn [& args]
(.apply % nil (clj->js (mapv bean/->js args))))
:else %)
inputs)
result (apply d/q query db resolved-inputs)]
(bean/->js (normalize-keyword-for-json result false))))))
(defn ^:export custom_query
[query-string]
(let [result (let [query (cljs.reader/read-string query-string)]
(db/custom-query {:query query}))]
(bean/->js (normalize-keyword-for-json (flatten @result)))))
(defn ^:export download_graph_db
[]
(when-let [repo (state/get-current-repo)]
(when-let [db (db/get-db repo)]
(let [db-str (if db (db/db->string db) "")
data-str (str "data:text/edn;charset=utf-8," (js/encodeURIComponent db-str))]
(when-let [anchor (gdom/getElement "download")]
(.setAttribute anchor "href" data-str)
(.setAttribute anchor "download" (str (string/replace repo "/" " ") ".transit"))
(.click anchor))))))
(defn ^:export download_graph_pages
[]
(when-let [repo (state/get-current-repo)]
(export-handler/export-repo-as-zip! repo)))
(defn ^:export exec_git_command
[^js args]
(when-let [args (and args (seq (bean/->clj args)))]
(shell/run-git-command! args)))
(defn ^:export git_exec_command
[^js args]
(when-let [args (and args (seq (bean/->clj args)))]
(shell/run-git-command2! args)))
(defn ^:export git_load_ignore_file
[]
(when-let [repo (state/get-current-repo)]
(p/let [file ".gitignore"
dir (config/get-repo-dir repo)
_ (fs/create-if-not-exists repo dir file)
content (fs/read-file dir file)]
content)))
(defn ^:export git_save_ignore_file
[content]
(when-let [repo (and (string? content) (state/get-current-repo))]
(p/let [file ".gitignore"
dir (config/get-repo-dir repo)
_ (fs/write-file! repo dir file content {:skip-compare? true})])))
(defn ^:export show_msg
([content] (show_msg content :success nil))
([content status] (show_msg content status nil))
([content status ^js opts]
(let [{:keys [key timeout]} (bean/->clj opts)
hiccup? (and (string? content) (string/starts-with? (string/triml content) "[:"))
content (if hiccup? (parse-hiccup-ui content) content)
uid (when (string? key) (keyword key))
clear? (not= timeout 0)
key' (notification/show! content (keyword status) clear? uid timeout)]
(name key'))))
(defn ^:export ui_show_msg
[& args]
(apply show_msg args))
(defn ^:export ui_close_msg
[key]
(when (string? key)
(notification/clear! (keyword key)) nil))
(defn ^:export assets_list_files_of_current_graph
[^js exts]
(p/let [files (ipc/ipc :getAssetsFiles {:exts exts})]
(bean/->js files)))
(defn ^:export exper_load_scripts
[pid & scripts]
(when-let [^js _pl (plugin-handler/get-plugin-inst pid)]
(doseq [s scripts
:let [upt-status #(state/upt-plugin-resource pid :scripts s :status %)
init? (plugin-handler/register-plugin-resources pid :scripts {:key s :src s})]]
(when init?
(p/catch
(p/then
(do
(upt-status :pending)
(loader/load s nil {:attributes {:data-ref (name pid)}}))
#(upt-status :done))
#(upt-status :error))))))
(defn ^:export exper_register_fenced_code_renderer
[pid type ^js opts]
(when-let [^js _pl (plugin-handler/get-plugin-inst pid)]
(plugin-handler/register-fenced-code-renderer
(keyword pid) type (reduce #(assoc %1 %2 (aget opts (name %2))) {}
[:edit :before :subs :render]))))
(defn ^:export exper_register_extensions_enhancer
[pid type enhancer]
(when-let [^js _pl (and (fn? enhancer) (plugin-handler/get-plugin-inst pid))]
(plugin-handler/register-extensions-enhancer
(keyword pid) type {:enhancer enhancer})))
(defonce *request-k (volatile! 0))
(defn ^:export exper_request
[pid ^js options]
(when-let [^js pl (plugin-handler/get-plugin-inst pid)]
(let [req-id (vreset! *request-k (inc @*request-k))
req-cb #(plugin-handler/request-callback pl req-id %)]
(-> (ipc/ipc :httpRequest req-id options)
(p/then #(req-cb %))
(p/catch #(req-cb %)))
req-id)))
(defn ^:export http_request_abort
[req-id]
(ipc/ipc :httpRequestAbort req-id))
(defn ^:export search
[q]
(search-handler/search q))
(defn ^:export query_element_by_id
[id]
(when-let [^js el (gdom/getElement id)]
(if el (str (.-tagName el) "#" id) false)))
(defn ^:export query_element_rect
[selector]
(when-let [^js el (js/document.querySelector selector)]
(bean/->js (.toJSON (.getBoundingClientRect el)))))
(defn ^:export set_focused_settings
[pid]
(when-let [plugin (state/get-plugin-by-id pid)]
(state/set-state! :plugin/focused-settings pid)
(state/pub-event! [:go/plugins-settings pid false (or (:name plugin) (:title plugin))])))
(defn ^:export force_save_graph
[]
(p/let [_ (el/persist-dbs!)]
true))
(def ^:export make_asset_url editor-handler/make-asset-url)
(def ^:export set_blocks_id #(editor-handler/set-blocks-id! (map uuid %)))
(defn ^:export __debug_state
[path]
(-> (if (string? path)
(get @state/state (keyword path))
@state/state)
(bean/->js)))
|
2bf20a8e34971cfa215dd1646010c5fef584ec54858a1ffd9b6a5be84f6f25d9 | hasktorch/hasktorch | Native2.hs |
-- generated by using spec/Declarations.yaml
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
module Torch.Internal.Unmanaged.Native.Native2 where
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<vector>"
C.include "<ATen/Tensor.h>"
C.include "<ATen/Functions.h>"
cosine_embedding_loss_tttdl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> Int64
-> IO (Ptr Tensor)
cosine_embedding_loss_tttdl _input1 _input2 _target _margin _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)
, $(double _margin)
, $(int64_t _reduction)));
}|]
cosine_embedding_loss_tttd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
cosine_embedding_loss_tttd _input1 _input2 _target _margin =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)
, $(double _margin)));
}|]
cosine_embedding_loss_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cosine_embedding_loss_ttt _input1 _input2 _target =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)));
}|]
count_nonzero_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
count_nonzero_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::count_nonzero(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
count_nonzero_t
:: Ptr Tensor
-> IO (Ptr Tensor)
count_nonzero_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::count_nonzero(
*$(at::Tensor* _self)));
}|]
cov_tltt
:: Ptr Tensor
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cov_tltt _self _correction _fweights _aweights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)
, *$(at::Tensor* _fweights)
, *$(at::Tensor* _aweights)));
}|]
cov_tlt
:: Ptr Tensor
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
cov_tlt _self _correction _fweights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)
, *$(at::Tensor* _fweights)));
}|]
cov_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cov_tl _self _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)));
}|]
cov_t
:: Ptr Tensor
-> IO (Ptr Tensor)
cov_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)));
}|]
corrcoef_t
:: Ptr Tensor
-> IO (Ptr Tensor)
corrcoef_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::corrcoef(
*$(at::Tensor* _self)));
}|]
cudnn_affine_grid_generator_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
cudnn_affine_grid_generator_tllll _theta _N _C _H _W =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_affine_grid_generator(
*$(at::Tensor* _theta)
, $(int64_t _N)
, $(int64_t _C)
, $(int64_t _H)
, $(int64_t _W)));
}|]
cudnn_affine_grid_generator_backward_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
cudnn_affine_grid_generator_backward_tllll _grad _N _C _H _W =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_affine_grid_generator_backward(
*$(at::Tensor* _grad)
, $(int64_t _N)
, $(int64_t _C)
, $(int64_t _H)
, $(int64_t _W)));
}|]
cudnn_batch_norm_tttttbdd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> CDouble
-> CDouble
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
cudnn_batch_norm_tttttbdd _input _weight _bias _running_mean _running_var _training _exponential_average_factor _epsilon =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::cudnn_batch_norm(
*$(at::Tensor* _input)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _bias)
, *$(at::Tensor* _running_mean)
, *$(at::Tensor* _running_var)
, $(bool _training)
, $(double _exponential_average_factor)
, $(double _epsilon)));
}|]
cudnn_batch_norm_backward_tttttttdt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor)))
cudnn_batch_norm_backward_tttttttdt _input _grad_output _weight _running_mean _running_var _save_mean _save_var _epsilon _reserveSpace =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor>(at::cudnn_batch_norm_backward(
*$(at::Tensor* _input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _running_mean)
, *$(at::Tensor* _running_var)
, *$(at::Tensor* _save_mean)
, *$(at::Tensor* _save_var)
, $(double _epsilon)
, *$(at::Tensor* _reserveSpace)));
}|]
cudnn_convolution_ttllllbbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
cudnn_convolution_ttllllbbb _self _weight _padding _stride _dilation _groups _benchmark _deterministic _allow_tf32 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)
, $(bool _benchmark)
, $(bool _deterministic)
, $(bool _allow_tf32)));
}|]
cudnn_convolution_transpose_ttlllllbbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
cudnn_convolution_transpose_ttlllllbbb _self _weight _padding _output_padding _stride _dilation _groups _benchmark _deterministic _allow_tf32 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_transpose(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _output_padding)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)
, $(bool _benchmark)
, $(bool _deterministic)
, $(bool _allow_tf32)));
}|]
cudnn_convolution_relu_tttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
cudnn_convolution_relu_tttllll _self _weight _bias _stride _padding _dilation _groups =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_relu(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _bias)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)));
}|]
cudnn_convolution_add_relu_tttstllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
cudnn_convolution_add_relu_tttstllll _self _weight _z _alpha _bias _stride _padding _dilation _groups =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_add_relu(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _z)
, *$(at::Scalar* _alpha)
, *$(at::Tensor* _bias)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)));
}|]
cudnn_grid_sampler_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cudnn_grid_sampler_tt _self _grid =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_grid_sampler(
*$(at::Tensor* _self)
, *$(at::Tensor* _grid)));
}|]
cudnn_grid_sampler_backward_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cudnn_grid_sampler_backward_ttt _self _grid _grad_output =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cudnn_grid_sampler_backward(
*$(at::Tensor* _self)
, *$(at::Tensor* _grid)
, *$(at::Tensor* _grad_output)));
}|]
cummax_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummax_out_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_out_tttl _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummax_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_tn _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cummax_out_tttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_out_tttn _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_cummax_helper_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (())
_cummax_helper_tttl _self _values _indices _dim =
[C.throwBlock| void { (at::_cummax_helper(
*$(at::Tensor* _self)
, *$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cummin_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummin_out_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_out_tttl _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummin_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_tn _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cummin_out_tttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_out_tttn _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_cummin_helper_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (())
_cummin_helper_tttl _self _values _indices _dim =
[C.throwBlock| void { (at::_cummin_helper(
*$(at::Tensor* _self)
, *$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cummaxmin_backward_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cummaxmin_backward_tttl _grad _input _indices _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cummaxmin_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _input)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cumprod_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumprod_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumprod_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumprod_out_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumprod_out_ttls _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumprod_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumprod_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumprod_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumprod_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumprod_out_ttns
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumprod_out_ttns _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumprod_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumprod_backward_ttlt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
cumprod_backward_ttlt _grad _input _dim _output =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _input)
, $(int64_t _dim)
, *$(at::Tensor* _output)));
}|]
cumsum_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumsum_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumsum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumsum_out_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumsum_out_ttls _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumsum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumsum_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumsum_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumsum_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumsum_out_ttns
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumsum_out_ttns _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumsum_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumulative_trapezoid_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumulative_trapezoid_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
cumulative_trapezoid_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cumulative_trapezoid_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
cumulative_trapezoid_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr Tensor)
cumulative_trapezoid_tsl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)
, $(int64_t _dim)));
}|]
cumulative_trapezoid_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
cumulative_trapezoid_ts _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)));
}|]
cumulative_trapezoid_t
:: Ptr Tensor
-> IO (Ptr Tensor)
cumulative_trapezoid_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)));
}|]
ctc_loss_ttllllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
ctc_loss_ttllllb _log_probs _targets _input_lengths _target_lengths _blank _reduction _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)
, $(bool _zero_infinity)));
}|]
ctc_loss_ttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttllll _log_probs _targets _input_lengths _target_lengths _blank _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)));
}|]
ctc_loss_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttlll _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)));
}|]
ctc_loss_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
ctc_loss_ttll _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)));
}|]
ctc_loss_ttttllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
ctc_loss_ttttllb _log_probs _targets _input_lengths _target_lengths _blank _reduction _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)
, $(bool _zero_infinity)));
}|]
ctc_loss_ttttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttttll _log_probs _targets _input_lengths _target_lengths _blank _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)));
}|]
ctc_loss_ttttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttttl _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)));
}|]
ctc_loss_tttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
ctc_loss_tttt _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)));
}|]
_ctc_loss_ttlllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttlllb _log_probs _targets _input_lengths _target_lengths _blank _zero_infinity =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(bool _zero_infinity)));
}|]
_ctc_loss_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttlll _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)));
}|]
_ctc_loss_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttll _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)));
}|]
_ctc_loss_backward_tttllttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_ctc_loss_backward_tttllttlb _grad _log_probs _targets _input_lengths _target_lengths _neg_log_likelihood _log_alpha _blank _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_ctc_loss_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, *$(at::Tensor* _neg_log_likelihood)
, *$(at::Tensor* _log_alpha)
, $(int64_t _blank)
, $(bool _zero_infinity)));
}|]
_ctc_loss_backward_tttllttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_ctc_loss_backward_tttllttl _grad _log_probs _targets _input_lengths _target_lengths _neg_log_likelihood _log_alpha _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_ctc_loss_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, *$(at::Tensor* _neg_log_likelihood)
, *$(at::Tensor* _log_alpha)
, $(int64_t _blank)));
}|]
diag_embed_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diag_embed_tlll _self _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diag_embed_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diag_embed_tll _self _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diag_embed_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diag_embed_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diag_embed_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diag_embed_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)));
}|]
diagflat_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagflat_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagflat(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diagflat_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diagflat_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagflat(
*$(at::Tensor* _self)));
}|]
diagonal_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_tlll _self _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diagonal_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_tll _self _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diagonal_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagonal_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diagonal_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diagonal_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)));
}|]
linalg_diagonal_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tlll _A _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
linalg_diagonal_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tll _A _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
linalg_diagonal_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tl _A _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)));
}|]
linalg_diagonal_t
:: Ptr Tensor
-> IO (Ptr Tensor)
linalg_diagonal_t _A =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)));
}|]
diagonal_tnnnl
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> Ptr Dimname
-> Int64
-> IO (Ptr Tensor)
diagonal_tnnnl _self _outdim _dim1 _dim2 _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, *$(at::Dimname* _outdim)
, *$(at::Dimname* _dim1)
, *$(at::Dimname* _dim2)
, $(int64_t _offset)));
}|]
diagonal_tnnn
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> Ptr Dimname
-> IO (Ptr Tensor)
diagonal_tnnn _self _outdim _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, *$(at::Dimname* _outdim)
, *$(at::Dimname* _dim1)
, *$(at::Dimname* _dim2)));
}|]
diagonal_backward_tllll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_backward_tllll _grad_output _input_sizes _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_backward(
*$(at::Tensor* _grad_output)
, *$(std::vector<int64_t>* _input_sizes)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diff_tlltt
:: Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_tlltt _self _n _dim _prepend _append =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)
, *$(at::Tensor* _append)));
}|]
diff_tllt
:: Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_tllt _self _n _dim _prepend =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)));
}|]
diff_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diff_tll _self _n _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)));
}|]
diff_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diff_tl _self _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)));
}|]
diff_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diff_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)));
}|]
diff_out_ttlltt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_ttlltt _out _self _n _dim _prepend _append =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)
, *$(at::Tensor* _append)));
}|]
diff_out_ttllt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_ttllt _out _self _n _dim _prepend =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)));
}|]
diff_out_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diff_out_ttll _out _self _n _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)));
}|]
diff_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diff_out_ttl _out _self _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)));
}|]
diff_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
gradient_tsll
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> Int64
-> IO (Ptr TensorList)
gradient_tsll _self _spacing _dim _edge_order =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(at::Scalar* _spacing)
, $(int64_t _dim)
, $(int64_t _edge_order)));
}|]
gradient_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr TensorList)
gradient_tsl _self _spacing _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(at::Scalar* _spacing)
, $(int64_t _dim)));
}|]
gradient_t
:: Ptr Tensor
-> IO (Ptr TensorList)
gradient_t _self =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)));
}|]
gradient_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
gradient_tll _self _dim _edge_order =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _edge_order)));
}|]
gradient_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
gradient_tl _self _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
gradient_tA
:: Ptr Tensor
-> Ptr (StdVector Scalar)
-> IO (Ptr TensorList)
gradient_tA _self _spacing =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<at::Scalar>* _spacing)));
}|]
div_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
div_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
div_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
div_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
div_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
div_tts _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
div_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
div_out_ttts _out _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
div_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
div_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
div_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr StdString
-> IO (Ptr Tensor)
div_tss _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
divide_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
divide_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
divide_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
divide_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
divide_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
divide_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
divide_tts _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
divide_out_ttts _out _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr StdString
-> IO (Ptr Tensor)
divide_tss _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)
, *$(std::string* _rounding_mode)));
}|]
true_divide_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
true_divide_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
true_divide_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
true_divide_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
true_divide_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
true_divide_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
dot_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
dot_tt _self _tensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dot(
*$(at::Tensor* _self)
, *$(at::Tensor* _tensor)));
}|]
dot_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
dot_out_ttt _out _self _tensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _tensor)));
}|]
vdot_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
vdot_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vdot(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
vdot_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
vdot_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vdot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
einsum_sl
:: Ptr StdString
-> Ptr TensorList
-> IO (Ptr Tensor)
einsum_sl _equation _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::einsum(
*$(std::string* _equation)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
embedding_ttlbb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> CBool
-> IO (Ptr Tensor)
embedding_ttlbb _weight _indices _padding_idx _scale_grad_by_freq _sparse =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)
, $(bool _sparse)));
}|]
embedding_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_ttlb _weight _indices _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
embedding_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
embedding_ttl _weight _indices _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)));
}|]
embedding_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
embedding_tt _weight _indices =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)));
}|]
embedding_backward_ttllbb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> CBool
-> IO (Ptr Tensor)
embedding_backward_ttllbb _grad _indices _num_weights _padding_idx _scale_grad_by_freq _sparse =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)
, $(bool _sparse)));
}|]
embedding_dense_backward_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_dense_backward_ttllb _grad_output _indices _num_weights _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_dense_backward(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
embedding_renorm__ttdd
:: Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
embedding_renorm__ttdd _self _indices _max_norm _norm_type =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_renorm_(
*$(at::Tensor* _self)
, *$(at::Tensor* _indices)
, $(double _max_norm)
, $(double _norm_type)));
}|]
embedding_sparse_backward_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_sparse_backward_ttllb _grad _indices _num_weights _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_forward_only_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_forward_only_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
_embedding_bag_forward_only_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_forward_only_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
_embedding_bag_forward_only_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
_embedding_bag_forward_only_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_forward_only_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
_rowwise_prune_tts
:: Ptr Tensor
-> Ptr Tensor
-> ScalarType
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_rowwise_prune_tts _weight _mask _compressed_indices_dtype =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_rowwise_prune(
*$(at::Tensor* _weight)
, *$(at::Tensor* _mask)
, $(at::ScalarType _compressed_indices_dtype)));
}|]
row_stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
row_stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::row_stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
row_stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
row_stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::row_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
embedding_bag_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
embedding_bag_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
embedding_bag_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
embedding_bag_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
embedding_bag_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
embedding_bag_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
embedding_bag_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
_embedding_bag_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
_embedding_bag_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
_embedding_bag_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
_embedding_bag_backward_ttttttlblbtl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_backward_ttttttlblbtl _grad _indices _offsets _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _sparse _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_backward_ttttttlblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_backward_ttttttlblbt _grad _indices _offsets _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_sparse_backward_tttttlbltl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_sparse_backward_tttttlbltl _grad _indices _offsets _offset2bag _bag_size _num_weights _scale_grad_by_freq _mode _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_sparse_backward_tttttlblt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_sparse_backward_tttttlblt _grad _indices _offsets _offset2bag _bag_size _num_weights _scale_grad_by_freq _mode _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_dense_backward_tttttlbltl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_dense_backward_tttttlbltl _grad _indices _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_dense_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_dense_backward_tttttlblt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_dense_backward_tttttlblt _grad _indices _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_dense_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_per_sample_weights_backward_tttttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_per_sample_weights_backward_tttttll _grad _weight _indices _offsets _offset2bag _mode _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_per_sample_weights_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, $(int64_t _mode)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_per_sample_weights_backward_tttttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_per_sample_weights_backward_tttttl _grad _weight _indices _offsets _offset2bag _mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_per_sample_weights_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, $(int64_t _mode)));
}|]
empty_lNoM
:: Ptr IntArray
-> Ptr DimnameList
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_lNoM _size _names _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_lNo
:: Ptr IntArray
-> Ptr DimnameList
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_lNo _size _names _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)
, *$(at::TensorOptions* _options)));
}|]
empty_lN
:: Ptr IntArray
-> Ptr DimnameList
-> IO (Ptr Tensor)
empty_lN _size _names =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)));
}|]
empty_loM
:: Ptr IntArray
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_loM _size _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_lo
:: Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_lo _size _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)));
}|]
empty_l
:: Ptr IntArray
-> IO (Ptr Tensor)
empty_l _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)));
}|]
_empty_affine_quantized_lodlM
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> Int64
-> MemoryFormat
-> IO (Ptr Tensor)
_empty_affine_quantized_lodlM _size _options _scale _zero_point _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)
, $(int64_t _zero_point)
, $(at::MemoryFormat _memory_format)));
}|]
_empty_affine_quantized_lodl
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> Int64
-> IO (Ptr Tensor)
_empty_affine_quantized_lodl _size _options _scale _zero_point =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)
, $(int64_t _zero_point)));
}|]
_empty_affine_quantized_lod
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> IO (Ptr Tensor)
_empty_affine_quantized_lod _size _options _scale =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)));
}|]
_empty_affine_quantized_lo
:: Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
_empty_affine_quantized_lo _size _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)));
}|]
_empty_affine_quantized_l
:: Ptr IntArray
-> IO (Ptr Tensor)
_empty_affine_quantized_l _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)));
}|]
_empty_per_channel_affine_quantized_lttloM
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttloM _size _scales _zero_points _axis _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
_empty_per_channel_affine_quantized_lttlo
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttlo _size _scales _zero_points _axis _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)
, *$(at::TensorOptions* _options)));
}|]
_empty_per_channel_affine_quantized_lttl
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttl _size _scales _zero_points _axis =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)));
}|]
empty_quantized_ltoM
:: Ptr IntArray
-> Ptr Tensor
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_quantized_ltoM _size _qtensor _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_quantized_lto
:: Ptr IntArray
-> Ptr Tensor
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_quantized_lto _size _qtensor _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)
, *$(at::TensorOptions* _options)));
}|]
empty_quantized_lt
:: Ptr IntArray
-> Ptr Tensor
-> IO (Ptr Tensor)
empty_quantized_lt _size _qtensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)));
}|]
empty_out_tlM
:: Ptr Tensor
-> Ptr IntArray
-> MemoryFormat
-> IO (Ptr Tensor)
empty_out_tlM _out _size _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_out(
*$(at::Tensor* _out)
, *$(std::vector<int64_t>* _size)
, $(at::MemoryFormat _memory_format)));
}|]
empty_out_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
empty_out_tl _out _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_out(
*$(at::Tensor* _out)
, *$(std::vector<int64_t>* _size)));
}|]
empty_like_toM
:: Ptr Tensor
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_like_toM _self _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_like_to
:: Ptr Tensor
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_like_to _self _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)
, *$(at::TensorOptions* _options)));
}|]
empty_like_t
:: Ptr Tensor
-> IO (Ptr Tensor)
empty_like_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)));
}|]
empty_strided_llo
:: Ptr IntArray
-> Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_strided_llo _size _stride _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_strided(
*$(std::vector<int64_t>* _size)
, *$(std::vector<int64_t>* _stride)
, *$(at::TensorOptions* _options)));
}|]
empty_strided_ll
:: Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
empty_strided_ll _size _stride =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_strided(
*$(std::vector<int64_t>* _size)
, *$(std::vector<int64_t>* _stride)));
}|]
erf_t
:: Ptr Tensor
-> IO (Ptr Tensor)
erf_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf(
*$(at::Tensor* _self)));
}|]
erf__t
:: Ptr Tensor
-> IO (Ptr Tensor)
erf__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf_(
*$(at::Tensor* _self)));
}|]
erf_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
erf_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
erfc_t
:: Ptr Tensor
-> IO (Ptr Tensor)
erfc_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc(
*$(at::Tensor* _self)));
}|]
erfc__t
:: Ptr Tensor
-> IO (Ptr Tensor)
erfc__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc_(
*$(at::Tensor* _self)));
}|]
erfc_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
erfc_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
exp_t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp(
*$(at::Tensor* _self)));
}|]
exp__t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp_(
*$(at::Tensor* _self)));
}|]
exp_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
exp_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
exp2_t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp2_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2(
*$(at::Tensor* _self)));
}|]
exp2__t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp2__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2_(
*$(at::Tensor* _self)));
}|]
exp2_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
exp2_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
expm1_t
:: Ptr Tensor
-> IO (Ptr Tensor)
expm1_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1(
*$(at::Tensor* _self)));
}|]
expm1__t
:: Ptr Tensor
-> IO (Ptr Tensor)
expm1__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1_(
*$(at::Tensor* _self)));
}|]
expm1_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
expm1_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
eye_lo
:: Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
eye_lo _n _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)
, *$(at::TensorOptions* _options)));
}|]
eye_l
:: Int64
-> IO (Ptr Tensor)
eye_l _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)));
}|]
eye_llo
:: Int64
-> Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
eye_llo _n _m _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)
, $(int64_t _m)
, *$(at::TensorOptions* _options)));
}|]
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/6233c173e1dd9fd7218fd13b104da15fc457f67e/libtorch-ffi/src/Torch/Internal/Unmanaged/Native/Native2.hs | haskell | generated by using spec/Declarations.yaml
# LANGUAGE OverloadedStrings # |
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
module Torch.Internal.Unmanaged.Native.Native2 where
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<vector>"
C.include "<ATen/Tensor.h>"
C.include "<ATen/Functions.h>"
cosine_embedding_loss_tttdl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> Int64
-> IO (Ptr Tensor)
cosine_embedding_loss_tttdl _input1 _input2 _target _margin _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)
, $(double _margin)
, $(int64_t _reduction)));
}|]
cosine_embedding_loss_tttd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> IO (Ptr Tensor)
cosine_embedding_loss_tttd _input1 _input2 _target _margin =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)
, $(double _margin)));
}|]
cosine_embedding_loss_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cosine_embedding_loss_ttt _input1 _input2 _target =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cosine_embedding_loss(
*$(at::Tensor* _input1)
, *$(at::Tensor* _input2)
, *$(at::Tensor* _target)));
}|]
count_nonzero_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
count_nonzero_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::count_nonzero(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
count_nonzero_t
:: Ptr Tensor
-> IO (Ptr Tensor)
count_nonzero_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::count_nonzero(
*$(at::Tensor* _self)));
}|]
cov_tltt
:: Ptr Tensor
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cov_tltt _self _correction _fweights _aweights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)
, *$(at::Tensor* _fweights)
, *$(at::Tensor* _aweights)));
}|]
cov_tlt
:: Ptr Tensor
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
cov_tlt _self _correction _fweights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)
, *$(at::Tensor* _fweights)));
}|]
cov_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cov_tl _self _correction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)
, $(int64_t _correction)));
}|]
cov_t
:: Ptr Tensor
-> IO (Ptr Tensor)
cov_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cov(
*$(at::Tensor* _self)));
}|]
corrcoef_t
:: Ptr Tensor
-> IO (Ptr Tensor)
corrcoef_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::corrcoef(
*$(at::Tensor* _self)));
}|]
cudnn_affine_grid_generator_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
cudnn_affine_grid_generator_tllll _theta _N _C _H _W =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_affine_grid_generator(
*$(at::Tensor* _theta)
, $(int64_t _N)
, $(int64_t _C)
, $(int64_t _H)
, $(int64_t _W)));
}|]
cudnn_affine_grid_generator_backward_tllll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
cudnn_affine_grid_generator_backward_tllll _grad _N _C _H _W =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_affine_grid_generator_backward(
*$(at::Tensor* _grad)
, $(int64_t _N)
, $(int64_t _C)
, $(int64_t _H)
, $(int64_t _W)));
}|]
cudnn_batch_norm_tttttbdd
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> CDouble
-> CDouble
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
cudnn_batch_norm_tttttbdd _input _weight _bias _running_mean _running_var _training _exponential_average_factor _epsilon =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::cudnn_batch_norm(
*$(at::Tensor* _input)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _bias)
, *$(at::Tensor* _running_mean)
, *$(at::Tensor* _running_var)
, $(bool _training)
, $(double _exponential_average_factor)
, $(double _epsilon)));
}|]
cudnn_batch_norm_backward_tttttttdt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CDouble
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor)))
cudnn_batch_norm_backward_tttttttdt _input _grad_output _weight _running_mean _running_var _save_mean _save_var _epsilon _reserveSpace =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor>(at::cudnn_batch_norm_backward(
*$(at::Tensor* _input)
, *$(at::Tensor* _grad_output)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _running_mean)
, *$(at::Tensor* _running_var)
, *$(at::Tensor* _save_mean)
, *$(at::Tensor* _save_var)
, $(double _epsilon)
, *$(at::Tensor* _reserveSpace)));
}|]
cudnn_convolution_ttllllbbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
cudnn_convolution_ttllllbbb _self _weight _padding _stride _dilation _groups _benchmark _deterministic _allow_tf32 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)
, $(bool _benchmark)
, $(bool _deterministic)
, $(bool _allow_tf32)));
}|]
cudnn_convolution_transpose_ttlllllbbb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> CBool
-> CBool
-> IO (Ptr Tensor)
cudnn_convolution_transpose_ttlllllbbb _self _weight _padding _output_padding _stride _dilation _groups _benchmark _deterministic _allow_tf32 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_transpose(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _output_padding)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)
, $(bool _benchmark)
, $(bool _deterministic)
, $(bool _allow_tf32)));
}|]
cudnn_convolution_relu_tttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
cudnn_convolution_relu_tttllll _self _weight _bias _stride _padding _dilation _groups =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_relu(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _bias)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)));
}|]
cudnn_convolution_add_relu_tttstllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Scalar
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
cudnn_convolution_add_relu_tttstllll _self _weight _z _alpha _bias _stride _padding _dilation _groups =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_convolution_add_relu(
*$(at::Tensor* _self)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _z)
, *$(at::Scalar* _alpha)
, *$(at::Tensor* _bias)
, *$(std::vector<int64_t>* _stride)
, *$(std::vector<int64_t>* _padding)
, *$(std::vector<int64_t>* _dilation)
, $(int64_t _groups)));
}|]
cudnn_grid_sampler_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cudnn_grid_sampler_tt _self _grid =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cudnn_grid_sampler(
*$(at::Tensor* _self)
, *$(at::Tensor* _grid)));
}|]
cudnn_grid_sampler_backward_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cudnn_grid_sampler_backward_ttt _self _grid _grad_output =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cudnn_grid_sampler_backward(
*$(at::Tensor* _self)
, *$(at::Tensor* _grid)
, *$(at::Tensor* _grad_output)));
}|]
cummax_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummax_out_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_out_tttl _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummax_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_tn _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cummax_out_tttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummax_out_tttn _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummax_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_cummax_helper_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (())
_cummax_helper_tttl _self _values _indices _dim =
[C.throwBlock| void { (at::_cummax_helper(
*$(at::Tensor* _self)
, *$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cummin_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_tl _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummin_out_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_out_tttl _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cummin_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_tn _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cummin_out_tttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
cummin_out_tttn _values _indices _self _dim =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::cummin_out(
*$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
_cummin_helper_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (())
_cummin_helper_tttl _self _values _indices _dim =
[C.throwBlock| void { (at::_cummin_helper(
*$(at::Tensor* _self)
, *$(at::Tensor* _values)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cummaxmin_backward_tttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cummaxmin_backward_tttl _grad _input _indices _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cummaxmin_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _input)
, *$(at::Tensor* _indices)
, $(int64_t _dim)));
}|]
cumprod_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumprod_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumprod_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumprod_out_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumprod_out_ttls _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumprod_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumprod_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumprod_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumprod_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumprod_out_ttns
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumprod_out_ttns _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumprod_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumprod_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumprod_backward_ttlt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
cumprod_backward_ttlt _grad _input _dim _output =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumprod_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _input)
, $(int64_t _dim)
, *$(at::Tensor* _output)));
}|]
cumsum_tls
:: Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumsum_tls _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumsum_tl _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumsum_out_ttls
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> ScalarType
-> IO (Ptr Tensor)
cumsum_out_ttls _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumsum_out_ttl _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _dim)));
}|]
cumsum_tns
:: Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumsum_tns _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_tn
:: Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumsum_tn _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum(
*$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumsum_out_ttns
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> ScalarType
-> IO (Ptr Tensor)
cumsum_out_ttns _out _self _dim _dtype =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)
, $(at::ScalarType _dtype)));
}|]
cumsum_out_ttn
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Dimname
-> IO (Ptr Tensor)
cumsum_out_ttn _out _self _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumsum_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Dimname* _dim)));
}|]
cumulative_trapezoid_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
cumulative_trapezoid_ttl _y _x _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)
, $(int64_t _dim)));
}|]
cumulative_trapezoid_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
cumulative_trapezoid_tt _y _x =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Tensor* _x)));
}|]
cumulative_trapezoid_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr Tensor)
cumulative_trapezoid_tsl _y _dx _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)
, $(int64_t _dim)));
}|]
cumulative_trapezoid_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
cumulative_trapezoid_ts _y _dx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)
, *$(at::Scalar* _dx)));
}|]
cumulative_trapezoid_t
:: Ptr Tensor
-> IO (Ptr Tensor)
cumulative_trapezoid_t _y =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::cumulative_trapezoid(
*$(at::Tensor* _y)));
}|]
ctc_loss_ttllllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
ctc_loss_ttllllb _log_probs _targets _input_lengths _target_lengths _blank _reduction _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)
, $(bool _zero_infinity)));
}|]
ctc_loss_ttllll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttllll _log_probs _targets _input_lengths _target_lengths _blank _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)));
}|]
ctc_loss_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttlll _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)));
}|]
ctc_loss_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
ctc_loss_ttll _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)));
}|]
ctc_loss_ttttllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
ctc_loss_ttttllb _log_probs _targets _input_lengths _target_lengths _blank _reduction _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)
, $(bool _zero_infinity)));
}|]
ctc_loss_ttttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttttll _log_probs _targets _input_lengths _target_lengths _blank _reduction =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)
, $(int64_t _reduction)));
}|]
ctc_loss_ttttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
ctc_loss_ttttl _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)
, $(int64_t _blank)));
}|]
ctc_loss_tttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
ctc_loss_tttt _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(at::Tensor* _input_lengths)
, *$(at::Tensor* _target_lengths)));
}|]
_ctc_loss_ttlllb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttlllb _log_probs _targets _input_lengths _target_lengths _blank _zero_infinity =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)
, $(bool _zero_infinity)));
}|]
_ctc_loss_ttlll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttlll _log_probs _targets _input_lengths _target_lengths _blank =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, $(int64_t _blank)));
}|]
_ctc_loss_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_ctc_loss_ttll _log_probs _targets _input_lengths _target_lengths =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_ctc_loss(
*$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)));
}|]
_ctc_loss_backward_tttllttlb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
_ctc_loss_backward_tttllttlb _grad _log_probs _targets _input_lengths _target_lengths _neg_log_likelihood _log_alpha _blank _zero_infinity =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_ctc_loss_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, *$(at::Tensor* _neg_log_likelihood)
, *$(at::Tensor* _log_alpha)
, $(int64_t _blank)
, $(bool _zero_infinity)));
}|]
_ctc_loss_backward_tttllttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr IntArray
-> Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_ctc_loss_backward_tttllttl _grad _log_probs _targets _input_lengths _target_lengths _neg_log_likelihood _log_alpha _blank =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_ctc_loss_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _log_probs)
, *$(at::Tensor* _targets)
, *$(std::vector<int64_t>* _input_lengths)
, *$(std::vector<int64_t>* _target_lengths)
, *$(at::Tensor* _neg_log_likelihood)
, *$(at::Tensor* _log_alpha)
, $(int64_t _blank)));
}|]
diag_embed_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diag_embed_tlll _self _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diag_embed_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diag_embed_tll _self _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diag_embed_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diag_embed_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diag_embed_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diag_embed_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diag_embed(
*$(at::Tensor* _self)));
}|]
diagflat_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagflat_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagflat(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diagflat_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diagflat_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagflat(
*$(at::Tensor* _self)));
}|]
diagonal_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_tlll _self _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diagonal_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_tll _self _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
diagonal_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diagonal_tl _self _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, $(int64_t _offset)));
}|]
diagonal_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diagonal_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)));
}|]
linalg_diagonal_tlll
:: Ptr Tensor
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tlll _A _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
linalg_diagonal_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tll _A _offset _dim1 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)
, $(int64_t _dim1)));
}|]
linalg_diagonal_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
linalg_diagonal_tl _A _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)
, $(int64_t _offset)));
}|]
linalg_diagonal_t
:: Ptr Tensor
-> IO (Ptr Tensor)
linalg_diagonal_t _A =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::linalg_diagonal(
*$(at::Tensor* _A)));
}|]
diagonal_tnnnl
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> Ptr Dimname
-> Int64
-> IO (Ptr Tensor)
diagonal_tnnnl _self _outdim _dim1 _dim2 _offset =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, *$(at::Dimname* _outdim)
, *$(at::Dimname* _dim1)
, *$(at::Dimname* _dim2)
, $(int64_t _offset)));
}|]
diagonal_tnnn
:: Ptr Tensor
-> Ptr Dimname
-> Ptr Dimname
-> Ptr Dimname
-> IO (Ptr Tensor)
diagonal_tnnn _self _outdim _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal(
*$(at::Tensor* _self)
, *$(at::Dimname* _outdim)
, *$(at::Dimname* _dim1)
, *$(at::Dimname* _dim2)));
}|]
diagonal_backward_tllll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> Int64
-> Int64
-> IO (Ptr Tensor)
diagonal_backward_tllll _grad_output _input_sizes _offset _dim1 _dim2 =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diagonal_backward(
*$(at::Tensor* _grad_output)
, *$(std::vector<int64_t>* _input_sizes)
, $(int64_t _offset)
, $(int64_t _dim1)
, $(int64_t _dim2)));
}|]
diff_tlltt
:: Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_tlltt _self _n _dim _prepend _append =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)
, *$(at::Tensor* _append)));
}|]
diff_tllt
:: Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_tllt _self _n _dim _prepend =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)));
}|]
diff_tll
:: Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diff_tll _self _n _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)));
}|]
diff_tl
:: Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diff_tl _self _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)
, $(int64_t _n)));
}|]
diff_t
:: Ptr Tensor
-> IO (Ptr Tensor)
diff_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff(
*$(at::Tensor* _self)));
}|]
diff_out_ttlltt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_ttlltt _out _self _n _dim _prepend _append =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)
, *$(at::Tensor* _append)));
}|]
diff_out_ttllt
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_ttllt _out _self _n _dim _prepend =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)
, *$(at::Tensor* _prepend)));
}|]
diff_out_ttll
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
diff_out_ttll _out _self _n _dim =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)
, $(int64_t _dim)));
}|]
diff_out_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
diff_out_ttl _out _self _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, $(int64_t _n)));
}|]
diff_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
diff_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::diff_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
gradient_tsll
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> Int64
-> IO (Ptr TensorList)
gradient_tsll _self _spacing _dim _edge_order =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(at::Scalar* _spacing)
, $(int64_t _dim)
, $(int64_t _edge_order)));
}|]
gradient_tsl
:: Ptr Tensor
-> Ptr Scalar
-> Int64
-> IO (Ptr TensorList)
gradient_tsl _self _spacing _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(at::Scalar* _spacing)
, $(int64_t _dim)));
}|]
gradient_t
:: Ptr Tensor
-> IO (Ptr TensorList)
gradient_t _self =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)));
}|]
gradient_tll
:: Ptr Tensor
-> Ptr IntArray
-> Int64
-> IO (Ptr TensorList)
gradient_tll _self _dim _edge_order =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)
, $(int64_t _edge_order)));
}|]
gradient_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr TensorList)
gradient_tl _self _dim =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<int64_t>* _dim)));
}|]
gradient_tA
:: Ptr Tensor
-> Ptr (StdVector Scalar)
-> IO (Ptr TensorList)
gradient_tA _self _spacing =
[C.throwBlock| std::vector<at::Tensor>* { return new std::vector<at::Tensor>(at::gradient(
*$(at::Tensor* _self)
, *$(std::vector<at::Scalar>* _spacing)));
}|]
div_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
div_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
div_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
div_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
div_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
div_tts _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
div_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
div_out_ttts _out _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
div_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
div_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
div_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr StdString
-> IO (Ptr Tensor)
div_tss _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::div(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
divide_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
divide_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
divide_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
divide_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
divide_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
divide_tts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
divide_tts _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_out_ttts
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr StdString
-> IO (Ptr Tensor)
divide_out_ttts _out _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)
, *$(std::string* _rounding_mode)));
}|]
divide_tss
:: Ptr Tensor
-> Ptr Scalar
-> Ptr StdString
-> IO (Ptr Tensor)
divide_tss _self _other _rounding_mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)
, *$(std::string* _rounding_mode)));
}|]
true_divide_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
true_divide_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
true_divide_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
true_divide_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
true_divide_ts
:: Ptr Tensor
-> Ptr Scalar
-> IO (Ptr Tensor)
true_divide_ts _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::true_divide(
*$(at::Tensor* _self)
, *$(at::Scalar* _other)));
}|]
dot_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
dot_tt _self _tensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dot(
*$(at::Tensor* _self)
, *$(at::Tensor* _tensor)));
}|]
dot_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
dot_out_ttt _out _self _tensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::dot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _tensor)));
}|]
vdot_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
vdot_tt _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vdot(
*$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
vdot_out_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
vdot_out_ttt _out _self _other =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::vdot_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)
, *$(at::Tensor* _other)));
}|]
einsum_sl
:: Ptr StdString
-> Ptr TensorList
-> IO (Ptr Tensor)
einsum_sl _equation _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::einsum(
*$(std::string* _equation)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
embedding_ttlbb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> CBool
-> IO (Ptr Tensor)
embedding_ttlbb _weight _indices _padding_idx _scale_grad_by_freq _sparse =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)
, $(bool _sparse)));
}|]
embedding_ttlb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_ttlb _weight _indices _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
embedding_ttl
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
embedding_ttl _weight _indices _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, $(int64_t _padding_idx)));
}|]
embedding_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
embedding_tt _weight _indices =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)));
}|]
embedding_backward_ttllbb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> CBool
-> IO (Ptr Tensor)
embedding_backward_ttllbb _grad _indices _num_weights _padding_idx _scale_grad_by_freq _sparse =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)
, $(bool _sparse)));
}|]
embedding_dense_backward_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_dense_backward_ttllb _grad_output _indices _num_weights _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_dense_backward(
*$(at::Tensor* _grad_output)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
embedding_renorm__ttdd
:: Ptr Tensor
-> Ptr Tensor
-> CDouble
-> CDouble
-> IO (Ptr Tensor)
embedding_renorm__ttdd _self _indices _max_norm _norm_type =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_renorm_(
*$(at::Tensor* _self)
, *$(at::Tensor* _indices)
, $(double _max_norm)
, $(double _norm_type)));
}|]
embedding_sparse_backward_ttllb
:: Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> CBool
-> IO (Ptr Tensor)
embedding_sparse_backward_ttllb _grad _indices _num_weights _padding_idx _scale_grad_by_freq =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::embedding_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, $(int64_t _num_weights)
, $(int64_t _padding_idx)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_forward_only_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_forward_only_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
_embedding_bag_forward_only_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_forward_only_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
_embedding_bag_forward_only_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
_embedding_bag_forward_only_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_forward_only_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_forward_only_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag_forward_only(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
_rowwise_prune_tts
:: Ptr Tensor
-> Ptr Tensor
-> ScalarType
-> IO (Ptr (StdTuple '(Tensor,Tensor)))
_rowwise_prune_tts _weight _mask _compressed_indices_dtype =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor>(at::_rowwise_prune(
*$(at::Tensor* _weight)
, *$(at::Tensor* _mask)
, $(at::ScalarType _compressed_indices_dtype)));
}|]
row_stack_l
:: Ptr TensorList
-> IO (Ptr Tensor)
row_stack_l _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::row_stack(
*$(std::vector<at::Tensor>* _tensors)));
}|]
row_stack_out_tl
:: Ptr Tensor
-> Ptr TensorList
-> IO (Ptr Tensor)
row_stack_out_tl _out _tensors =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::row_stack_out(
*$(at::Tensor* _out)
, *$(std::vector<at::Tensor>* _tensors)));
}|]
embedding_bag_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
embedding_bag_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
embedding_bag_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
embedding_bag_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
embedding_bag_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
embedding_bag_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
embedding_bag_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
embedding_bag_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_tttblbtbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbtbl _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset _padding_idx =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_tttblbtb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbtb _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights _include_last_offset =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(bool _include_last_offset)));
}|]
_embedding_bag_tttblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblbt _weight _indices _offsets _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_tttblb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttblb _weight _indices _offsets _scale_grad_by_freq _mode _sparse =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)));
}|]
_embedding_bag_tttbl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> Int64
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttbl _weight _indices _offsets _scale_grad_by_freq _mode =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)));
}|]
_embedding_bag_tttb
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> CBool
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_tttb _weight _indices _offsets _scale_grad_by_freq =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, $(bool _scale_grad_by_freq)));
}|]
_embedding_bag_ttt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> IO (Ptr (StdTuple '(Tensor,Tensor,Tensor,Tensor)))
_embedding_bag_ttt _weight _indices _offsets =
[C.throwBlock| std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>* { return new std::tuple<at::Tensor,at::Tensor,at::Tensor,at::Tensor>(at::_embedding_bag(
*$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)));
}|]
_embedding_bag_backward_ttttttlblbtl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_backward_ttttttlblbtl _grad _indices _offsets _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _sparse _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_backward_ttttttlblbt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> CBool
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_backward_ttttttlblbt _grad _indices _offsets _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _sparse _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, $(bool _sparse)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_sparse_backward_tttttlbltl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_sparse_backward_tttttlbltl _grad _indices _offsets _offset2bag _bag_size _num_weights _scale_grad_by_freq _mode _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_sparse_backward_tttttlblt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_sparse_backward_tttttlblt _grad _indices _offsets _offset2bag _bag_size _num_weights _scale_grad_by_freq _mode _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_sparse_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_dense_backward_tttttlbltl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_dense_backward_tttttlbltl _grad _indices _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _per_sample_weights _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_dense_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_dense_backward_tttttlblt
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> CBool
-> Int64
-> Ptr Tensor
-> IO (Ptr Tensor)
_embedding_bag_dense_backward_tttttlblt _grad _indices _offset2bag _bag_size _maximum_indices _num_weights _scale_grad_by_freq _mode _per_sample_weights =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_dense_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offset2bag)
, *$(at::Tensor* _bag_size)
, *$(at::Tensor* _maximum_indices)
, $(int64_t _num_weights)
, $(bool _scale_grad_by_freq)
, $(int64_t _mode)
, *$(at::Tensor* _per_sample_weights)));
}|]
_embedding_bag_per_sample_weights_backward_tttttll
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_per_sample_weights_backward_tttttll _grad _weight _indices _offsets _offset2bag _mode _padding_idx =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_per_sample_weights_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, $(int64_t _mode)
, $(int64_t _padding_idx)));
}|]
_embedding_bag_per_sample_weights_backward_tttttl
:: Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_embedding_bag_per_sample_weights_backward_tttttl _grad _weight _indices _offsets _offset2bag _mode =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_embedding_bag_per_sample_weights_backward(
*$(at::Tensor* _grad)
, *$(at::Tensor* _weight)
, *$(at::Tensor* _indices)
, *$(at::Tensor* _offsets)
, *$(at::Tensor* _offset2bag)
, $(int64_t _mode)));
}|]
empty_lNoM
:: Ptr IntArray
-> Ptr DimnameList
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_lNoM _size _names _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_lNo
:: Ptr IntArray
-> Ptr DimnameList
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_lNo _size _names _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)
, *$(at::TensorOptions* _options)));
}|]
empty_lN
:: Ptr IntArray
-> Ptr DimnameList
-> IO (Ptr Tensor)
empty_lN _size _names =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(std::vector<at::Dimname>* _names)));
}|]
empty_loM
:: Ptr IntArray
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_loM _size _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_lo
:: Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_lo _size _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)));
}|]
empty_l
:: Ptr IntArray
-> IO (Ptr Tensor)
empty_l _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty(
*$(std::vector<int64_t>* _size)));
}|]
_empty_affine_quantized_lodlM
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> Int64
-> MemoryFormat
-> IO (Ptr Tensor)
_empty_affine_quantized_lodlM _size _options _scale _zero_point _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)
, $(int64_t _zero_point)
, $(at::MemoryFormat _memory_format)));
}|]
_empty_affine_quantized_lodl
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> Int64
-> IO (Ptr Tensor)
_empty_affine_quantized_lodl _size _options _scale _zero_point =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)
, $(int64_t _zero_point)));
}|]
_empty_affine_quantized_lod
:: Ptr IntArray
-> Ptr TensorOptions
-> CDouble
-> IO (Ptr Tensor)
_empty_affine_quantized_lod _size _options _scale =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)
, $(double _scale)));
}|]
_empty_affine_quantized_lo
:: Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
_empty_affine_quantized_lo _size _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::TensorOptions* _options)));
}|]
_empty_affine_quantized_l
:: Ptr IntArray
-> IO (Ptr Tensor)
_empty_affine_quantized_l _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_affine_quantized(
*$(std::vector<int64_t>* _size)));
}|]
_empty_per_channel_affine_quantized_lttloM
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttloM _size _scales _zero_points _axis _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
_empty_per_channel_affine_quantized_lttlo
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttlo _size _scales _zero_points _axis _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)
, *$(at::TensorOptions* _options)));
}|]
_empty_per_channel_affine_quantized_lttl
:: Ptr IntArray
-> Ptr Tensor
-> Ptr Tensor
-> Int64
-> IO (Ptr Tensor)
_empty_per_channel_affine_quantized_lttl _size _scales _zero_points _axis =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::_empty_per_channel_affine_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _scales)
, *$(at::Tensor* _zero_points)
, $(int64_t _axis)));
}|]
empty_quantized_ltoM
:: Ptr IntArray
-> Ptr Tensor
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_quantized_ltoM _size _qtensor _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_quantized_lto
:: Ptr IntArray
-> Ptr Tensor
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_quantized_lto _size _qtensor _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)
, *$(at::TensorOptions* _options)));
}|]
empty_quantized_lt
:: Ptr IntArray
-> Ptr Tensor
-> IO (Ptr Tensor)
empty_quantized_lt _size _qtensor =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_quantized(
*$(std::vector<int64_t>* _size)
, *$(at::Tensor* _qtensor)));
}|]
empty_out_tlM
:: Ptr Tensor
-> Ptr IntArray
-> MemoryFormat
-> IO (Ptr Tensor)
empty_out_tlM _out _size _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_out(
*$(at::Tensor* _out)
, *$(std::vector<int64_t>* _size)
, $(at::MemoryFormat _memory_format)));
}|]
empty_out_tl
:: Ptr Tensor
-> Ptr IntArray
-> IO (Ptr Tensor)
empty_out_tl _out _size =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_out(
*$(at::Tensor* _out)
, *$(std::vector<int64_t>* _size)));
}|]
empty_like_toM
:: Ptr Tensor
-> Ptr TensorOptions
-> MemoryFormat
-> IO (Ptr Tensor)
empty_like_toM _self _options _memory_format =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)
, *$(at::TensorOptions* _options)
, $(at::MemoryFormat _memory_format)));
}|]
empty_like_to
:: Ptr Tensor
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_like_to _self _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)
, *$(at::TensorOptions* _options)));
}|]
empty_like_t
:: Ptr Tensor
-> IO (Ptr Tensor)
empty_like_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_like(
*$(at::Tensor* _self)));
}|]
empty_strided_llo
:: Ptr IntArray
-> Ptr IntArray
-> Ptr TensorOptions
-> IO (Ptr Tensor)
empty_strided_llo _size _stride _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_strided(
*$(std::vector<int64_t>* _size)
, *$(std::vector<int64_t>* _stride)
, *$(at::TensorOptions* _options)));
}|]
empty_strided_ll
:: Ptr IntArray
-> Ptr IntArray
-> IO (Ptr Tensor)
empty_strided_ll _size _stride =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::empty_strided(
*$(std::vector<int64_t>* _size)
, *$(std::vector<int64_t>* _stride)));
}|]
erf_t
:: Ptr Tensor
-> IO (Ptr Tensor)
erf_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf(
*$(at::Tensor* _self)));
}|]
erf__t
:: Ptr Tensor
-> IO (Ptr Tensor)
erf__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf_(
*$(at::Tensor* _self)));
}|]
erf_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
erf_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erf_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
erfc_t
:: Ptr Tensor
-> IO (Ptr Tensor)
erfc_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc(
*$(at::Tensor* _self)));
}|]
erfc__t
:: Ptr Tensor
-> IO (Ptr Tensor)
erfc__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc_(
*$(at::Tensor* _self)));
}|]
erfc_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
erfc_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::erfc_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
exp_t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp(
*$(at::Tensor* _self)));
}|]
exp__t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp_(
*$(at::Tensor* _self)));
}|]
exp_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
exp_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
exp2_t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp2_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2(
*$(at::Tensor* _self)));
}|]
exp2__t
:: Ptr Tensor
-> IO (Ptr Tensor)
exp2__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2_(
*$(at::Tensor* _self)));
}|]
exp2_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
exp2_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::exp2_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
expm1_t
:: Ptr Tensor
-> IO (Ptr Tensor)
expm1_t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1(
*$(at::Tensor* _self)));
}|]
expm1__t
:: Ptr Tensor
-> IO (Ptr Tensor)
expm1__t _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1_(
*$(at::Tensor* _self)));
}|]
expm1_out_tt
:: Ptr Tensor
-> Ptr Tensor
-> IO (Ptr Tensor)
expm1_out_tt _out _self =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::expm1_out(
*$(at::Tensor* _out)
, *$(at::Tensor* _self)));
}|]
eye_lo
:: Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
eye_lo _n _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)
, *$(at::TensorOptions* _options)));
}|]
eye_l
:: Int64
-> IO (Ptr Tensor)
eye_l _n =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)));
}|]
eye_llo
:: Int64
-> Int64
-> Ptr TensorOptions
-> IO (Ptr Tensor)
eye_llo _n _m _options =
[C.throwBlock| at::Tensor* { return new at::Tensor(at::eye(
$(int64_t _n)
, $(int64_t _m)
, *$(at::TensorOptions* _options)));
}|]
|
532ee7a1c8677ddf499f86d4d6b1a2b029d84a65f8ba0ba3a9b360c1ac4b6960 | simplegeo/erlang | sys_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(sys_SUITE).
-export([all/1,log/1,log_to_file/1,stats/1,trace/1,suspend/1,install/1]).
-export([handle_call/3,terminate/2,init/1]).
-include("test_server.hrl").
-define(server,sys_SUITE_server).
%% Doesn't look into change_code at all
%% Doesn't address writing your own process that understands
%% system messages at all.
all(suite) -> [log,log_to_file,stats,trace,suspend,install].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
log(suite) -> [];
log(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line ok = sys:log(?server,true),
?line {ok,-44} = public_call(44),
?line ok = sys:log(?server,false),
?line ok = sys:log(?server,print),
?line stop(),
ok.
log_to_file(suite) -> [];
log_to_file(Config) when is_list(Config) ->
TempName = test_server:temp_name(?config(priv_dir,Config) ++ "sys."),
?line {ok,_Server} = start(),
?line ok = sys:log_to_file(?server,TempName),
?line {ok,-44} = public_call(44),
?line ok = sys:log_to_file(?server,false),
?line {ok,Fd} = file:open(TempName,read),
?line Msg1 = io:get_line(Fd,''),
?line Msg2 = io:get_line(Fd,''),
?line file:close(Fd),
?line lists:prefix("*DBG* sys_SUITE_server got call {req,44} from ",Msg1),
?line lists:prefix("*DBG* sys_SUITE_server sent {ok,-44} to ",Msg2),
?line stop(),
ok.
stats(suite) -> [];
stats(Config) when is_list(Config) ->
?line Self = self(),
?line {ok,_Server} = start(),
?line ok = sys:statistics(?server,true),
?line {ok,-44} = public_call(44),
?line {ok,Stats} = sys:statistics(?server,get),
?line lists:member({messages_in,1},Stats),
?line lists:member({messages_out,1},Stats),
?line ok = sys:statistics(?server,false),
?line {status,_Pid,{module,_Mod},[_PDict,running,Self,_,_]} =
sys:get_status(?server),
?line {ok,no_statistics} = sys:statistics(?server,get),
?line stop(),
ok.
trace(suite) -> [];
trace(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
case os:type() of
vxworks ->
?line test_server:sleep(20000);
_ ->
?line test_server:sleep(2000)
end,
?line test_server:capture_start(),
?line sys:trace(?server,true),
?line {ok,-44} = public_call(44),
%% ho, hum, allow for the io to reach us..
case os:type() of
vxworks ->
?line test_server:sleep(10000);
_ ->
?line test_server:sleep(1000)
end,
?line test_server:capture_stop(),
?line [Msg1,Msg2] = test_server:capture_get(),
?line lists:prefix("*DBG* sys_SUITE_server got call {req,44} from ",Msg1),
?line lists:prefix("*DBG* sys_SUITE_server sent {ok,-44} to ",Msg2),
?line stop(),
ok.
suspend(suite) -> [];
suspend(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line sys:suspend(?server,1000),
?line {'EXIT',_} = (catch public_call(48)),
?line {status,_,_,[_,suspended,_,_,_]} = sys:get_status(?server),
?line sys:suspend(?server,1000), %% doing it twice is no error
?line {'EXIT',_} = (catch public_call(48)),
?line sys:resume(?server),
?line {status,_,_,[_,running,_,_,_]} = sys:get_status(?server),
?line {ok,-48} = (catch public_call(48)),
?line sys:resume(?server), %% doing it twice is no error
?line {ok,-48} = (catch public_call(48)),
?line stop(),
ok.
install(suite) -> [];
install(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line Master = self(),
?line SpyFun =
fun(func_state,Event,ProcState) ->
case Event of
{in,{'$gen_call',_From,{req,Arg}}} ->
io:format("Trigged\n"),
Master ! {spy_got,{request,Arg},ProcState};
Other ->
io:format("Trigged other=~p\n",[Other])
end
end,
?line sys:install(?server,{SpyFun,func_state}),
?line {ok,-1} = (catch public_call(1)),
?line sys:no_debug(?server),
?line {ok,-2} = (catch public_call(2)),
?line sys:install(?server,{SpyFun,func_state}),
?line sys:install(?server,{SpyFun,func_state}),
?line {ok,-3} = (catch public_call(3)),
?line sys:remove(?server,SpyFun),
?line {ok,-4} = (catch public_call(4)),
?line Msgs = test_server:messages_get(),
?line [{spy_got,{request,1},sys_SUITE_server},
{spy_got,{request,3},sys_SUITE_server}] = Msgs,
?line stop(),
ok.
%%%%%%%%%%%%%%%%%%%%
%% Dummy server
public_call(Arg) ->
gen_server:call(?server,{req,Arg},1000).
start() ->
gen_server:start_link({local,?server},?MODULE,[],[]).
stop() ->
gen_server:call(?server,stop,1000).
init([]) ->
{ok,0}.
handle_call({req,Arg},_From,State) ->
NewState = State+1,
{reply,{ok,-Arg},NewState};
handle_call(stop,_From,State) ->
{stop,normal,ok,State}.
terminate(_Reason, _State) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
| null | https://raw.githubusercontent.com/simplegeo/erlang/15eda8de27ba73d176c7eeb3a70a64167f50e2c4/lib/stdlib/test/sys_SUITE.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Doesn't look into change_code at all
Doesn't address writing your own process that understands
system messages at all.
ho, hum, allow for the io to reach us..
doing it twice is no error
doing it twice is no error
Dummy server
| Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(sys_SUITE).
-export([all/1,log/1,log_to_file/1,stats/1,trace/1,suspend/1,install/1]).
-export([handle_call/3,terminate/2,init/1]).
-include("test_server.hrl").
-define(server,sys_SUITE_server).
all(suite) -> [log,log_to_file,stats,trace,suspend,install].
log(suite) -> [];
log(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line ok = sys:log(?server,true),
?line {ok,-44} = public_call(44),
?line ok = sys:log(?server,false),
?line ok = sys:log(?server,print),
?line stop(),
ok.
log_to_file(suite) -> [];
log_to_file(Config) when is_list(Config) ->
TempName = test_server:temp_name(?config(priv_dir,Config) ++ "sys."),
?line {ok,_Server} = start(),
?line ok = sys:log_to_file(?server,TempName),
?line {ok,-44} = public_call(44),
?line ok = sys:log_to_file(?server,false),
?line {ok,Fd} = file:open(TempName,read),
?line Msg1 = io:get_line(Fd,''),
?line Msg2 = io:get_line(Fd,''),
?line file:close(Fd),
?line lists:prefix("*DBG* sys_SUITE_server got call {req,44} from ",Msg1),
?line lists:prefix("*DBG* sys_SUITE_server sent {ok,-44} to ",Msg2),
?line stop(),
ok.
stats(suite) -> [];
stats(Config) when is_list(Config) ->
?line Self = self(),
?line {ok,_Server} = start(),
?line ok = sys:statistics(?server,true),
?line {ok,-44} = public_call(44),
?line {ok,Stats} = sys:statistics(?server,get),
?line lists:member({messages_in,1},Stats),
?line lists:member({messages_out,1},Stats),
?line ok = sys:statistics(?server,false),
?line {status,_Pid,{module,_Mod},[_PDict,running,Self,_,_]} =
sys:get_status(?server),
?line {ok,no_statistics} = sys:statistics(?server,get),
?line stop(),
ok.
trace(suite) -> [];
trace(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
case os:type() of
vxworks ->
?line test_server:sleep(20000);
_ ->
?line test_server:sleep(2000)
end,
?line test_server:capture_start(),
?line sys:trace(?server,true),
?line {ok,-44} = public_call(44),
case os:type() of
vxworks ->
?line test_server:sleep(10000);
_ ->
?line test_server:sleep(1000)
end,
?line test_server:capture_stop(),
?line [Msg1,Msg2] = test_server:capture_get(),
?line lists:prefix("*DBG* sys_SUITE_server got call {req,44} from ",Msg1),
?line lists:prefix("*DBG* sys_SUITE_server sent {ok,-44} to ",Msg2),
?line stop(),
ok.
suspend(suite) -> [];
suspend(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line sys:suspend(?server,1000),
?line {'EXIT',_} = (catch public_call(48)),
?line {status,_,_,[_,suspended,_,_,_]} = sys:get_status(?server),
?line {'EXIT',_} = (catch public_call(48)),
?line sys:resume(?server),
?line {status,_,_,[_,running,_,_,_]} = sys:get_status(?server),
?line {ok,-48} = (catch public_call(48)),
?line {ok,-48} = (catch public_call(48)),
?line stop(),
ok.
install(suite) -> [];
install(Config) when is_list(Config) ->
?line {ok,_Server} = start(),
?line Master = self(),
?line SpyFun =
fun(func_state,Event,ProcState) ->
case Event of
{in,{'$gen_call',_From,{req,Arg}}} ->
io:format("Trigged\n"),
Master ! {spy_got,{request,Arg},ProcState};
Other ->
io:format("Trigged other=~p\n",[Other])
end
end,
?line sys:install(?server,{SpyFun,func_state}),
?line {ok,-1} = (catch public_call(1)),
?line sys:no_debug(?server),
?line {ok,-2} = (catch public_call(2)),
?line sys:install(?server,{SpyFun,func_state}),
?line sys:install(?server,{SpyFun,func_state}),
?line {ok,-3} = (catch public_call(3)),
?line sys:remove(?server,SpyFun),
?line {ok,-4} = (catch public_call(4)),
?line Msgs = test_server:messages_get(),
?line [{spy_got,{request,1},sys_SUITE_server},
{spy_got,{request,3},sys_SUITE_server}] = Msgs,
?line stop(),
ok.
public_call(Arg) ->
gen_server:call(?server,{req,Arg},1000).
start() ->
gen_server:start_link({local,?server},?MODULE,[],[]).
stop() ->
gen_server:call(?server,stop,1000).
init([]) ->
{ok,0}.
handle_call({req,Arg},_From,State) ->
NewState = State+1,
{reply,{ok,-Arg},NewState};
handle_call(stop,_From,State) ->
{stop,normal,ok,State}.
terminate(_Reason, _State) ->
ok.
|
42150e62916beadf5fd2d197fd5ebbea56d0f7de71a1c4456a217d33d4db2045 | SamirTalwar/smoke | DiffUtility.hs | module Test.Smoke.App.Diff.DiffUtility
( engine,
)
where
import Data.List.NonEmpty qualified as NonEmpty
import Test.Smoke.App.Diff.ExternalDiffCommand
import Test.Smoke.App.Diff.Types
engine :: DiffEngine
engine =
DiffEngine
{ engineName = name,
engineEnabled = enabled executable,
engineRender = \_ -> render command
}
name :: String
name = "diff"
command :: Command
command = NonEmpty.fromList [executable]
executable :: String
executable = "diff"
| null | https://raw.githubusercontent.com/SamirTalwar/smoke/b1c24419d76eac3df36aed02bf7f726d04f80d5a/src/app/Test/Smoke/App/Diff/DiffUtility.hs | haskell | module Test.Smoke.App.Diff.DiffUtility
( engine,
)
where
import Data.List.NonEmpty qualified as NonEmpty
import Test.Smoke.App.Diff.ExternalDiffCommand
import Test.Smoke.App.Diff.Types
engine :: DiffEngine
engine =
DiffEngine
{ engineName = name,
engineEnabled = enabled executable,
engineRender = \_ -> render command
}
name :: String
name = "diff"
command :: Command
command = NonEmpty.fromList [executable]
executable :: String
executable = "diff"
| |
40d43d8c8183afd0f1f78ebeff9a7e0c7e156518a77787fc853e274c01178285 | erlangonrails/devdb | bin_util.erl | -module(bin_util).
-export([member64/2, to_list64/1, encode_kvsegment/1, decode_kvsegment/1, bits/1, pad/1]).
-export([find_kv/2]).
member64(Bin, Val) when is_binary(Bin), is_integer(Val) ->
member64(Bin, <<Val:64>>);
member64(Bin, Val) when is_binary(Bin), is_binary(Val) ->
member64_1(Bin, Val).
member64_1(<<>>, _) -> false;
member64_1(<<X:8/binary, _/binary>>, Val) when X == Val -> true;
member64_1(<<_:8/binary, R/binary>>, Val) -> member64_1(R, Val).
to_list64(B) -> to_list64(B, []).
to_list64(<<X:8/binary, R/binary>>, Res) -> to_list64(R, [X|Res]);
to_list64(<<>>, Res) -> Res.
encode_kvsegment([]) -> <<>>;
encode_kvsegment(L) ->
B = bits(lists:max([V || {_, V} <- L])),
D = << <<K:32, V:B>> || {K, V} <- L >>,
pad(<<(length(L)):32, B:5, D/bits>>).
decode_kvsegment(<<>>) -> [];
decode_kvsegment(Seg) ->
<<N:32, B:5, X/bits>> = Seg,
S = N * (32 + B),
<<D:S/bits, _/bits>> = X,
[{K, V} || <<K:32, V:B>> <= D].
%%%
%%% Binary search for a kvsegment
%%%
find_kv(Key, <<>>) -> {Key, none};
find_kv(Key, Seg) ->
<<N:32, B:5, X/bits>> = Seg,
S = N * (32 + B),
<<D:S/bits, _/bits>> = X,
choose(D, Key, middle(D, 32 + B), 32 + B).
middle(<<>>, _) -> none;
middle(Seg, ItemSize) ->
N = bit_size(Seg) div ItemSize,
P = (N div 2) * ItemSize,
<<_:P/bits, Middle:32, _/bits>> = Seg,
{P, Middle}.
choose(Seg, Key, {P, Middle}, ItemSize) when Key > Middle ->
PP = P + ItemSize,
<<_:PP, NSeg/bits>> = Seg,
choose(NSeg, Key, middle(NSeg, ItemSize), ItemSize);
choose(Seg, Key, {P, Middle}, ItemSize) when Key < Middle ->
<<NSeg:P/bits, _/bits>> = Seg,
choose(NSeg, Key, middle(NSeg, ItemSize), ItemSize);
choose(Seg, Key, {P, _}, ItemSize) ->
<<_:P/bits, Item:ItemSize/bits, _/bits>> = Seg,
S = ItemSize - 32,
<<Key:32, Value:S>> = Item,
{Key, Value};
choose(<<>>, Key, _, _) -> {Key, none}.
%%%
%%%
%%%
pad(X) when is_binary(X) -> X;
pad(X) ->
P = 8 - (bit_size(X) rem 8),
<<X/bits, 0:P>>.
bits(X) -> bits(X, 0).
bits(0, N) -> N;
bits(X, N) -> bits(X bsr 1, N + 1).
| null | https://raw.githubusercontent.com/erlangonrails/devdb/0e7eaa6bd810ec3892bfc3d933439560620d0941/dev/ringo/ring/src/bin_util.erl | erlang |
Binary search for a kvsegment
| -module(bin_util).
-export([member64/2, to_list64/1, encode_kvsegment/1, decode_kvsegment/1, bits/1, pad/1]).
-export([find_kv/2]).
member64(Bin, Val) when is_binary(Bin), is_integer(Val) ->
member64(Bin, <<Val:64>>);
member64(Bin, Val) when is_binary(Bin), is_binary(Val) ->
member64_1(Bin, Val).
member64_1(<<>>, _) -> false;
member64_1(<<X:8/binary, _/binary>>, Val) when X == Val -> true;
member64_1(<<_:8/binary, R/binary>>, Val) -> member64_1(R, Val).
to_list64(B) -> to_list64(B, []).
to_list64(<<X:8/binary, R/binary>>, Res) -> to_list64(R, [X|Res]);
to_list64(<<>>, Res) -> Res.
encode_kvsegment([]) -> <<>>;
encode_kvsegment(L) ->
B = bits(lists:max([V || {_, V} <- L])),
D = << <<K:32, V:B>> || {K, V} <- L >>,
pad(<<(length(L)):32, B:5, D/bits>>).
decode_kvsegment(<<>>) -> [];
decode_kvsegment(Seg) ->
<<N:32, B:5, X/bits>> = Seg,
S = N * (32 + B),
<<D:S/bits, _/bits>> = X,
[{K, V} || <<K:32, V:B>> <= D].
find_kv(Key, <<>>) -> {Key, none};
find_kv(Key, Seg) ->
<<N:32, B:5, X/bits>> = Seg,
S = N * (32 + B),
<<D:S/bits, _/bits>> = X,
choose(D, Key, middle(D, 32 + B), 32 + B).
middle(<<>>, _) -> none;
middle(Seg, ItemSize) ->
N = bit_size(Seg) div ItemSize,
P = (N div 2) * ItemSize,
<<_:P/bits, Middle:32, _/bits>> = Seg,
{P, Middle}.
choose(Seg, Key, {P, Middle}, ItemSize) when Key > Middle ->
PP = P + ItemSize,
<<_:PP, NSeg/bits>> = Seg,
choose(NSeg, Key, middle(NSeg, ItemSize), ItemSize);
choose(Seg, Key, {P, Middle}, ItemSize) when Key < Middle ->
<<NSeg:P/bits, _/bits>> = Seg,
choose(NSeg, Key, middle(NSeg, ItemSize), ItemSize);
choose(Seg, Key, {P, _}, ItemSize) ->
<<_:P/bits, Item:ItemSize/bits, _/bits>> = Seg,
S = ItemSize - 32,
<<Key:32, Value:S>> = Item,
{Key, Value};
choose(<<>>, Key, _, _) -> {Key, none}.
pad(X) when is_binary(X) -> X;
pad(X) ->
P = 8 - (bit_size(X) rem 8),
<<X/bits, 0:P>>.
bits(X) -> bits(X, 0).
bits(0, N) -> N;
bits(X, N) -> bits(X bsr 1, N + 1).
|
d7d4bf521e0a1b9ef380f5996821a1c2f4da80f56b0252e817382b7b891802b5 | juspay/atlas | Handler.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : API.Parking . Booking . Handler
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : API.Parking.Booking.Handler
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module API.Parking.Booking.Handler where
import API.Parking.Booking.BookingId.Handler as BookingStatus
import API.Parking.Booking.BookingId.TriggerStatus.Handler as BookingTriggerStatus
import API.Parking.Booking.BookingList.Handler as BookingList
import API.Parking.Booking.Types
import App.Types
import Servant
handler :: FlowServer API
handler =
BookingList.handler
:<|> BookingStatus.handler
:<|> BookingTriggerStatus.handler
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/parking-bap/src/API/Parking/Booking/Handler.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : API.Parking . Booking . Handler
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : API.Parking.Booking.Handler
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module API.Parking.Booking.Handler where
import API.Parking.Booking.BookingId.Handler as BookingStatus
import API.Parking.Booking.BookingId.TriggerStatus.Handler as BookingTriggerStatus
import API.Parking.Booking.BookingList.Handler as BookingList
import API.Parking.Booking.Types
import App.Types
import Servant
handler :: FlowServer API
handler =
BookingList.handler
:<|> BookingStatus.handler
:<|> BookingTriggerStatus.handler
| |
5a3d9853bd075faa1ecaa280b8b6c4bac8d64e147cec4b6df1fdde2cfb1ec992 | cryogen-project/cryogen-core | klipse_test.clj | (ns cryogen-core.klipse-test
(:require [cryogen-core.klipse :refer :all]
[clojure.test :refer [deftest testing is are]]
[net.cgrand.enlive-html :as enlive]))
(deftest map-keys-test
(is (= {"a" 1 "b" 2} (map-keys name {:a 1 :b 2}))))
(deftest update-existing-test
(is (= {:a 1 :b 2} (update-existing {:a 1 :b 1} :b inc)))
(is (= {:a 1} (update-existing {:a 1} :b (constantly 2)))))
(deftest deep-merge-test
(is (= {:a {:b 1 :c 2}} (deep-merge {:a {:b 1}} {:a {:c 2}})))
(is (= {:a {:b 1}} (deep-merge {:a {:b 1}} {:a nil})))
(is (= {:a {:b 1 :c 3}} (deep-merge {:a {:b 1 :c 2}} {:a {:c 3}}))))
(deftest code-block-classes-test
(is (= ["clojure" "ruby"]
(code-block-classes
(enlive/html-snippet "<h1>stuff</h1>
<div class=\"not-code\"><pre><code class=\"clojure\">(def x 42)</code></pre></div>
<pre><code class=\"ruby\">123</code><pre>")))))
(deftest clojure-eval-classes-test
(is (= #{"eval-cljs" "eval-reagent"}
(clojure-eval-classes {"selector" ".eval-cljs"
"selector_reagent" ".eval-reagent"
"selector_eval_ruby" ".eval-ruby"}))))
(deftest clojure-eval?-test
(is (clojure-eval? {"selector" ".eval-cljs"}
(enlive/html-snippet "<h1>stuff</h1>
<div class=\"not-code\"><pre><code class=\"eval-cljs\">(def x 42)</code></pre></div>
<pre><code class=\"ruby\">123</code><pre>")))
(is (not (clojure-eval? {"selector" ".eval-cljs"
"selector_eval_ruby" ".eval-ruby"}
(enlive/html-snippet "<h1>stuff</h1>
<pre><code class=\"eval-ruby\">123</code><pre>")))))
(deftest normalize-settings-test
(is (= {"selector_reagent" ".reagent"
"codemirror_options_in" {"lineNumbers" true}}
(normalize-settings
{:selector-reagent ".reagent"
:codemirror-options-in {:line-numbers true}}))))
(deftest merge-configs-test
(testing "Things are merged correctly"
(is (= (merge defaults
{:settings {"selector" ".clojure-eval"
"codemirror_options_in" {"lineNumbers" true}}})
(merge-configs {:settings {:codemirror-options-in {:line-numbers true}}}
{:settings {:selector ".clojure-eval"}}))))
(testing "If it's all set up in config.edn, in the post it can be just :klipse true"
(is (= (merge defaults {:settings {"selector_js" ".javascript"}})
(merge-configs {:settings {:selector-js ".javascript"}} true)))))
(def valid-cfg
"A minimal valid config."
{:settings {:selector ".cljs"}})
(deftest klipsify?-test
(is (false? (klipsify? {} true)))
(is (false? (klipsify? {} {})))
(is (false? (klipsify? nil {})))
(is (false? (klipsify? valid-cfg nil)))
(is (true? (klipsify? valid-cfg {})))
(is (true? (klipsify? valid-cfg true)))
(is (true? (klipsify? {} valid-cfg)))
(is (true? (klipsify? nil valid-cfg))))
| null | https://raw.githubusercontent.com/cryogen-project/cryogen-core/8318bde771d39515b5f5a7f5af53ba5c55aeec35/test/cryogen_core/klipse_test.clj | clojure | (ns cryogen-core.klipse-test
(:require [cryogen-core.klipse :refer :all]
[clojure.test :refer [deftest testing is are]]
[net.cgrand.enlive-html :as enlive]))
(deftest map-keys-test
(is (= {"a" 1 "b" 2} (map-keys name {:a 1 :b 2}))))
(deftest update-existing-test
(is (= {:a 1 :b 2} (update-existing {:a 1 :b 1} :b inc)))
(is (= {:a 1} (update-existing {:a 1} :b (constantly 2)))))
(deftest deep-merge-test
(is (= {:a {:b 1 :c 2}} (deep-merge {:a {:b 1}} {:a {:c 2}})))
(is (= {:a {:b 1}} (deep-merge {:a {:b 1}} {:a nil})))
(is (= {:a {:b 1 :c 3}} (deep-merge {:a {:b 1 :c 2}} {:a {:c 3}}))))
(deftest code-block-classes-test
(is (= ["clojure" "ruby"]
(code-block-classes
(enlive/html-snippet "<h1>stuff</h1>
<div class=\"not-code\"><pre><code class=\"clojure\">(def x 42)</code></pre></div>
<pre><code class=\"ruby\">123</code><pre>")))))
(deftest clojure-eval-classes-test
(is (= #{"eval-cljs" "eval-reagent"}
(clojure-eval-classes {"selector" ".eval-cljs"
"selector_reagent" ".eval-reagent"
"selector_eval_ruby" ".eval-ruby"}))))
(deftest clojure-eval?-test
(is (clojure-eval? {"selector" ".eval-cljs"}
(enlive/html-snippet "<h1>stuff</h1>
<div class=\"not-code\"><pre><code class=\"eval-cljs\">(def x 42)</code></pre></div>
<pre><code class=\"ruby\">123</code><pre>")))
(is (not (clojure-eval? {"selector" ".eval-cljs"
"selector_eval_ruby" ".eval-ruby"}
(enlive/html-snippet "<h1>stuff</h1>
<pre><code class=\"eval-ruby\">123</code><pre>")))))
(deftest normalize-settings-test
(is (= {"selector_reagent" ".reagent"
"codemirror_options_in" {"lineNumbers" true}}
(normalize-settings
{:selector-reagent ".reagent"
:codemirror-options-in {:line-numbers true}}))))
(deftest merge-configs-test
(testing "Things are merged correctly"
(is (= (merge defaults
{:settings {"selector" ".clojure-eval"
"codemirror_options_in" {"lineNumbers" true}}})
(merge-configs {:settings {:codemirror-options-in {:line-numbers true}}}
{:settings {:selector ".clojure-eval"}}))))
(testing "If it's all set up in config.edn, in the post it can be just :klipse true"
(is (= (merge defaults {:settings {"selector_js" ".javascript"}})
(merge-configs {:settings {:selector-js ".javascript"}} true)))))
(def valid-cfg
"A minimal valid config."
{:settings {:selector ".cljs"}})
(deftest klipsify?-test
(is (false? (klipsify? {} true)))
(is (false? (klipsify? {} {})))
(is (false? (klipsify? nil {})))
(is (false? (klipsify? valid-cfg nil)))
(is (true? (klipsify? valid-cfg {})))
(is (true? (klipsify? valid-cfg true)))
(is (true? (klipsify? {} valid-cfg)))
(is (true? (klipsify? nil valid-cfg))))
| |
28fa9a696a83a85ee7cfc05fb32e95a5ef8917e2b08c9edff9877495110f5d36 | kolmodin/binary | Put.hs | # LANGUAGE CPP , ExistentialQuantification #
# LANGUAGE DeriveGeneric #
module Main (main) where
import Control.DeepSeq
import Control.Exception (evaluate)
import Criterion.Main
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as L
import Data.Monoid
import GHC.Generics
import Data.Binary
import Data.Binary.Put
import Data.ByteString.Builder as BB
import Prelude -- Silence Monoid import warning.
main :: IO ()
main = do
evaluate $ rnf
[ rnf bigIntegers
, rnf smallIntegers
, rnf smallByteStrings
, rnf smallStrings
, rnf doubles
, rnf word8s
, rnf word16s
, rnf word32s
, rnf word64s
]
defaultMain
[
bench "small Integers" $ whnf (run . fromIntegers) smallIntegers,
bench "big Integers" $ whnf (run . fromIntegers) bigIntegers,
bench "[small Integer]" $ whnf (run . put) smallIntegers,
bench "[big Integer]" $ whnf (run . put) bigIntegers,
bench "small ByteStrings" $ whnf (run . fromByteStrings) smallByteStrings,
bench "[small ByteString]" $ whnf (run . put) smallByteStrings,
bench "small Strings" $ whnf (run . fromStrings) smallStrings,
bench "[small String]" $ whnf (run . put) smallStrings,
bench "Double" $ whnf (run . put) doubles,
bench "Word8s monoid put" $ whnf (run . fromWord8s) word8s,
bench "Word8s builder" $ whnf (L.length . toLazyByteString . fromWord8sBuilder) word8s,
bench "[Word8]" $ whnf (run . put) word8s,
bench "Word16s monoid put" $ whnf (run . fromWord16s) word16s,
bench "Word16s builder" $ whnf (L.length . toLazyByteString . fromWord16sBuilder) word16s,
bench "[Word16]" $ whnf (run . put) word16s,
bench "Word32s monoid put" $ whnf (run . fromWord32s) word32s,
bench "Word32s builder" $ whnf (L.length . toLazyByteString . fromWord32sBuilder) word32s,
bench "[Word32]" $ whnf (run . put) word32s,
bench "Word64s monoid put" $ whnf (run . fromWord64s) word64s,
bench "Word64s builder" $ whnf (L.length . toLazyByteString . fromWord64sBuilder) word64s,
bench "[Word64]" $ whnf (run . put) word64s
, bgroup "Generics" [
bench "Struct monoid put" $ whnf (run . fromStructs) structs,
bench "Struct put as list" $ whnf (run . put) structs,
bench "StructList monoid put" $ whnf (run . fromStructLists) structLists,
bench "StructList put as list" $ whnf (run . put) structLists
]
]
where
run = L.length . runPut
data Struct = Struct Word8 Word16 Word32 Word64 deriving Generic
instance Binary Struct
data StructList = StructList [Struct] deriving Generic
instance Binary StructList
structs :: [Struct]
structs = take 10000 $ [ Struct a b 0 0 | a <- [0 .. maxBound], b <- [0 .. maxBound] ]
structLists :: [StructList]
structLists = replicate 1000 (StructList (take 10 structs))
-- Input data
smallIntegers :: [Integer]
smallIntegers = [0..10000]
# NOINLINE smallIntegers #
bigIntegers :: [Integer]
bigIntegers = [m .. m + 10000]
where
m :: Integer
m = fromIntegral (maxBound :: Word64)
# NOINLINE bigIntegers #
smallByteStrings :: [S.ByteString]
smallByteStrings = replicate 10000 $ C.pack "abcdefghi"
# NOINLINE smallByteStrings #
smallStrings :: [String]
smallStrings = replicate 10000 "abcdefghi"
# NOINLINE smallStrings #
doubles :: [Double]
doubles = take 10000 $ [ sign * 2 ** n | sign <- [-1, 1], n <- [ 0, 0.2 .. 1023 ]]
word8s :: [Word8]
word8s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word8s #
word16s :: [Word16]
word16s = take 10000 $ cycle [minBound .. maxBound]
{-# NOINLINE word16s #-}
word32s :: [Word32]
word32s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word32s #
word64s :: [Word64]
word64s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word64s #
------------------------------------------------------------------------
-- Benchmarks
fromIntegers :: [Integer] -> Put
fromIntegers [] = mempty
fromIntegers (x:xs) = put x `mappend` fromIntegers xs
fromByteStrings :: [S.ByteString] -> Put
fromByteStrings [] = mempty
fromByteStrings (x:xs) = put x `mappend` fromByteStrings xs
fromStrings :: [String] -> Put
fromStrings [] = mempty
fromStrings (x:xs) = put x `mappend` fromStrings xs
fromWord8s :: [Word8] -> Put
fromWord8s [] = mempty
fromWord8s (x:xs) = put x `mappend` fromWord8s xs
fromWord8sBuilder :: [Word8] -> BB.Builder
fromWord8sBuilder [] = mempty
fromWord8sBuilder (x:xs) = BB.word8 x `mappend` fromWord8sBuilder xs
fromWord16s :: [Word16] -> Put
fromWord16s [] = mempty
fromWord16s (x:xs) = put x `mappend` fromWord16s xs
fromWord16sBuilder :: [Word16] -> BB.Builder
fromWord16sBuilder [] = mempty
fromWord16sBuilder (x:xs) = BB.word16BE x `mappend` fromWord16sBuilder xs
fromWord32s :: [Word32] -> Put
fromWord32s [] = mempty
fromWord32s (x:xs) = put x `mappend` fromWord32s xs
fromWord32sBuilder :: [Word32] -> BB.Builder
fromWord32sBuilder [] = mempty
fromWord32sBuilder (x:xs) = BB.word32BE x `mappend` fromWord32sBuilder xs
fromWord64s :: [Word64] -> Put
fromWord64s [] = mempty
fromWord64s (x:xs) = put x `mappend` fromWord64s xs
fromWord64sBuilder :: [Word64] -> BB.Builder
fromWord64sBuilder [] = mempty
fromWord64sBuilder (x:xs) = BB.word64BE x `mappend` fromWord64sBuilder xs
fromStructs :: [Struct] -> Put
fromStructs [] = mempty
fromStructs (x:xs) = put x `mappend` fromStructs xs
fromStructLists :: [StructList] -> Put
fromStructLists [] = mempty
fromStructLists (x:xs) = put x `mappend` fromStructLists xs
| null | https://raw.githubusercontent.com/kolmodin/binary/bccbece2e254813f86e0a04f71d9ca3cea68b3bf/benchmarks/Put.hs | haskell | Silence Monoid import warning.
Input data
# NOINLINE word16s #
----------------------------------------------------------------------
Benchmarks | # LANGUAGE CPP , ExistentialQuantification #
# LANGUAGE DeriveGeneric #
module Main (main) where
import Control.DeepSeq
import Control.Exception (evaluate)
import Criterion.Main
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as L
import Data.Monoid
import GHC.Generics
import Data.Binary
import Data.Binary.Put
import Data.ByteString.Builder as BB
main :: IO ()
main = do
evaluate $ rnf
[ rnf bigIntegers
, rnf smallIntegers
, rnf smallByteStrings
, rnf smallStrings
, rnf doubles
, rnf word8s
, rnf word16s
, rnf word32s
, rnf word64s
]
defaultMain
[
bench "small Integers" $ whnf (run . fromIntegers) smallIntegers,
bench "big Integers" $ whnf (run . fromIntegers) bigIntegers,
bench "[small Integer]" $ whnf (run . put) smallIntegers,
bench "[big Integer]" $ whnf (run . put) bigIntegers,
bench "small ByteStrings" $ whnf (run . fromByteStrings) smallByteStrings,
bench "[small ByteString]" $ whnf (run . put) smallByteStrings,
bench "small Strings" $ whnf (run . fromStrings) smallStrings,
bench "[small String]" $ whnf (run . put) smallStrings,
bench "Double" $ whnf (run . put) doubles,
bench "Word8s monoid put" $ whnf (run . fromWord8s) word8s,
bench "Word8s builder" $ whnf (L.length . toLazyByteString . fromWord8sBuilder) word8s,
bench "[Word8]" $ whnf (run . put) word8s,
bench "Word16s monoid put" $ whnf (run . fromWord16s) word16s,
bench "Word16s builder" $ whnf (L.length . toLazyByteString . fromWord16sBuilder) word16s,
bench "[Word16]" $ whnf (run . put) word16s,
bench "Word32s monoid put" $ whnf (run . fromWord32s) word32s,
bench "Word32s builder" $ whnf (L.length . toLazyByteString . fromWord32sBuilder) word32s,
bench "[Word32]" $ whnf (run . put) word32s,
bench "Word64s monoid put" $ whnf (run . fromWord64s) word64s,
bench "Word64s builder" $ whnf (L.length . toLazyByteString . fromWord64sBuilder) word64s,
bench "[Word64]" $ whnf (run . put) word64s
, bgroup "Generics" [
bench "Struct monoid put" $ whnf (run . fromStructs) structs,
bench "Struct put as list" $ whnf (run . put) structs,
bench "StructList monoid put" $ whnf (run . fromStructLists) structLists,
bench "StructList put as list" $ whnf (run . put) structLists
]
]
where
run = L.length . runPut
data Struct = Struct Word8 Word16 Word32 Word64 deriving Generic
instance Binary Struct
data StructList = StructList [Struct] deriving Generic
instance Binary StructList
structs :: [Struct]
structs = take 10000 $ [ Struct a b 0 0 | a <- [0 .. maxBound], b <- [0 .. maxBound] ]
structLists :: [StructList]
structLists = replicate 1000 (StructList (take 10 structs))
smallIntegers :: [Integer]
smallIntegers = [0..10000]
# NOINLINE smallIntegers #
bigIntegers :: [Integer]
bigIntegers = [m .. m + 10000]
where
m :: Integer
m = fromIntegral (maxBound :: Word64)
# NOINLINE bigIntegers #
smallByteStrings :: [S.ByteString]
smallByteStrings = replicate 10000 $ C.pack "abcdefghi"
# NOINLINE smallByteStrings #
smallStrings :: [String]
smallStrings = replicate 10000 "abcdefghi"
# NOINLINE smallStrings #
doubles :: [Double]
doubles = take 10000 $ [ sign * 2 ** n | sign <- [-1, 1], n <- [ 0, 0.2 .. 1023 ]]
word8s :: [Word8]
word8s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word8s #
word16s :: [Word16]
word16s = take 10000 $ cycle [minBound .. maxBound]
word32s :: [Word32]
word32s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word32s #
word64s :: [Word64]
word64s = take 10000 $ cycle [minBound .. maxBound]
# NOINLINE word64s #
fromIntegers :: [Integer] -> Put
fromIntegers [] = mempty
fromIntegers (x:xs) = put x `mappend` fromIntegers xs
fromByteStrings :: [S.ByteString] -> Put
fromByteStrings [] = mempty
fromByteStrings (x:xs) = put x `mappend` fromByteStrings xs
fromStrings :: [String] -> Put
fromStrings [] = mempty
fromStrings (x:xs) = put x `mappend` fromStrings xs
fromWord8s :: [Word8] -> Put
fromWord8s [] = mempty
fromWord8s (x:xs) = put x `mappend` fromWord8s xs
fromWord8sBuilder :: [Word8] -> BB.Builder
fromWord8sBuilder [] = mempty
fromWord8sBuilder (x:xs) = BB.word8 x `mappend` fromWord8sBuilder xs
fromWord16s :: [Word16] -> Put
fromWord16s [] = mempty
fromWord16s (x:xs) = put x `mappend` fromWord16s xs
fromWord16sBuilder :: [Word16] -> BB.Builder
fromWord16sBuilder [] = mempty
fromWord16sBuilder (x:xs) = BB.word16BE x `mappend` fromWord16sBuilder xs
fromWord32s :: [Word32] -> Put
fromWord32s [] = mempty
fromWord32s (x:xs) = put x `mappend` fromWord32s xs
fromWord32sBuilder :: [Word32] -> BB.Builder
fromWord32sBuilder [] = mempty
fromWord32sBuilder (x:xs) = BB.word32BE x `mappend` fromWord32sBuilder xs
fromWord64s :: [Word64] -> Put
fromWord64s [] = mempty
fromWord64s (x:xs) = put x `mappend` fromWord64s xs
fromWord64sBuilder :: [Word64] -> BB.Builder
fromWord64sBuilder [] = mempty
fromWord64sBuilder (x:xs) = BB.word64BE x `mappend` fromWord64sBuilder xs
fromStructs :: [Struct] -> Put
fromStructs [] = mempty
fromStructs (x:xs) = put x `mappend` fromStructs xs
fromStructLists :: [StructList] -> Put
fromStructLists [] = mempty
fromStructLists (x:xs) = put x `mappend` fromStructLists xs
|
609161cdc1abfc75e24ccdcb82af38b57156ac954348a2ec2b0251ccd1765581 | blitzcode/ray-marching-distance-fields | CornellBox.hs |
# LANGUAGE OverloadedLists #
module CornellBox ( mkCornellBoxVerticesTex
, cornellBox
) where
import Control.Exception
import qualified Data.Vector as V
import qualified Graphics.Rendering.OpenGL as GL
import Foreign.Marshal.Array
import Linear
import GLHelpers
-- Build a 1D OpenGL floating point texture containing the vertices of
the triangulated Cornell Box geometry , scaled and centered to [ -1 , 1 ]
--
--
mkCornellBoxVerticesTex :: IO GL.TextureObject
mkCornellBoxVerticesTex =
bracketOnError GL.genObjectName GL.deleteObjectName $ \tex -> do
GL.textureBinding GL.Texture1D GL.$= Just tex
setTextureFiltering GL.Texture1D TFNone
let numQuad = V.length cornellBox `div` 4
numTri = numQuad * 2
numVtx = numTri * 3
toUnit = 559.2 / 2
scale = 1 / (sqrt (2 * 2 + 2 * 2 + 2 * 2) / 2) * 0.99
vtx = flip concatMap ([0..numQuad - 1] :: [Int]) $ \quadIdx ->
[ ((cornellBox V.! (quadIdx * 4 + 0)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 1)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 3)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 3)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 1)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 2)) / toUnit - 1) ^* scale
]
in withArray vtx $ GL.texImage1D GL.Texture1D
GL.NoProxy
0
GL.RGB32F
(GL.TextureSize1D $ fromIntegral numVtx)
0
. GL.PixelData GL.RGB GL.Float
return tex
cornellBox :: V.Vector (V3 Float)
cornellBox =
[ -- Floor (White)
V3 552.8 0.0 0.0
, V3 0.0 0.0 0.0
, V3 0.0 0.0 559.2
, V3 549.6 0.0 559.2
-- Ceiling (White)
, V3 556.0 548.8 0.0
, V3 556.0 548.8 559.2
, V3 0.0 548.8 559.2
, V3 0.0 548.8 0.0
-- Back Wall (White)
, V3 549.6 0.0 559.2
, V3 0.0 0.0 559.2
, V3 0.0 548.8 559.2
, V3 556.0 548.8 559.2
-- Right Wall (Green)
, V3 0.0 0.0 559.2
, V3 0.0 0.0 0.0
, V3 0.0 548.8 0.0
, V3 0.0 548.8 559.2
-- Left Wall (Red)
, V3 552.8 0.0 0.0
, V3 549.6 0.0 559.2
, V3 556.0 548.8 559.2
, V3 556.0 548.8 0.0
-- Light (Small offset to avoid surface acne)
, V3 343.0 (548.8 - 0.1) 227.0
, V3 343.0 (548.8 - 0.1) 332.0
, V3 213.0 (548.8 - 0.1) 332.0
, V3 213.0 (548.8 - 0.1) 227.0
-- Short Block (White)
, V3 130.0 165.0 65.0
, V3 82.0 165.0 225.0
, V3 240.0 165.0 272.0
, V3 290.0 165.0 114.0
, V3 290.0 0.0 114.0
, V3 290.0 165.0 114.0
, V3 240.0 165.0 272.0
, V3 240.0 0.0 272.0
, V3 130.0 0.0 65.0
, V3 130.0 165.0 65.0
, V3 290.0 165.0 114.0
, V3 290.0 0.0 114.0
, V3 82.0 0.0 225.0
, V3 82.0 165.0 225.0
, V3 130.0 165.0 65.0
, V3 130.0 0.0 65.0
, V3 240.0 0.0 272.0
, V3 240.0 165.0 272.0
, V3 82.0 165.0 225.0
, V3 82.0 0.0 225.0
-- Tall Block (White)
, V3 423.0 330.0 247.0
, V3 265.0 330.0 296.0
, V3 314.0 330.0 456.0
, V3 472.0 330.0 406.0
, V3 423.0 0.0 247.0
, V3 423.0 330.0 247.0
, V3 472.0 330.0 406.0
, V3 472.0 0.0 406.0
, V3 472.0 0.0 406.0
, V3 472.0 330.0 406.0
, V3 314.0 330.0 456.0
, V3 314.0 0.0 456.0
, V3 314.0 0.0 456.0
, V3 314.0 330.0 456.0
, V3 265.0 330.0 296.0
, V3 265.0 0.0 296.0
, V3 265.0 0.0 296.0
, V3 265.0 330.0 296.0
, V3 423.0 330.0 247.0
, V3 423.0 0.0 247.0
]
| null | https://raw.githubusercontent.com/blitzcode/ray-marching-distance-fields/0578d01e75f819b1242fa1378e3963bd48842acc/CornellBox.hs | haskell | Build a 1D OpenGL floating point texture containing the vertices of
Floor (White)
Ceiling (White)
Back Wall (White)
Right Wall (Green)
Left Wall (Red)
Light (Small offset to avoid surface acne)
Short Block (White)
Tall Block (White) |
# LANGUAGE OverloadedLists #
module CornellBox ( mkCornellBoxVerticesTex
, cornellBox
) where
import Control.Exception
import qualified Data.Vector as V
import qualified Graphics.Rendering.OpenGL as GL
import Foreign.Marshal.Array
import Linear
import GLHelpers
the triangulated Cornell Box geometry , scaled and centered to [ -1 , 1 ]
mkCornellBoxVerticesTex :: IO GL.TextureObject
mkCornellBoxVerticesTex =
bracketOnError GL.genObjectName GL.deleteObjectName $ \tex -> do
GL.textureBinding GL.Texture1D GL.$= Just tex
setTextureFiltering GL.Texture1D TFNone
let numQuad = V.length cornellBox `div` 4
numTri = numQuad * 2
numVtx = numTri * 3
toUnit = 559.2 / 2
scale = 1 / (sqrt (2 * 2 + 2 * 2 + 2 * 2) / 2) * 0.99
vtx = flip concatMap ([0..numQuad - 1] :: [Int]) $ \quadIdx ->
[ ((cornellBox V.! (quadIdx * 4 + 0)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 1)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 3)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 3)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 1)) / toUnit - 1) ^* scale
, ((cornellBox V.! (quadIdx * 4 + 2)) / toUnit - 1) ^* scale
]
in withArray vtx $ GL.texImage1D GL.Texture1D
GL.NoProxy
0
GL.RGB32F
(GL.TextureSize1D $ fromIntegral numVtx)
0
. GL.PixelData GL.RGB GL.Float
return tex
cornellBox :: V.Vector (V3 Float)
cornellBox =
V3 552.8 0.0 0.0
, V3 0.0 0.0 0.0
, V3 0.0 0.0 559.2
, V3 549.6 0.0 559.2
, V3 556.0 548.8 0.0
, V3 556.0 548.8 559.2
, V3 0.0 548.8 559.2
, V3 0.0 548.8 0.0
, V3 549.6 0.0 559.2
, V3 0.0 0.0 559.2
, V3 0.0 548.8 559.2
, V3 556.0 548.8 559.2
, V3 0.0 0.0 559.2
, V3 0.0 0.0 0.0
, V3 0.0 548.8 0.0
, V3 0.0 548.8 559.2
, V3 552.8 0.0 0.0
, V3 549.6 0.0 559.2
, V3 556.0 548.8 559.2
, V3 556.0 548.8 0.0
, V3 343.0 (548.8 - 0.1) 227.0
, V3 343.0 (548.8 - 0.1) 332.0
, V3 213.0 (548.8 - 0.1) 332.0
, V3 213.0 (548.8 - 0.1) 227.0
, V3 130.0 165.0 65.0
, V3 82.0 165.0 225.0
, V3 240.0 165.0 272.0
, V3 290.0 165.0 114.0
, V3 290.0 0.0 114.0
, V3 290.0 165.0 114.0
, V3 240.0 165.0 272.0
, V3 240.0 0.0 272.0
, V3 130.0 0.0 65.0
, V3 130.0 165.0 65.0
, V3 290.0 165.0 114.0
, V3 290.0 0.0 114.0
, V3 82.0 0.0 225.0
, V3 82.0 165.0 225.0
, V3 130.0 165.0 65.0
, V3 130.0 0.0 65.0
, V3 240.0 0.0 272.0
, V3 240.0 165.0 272.0
, V3 82.0 165.0 225.0
, V3 82.0 0.0 225.0
, V3 423.0 330.0 247.0
, V3 265.0 330.0 296.0
, V3 314.0 330.0 456.0
, V3 472.0 330.0 406.0
, V3 423.0 0.0 247.0
, V3 423.0 330.0 247.0
, V3 472.0 330.0 406.0
, V3 472.0 0.0 406.0
, V3 472.0 0.0 406.0
, V3 472.0 330.0 406.0
, V3 314.0 330.0 456.0
, V3 314.0 0.0 456.0
, V3 314.0 0.0 456.0
, V3 314.0 330.0 456.0
, V3 265.0 330.0 296.0
, V3 265.0 0.0 296.0
, V3 265.0 0.0 296.0
, V3 265.0 330.0 296.0
, V3 423.0 330.0 247.0
, V3 423.0 0.0 247.0
]
|
7cc2c4dc03d48c84f67707e9d2a81b9d1771ac15a3dcc812157e3570cea0c8d0 | project-oak/hafnium-verification | Epilogues.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
let early_callback = ref (fun () -> ())
let late_callback = ref (fun () -> ())
let register callback_ref ~f ~description =
let f_no_exn () =
try f ()
with exn ->
F.eprintf "%a: Error while running epilogue \"%s\":@ %a.@ Powering through...@." Pid.pp
(Unix.getpid ()) description Exn.pp exn
in
let g = !callback_ref in
callback_ref := fun () -> f_no_exn () ; g ()
let register_early ~f ~description = register early_callback ~f ~description
let register_late ~f ~description = register late_callback ~f ~description
let early () = !early_callback ()
let late () = !late_callback ()
let run () = early () ; late ()
(* Run the epilogues when we get SIGINT (Control-C). *)
let () =
let run_epilogues_on_signal s =
F.eprintf "*** %s: Caught %s, time to die@."
(Filename.basename Sys.executable_name)
(Signal.to_string s) ;
run ()
in
Signal.Expert.handle Signal.int run_epilogues_on_signal
let reset () =
(early_callback := fun () -> ()) ;
late_callback := fun () -> ()
let register = register_early
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/base/Epilogues.ml | ocaml | Run the epilogues when we get SIGINT (Control-C). |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
let early_callback = ref (fun () -> ())
let late_callback = ref (fun () -> ())
let register callback_ref ~f ~description =
let f_no_exn () =
try f ()
with exn ->
F.eprintf "%a: Error while running epilogue \"%s\":@ %a.@ Powering through...@." Pid.pp
(Unix.getpid ()) description Exn.pp exn
in
let g = !callback_ref in
callback_ref := fun () -> f_no_exn () ; g ()
let register_early ~f ~description = register early_callback ~f ~description
let register_late ~f ~description = register late_callback ~f ~description
let early () = !early_callback ()
let late () = !late_callback ()
let run () = early () ; late ()
let () =
let run_epilogues_on_signal s =
F.eprintf "*** %s: Caught %s, time to die@."
(Filename.basename Sys.executable_name)
(Signal.to_string s) ;
run ()
in
Signal.Expert.handle Signal.int run_epilogues_on_signal
let reset () =
(early_callback := fun () -> ()) ;
late_callback := fun () -> ()
let register = register_early
|
fcaf530dfed98c7aba80f5c54a3e04837ff2cd4e22463a3cfa71a08a127add3f | tommaisey/aeon | running-max.help.scm | ;; (running-max in trig)
;; Track maximum level. outputs the maximum value received at the
;; input. When triggered, the maximum output value is reset to the
;; current value.
;; in - input signal
;; trig - reset the output value to the current input value
(let* ((t (impulse ar 0.4 0))
(f (mul-add (running-max (dust ar 20) t) 500 200)))
(audition (out 0 (mul (sin-osc ar f 0) 0.2))))
(let* ((t (impulse kr (mouse-x kr 0.01 2 1 0.1) 0))
(f (mul-add (running-max (sin-osc kr 2 0) t) 500 200)))
(audition (out 0 (mul (sin-osc ar f 0) 0.2))))
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/rsc3/help/ugen/triggers/running-max.help.scm | scheme | (running-max in trig)
Track maximum level. outputs the maximum value received at the
input. When triggered, the maximum output value is reset to the
current value.
in - input signal
trig - reset the output value to the current input value |
(let* ((t (impulse ar 0.4 0))
(f (mul-add (running-max (dust ar 20) t) 500 200)))
(audition (out 0 (mul (sin-osc ar f 0) 0.2))))
(let* ((t (impulse kr (mouse-x kr 0.01 2 1 0.1) 0))
(f (mul-add (running-max (sin-osc kr 2 0) t) 500 200)))
(audition (out 0 (mul (sin-osc ar f 0) 0.2))))
|
bc8a274ac2d7cf3c68290f31feb80569fe50029e2c896335a9d030b2f866f891 | W-Net-AI/LISP-CV | package.lisp | ;;;; -*- mode: lisp; indent-tabs: nil -*-
(defpackage :lisp-cv
(:nicknames #:lisp-cv #:lcv #:cv)
(:use #:cffi #:common-lisp #:swank #:trivial-garbage #:lisp-executable #:bordeaux-threads)
(:shadow #:abs #:exp #:fill #:length #:load #:log #:min #:max #:open #:read #:set #:sqrt #:write)
(:export
;; Default parameters.
#:*camera-index*
#:*default-width*
#:*default-height*
#:*frames-per-second*
#:*millis-per-frame*
utils - Utilities
#:->
#:cat
#:do-while
#:dup
#:rename-package-nicknames
#:full-pathname
#:*lisp-cv-data-dir*
#:*lisp-cv-src-dir*
#:make-pathname-list
#:mklist
#:partition
#:println
#:run-program
;; Change default parameters
#:def-params
;; Live code editing
#:continuable
#:update-swank
Macros
#:$
#:@
#:alloc
#:free
#:gced-foreign-alloc
#:size-of
C - Interop
#:%string
#:c-string
#:c-string-to-string
#:std-string-to-c-string
;; Extra OpenCV constants
#:+max-dim+
#:+pi+
;; C Constants
;; C Integer Limits
#:+char-bit+
#:+schar-min+
#:+schar-max+
#:+uchar-max+
#:+char-min+
#:+char-min-j+
#:+char-max+
#:+char-max-j+
#:+mb-len-max+
#:+shrt-min+
#:+shrt-max+
#:+ushrt-max+
#:+int-min+
#:+int-max+
#:+uint-max+
#:+long-min+
#:+long-max+
#:+ulong-max+
#:+dbl-max+
#:+flt-max+
;; DELETE
#:del
#:del-ann-mlp
#:del-ann-mlp-train-params
#:del-cascade-classifier
#:del-dmatch
#:del-d-tree
#:del-d-tree-params
#:del-file-node
#:del-file-storage
#:del-hog-descriptor
#:del-k-nearest
#:del-key-point
#:del-mat
#:del-mat-expr
#:del-normal-bayes-classifier
#:del-pca
#:del-point
#:del-point-2d
#:del-point-2f
#:del-point-3d
#:del-point-3f
#:del-point-3i
#:del-range
#:del-rect
#:del-rng
#:del-rot-rect
#:del-scalar
#:del-size
#:del-std-string
#:del-svm
#:del-svm-params
#:del-term-crit
#:del-vec-2b
#:del-vec-3b
#:del-vec-4b
#:del-vec-2d
#:del-vec-3d
#:del-vec-4d
#:del-vec-6d
#:del-vec-2f
#:del-vec-3f
#:del-vec-4f
#:del-vec-6f
#:del-vec-2i
#:del-vec-3i
#:del-vec-4i
#:del-vec-6i
#:del-vec-8i
#:del-vec-2s
#:del-vec-3s
#:del-vec-4s
#:del-vec-2w
#:del-vec-3w
#:del-vec-4w
#:del-vector-char
#:del-vector-double
#:del-vector-dmatch
#:del-vector-float
#:del-vector-int
#:del-vector-key-point
#:del-vector-mat
#:del-vector-point
#:del-vector-point-2f
#:del-vector-rect
#:del-vector-uchar
#:del-vector-vec-2b
#:del-vector-vec-3b
#:del-vector-vec-4b
#:del-vector-vec-2d
#:del-vector-vec-3d
#:del-vector-vec-4d
#:del-vector-vec-6d
#:del-vector-vec-2f
#:del-vector-vec-3f
#:del-vector-vec-4f
#:del-vector-vec-6f
#:del-vector-vec-2i
#:del-vector-vec-3i
#:del-vector-vec-4i
#:del-vector-vec-6i
#:del-vector-vec-8i
#:del-vector-vec-2s
#:del-vector-vec-3s
#:del-vector-vec-4s
#:del-vector-vec-2w
#:del-vector-vec-3w
#:del-vector-vec-4w
#:del-video-capture
#:del-vid-writer
;; WITH-MACROS
#:with-captured-camera
#:with-captured-file
#:with-cascade-classifier
#:with-dmatch
#:with-d-tree
#:with-d-tree-params
#:with-file-node
#:with-file-storage
#:with-hog-descriptor
#:with-key-point
#:with-k-nearest
#:with-mat
#:with-mat-expr
#:with-named-window
#:with-normal-bayes-classifier
#:with-object
#:with-pca
#:with-point
#:with-point-2d
#:with-point-2f
#:with-point-3d
#:with-point-3f
#:with-point-3i
#:with-rect
#:with-range
#:with-rng
#:with-rotated-rect
#:with-scalar
#:with-size
#:with-surf
#:with-svm
#:with-svm-params
#:with-term-criteria
#:with-vec-2b
#:with-vec-2d
#:with-vec-2f
#:with-vec-2i
#:with-vec-2s
#:with-vec-2w
#:with-vec-3b
#:with-vec-3d
#:with-vec-3f
#:with-vec-3i
#:with-vec-3s
#:with-vec-3w
#:with-vec-4b
#:with-vec-4d
#:with-vec-4f
#:with-vec-4i
#:with-vec-4s
#:with-vec-4w
#:with-vec-6d
#:with-vec-6f
#:with-vec-6i
#:with-vec-8i
#:with-vector-char
#:with-vector-double
#:with-vector-dmatch
#:with-vector-float
#:with-vector-int
#:with-vector-key-point
#:with-vector-mat
#:with-vector-point
#:with-vector-point-2f
#:with-vector-rect
#:with-vector-uchar
#:with-vector-vec-2b
#:with-vector-vec-3b
#:with-vector-vec-4b
#:with-vector-vec-2d
#:with-vector-vec-3d
#:with-vector-vec-4d
#:with-vector-vec-6d
#:with-vector-vec-2f
#:with-vector-vec-3f
#:with-vector-vec-4f
#:with-vector-vec-6f
#:with-vector-vec-2i
#:with-vector-vec-3i
#:with-vector-vec-4i
#:with-vector-vec-6i
#:with-vector-vec-8i
#:with-vector-vec-2s
#:with-vector-vec-3s
#:with-vector-vec-4s
#:with-vector-vec-2w
#:with-vector-vec-3w
#:with-vector-vec-4w
#:with-video-capture
#:with-video-writer
;; DEFINE-FOREIGN-TYPE
#:string*
#:ann-mlp
#:ann-mlp-train-params
#:bf-matcher
#:brisk
#:c-pointer
#:cascade-classifier
#:dmatch
#:d-tree
#:d-tree-node
#:d-tree-params
#:file-node
#:file-storage
#:flann-based-matcher
#:hog-descriptor
#:key-point
#:k-nearest
#:mat
#:mat-expr
#:mat-struct
#:mouse-callback
#:normal-bayes-classifier
#:pca
#:point
#:point-2d
#:point-2f
#:point-3d
#:point-3f
#:point-3i
#:range
#:rect
#:rng
#:rotated-rect
#:scalar
#:size
#:svm
#:svm-params
#:surf
#:term-criteria
#:term-criteria-struct
#:trackbar-callback
#:vec-2b
#:vec-3b
#:vec-4b
#:vec-2d
#:vec-3d
#:vec-4d
#:vec-6d
#:vec-2f
#:vec-3f
#:vec-4f
#:vec-6f
#:vec-2i
#:vec-3i
#:vec-4i
#:vec-6i
#:vec-8i
#:vec-2s
#:vec-3s
#:vec-4s
#:vec-2w
#:vec-3w
#:vec-4w
#:vector-char
#:vector-dmatch
#:vector-double
#:vector-float
#:vector-int
#:vector-key-point
#:vector-mat
#:vector-point
#:vector-point-2f
#:vector-rect
#:vector-uchar
#:vector-vec-2b
#:vector-vec-3b
#:vector-vec-4b
#:vector-vec-2d
#:vector-vec-3d
#:vector-vec-4d
#:vector-vec-6d
#:vector-vec-2f
#:vector-vec-3f
#:vector-vec-4f
#:vector-vec-6f
#:vector-vec-2i
#:vector-vec-3i
#:vector-vec-4i
#:vector-vec-6i
#:vector-vec-8i
#:video-capture
#:video-writer
#:cv-ann-mlp
#:cv-ann-mlp-train-params
#:cv-bf-matcher
#:cv-brisk
#:cv-cascade-classifier
#:cv-dmatch
#:cv-d-tree
#:cv-d-tree-node
#:cv-d-tree-params
#:cv-file-node
#:cv-file-storage
#:cv-hog-descriptor
#:cv-key-point
#:cv-k-nearest
#:cv-mat
#:cv-mat-expr
#:cv-mat-struct
#:cv-mouse-callback
#:cv-normal-bayes-classifier
#:cv-pca
#:cv-point
#:cv-point-2d
#:cv-point-2f
#:cv-point-3d
#:cv-point-3f
#:cv-point-3i
#:cv-range
#:cv-rect
#:cv-rng
#:cv-rotated-rect
#:cv-scalar
#:cv-size
#:std-string
#:cv-svm
#:cv-svm-params
#:cv-surf
#:cv-term-criteria
#:cv-trackbar-callback
#:cv-vec-2b
#:cv-vec-3b
#:cv-vec-4b
#:cv-vec-2d
#:cv-vec-3d
#:cv-vec-4d
#:cv-vec-6d
#:cv-vec-2f
#:cv-vec-3f
#:cv-vec-4f
#:cv-vec-6f
#:cv-vec-2i
#:cv-vec-3i
#:cv-vec-4i
#:cv-vec-6i
#:cv-vec-8i
#:cv-vec-2s
#:cv-vec-3s
#:cv-vec-4s
#:cv-vec-2w
#:cv-vec-3w
#:cv-vec-4w
#:std-vector-char
#:std-vector-dmatch
#:std-vector-double
#:std-vector-float
#:std-vector-int
#:std-vector-key-point
#:std-vector-mat
#:std-vector-point
#:std-vector-point-2f
#:std-vector-rect
#:std-vector-uchar
#:std-vector-vec-2b
#:std-vector-vec-3b
#:std-vector-vec-4b
#:std-vector-vec-2d
#:std-vector-vec-3d
#:std-vector-vec-4d
#:std-vector-vec-6d
#:std-vector-vec-2f
#:std-vector-vec-3f
#:std-vector-vec-4f
#:std-vector-vec-6f
#:std-vector-vec-2i
#:std-vector-vec-3i
#:std-vector-vec-4i
#:std-vector-vec-6i
#:std-vector-vec-8i
#:cv-video-capture
#:cv-video-writer
;;; VECTORS
#:make-vector-char
#:vector-char
#:c-arr-to-vec-char
#:seq-to-vec-char
#:vec-char-to-c-arr
#:vec-char-length
#:vec-char-to-lisp-list
#:vec-char-to-lisp-vec
#:vec-char-at
#:vec-char-push-back
#:make-vector-dmatch
#:vector-dmatch
#:c-arr-to-vec-dmatch
#:list-to-vec-dmatch
#:vec-dmatch-to-c-arr
#:vec-dmatch-length
#:vec-dmatch-to-lisp-list
#:vec-dmatch-to-lisp-vec
#:vec-dmatch-at
#:vec-dmatch-push-back
#:make-vector-double
#:vector-double
#:c-arr-to-vec-double
#:seq-to-vec-double
#:vec-double-to-c-arr
#:vec-double-length
#:vec-double-to-lisp-list
#:vec-double-to-lisp-vec
#:vec-double-at
#:vec-double-push-back
#:make-vector-float
#:vector-float
#:c-arr-to-vec-float
#:seq-to-vec-float
#:vec-float-to-c-arr
#:vec-float-length
#:vec-float-to-lisp-list
#:vec-float-to-lisp-vec
#:vec-float-at
#:vec-float-push-back
#:make-vector-int
#:vector-int
#:c-arr-to-vec-int
#:seq-to-vec-int
#:vec-int-to-c-arr
#:vec-int-length
#:vec-int-to-lisp-list
#:vec-int-to-lisp-vec
#:vec-int-at
#:vec-int-push-back
#:make-vector-key-point
#:vector-key-point
#:c-arr-to-vec-key-point
#:list-to-vec-key-point
#:vec-key-point-to-c-arr
#:vec-key-point-length
#:vec-key-point-to-lisp-list
#:vec-key-point-to-lisp-vec
#:vec-key-point-at
#:vec-key-point-push-back
#:make-vector-mat
#:vector-mat
#:c-arr-to-vec-mat
#:seq-to-vec-mat
#:vec-mat-to-c-arr
#:vec-mat-length
#:vec-mat-to-lisp-list
#:vec-mat-to-lisp-vec
#:vec-mat-at
#:vec-mat-push-back
#:make-vector-point
#:vector-point
#:c-arr-to-vec-point
#:seq-to-vec-point
#:vec-point-to-c-arr
#:vec-point-length
#:vec-point-to-lisp-list
#:vec-point-to-lisp-vec
#:vec-point-at
#:vec-point-push-back
#:make-vector-point-2f
#:vector-point-2f
#:c-arr-to-vec-point-2f
#:seq-to-vec-point-2f
#:vec-point-2f-to-c-arr
#:vec-point-2f-length
#:vec-point-2f-to-lisp-list
#:vec-point-2f-to-lisp-vec
#:vec-point-2f-at
#:vec-point-2f-push-back
#:make-vector-rect
#:vector-rect
#:c-arr-to-vec-rect
#:seq-to-vec-rect
#:vec-rect-to-c-arr
#:vec-rect-length
#:vec-rect-to-lisp-list
#:vec-rect-to-lisp-vec
#:vec-rect-at
#:vec-rect-push-back
#:make-vector-uchar
#:vector-uchar
#:c-arr-to-vec-uchar
#:seq-to-vec-uchar
#:vec-uchar-to-c-arr
#:vec-uchar-length
#:vec-uchar-to-lisp-list
#:vec-uchar-to-lisp-vec
#:vec-uchar-at
#:vec-uchar-push-back
#:make-vector-vec-2d
#:vector-vec-2d
#:c-arr-to-vec-vec-2d
#:seq-to-vec-vec-2d
#:vec-vec-2d-to-c-arr
#:vec-vec-2d-length
#:vec-vec-2d-to-lisp-list
#:vec-vec-2d-to-lisp-vec
#:vec-vec-2d-at
#:vec-vec-2d-push-back
#:make-vector-vec-3d
#:vector-vec-3d
#:c-arr-to-vec-vec-3d
#:seq-to-vec-vec-3d
#:vec-vec-3d-to-c-arr
#:vec-vec-3d-length
#:vec-vec-3d-to-lisp-list
#:vec-vec-3d-to-lisp-vec
#:vec-vec-3d-at
#:vec-vec-3d-push-back
#:make-vector-vec-4d
#:vector-vec-4d
#:c-arr-to-vec-vec-4d
#:seq-to-vec-vec-4d
#:vec-vec-4d-to-c-arr
#:vec-vec-4d-length
#:vec-vec-4d-to-lisp-list
#:vec-vec-4d-to-lisp-vec
#:vec-vec-4d-at
#:vec-vec-4d-push-back
#:make-vector-vec-6d
#:vector-vec-6d
#:c-arr-to-vec-vec-6d
#:seq-to-vec-vec-6d
#:vec-vec-6d-to-c-arr
#:vec-vec-6d-length
#:vec-vec-6d-to-lisp-list
#:vec-vec-6d-to-lisp-vec
#:vec-vec-6d-at
#:vec-vec-6d-push-back
#:make-vector-vec-2f
#:vector-vec-2f
#:c-arr-to-vec-vec-2f
#:seq-to-vec-vec-2f
#:vec-vec-2f-to-c-arr
#:vec-vec-2f-length
#:vec-vec-2f-to-lisp-list
#:vec-vec-2f-to-lisp-vec
#:vec-vec-2f-at
#:vec-vec-2f-push-back
#:make-vector-vec-3f
#:vector-vec-3f
#:c-arr-to-vec-vec-3f
#:seq-to-vec-vec-3f
#:vec-vec-3f-to-c-arr
#:vec-vec-3f-length
#:vec-vec-3f-to-lisp-list
#:vec-vec-3f-to-lisp-vec
#:vec-vec-3f-at
#:vec-vec-3f-push-back
#:make-vector-vec-4f
#:vector-vec-4f
#:c-arr-to-vec-vec-4f
#:seq-to-vec-vec-4f
#:vec-vec-4f-to-c-arr
#:vec-vec-4f-length
#:vec-vec-4f-to-lisp-list
#:vec-vec-4f-to-lisp-vec
#:vec-vec-4f-at
#:vec-vec-4f-push-back
#:make-vector-vec-6f
#:vector-vec-6f
#:c-arr-to-vec-vec-6f
#:seq-to-vec-vec-6f
#:vec-vec-6f-to-c-arr
#:vec-vec-6f-length
#:vec-vec-6f-to-lisp-list
#:vec-vec-6f-to-lisp-vec
#:vec-vec-6f-at
#:vec-vec-6f-push-back
#:make-vector-vec-2i
#:vector-vec-2i
#:c-arr-to-vec-vec-2i
#:seq-to-vec-vec-2i
#:vec-vec-2i-to-c-arr
#:vec-vec-2i-length
#:vec-vec-2i-to-lisp-list
#:vec-vec-2i-to-lisp-vec
#:vec-vec-2i-at
#:vec-vec-2i-push-back
#:make-vector-vec-3i
#:vector-vec-3i
#:c-arr-to-vec-vec-3i
#:seq-to-vec-vec-3i
#:vec-vec-3i-to-c-arr
#:vec-vec-3i-length
#:vec-vec-3i-to-lisp-list
#:vec-vec-3i-to-lisp-vec
#:vec-vec-3i-at
#:vec-vec-3i-push-back
#:make-vector-vec-4i
#:vector-vec-4i
#:c-arr-to-vec-vec-4i
#:seq-to-vec-vec-4i
#:vec-vec-4i-to-c-arr
#:vec-vec-4i-length
#:vec-vec-4i-to-lisp-list
#:vec-vec-4i-to-lisp-vec
#:vec-vec-4i-at
#:vec-vec-4i-push-back
#:make-vector-vec-6i
#:vector-vec-6i
#:c-arr-to-vec-vec-6i
#:seq-to-vec-vec-6i
#:vec-vec-6i-to-c-arr
#:vec-vec-6i-length
#:vec-vec-6i-to-lisp-list
#:vec-vec-6i-to-lisp-vec
#:vec-vec-6i-at
#:vec-vec-6i-push-back
#:make-vector-vec-8i
#:vector-vec-8i
#:c-arr-to-vec-vec-8i
#:seq-to-vec-vec-8i
#:vec-vec-8i-to-c-arr
#:vec-vec-8i-length
#:vec-vec-8i-to-lisp-list
#:vec-vec-8i-to-lisp-vec
#:vec-vec-8i-at
#:vec-vec-8i-push-back
;; DEFGENERIC
#:get*
#:angle
#:assign
#:bounding-rect
#:center
#:clone
#:compute
#:create
#:data
#:detect
#:dot
#:height
#:is-opened
#:match
#:mean
#:predict
#:push-back
#:release
#:save
#:size
#:train
#:type*
#:width
#:x
#:y
#:z
;;; Other generic functions and defuns
;;; used to re-import shadowed symbols.
#:abs
#:exp
#:load
#:length
#:log
#:max
#:min
#:open
#:read
#:set
#:sqrt
#:write
;; core - Basic Structures
#:+8uc1+
#:+8uc2+
#:+8uc3+
#:+8uc4+
#:+8sc1+
#:+8sc2+
#:+8sc3+
#:+8sc4+
#:+16uc1+
#:+16uc2+
#:+16uc3+
#:+16uc4+
#:+16sc1+
#:+16sc2+
#:+16sc3+
#:+16sc4+
#:+32sc1+
#:+32sc2+
#:+32sc3+
#:+32sc4+
#:+32f+
#:+32fc1+
#:+32fc2+
#:+32fc3+
#:+32fc4+
#:+64fc1+
#:+64fc2+
#:+64fc3+
#:+64fc4+
#:+termcrit-iter+
#:+termcrit-number+
#:+termcrit-eps+
#:step*
#:trace*
#:<<
#:>>
#:add
#:adjust-roiadd all pathnames in a directory to a list lisp
#:area
#:arr-to-mat
#:assign-val
#:at
#:at-char
#:at-double
#:at-float
#:at-int
#:at-short
#:at-uchar
#:at-uint
#:at-ushort
#:at-point-2d
#:at-point-2f
#:at-point-3d
#:at-point-3f
#:at-point-3i
#:at-vec-2b
#:at-vec-2d
#:at-vec-2f
#:at-vec-2i
#:at-vec-2s
#:at-vec-2w
#:at-vec-3b
#:at-vec-3d
#:at-vec-3f
#:at-vec-3i
#:at-vec-3s
#:at-vec-3w
#:at-vec-4b
#:at-vec-4d
#:at-vec-4f
#:at-vec-4i
#:at-vec-4s
#:at-vec-4w
#:br
#:channels
#:class-id
#:col-range
#:cols
#:%convert-to
#:convert-to
#:cross
#:copy-to
#:create-mat-typed
#:create-mat-with-data
#:create-mat-with-element
#:create-mat-with-range
#:create-mat-with-value
#:data
#:depth
#:diag
#:distance
#:div
#:dmatch
#:%dmatch-distance
#:%dmatch-img-idx
#:%dmatch-query-idx
#:%dmatch-train-idx
#:dmatch-distance
#:dmatch-img-idx
#:dmatch-query-idx
#:dmatch-train-idx
#:dot-2d
#:dot-2f
#:dot-2i
#:dot-3d
#:dot-3f
#:dot-3i
#:elem-size
#:elem-size1
#:empty
#:epsilon
#:force
#:img-idx
#:inv
#:is-continuous
#:key-point
#:%key-point-angle
#:%key-point-class-id
#:%key-point-octave
#:%key-point-pt
#:%key-point-response
#:%key-point-size
#:%key-point-x
#:%key-point-y
#:key-point-angle
#:key-point-class-id
#:key-point-octave
#:key-point-pt
#:key-point-response
#:key-point-size
#:key-point-x
#:key-point-y
#:locate-roi
#:make-dmatch
#:make-key-point
#:make-mat
#:make-point
#:make-point-2d
#:make-point-2f
#:make-point-3d
#:make-point-3f
#:make-point-3i
#:make-range
#:make-range-all
#:make-rect
#:make-rotated-rect
#:make-scalar
#:make-scalar-all
#:make-size
#:make-term-criteria
#:make-vec-2b
#:make-vec-3b
#:make-vec-4b
#:make-vec-2d
#:make-vec-3d
#:make-vec-4d
#:make-vec-6d
#:make-vec-2f
#:make-vec-3f
#:make-vec-4f
#:make-vec-6f
#:make-vec-2i
#:make-vec-3i
#:make-vec-4i
#:make-vec-6i
#:make-vec-8i
#:make-vec-2s
#:make-vec-3s
#:make-vec-4s
#:make-vec-2w
#:make-vec-3w
#:make-vec-4w
#:%mat
#:mat
#:mat-assign
#:mat-assign-val
#:mat-and-cffi-type
#:mat-at-char
#:mat-at-double
#:mat-at-float
#:mat-at-int
#:mat-at-short
#:mat-at-uchar
#:mat-at-uint
#:mat-at-ushort
#:mat-at-point-2d
#:mat-at-point-2f
#:mat-at-point-3d
#:mat-at-point-3f
#:mat-at-point-3i
#:mat-at-vec-2b
#:mat-at-vec-2d
#:mat-at-vec-2f
#:mat-at-vec-2i
#:mat-at-vec-2s
#:mat-at-vec-2w
#:mat-at-vec-3b
#:mat-at-vec-3d
#:mat-at-vec-3f
#:mat-at-vec-3i
#:mat-at-vec-3s
#:mat-at-vec-3w
#:mat-at-vec-4b
#:mat-at-vec-4d
#:mat-at-vec-4f
#:mat-at-vec-4i
#:mat-at-vec-4s
#:mat-at-vec-4w
#:mat-cross
#:mat-depth
#:mat-expr-t
#:mat-eye
#:mat-ones
#:mat-push-back
#:mat-to-arr
#:mat-type
#:mat-zeros
#:max-count
#:mul
#:octave
#:pdm
#:pkp
#:pm
#:point
#:point-x
#:point-y
#:point-2d
#:point-2d-x
#:point-2d-y
#:point-2f
#:point-2f-x
#:point-2f-y
#:point-3d
#:point-3d-x
#:point-3d-y
#:point-3d-z
#:point-3f
#:point-3f-x
#:point-3f-y
#:point-3f-z
#:point-3i
#:point-3i-x
#:point-3i-y
#:point-3i-z
#:pp
#:pr
#:print-mat
#:print-point
#:print-point-2i
#:print-point-2d
#:print-point-2f
#:print-point-3d
#:print-point-3f
#:print-point-3i
#:print-scalar
#:print-vec
#:print-vec-2b
#:print-vec-2d
#:print-vec-2f
#:print-vec-2i
#:print-vec-2s
#:print-vec-2w
#:print-vec-3b
#:print-vec-3d
#:print-vec-3f
#:print-vec-3i
#:print-vec-3s
#:print-vec-3w
#:print-vec-4b
#:print-vec-4d
#:print-vec-4f
#:print-vec-4i
#:print-vec-4s
#:print-vec-4w
#:print-vec-6d
#:print-vec-6f
#:print-vec-6i
#:print-vec-8i
#:promote
#:ps
#:psc
#:ptc
#:pv
#:%ptr
#:ptr
#:query-idx
#:range
#:range-end
#:range-all
#:range-start
#:rect
#:rect-br
#:rect-tl
#:reshape
#:reshape-rows
#:response
#:roi
#:rotated-rect
#:rotated-rect-bounding-rect
#:rotated-rect-center
#:rotated-rect-size
#:row
#:row-range
#:rows
#:scalar
#:scalar-all
#:scale
#:size-assign-to
#:size-from-point
#:size-height
#:size-width
#:step1
#:sub
#:tl
#:term-criteria
#:term-criteria-epsilon
#:term-criteria-max-count
#:term-criteria-type
#:total
#:train-idx
#:vec-2b
#:vec-3b
#:vec-4b
#:vec-2d
#:vec-3d
#:vec-4d
#:vec-6d
#:vec-2f
#:vec-3f
#:vec-4f
#:vec-6f
#:vec-2i
#:vec-3i
#:vec-4i
#:vec-6i
#:vec-8i
#:vec-2s
#:vec-3s
#:vec-4s
#:vec-2w
#:vec-3w
#:vec-4w
;; core - Dynamic Structures
;; core - Operations on Arrays
#:+C+
#:+cmp-eq+
#:+cmp-gt+
#:+cmp-ge+
#:+cmp-lt+
#:+cmp-le+
#:+cmp-ne+
#:+dft-inverse+
#:+dft-scale+
#:+dft-rows+
#:+dft-complex-output+
#:+dft-real-output+
#:+dxt-forward+
#:+dxt-inverse+
#:+dxt-inv-scale+
#:+dxt-inverse-scale+
#:+dxt-rows+
#:+dxt-mul-conj+
#:+decomp-lu+
#:+decomp-svd+
#:+decomp-eig+
#:+decomp-cholesky+
#:+decomp-qr+
#:+decomp-normal+
#:+l1+
#:+l2+
#:+norm-inf+
#:+norm-l1+
#:+norm-l2+
#:+norm-type-mask+
#:+norm-relative+
#:+norm-minmax+
#:+minmax+
#:+relative-c+
#:+relative-l1+
#:+relative-l2+
#:+covar-scrambled+
#:+covar-normal+
#:+covar-use-avg+
#:+covar-scale+
#:+covar-rows+
#:+covar-cols+
#:+rng-uniform+
#:+rng-normal+
#+pca-data-as-row+
#+pca-data-as-col+
#+pca-use-avg+
#:abs-diff
#:add-weighted
#:back-project
#:bitwise-and
#:bitwise-not
#:bitwise-or
#:bitwise-xor
#:calc-covar-matrix
#:check-range
#:complete-symm
#:convert-scale-abs
#:determinant
#:divide
#:eigenvalues
#:eigenvectors
#:flip
#:in-range-s
#:inv
#:invert
#:magnitude
#:mahalanobis
#:make-pca
#:make-rng
#:min-max-loc
#:mul-transposed
#:multiply
#:norm
#:normalize
#:pca
#:pca-eigenvalues
#:pca-eigenvectors
#:pca-mean
#:pca-back-project-2
#:pca-back-project
#:pca-project-2
#:pca-project
#:perspective-transform
#:project
#:phase*
#:pow
#:randu
#:repeat
#:rng
#:scale-add
#:subtract
#:sum
#:uniform
;; core - Drawing Functions
#:+line-4+
#:+line-8+
#:+line-aa+
#:+aa+
#:+filled+
#:+font-hershey-simplex+
#:+font-hershey-plain+
#:+font-hershey-duplex+
#:+font-hershey-complex+
#:+font-hershey-triplex+
#:+font-hershey-complex-small+
#:+font-hershey-script-simplex+
#:+font-hershey-script-complex+
#:+font-italic+
#:bgr
#:circle
#:clip-line
#:ellipse
#:get-text
#:line
#:make-bgr
#:make-rgb
#:put-text
#:rgb
;; core - XML/YAML Persistence
#:+file-storage-read+
#:+file-storage-write+
#:+file-storage-append+
#:+file-storage-memory+
#:file-storage
#:make-file-storage
#:file-storage-open
#:file-storage-release
#:file-storage-write
;; core - Clustering
#:+kmeans-random-centers+
#:+kmeans-pp-centers+
#:+kmeans-use-initial-labels+
#:kmeans
core - Utility and System Functions and Macros
#:+cpu-none+
#:+cpu-mm+
#:+cpu-sse+
#:+cpu-sse2+
#:+cpu-sse3+
#:+cpu-ssse3+
#:+cpu-sse4-1+
#:+cpu-sse4-2+
#:+cpu-popcnt+
#:+cpu-avx+
#:+hardware-max-feature+
#:+max-feature+
#:+stsok+
#:+stsbacktrace+
#:+stserror+
#:+stsinternal+
#:+stsnomem+
#:+stsbadarg+
#:+stsbadfunc+
#:+stsnoconv+
#:+stsautotrace+
#:+headerisnull+
#:+badimagesize+
#:+badoffset+
#:+baddataptr+
#:+badstep+
#:+badmodelorchseq+
#:+badnumchannels+
#:+badnumchannel1u+
#:+baddepth+
#:+badalphachannel+
#:+badorder+
#:+badorigin+
#:+badalign+
#:+badcallback+
#:+badtilesize+
#:+badcoi+
#:+badroisize+
#:+maskistiled+
#:+stsnullptr+
#:+stsveclengtherr+
#:+stsfilterstructcontenterr+
#:+stskernelstructcontenterr+
#:+stsfilteroffseterr+
#:+stsbadsize+
#:+stsdivbyzero+
#:+stsinplacenotsupported+
#:+stsobjectnotfound+
#:+stsunmatchedformats+
#:+stsbadflag+
#:+stsbadpoint+
#:+stsbadmask+
#:+stsunmatchedsizes+
#:+stsunsupportedformat+
#:+stsoutofrange+
#:+stsparseerror+
#:+stsnotimplemented+
#:+stsbadmembreshapelock+
#:+stsassert+
#:+gpunotsupported+
#:+gpuapicallerror+
#:+openglnotsupported+
#:+openglapicallerror+
#:check-hardware-support
#:cube-root
#:fast-atan2
#:get-build-information
#:get-number-of-cpus
#:get-tick-count
#:get-tick-frequency
;; imgproc - Image Filtering
#:+ipl-border-constant+
#:+ipl-border-replicate+
#:+border-constant+
#:+border-replicate+
#:+border-reflect+
#:+border-wrap+
#:+border-reflect-101+
#:+border-reflect101+
#:+border-default+
#:+border-transparent+
#:+border-isolated+
#:+shape-rect+
#:+shape-cross+
#:+shape-ellipse+
#:+morph-rect+
#:+morph-cross+
#:+morph-ellipse+
#:+shape-custom+
#:+gaussian-5x5+
#:+blur-no-scale+
#:+blur+
#:+gaussian+
#:+median+
#:+bilateral+
#:+scharr+
#:bilateral-filter
#:blur
#:copy-make-border
#:erode
#:dilate
#:filter-2d
#:gaussian-blur
#:laplacian
#:make-morphology-default-border-value
#:median-blur
#:morphology-default-border-value
#:morphology-ex
#:pyr-down
#:pyr-up
#:scharr
#:sobel
;; imgproc - Geometric Image Transformations
#:+warp-fill-outliers+
#:+warp-inverse-map+
#:+inter-nearest+
#:+inter-linear+
#:+inter-cubic+
#:+inter-area+
#:+inter-lanczos4+
#:get-affine-transform
#:get-perspective-transform
#:get-rotation-matrix-2d
#:invert-affine-transform
#:remap
#:%resize
#:resize
#:warp-affine
#:warp-perspective
;; imgproc - Miscellaneous Image Transformations
#:+bgr2bgra+
#:+rgb2rgba+
#:+bgra2bgr+
#:+rgba2rgb+
#:+bgr2rgba+
#:+rgb2bgra+
#:+rgba2bgr+
#:+bgra2rgb+
#:+bgr2rgb+
#:+rgb2bgr+
#:+bgra2rgba+
#:+rgba2bgra+
#:+bgr2gray+
#:+rgb2gray+
#:+gray2bgr+
#:+gray2rgb+
#:+gray2bgra+
#:+gray2rgba+
#:+bgra2gray+
#:+rgba2gray+
#:+bgr2bgr565+
#:+rgb2bgr565+
#:+bgr5652bgr+
#:+bgr5652rgb+
#:+bgra2bgr565+
#:+rgba2bgr565+
#:+bgr5652bgra+
#:+bgr5652rgba+
#:+gray2bgr565+
#:+bgr5652gray+
#:+bgr2bgr555+
#:+rgb2bgr555+
#:+bgr5552bgr+
#:+bgr5552rgb+
#:+bgra2bgr555+
#:+rgba2bgr555+
#:+bgr5552bgra+
#:+bgr5552rgba+
#:+gray2bgr555+
#:+bgr5552gray+
#:+bgr2xyz+
#:+rgb2xyz+
#:+xyz2bgr+
#:+xyz2rgb+
#:+bgr2ycrcb+
#:+rgb2ycrcb+
#:+ycrcb2bgr+
#:+ycrcb2rgb+
#:+bgr2hsv+
#:+rgb2hsv+
#:+bgr2lab+
#:+rgb2lab+
#:+bayerbg2bgr+
#:+bayergb2bgr+
#:+bayerrg2bgr+
#:+bayergr2bgr+
#:+bayerbg2rgb+
#:+bayergb2rgb+
#:+bayerrg2rgb+
#:+bayergr2rgb+
#:+bgr2luv+
#:+rgb2luv+
#:+bgr2hls+
#:+rgb2hls+
#:+hsv2bgr+
#:+hsv2rgb+
#:+lab2bgr+
#:+lab2rgb+
#:+luv2bgr+
#:+luv2rgb+
#:+hls2bgr+
#:+hls2rgb+
#:+bayerbg2bgr-vng+
#:+bayergb2bgr-vng+
#:+bayerrg2bgr-vng+
#:+bayergr2bgr-vng+
#:+bayerbg2rgb-vng+
#:+bayergb2rgb-vng+
#:+bayerrg2rgb-vng+
#:+bayergr2rgb-vng+
#:+bgr2hsv-full+
#:+rgb2hsv-full+
#:+bgr2hls-full+
#:+rgb2hls-full+
#:+hsv2bgr-full+
#:+hsv2rgb-full+
#:+hls2bgr-full+
#:+hls2rgb-full+
#:+lbgr2lab+
#:+lrgb2lab+
#:+lbgr2luv+
#:+lrgb2luv+
#:+lab2lbgr+
#:+lab2lrgb+
#:+luv2lbgr+
#:+luv2lrgb+
#:+bgr2yuv+
#:+rgb2yuv+
#:+yuv2bgr+
#:+yuv2rgb+
#:+bayerbg2gray+
#:+bayergb2gray+
#:+bayerrg2gray+
#:+bayergr2gray+
#:+yuv2rgb-nv12+
#:+yuv2bgr-nv12+
#:+yuv2rgb-nv21+
#:+yuv2bgr-nv21+
#:+yuv420sp2rgb+
#:+yuv420sp2bgr+
#:+yuv2rgba-nv12+
#:+yuv2bgra-nv12+
#:+yuv2rgba-nv21+
#:+yuv2bgra-nv21+
#:+yuv420sp2rgba+
#:+yuv420sp2bgra+
#:+yuv2rgb-yv12+
#:+yuv2bgr-yv12+
#:+yuv2rgb-iyuv+
#:+yuv2bgr-iyuv+
#:+yuv2rgb-i420+
#:+yuv2bgr-i420+
#:+yuv420p2rgb+
#:+yuv420p2bgr+
#:+yuv2rgba-yv12+
#:+yuv2bgra-yv12+
#:+yuv2rgba-iyuv+
#:+yuv2bgra-iyuv+
#:+yuv2rgba-i420+
#:+yuv2bgra-i420+
#:+yuv420p2rgba+
#:+yuv420p2bgra+
#:+yuv2gray-420+
#:+yuv2gray-nv21+
#:+yuv2gray-nv12+
#:+yuv2gray-yv12+
#:+yuv2gray-iyuv+
#:+yuv2gray-i420+
#:+yuv420sp2gray+
#:+yuv420p2gray+
#:+yuv2rgb-uyvy+
#:+yuv2bgr-uyvy+
+ yuv2rgb - vyuy+
;;+yuv2bgr-vyuy+
#:+yuv2rgb-y422+
#:+yuv2bgr-y422+
#:+yuv2rgb-uynv+
#:+yuv2bgr-uynv+
#:+yuv2rgba-uyvy+
#:+yuv2bgra-uyvy+
;;+yuv2rgba-vyuy+
;;+yuv2bgra-vyuy+
#:+yuv2rgba-y422+
#:+yuv2bgra-y422+
#:+yuv2rgba-uynv+
#:+yuv2bgra-uynv+
#:+yuv2rgb-yuy2+
#:+yuv2bgr-yuy2+
#:+yuv2rgb-yvyu+
#:+yuv2bgr-yvyu+
#:+yuv2rgb-yuyv+
#:+yuv2bgr-yuyv+
#:+yuv2rgb-yunv+
#:+yuv2bgr-yunv+
#:+yuv2rgba-yuy2+
#:+yuv2bgra-yuy2+
#:+yuv2rgba-yvyu+
#:+yuv2bgra-yvyu+
#:+yuv2rgba-yuyv+
#:+yuv2bgra-yuyv+
#:+yuv2rgba-yunv+
#:+yuv2bgra-yunv+
#:+yuv2gray-uyvy+
#:+yuv2gray-yuy2+
;;+yuv2gray-vyuy+
#:+yuv2gray-y422+
#:+yuv2gray-uynv+
#:+yuv2gray-yvyu+
#:+yuv2gray-yuyv+
#:+yuv2gray-yunv+
#:+rgba2mrgba+
#:+mrgba2rgba+
#:+rgb2yuv-i420+
#:+bgr2yuv-i420+
#:+rgb2yuv-iyuv+
#:+bgr2yuv-iyuv+
#:+rgba2yuv-i420+
#:+bgra2yuv-i420+
#:+rgba2yuv-iyuv+
#:+bgra2yuv-iyuv+
#:+rgb2yuv-yv12+
#:+bgr2yuv-yv12+
#:+rgba2yuv-yv12+
#:+bgra2yuv-yv12+
#:+colorcvt-max+
#:+thresh-binary+
#:+thresh-binary-inv+
#:+thresh-trunc+
#:+thresh-tozero+
#:+thresh-tozero-inv+
#:+adaptitor-doubve-thresh-mean-c+
#:+adaptive-thresh-gaussian-c+
#:+dist-l1+
#:+dist-l2+
#:+dist-c+
#:+dist-label-ccomp+
#:+dist-label-pixel+
#:+dist-mask-3+
#:+dist-mask-5+
#:+dist-mask-precise+
#:+floodfill-fixed-range+
#:+floodfill-mask-only+
#:adaptive-threshold
#:cvt-color
#:distance-transform
#:flood-fill
#:threshold
imgproc - Histograms
#:equalize-hist
;; imgproc - Structural Analysis and Shape Descriptors
imgproc - Motion Analysis and Object Tracking
#:phase-correlate
imgproc - Feature Detection
#:+lsd-refine-none+
#:+lsd-refine-std+
#:+lsd-refine-adv+
#:canny
#:corner-sub-pix
#:good-features-to-track
#:hough-lines
#:hough-lines-p
;; imgproc - Object Detection
#:+tm-sqdiff+
#:+tm-sqdiff-normed+
#:+tm-ccorr+
#:+tm-ccorr-normed+
#:+tm-ccoeff+
#:+tm-ccoeff-normed+
;; highgui - User Interface
#:+cvtimg-flip+
#:+cvtimage-swap-rb+
#:+event-mousemove+
#:+event-lbuttondown+
#:+event-rbuttondown+
#:+event-mbuttondown+
#:+event-lbuttonup+
#:+event-rbuttonup+
#:+event-mbuttonup+
#:+event-lbuttondblclk+
#:+event-rbuttondblclk+
#:+event-mbuttondblclk+
#:+event-flag-lbutton+
#:+event-flag-rbutton+
#:+event-flag-mbutton+
#:+event-flag-ctrlkey+
#:+event-flag-shiftkey+
#:+event-flag-altkey+
#:+window-normal+
#:+window-autosize+
#:convert-image
#:create-trackbar
#:destroy-all-windows
#:%destroy-window
#:destroy-window
#:get-trackbar-pos
#:imshow
#:move-window
#:set-mouse-callback
#:set-trackbar-pos
#:named-window
#:start-window-thread
#:wait-key
;; highgui - Reading and Writing Images and Video
#:+cap-any+
#:+cap-prop-pos-msec+
#:+cap-prop-pos-frames+
#:+cap-prop-pos-avi-ratio+
#:+cap-prop-frame-width+
#:+cap-prop-frame-height+
#:+cap-prop-fps+
#:+cap-prop-fourcc+
#:+cap-prop-frame-count+
#:+cap-prop-format+
#:+cap-prop-mode+
#:+cap-prop-brightness+
#:+cap-prop-contrast+
#:+cap-prop-saturation+
#:+cap-prop-hue+
#:+cap-prop-gain+
#:+cap-prop-exposure+
#:+cap-prop-convert-rgb+
#:+cap-prop-white-balance+
#:+cap-prop-rectification+
#:+imread_unchanged+
#:+imread_grayscale+
#:+imread_color+
#:+imread_anydepth+
#:+imread_anycolor+
#:+load-image-unchanged+
#:+load-image-grayscale+
#:+load-image-color+
#:+load-image-anydepth+
#:+load-image-anycolor+
#:cap-is-open
#:four-cc
#:grab
#:%imread
#:imread
#:imwrite
#:make-video-capture
#:make-video-writer
#:retrieve
#:video-capture
#:video-capture-get
#:video-capture-grab
#:video-capture-is-opened
#:video-capture-read
#:video-capture-release
#:video-capture-retrieve
#:video-capture-set
#:video-writer
#:video-writer-four-cc
#:video-writer-is-opened
#:video-writer-write
highgui - Qt New Functions
#:+window-fullscreen+
#:+window-freeratio+
#:+window-keepratio+
#:+wnd-prop-fullscreen+
#:+wnd-prop-autosize+
#:+wnd-prop-aspectratio+
#:display-overlay
#:get-window-property
#:set-window-property
;; video - Video Analysis
#:+optflow-use-initial-flow+
#:+optflow-lk-get-min-eigenvals+
#:+optflow-farneback-gaussian+
#:+motion-translation+
#:+motion-euclidean+
#:+motion-affine+
#:+motion-homography+
calib3d - Camera Calibration and 3D Reconstruction
#:+calib-cb-adaptive-thresh+
#:+calib-cb-normalize-image+
#:+calib-cb-filter-quads+
#:+calib-cb-fast-check+
#:+lmeds+
#:+ransac+
#:find-homography
;; features2d - Feature Detection and Description
#:brisk
#:make-brisk
#:feature-2d-compute
#:feature-2d-create
;; features2d - Common Interfaces of Feature Detectors
#:feature-detector-create
#:feature-detector-detect
;; features2d - Common Interfaces of Descriptor Extractors
#:descriptor-extractor-compute
#:descriptor-extractor-create
;; features2d - Common Interfaces of Descriptor Matchers
#:bf-matcher
#:descriptor-matcher-create
#:descriptor-matcher-match
#:flann-based-matcher
#:make-bf-matcher
#:make-flann-based-matcher
;;; features2d - Drawing Function of Keypoints and Matches
#:+draw-matches-flags-default+
#:+draw-matches-flags-draw-over-outimg+
#:+draw-matches-flags-not-draw-single-points+
#:+draw-matches-flags-draw-rich-keypoints+
#:draw-matches
;;; objdetect
#:+hog-descriptor-l-2-hys+
#:+hog-descriptor-default-nlevels+
;;; objdetect - Cascade Classification
#:+cascade-do-canny-pruning+
#:+cascade-scale-image+
#:+cascade-find-biggest-object+
#:+cascade-do-rough-search+
#:cascade-classifier
#:cascade-classifier-load
#:detect-multi-scale
#:make-cascade-classifier
;;; ml
#:+var-numerical+
#:+var-ordered+
#:+var-categorical+
#:+col-sample+
#:+row-sample+
;;; ml - LISP-CV specific
;;; make-training-matrix
;;; ml - Statistical Models
#:stat-model-load
#:stat-model-save
;;; ml - Normal Bayes Classifier
#:make-normal-bayes-classifier
#:normal-bayes-classifier
#:normal-bayes-classifier-predict
;;; ml - K-Nearest Neighbors
#:k-nearest
#:k-nearest-find-nearest
#:make-k-nearest
ml - Support Vector Machines
#:+svm-params-c-svc+
#:+svm-params-nu-svc+
#:+svm-params-one-class+
#:+svm-params-eps-svr+
#:+svm-params-nu-svr+
#:+svm-params-linear+
#:+svm-params-poly+
#:+svm-params-rbf+
#:+svm-params-sigmoid+
#:+svm-params-chi2+
#:+svm-params-inter+
#:+svm-params-c+
#:+svm-params-gamma+
#:+svm-params-p+
#:+svm-params-nu+
#:+svm-params-coef+
#:+svm-params-degree+
#:c
#:class-weights
#:coef-0
#:degree
#:gamma
#:get-support-vector
#:get-support-vector-count
#:kernel-type
#:make-svm
#:make-svm-params
#:nu
#:p
#:svm
#:svm-params-0
#:svm-params
#:svm-predict
#:svm-train
#:svm-type
#:term-crit
;;; ml - Decision Trees
#:d-tree
#:d-tree-params
#:d-tree-predict
#:d-tree-train
#:make-d-tree
#:make-d-tree-params
ml - Neural Networks
#:+ann-mlp-identity+
#:+ann-mlp-sigmoid-sym+
#:+ann-mlp-gaussian+
#:+ann-mlp-train-params-backprop+
#:+ann-mlp-train-params-rprop+
#:+update-weights+
#:+no-input-scale+
#:+no-output-scale+
#:ann-mlp
#:ann-mlp-create
#:ann-mlp-predict
#:ann-mlp-train
#:ann-mlp-train-params
#:make-ann-mlp
#:make-ann-mlp-train-params
;;; photo - Inpainting
#:+inpaint-ns+
#:+inpaint-telea+
#:in-paint
;;; photo - Decolorization
#:decolor
;;; photo - Seamless Cloning
#:+normal-clone+
#:+mixed-clone+
#:+feature-exchange+
#:color-change
#:illumination-change
#:seamless-clone
#:texture-flattening
;;; photo - Non-Photorealistic Rendering
#:+recurs-filter+
#:+normconv-filter+
#:+monochrome-transfer+
#:detail-enhance
#:edge-preserving-filter
#:pencil-sketch
#:stylization
;;; nonfree - Feature Detection and Description
#:make-surf
#:surf
;;; contrib - ColorMaps in OpenCV
#:+colormap-autumn+
#:+colormap-bone+
#:+colormap-jet+
#:+colormap-winter+
#:+colormap-rainbow+
#:+colormap-ocean+
#:+colormap-summer+
#:+colormap-spring+
#:+colormap-cool+
#:+colormap-hsv+
#:+colormap-pink+
#:+colormap-hot+
#:apply-color-map
))
| null | https://raw.githubusercontent.com/W-Net-AI/LISP-CV/10d5c7c1a6fa026de488ca89a28e8a5c519ff8f2/package.lisp | lisp | -*- mode: lisp; indent-tabs: nil -*-
Default parameters.
Change default parameters
Live code editing
Extra OpenCV constants
C Constants
C Integer Limits
DELETE
WITH-MACROS
DEFINE-FOREIGN-TYPE
VECTORS
DEFGENERIC
Other generic functions and defuns
used to re-import shadowed symbols.
core - Basic Structures
core - Dynamic Structures
core - Operations on Arrays
core - Drawing Functions
core - XML/YAML Persistence
core - Clustering
imgproc - Image Filtering
imgproc - Geometric Image Transformations
imgproc - Miscellaneous Image Transformations
+yuv2bgr-vyuy+
+yuv2rgba-vyuy+
+yuv2bgra-vyuy+
+yuv2gray-vyuy+
imgproc - Structural Analysis and Shape Descriptors
imgproc - Object Detection
highgui - User Interface
highgui - Reading and Writing Images and Video
video - Video Analysis
features2d - Feature Detection and Description
features2d - Common Interfaces of Feature Detectors
features2d - Common Interfaces of Descriptor Extractors
features2d - Common Interfaces of Descriptor Matchers
features2d - Drawing Function of Keypoints and Matches
objdetect
objdetect - Cascade Classification
ml
ml - LISP-CV specific
make-training-matrix
ml - Statistical Models
ml - Normal Bayes Classifier
ml - K-Nearest Neighbors
ml - Decision Trees
photo - Inpainting
photo - Decolorization
photo - Seamless Cloning
photo - Non-Photorealistic Rendering
nonfree - Feature Detection and Description
contrib - ColorMaps in OpenCV
|
(defpackage :lisp-cv
(:nicknames #:lisp-cv #:lcv #:cv)
(:use #:cffi #:common-lisp #:swank #:trivial-garbage #:lisp-executable #:bordeaux-threads)
(:shadow #:abs #:exp #:fill #:length #:load #:log #:min #:max #:open #:read #:set #:sqrt #:write)
(:export
#:*camera-index*
#:*default-width*
#:*default-height*
#:*frames-per-second*
#:*millis-per-frame*
utils - Utilities
#:->
#:cat
#:do-while
#:dup
#:rename-package-nicknames
#:full-pathname
#:*lisp-cv-data-dir*
#:*lisp-cv-src-dir*
#:make-pathname-list
#:mklist
#:partition
#:println
#:run-program
#:def-params
#:continuable
#:update-swank
Macros
#:$
#:@
#:alloc
#:free
#:gced-foreign-alloc
#:size-of
C - Interop
#:%string
#:c-string
#:c-string-to-string
#:std-string-to-c-string
#:+max-dim+
#:+pi+
#:+char-bit+
#:+schar-min+
#:+schar-max+
#:+uchar-max+
#:+char-min+
#:+char-min-j+
#:+char-max+
#:+char-max-j+
#:+mb-len-max+
#:+shrt-min+
#:+shrt-max+
#:+ushrt-max+
#:+int-min+
#:+int-max+
#:+uint-max+
#:+long-min+
#:+long-max+
#:+ulong-max+
#:+dbl-max+
#:+flt-max+
#:del
#:del-ann-mlp
#:del-ann-mlp-train-params
#:del-cascade-classifier
#:del-dmatch
#:del-d-tree
#:del-d-tree-params
#:del-file-node
#:del-file-storage
#:del-hog-descriptor
#:del-k-nearest
#:del-key-point
#:del-mat
#:del-mat-expr
#:del-normal-bayes-classifier
#:del-pca
#:del-point
#:del-point-2d
#:del-point-2f
#:del-point-3d
#:del-point-3f
#:del-point-3i
#:del-range
#:del-rect
#:del-rng
#:del-rot-rect
#:del-scalar
#:del-size
#:del-std-string
#:del-svm
#:del-svm-params
#:del-term-crit
#:del-vec-2b
#:del-vec-3b
#:del-vec-4b
#:del-vec-2d
#:del-vec-3d
#:del-vec-4d
#:del-vec-6d
#:del-vec-2f
#:del-vec-3f
#:del-vec-4f
#:del-vec-6f
#:del-vec-2i
#:del-vec-3i
#:del-vec-4i
#:del-vec-6i
#:del-vec-8i
#:del-vec-2s
#:del-vec-3s
#:del-vec-4s
#:del-vec-2w
#:del-vec-3w
#:del-vec-4w
#:del-vector-char
#:del-vector-double
#:del-vector-dmatch
#:del-vector-float
#:del-vector-int
#:del-vector-key-point
#:del-vector-mat
#:del-vector-point
#:del-vector-point-2f
#:del-vector-rect
#:del-vector-uchar
#:del-vector-vec-2b
#:del-vector-vec-3b
#:del-vector-vec-4b
#:del-vector-vec-2d
#:del-vector-vec-3d
#:del-vector-vec-4d
#:del-vector-vec-6d
#:del-vector-vec-2f
#:del-vector-vec-3f
#:del-vector-vec-4f
#:del-vector-vec-6f
#:del-vector-vec-2i
#:del-vector-vec-3i
#:del-vector-vec-4i
#:del-vector-vec-6i
#:del-vector-vec-8i
#:del-vector-vec-2s
#:del-vector-vec-3s
#:del-vector-vec-4s
#:del-vector-vec-2w
#:del-vector-vec-3w
#:del-vector-vec-4w
#:del-video-capture
#:del-vid-writer
#:with-captured-camera
#:with-captured-file
#:with-cascade-classifier
#:with-dmatch
#:with-d-tree
#:with-d-tree-params
#:with-file-node
#:with-file-storage
#:with-hog-descriptor
#:with-key-point
#:with-k-nearest
#:with-mat
#:with-mat-expr
#:with-named-window
#:with-normal-bayes-classifier
#:with-object
#:with-pca
#:with-point
#:with-point-2d
#:with-point-2f
#:with-point-3d
#:with-point-3f
#:with-point-3i
#:with-rect
#:with-range
#:with-rng
#:with-rotated-rect
#:with-scalar
#:with-size
#:with-surf
#:with-svm
#:with-svm-params
#:with-term-criteria
#:with-vec-2b
#:with-vec-2d
#:with-vec-2f
#:with-vec-2i
#:with-vec-2s
#:with-vec-2w
#:with-vec-3b
#:with-vec-3d
#:with-vec-3f
#:with-vec-3i
#:with-vec-3s
#:with-vec-3w
#:with-vec-4b
#:with-vec-4d
#:with-vec-4f
#:with-vec-4i
#:with-vec-4s
#:with-vec-4w
#:with-vec-6d
#:with-vec-6f
#:with-vec-6i
#:with-vec-8i
#:with-vector-char
#:with-vector-double
#:with-vector-dmatch
#:with-vector-float
#:with-vector-int
#:with-vector-key-point
#:with-vector-mat
#:with-vector-point
#:with-vector-point-2f
#:with-vector-rect
#:with-vector-uchar
#:with-vector-vec-2b
#:with-vector-vec-3b
#:with-vector-vec-4b
#:with-vector-vec-2d
#:with-vector-vec-3d
#:with-vector-vec-4d
#:with-vector-vec-6d
#:with-vector-vec-2f
#:with-vector-vec-3f
#:with-vector-vec-4f
#:with-vector-vec-6f
#:with-vector-vec-2i
#:with-vector-vec-3i
#:with-vector-vec-4i
#:with-vector-vec-6i
#:with-vector-vec-8i
#:with-vector-vec-2s
#:with-vector-vec-3s
#:with-vector-vec-4s
#:with-vector-vec-2w
#:with-vector-vec-3w
#:with-vector-vec-4w
#:with-video-capture
#:with-video-writer
#:string*
#:ann-mlp
#:ann-mlp-train-params
#:bf-matcher
#:brisk
#:c-pointer
#:cascade-classifier
#:dmatch
#:d-tree
#:d-tree-node
#:d-tree-params
#:file-node
#:file-storage
#:flann-based-matcher
#:hog-descriptor
#:key-point
#:k-nearest
#:mat
#:mat-expr
#:mat-struct
#:mouse-callback
#:normal-bayes-classifier
#:pca
#:point
#:point-2d
#:point-2f
#:point-3d
#:point-3f
#:point-3i
#:range
#:rect
#:rng
#:rotated-rect
#:scalar
#:size
#:svm
#:svm-params
#:surf
#:term-criteria
#:term-criteria-struct
#:trackbar-callback
#:vec-2b
#:vec-3b
#:vec-4b
#:vec-2d
#:vec-3d
#:vec-4d
#:vec-6d
#:vec-2f
#:vec-3f
#:vec-4f
#:vec-6f
#:vec-2i
#:vec-3i
#:vec-4i
#:vec-6i
#:vec-8i
#:vec-2s
#:vec-3s
#:vec-4s
#:vec-2w
#:vec-3w
#:vec-4w
#:vector-char
#:vector-dmatch
#:vector-double
#:vector-float
#:vector-int
#:vector-key-point
#:vector-mat
#:vector-point
#:vector-point-2f
#:vector-rect
#:vector-uchar
#:vector-vec-2b
#:vector-vec-3b
#:vector-vec-4b
#:vector-vec-2d
#:vector-vec-3d
#:vector-vec-4d
#:vector-vec-6d
#:vector-vec-2f
#:vector-vec-3f
#:vector-vec-4f
#:vector-vec-6f
#:vector-vec-2i
#:vector-vec-3i
#:vector-vec-4i
#:vector-vec-6i
#:vector-vec-8i
#:video-capture
#:video-writer
#:cv-ann-mlp
#:cv-ann-mlp-train-params
#:cv-bf-matcher
#:cv-brisk
#:cv-cascade-classifier
#:cv-dmatch
#:cv-d-tree
#:cv-d-tree-node
#:cv-d-tree-params
#:cv-file-node
#:cv-file-storage
#:cv-hog-descriptor
#:cv-key-point
#:cv-k-nearest
#:cv-mat
#:cv-mat-expr
#:cv-mat-struct
#:cv-mouse-callback
#:cv-normal-bayes-classifier
#:cv-pca
#:cv-point
#:cv-point-2d
#:cv-point-2f
#:cv-point-3d
#:cv-point-3f
#:cv-point-3i
#:cv-range
#:cv-rect
#:cv-rng
#:cv-rotated-rect
#:cv-scalar
#:cv-size
#:std-string
#:cv-svm
#:cv-svm-params
#:cv-surf
#:cv-term-criteria
#:cv-trackbar-callback
#:cv-vec-2b
#:cv-vec-3b
#:cv-vec-4b
#:cv-vec-2d
#:cv-vec-3d
#:cv-vec-4d
#:cv-vec-6d
#:cv-vec-2f
#:cv-vec-3f
#:cv-vec-4f
#:cv-vec-6f
#:cv-vec-2i
#:cv-vec-3i
#:cv-vec-4i
#:cv-vec-6i
#:cv-vec-8i
#:cv-vec-2s
#:cv-vec-3s
#:cv-vec-4s
#:cv-vec-2w
#:cv-vec-3w
#:cv-vec-4w
#:std-vector-char
#:std-vector-dmatch
#:std-vector-double
#:std-vector-float
#:std-vector-int
#:std-vector-key-point
#:std-vector-mat
#:std-vector-point
#:std-vector-point-2f
#:std-vector-rect
#:std-vector-uchar
#:std-vector-vec-2b
#:std-vector-vec-3b
#:std-vector-vec-4b
#:std-vector-vec-2d
#:std-vector-vec-3d
#:std-vector-vec-4d
#:std-vector-vec-6d
#:std-vector-vec-2f
#:std-vector-vec-3f
#:std-vector-vec-4f
#:std-vector-vec-6f
#:std-vector-vec-2i
#:std-vector-vec-3i
#:std-vector-vec-4i
#:std-vector-vec-6i
#:std-vector-vec-8i
#:cv-video-capture
#:cv-video-writer
#:make-vector-char
#:vector-char
#:c-arr-to-vec-char
#:seq-to-vec-char
#:vec-char-to-c-arr
#:vec-char-length
#:vec-char-to-lisp-list
#:vec-char-to-lisp-vec
#:vec-char-at
#:vec-char-push-back
#:make-vector-dmatch
#:vector-dmatch
#:c-arr-to-vec-dmatch
#:list-to-vec-dmatch
#:vec-dmatch-to-c-arr
#:vec-dmatch-length
#:vec-dmatch-to-lisp-list
#:vec-dmatch-to-lisp-vec
#:vec-dmatch-at
#:vec-dmatch-push-back
#:make-vector-double
#:vector-double
#:c-arr-to-vec-double
#:seq-to-vec-double
#:vec-double-to-c-arr
#:vec-double-length
#:vec-double-to-lisp-list
#:vec-double-to-lisp-vec
#:vec-double-at
#:vec-double-push-back
#:make-vector-float
#:vector-float
#:c-arr-to-vec-float
#:seq-to-vec-float
#:vec-float-to-c-arr
#:vec-float-length
#:vec-float-to-lisp-list
#:vec-float-to-lisp-vec
#:vec-float-at
#:vec-float-push-back
#:make-vector-int
#:vector-int
#:c-arr-to-vec-int
#:seq-to-vec-int
#:vec-int-to-c-arr
#:vec-int-length
#:vec-int-to-lisp-list
#:vec-int-to-lisp-vec
#:vec-int-at
#:vec-int-push-back
#:make-vector-key-point
#:vector-key-point
#:c-arr-to-vec-key-point
#:list-to-vec-key-point
#:vec-key-point-to-c-arr
#:vec-key-point-length
#:vec-key-point-to-lisp-list
#:vec-key-point-to-lisp-vec
#:vec-key-point-at
#:vec-key-point-push-back
#:make-vector-mat
#:vector-mat
#:c-arr-to-vec-mat
#:seq-to-vec-mat
#:vec-mat-to-c-arr
#:vec-mat-length
#:vec-mat-to-lisp-list
#:vec-mat-to-lisp-vec
#:vec-mat-at
#:vec-mat-push-back
#:make-vector-point
#:vector-point
#:c-arr-to-vec-point
#:seq-to-vec-point
#:vec-point-to-c-arr
#:vec-point-length
#:vec-point-to-lisp-list
#:vec-point-to-lisp-vec
#:vec-point-at
#:vec-point-push-back
#:make-vector-point-2f
#:vector-point-2f
#:c-arr-to-vec-point-2f
#:seq-to-vec-point-2f
#:vec-point-2f-to-c-arr
#:vec-point-2f-length
#:vec-point-2f-to-lisp-list
#:vec-point-2f-to-lisp-vec
#:vec-point-2f-at
#:vec-point-2f-push-back
#:make-vector-rect
#:vector-rect
#:c-arr-to-vec-rect
#:seq-to-vec-rect
#:vec-rect-to-c-arr
#:vec-rect-length
#:vec-rect-to-lisp-list
#:vec-rect-to-lisp-vec
#:vec-rect-at
#:vec-rect-push-back
#:make-vector-uchar
#:vector-uchar
#:c-arr-to-vec-uchar
#:seq-to-vec-uchar
#:vec-uchar-to-c-arr
#:vec-uchar-length
#:vec-uchar-to-lisp-list
#:vec-uchar-to-lisp-vec
#:vec-uchar-at
#:vec-uchar-push-back
#:make-vector-vec-2d
#:vector-vec-2d
#:c-arr-to-vec-vec-2d
#:seq-to-vec-vec-2d
#:vec-vec-2d-to-c-arr
#:vec-vec-2d-length
#:vec-vec-2d-to-lisp-list
#:vec-vec-2d-to-lisp-vec
#:vec-vec-2d-at
#:vec-vec-2d-push-back
#:make-vector-vec-3d
#:vector-vec-3d
#:c-arr-to-vec-vec-3d
#:seq-to-vec-vec-3d
#:vec-vec-3d-to-c-arr
#:vec-vec-3d-length
#:vec-vec-3d-to-lisp-list
#:vec-vec-3d-to-lisp-vec
#:vec-vec-3d-at
#:vec-vec-3d-push-back
#:make-vector-vec-4d
#:vector-vec-4d
#:c-arr-to-vec-vec-4d
#:seq-to-vec-vec-4d
#:vec-vec-4d-to-c-arr
#:vec-vec-4d-length
#:vec-vec-4d-to-lisp-list
#:vec-vec-4d-to-lisp-vec
#:vec-vec-4d-at
#:vec-vec-4d-push-back
#:make-vector-vec-6d
#:vector-vec-6d
#:c-arr-to-vec-vec-6d
#:seq-to-vec-vec-6d
#:vec-vec-6d-to-c-arr
#:vec-vec-6d-length
#:vec-vec-6d-to-lisp-list
#:vec-vec-6d-to-lisp-vec
#:vec-vec-6d-at
#:vec-vec-6d-push-back
#:make-vector-vec-2f
#:vector-vec-2f
#:c-arr-to-vec-vec-2f
#:seq-to-vec-vec-2f
#:vec-vec-2f-to-c-arr
#:vec-vec-2f-length
#:vec-vec-2f-to-lisp-list
#:vec-vec-2f-to-lisp-vec
#:vec-vec-2f-at
#:vec-vec-2f-push-back
#:make-vector-vec-3f
#:vector-vec-3f
#:c-arr-to-vec-vec-3f
#:seq-to-vec-vec-3f
#:vec-vec-3f-to-c-arr
#:vec-vec-3f-length
#:vec-vec-3f-to-lisp-list
#:vec-vec-3f-to-lisp-vec
#:vec-vec-3f-at
#:vec-vec-3f-push-back
#:make-vector-vec-4f
#:vector-vec-4f
#:c-arr-to-vec-vec-4f
#:seq-to-vec-vec-4f
#:vec-vec-4f-to-c-arr
#:vec-vec-4f-length
#:vec-vec-4f-to-lisp-list
#:vec-vec-4f-to-lisp-vec
#:vec-vec-4f-at
#:vec-vec-4f-push-back
#:make-vector-vec-6f
#:vector-vec-6f
#:c-arr-to-vec-vec-6f
#:seq-to-vec-vec-6f
#:vec-vec-6f-to-c-arr
#:vec-vec-6f-length
#:vec-vec-6f-to-lisp-list
#:vec-vec-6f-to-lisp-vec
#:vec-vec-6f-at
#:vec-vec-6f-push-back
#:make-vector-vec-2i
#:vector-vec-2i
#:c-arr-to-vec-vec-2i
#:seq-to-vec-vec-2i
#:vec-vec-2i-to-c-arr
#:vec-vec-2i-length
#:vec-vec-2i-to-lisp-list
#:vec-vec-2i-to-lisp-vec
#:vec-vec-2i-at
#:vec-vec-2i-push-back
#:make-vector-vec-3i
#:vector-vec-3i
#:c-arr-to-vec-vec-3i
#:seq-to-vec-vec-3i
#:vec-vec-3i-to-c-arr
#:vec-vec-3i-length
#:vec-vec-3i-to-lisp-list
#:vec-vec-3i-to-lisp-vec
#:vec-vec-3i-at
#:vec-vec-3i-push-back
#:make-vector-vec-4i
#:vector-vec-4i
#:c-arr-to-vec-vec-4i
#:seq-to-vec-vec-4i
#:vec-vec-4i-to-c-arr
#:vec-vec-4i-length
#:vec-vec-4i-to-lisp-list
#:vec-vec-4i-to-lisp-vec
#:vec-vec-4i-at
#:vec-vec-4i-push-back
#:make-vector-vec-6i
#:vector-vec-6i
#:c-arr-to-vec-vec-6i
#:seq-to-vec-vec-6i
#:vec-vec-6i-to-c-arr
#:vec-vec-6i-length
#:vec-vec-6i-to-lisp-list
#:vec-vec-6i-to-lisp-vec
#:vec-vec-6i-at
#:vec-vec-6i-push-back
#:make-vector-vec-8i
#:vector-vec-8i
#:c-arr-to-vec-vec-8i
#:seq-to-vec-vec-8i
#:vec-vec-8i-to-c-arr
#:vec-vec-8i-length
#:vec-vec-8i-to-lisp-list
#:vec-vec-8i-to-lisp-vec
#:vec-vec-8i-at
#:vec-vec-8i-push-back
#:get*
#:angle
#:assign
#:bounding-rect
#:center
#:clone
#:compute
#:create
#:data
#:detect
#:dot
#:height
#:is-opened
#:match
#:mean
#:predict
#:push-back
#:release
#:save
#:size
#:train
#:type*
#:width
#:x
#:y
#:z
#:abs
#:exp
#:load
#:length
#:log
#:max
#:min
#:open
#:read
#:set
#:sqrt
#:write
#:+8uc1+
#:+8uc2+
#:+8uc3+
#:+8uc4+
#:+8sc1+
#:+8sc2+
#:+8sc3+
#:+8sc4+
#:+16uc1+
#:+16uc2+
#:+16uc3+
#:+16uc4+
#:+16sc1+
#:+16sc2+
#:+16sc3+
#:+16sc4+
#:+32sc1+
#:+32sc2+
#:+32sc3+
#:+32sc4+
#:+32f+
#:+32fc1+
#:+32fc2+
#:+32fc3+
#:+32fc4+
#:+64fc1+
#:+64fc2+
#:+64fc3+
#:+64fc4+
#:+termcrit-iter+
#:+termcrit-number+
#:+termcrit-eps+
#:step*
#:trace*
#:<<
#:>>
#:add
#:adjust-roiadd all pathnames in a directory to a list lisp
#:area
#:arr-to-mat
#:assign-val
#:at
#:at-char
#:at-double
#:at-float
#:at-int
#:at-short
#:at-uchar
#:at-uint
#:at-ushort
#:at-point-2d
#:at-point-2f
#:at-point-3d
#:at-point-3f
#:at-point-3i
#:at-vec-2b
#:at-vec-2d
#:at-vec-2f
#:at-vec-2i
#:at-vec-2s
#:at-vec-2w
#:at-vec-3b
#:at-vec-3d
#:at-vec-3f
#:at-vec-3i
#:at-vec-3s
#:at-vec-3w
#:at-vec-4b
#:at-vec-4d
#:at-vec-4f
#:at-vec-4i
#:at-vec-4s
#:at-vec-4w
#:br
#:channels
#:class-id
#:col-range
#:cols
#:%convert-to
#:convert-to
#:cross
#:copy-to
#:create-mat-typed
#:create-mat-with-data
#:create-mat-with-element
#:create-mat-with-range
#:create-mat-with-value
#:data
#:depth
#:diag
#:distance
#:div
#:dmatch
#:%dmatch-distance
#:%dmatch-img-idx
#:%dmatch-query-idx
#:%dmatch-train-idx
#:dmatch-distance
#:dmatch-img-idx
#:dmatch-query-idx
#:dmatch-train-idx
#:dot-2d
#:dot-2f
#:dot-2i
#:dot-3d
#:dot-3f
#:dot-3i
#:elem-size
#:elem-size1
#:empty
#:epsilon
#:force
#:img-idx
#:inv
#:is-continuous
#:key-point
#:%key-point-angle
#:%key-point-class-id
#:%key-point-octave
#:%key-point-pt
#:%key-point-response
#:%key-point-size
#:%key-point-x
#:%key-point-y
#:key-point-angle
#:key-point-class-id
#:key-point-octave
#:key-point-pt
#:key-point-response
#:key-point-size
#:key-point-x
#:key-point-y
#:locate-roi
#:make-dmatch
#:make-key-point
#:make-mat
#:make-point
#:make-point-2d
#:make-point-2f
#:make-point-3d
#:make-point-3f
#:make-point-3i
#:make-range
#:make-range-all
#:make-rect
#:make-rotated-rect
#:make-scalar
#:make-scalar-all
#:make-size
#:make-term-criteria
#:make-vec-2b
#:make-vec-3b
#:make-vec-4b
#:make-vec-2d
#:make-vec-3d
#:make-vec-4d
#:make-vec-6d
#:make-vec-2f
#:make-vec-3f
#:make-vec-4f
#:make-vec-6f
#:make-vec-2i
#:make-vec-3i
#:make-vec-4i
#:make-vec-6i
#:make-vec-8i
#:make-vec-2s
#:make-vec-3s
#:make-vec-4s
#:make-vec-2w
#:make-vec-3w
#:make-vec-4w
#:%mat
#:mat
#:mat-assign
#:mat-assign-val
#:mat-and-cffi-type
#:mat-at-char
#:mat-at-double
#:mat-at-float
#:mat-at-int
#:mat-at-short
#:mat-at-uchar
#:mat-at-uint
#:mat-at-ushort
#:mat-at-point-2d
#:mat-at-point-2f
#:mat-at-point-3d
#:mat-at-point-3f
#:mat-at-point-3i
#:mat-at-vec-2b
#:mat-at-vec-2d
#:mat-at-vec-2f
#:mat-at-vec-2i
#:mat-at-vec-2s
#:mat-at-vec-2w
#:mat-at-vec-3b
#:mat-at-vec-3d
#:mat-at-vec-3f
#:mat-at-vec-3i
#:mat-at-vec-3s
#:mat-at-vec-3w
#:mat-at-vec-4b
#:mat-at-vec-4d
#:mat-at-vec-4f
#:mat-at-vec-4i
#:mat-at-vec-4s
#:mat-at-vec-4w
#:mat-cross
#:mat-depth
#:mat-expr-t
#:mat-eye
#:mat-ones
#:mat-push-back
#:mat-to-arr
#:mat-type
#:mat-zeros
#:max-count
#:mul
#:octave
#:pdm
#:pkp
#:pm
#:point
#:point-x
#:point-y
#:point-2d
#:point-2d-x
#:point-2d-y
#:point-2f
#:point-2f-x
#:point-2f-y
#:point-3d
#:point-3d-x
#:point-3d-y
#:point-3d-z
#:point-3f
#:point-3f-x
#:point-3f-y
#:point-3f-z
#:point-3i
#:point-3i-x
#:point-3i-y
#:point-3i-z
#:pp
#:pr
#:print-mat
#:print-point
#:print-point-2i
#:print-point-2d
#:print-point-2f
#:print-point-3d
#:print-point-3f
#:print-point-3i
#:print-scalar
#:print-vec
#:print-vec-2b
#:print-vec-2d
#:print-vec-2f
#:print-vec-2i
#:print-vec-2s
#:print-vec-2w
#:print-vec-3b
#:print-vec-3d
#:print-vec-3f
#:print-vec-3i
#:print-vec-3s
#:print-vec-3w
#:print-vec-4b
#:print-vec-4d
#:print-vec-4f
#:print-vec-4i
#:print-vec-4s
#:print-vec-4w
#:print-vec-6d
#:print-vec-6f
#:print-vec-6i
#:print-vec-8i
#:promote
#:ps
#:psc
#:ptc
#:pv
#:%ptr
#:ptr
#:query-idx
#:range
#:range-end
#:range-all
#:range-start
#:rect
#:rect-br
#:rect-tl
#:reshape
#:reshape-rows
#:response
#:roi
#:rotated-rect
#:rotated-rect-bounding-rect
#:rotated-rect-center
#:rotated-rect-size
#:row
#:row-range
#:rows
#:scalar
#:scalar-all
#:scale
#:size-assign-to
#:size-from-point
#:size-height
#:size-width
#:step1
#:sub
#:tl
#:term-criteria
#:term-criteria-epsilon
#:term-criteria-max-count
#:term-criteria-type
#:total
#:train-idx
#:vec-2b
#:vec-3b
#:vec-4b
#:vec-2d
#:vec-3d
#:vec-4d
#:vec-6d
#:vec-2f
#:vec-3f
#:vec-4f
#:vec-6f
#:vec-2i
#:vec-3i
#:vec-4i
#:vec-6i
#:vec-8i
#:vec-2s
#:vec-3s
#:vec-4s
#:vec-2w
#:vec-3w
#:vec-4w
#:+C+
#:+cmp-eq+
#:+cmp-gt+
#:+cmp-ge+
#:+cmp-lt+
#:+cmp-le+
#:+cmp-ne+
#:+dft-inverse+
#:+dft-scale+
#:+dft-rows+
#:+dft-complex-output+
#:+dft-real-output+
#:+dxt-forward+
#:+dxt-inverse+
#:+dxt-inv-scale+
#:+dxt-inverse-scale+
#:+dxt-rows+
#:+dxt-mul-conj+
#:+decomp-lu+
#:+decomp-svd+
#:+decomp-eig+
#:+decomp-cholesky+
#:+decomp-qr+
#:+decomp-normal+
#:+l1+
#:+l2+
#:+norm-inf+
#:+norm-l1+
#:+norm-l2+
#:+norm-type-mask+
#:+norm-relative+
#:+norm-minmax+
#:+minmax+
#:+relative-c+
#:+relative-l1+
#:+relative-l2+
#:+covar-scrambled+
#:+covar-normal+
#:+covar-use-avg+
#:+covar-scale+
#:+covar-rows+
#:+covar-cols+
#:+rng-uniform+
#:+rng-normal+
#+pca-data-as-row+
#+pca-data-as-col+
#+pca-use-avg+
#:abs-diff
#:add-weighted
#:back-project
#:bitwise-and
#:bitwise-not
#:bitwise-or
#:bitwise-xor
#:calc-covar-matrix
#:check-range
#:complete-symm
#:convert-scale-abs
#:determinant
#:divide
#:eigenvalues
#:eigenvectors
#:flip
#:in-range-s
#:inv
#:invert
#:magnitude
#:mahalanobis
#:make-pca
#:make-rng
#:min-max-loc
#:mul-transposed
#:multiply
#:norm
#:normalize
#:pca
#:pca-eigenvalues
#:pca-eigenvectors
#:pca-mean
#:pca-back-project-2
#:pca-back-project
#:pca-project-2
#:pca-project
#:perspective-transform
#:project
#:phase*
#:pow
#:randu
#:repeat
#:rng
#:scale-add
#:subtract
#:sum
#:uniform
#:+line-4+
#:+line-8+
#:+line-aa+
#:+aa+
#:+filled+
#:+font-hershey-simplex+
#:+font-hershey-plain+
#:+font-hershey-duplex+
#:+font-hershey-complex+
#:+font-hershey-triplex+
#:+font-hershey-complex-small+
#:+font-hershey-script-simplex+
#:+font-hershey-script-complex+
#:+font-italic+
#:bgr
#:circle
#:clip-line
#:ellipse
#:get-text
#:line
#:make-bgr
#:make-rgb
#:put-text
#:rgb
#:+file-storage-read+
#:+file-storage-write+
#:+file-storage-append+
#:+file-storage-memory+
#:file-storage
#:make-file-storage
#:file-storage-open
#:file-storage-release
#:file-storage-write
#:+kmeans-random-centers+
#:+kmeans-pp-centers+
#:+kmeans-use-initial-labels+
#:kmeans
core - Utility and System Functions and Macros
#:+cpu-none+
#:+cpu-mm+
#:+cpu-sse+
#:+cpu-sse2+
#:+cpu-sse3+
#:+cpu-ssse3+
#:+cpu-sse4-1+
#:+cpu-sse4-2+
#:+cpu-popcnt+
#:+cpu-avx+
#:+hardware-max-feature+
#:+max-feature+
#:+stsok+
#:+stsbacktrace+
#:+stserror+
#:+stsinternal+
#:+stsnomem+
#:+stsbadarg+
#:+stsbadfunc+
#:+stsnoconv+
#:+stsautotrace+
#:+headerisnull+
#:+badimagesize+
#:+badoffset+
#:+baddataptr+
#:+badstep+
#:+badmodelorchseq+
#:+badnumchannels+
#:+badnumchannel1u+
#:+baddepth+
#:+badalphachannel+
#:+badorder+
#:+badorigin+
#:+badalign+
#:+badcallback+
#:+badtilesize+
#:+badcoi+
#:+badroisize+
#:+maskistiled+
#:+stsnullptr+
#:+stsveclengtherr+
#:+stsfilterstructcontenterr+
#:+stskernelstructcontenterr+
#:+stsfilteroffseterr+
#:+stsbadsize+
#:+stsdivbyzero+
#:+stsinplacenotsupported+
#:+stsobjectnotfound+
#:+stsunmatchedformats+
#:+stsbadflag+
#:+stsbadpoint+
#:+stsbadmask+
#:+stsunmatchedsizes+
#:+stsunsupportedformat+
#:+stsoutofrange+
#:+stsparseerror+
#:+stsnotimplemented+
#:+stsbadmembreshapelock+
#:+stsassert+
#:+gpunotsupported+
#:+gpuapicallerror+
#:+openglnotsupported+
#:+openglapicallerror+
#:check-hardware-support
#:cube-root
#:fast-atan2
#:get-build-information
#:get-number-of-cpus
#:get-tick-count
#:get-tick-frequency
#:+ipl-border-constant+
#:+ipl-border-replicate+
#:+border-constant+
#:+border-replicate+
#:+border-reflect+
#:+border-wrap+
#:+border-reflect-101+
#:+border-reflect101+
#:+border-default+
#:+border-transparent+
#:+border-isolated+
#:+shape-rect+
#:+shape-cross+
#:+shape-ellipse+
#:+morph-rect+
#:+morph-cross+
#:+morph-ellipse+
#:+shape-custom+
#:+gaussian-5x5+
#:+blur-no-scale+
#:+blur+
#:+gaussian+
#:+median+
#:+bilateral+
#:+scharr+
#:bilateral-filter
#:blur
#:copy-make-border
#:erode
#:dilate
#:filter-2d
#:gaussian-blur
#:laplacian
#:make-morphology-default-border-value
#:median-blur
#:morphology-default-border-value
#:morphology-ex
#:pyr-down
#:pyr-up
#:scharr
#:sobel
#:+warp-fill-outliers+
#:+warp-inverse-map+
#:+inter-nearest+
#:+inter-linear+
#:+inter-cubic+
#:+inter-area+
#:+inter-lanczos4+
#:get-affine-transform
#:get-perspective-transform
#:get-rotation-matrix-2d
#:invert-affine-transform
#:remap
#:%resize
#:resize
#:warp-affine
#:warp-perspective
#:+bgr2bgra+
#:+rgb2rgba+
#:+bgra2bgr+
#:+rgba2rgb+
#:+bgr2rgba+
#:+rgb2bgra+
#:+rgba2bgr+
#:+bgra2rgb+
#:+bgr2rgb+
#:+rgb2bgr+
#:+bgra2rgba+
#:+rgba2bgra+
#:+bgr2gray+
#:+rgb2gray+
#:+gray2bgr+
#:+gray2rgb+
#:+gray2bgra+
#:+gray2rgba+
#:+bgra2gray+
#:+rgba2gray+
#:+bgr2bgr565+
#:+rgb2bgr565+
#:+bgr5652bgr+
#:+bgr5652rgb+
#:+bgra2bgr565+
#:+rgba2bgr565+
#:+bgr5652bgra+
#:+bgr5652rgba+
#:+gray2bgr565+
#:+bgr5652gray+
#:+bgr2bgr555+
#:+rgb2bgr555+
#:+bgr5552bgr+
#:+bgr5552rgb+
#:+bgra2bgr555+
#:+rgba2bgr555+
#:+bgr5552bgra+
#:+bgr5552rgba+
#:+gray2bgr555+
#:+bgr5552gray+
#:+bgr2xyz+
#:+rgb2xyz+
#:+xyz2bgr+
#:+xyz2rgb+
#:+bgr2ycrcb+
#:+rgb2ycrcb+
#:+ycrcb2bgr+
#:+ycrcb2rgb+
#:+bgr2hsv+
#:+rgb2hsv+
#:+bgr2lab+
#:+rgb2lab+
#:+bayerbg2bgr+
#:+bayergb2bgr+
#:+bayerrg2bgr+
#:+bayergr2bgr+
#:+bayerbg2rgb+
#:+bayergb2rgb+
#:+bayerrg2rgb+
#:+bayergr2rgb+
#:+bgr2luv+
#:+rgb2luv+
#:+bgr2hls+
#:+rgb2hls+
#:+hsv2bgr+
#:+hsv2rgb+
#:+lab2bgr+
#:+lab2rgb+
#:+luv2bgr+
#:+luv2rgb+
#:+hls2bgr+
#:+hls2rgb+
#:+bayerbg2bgr-vng+
#:+bayergb2bgr-vng+
#:+bayerrg2bgr-vng+
#:+bayergr2bgr-vng+
#:+bayerbg2rgb-vng+
#:+bayergb2rgb-vng+
#:+bayerrg2rgb-vng+
#:+bayergr2rgb-vng+
#:+bgr2hsv-full+
#:+rgb2hsv-full+
#:+bgr2hls-full+
#:+rgb2hls-full+
#:+hsv2bgr-full+
#:+hsv2rgb-full+
#:+hls2bgr-full+
#:+hls2rgb-full+
#:+lbgr2lab+
#:+lrgb2lab+
#:+lbgr2luv+
#:+lrgb2luv+
#:+lab2lbgr+
#:+lab2lrgb+
#:+luv2lbgr+
#:+luv2lrgb+
#:+bgr2yuv+
#:+rgb2yuv+
#:+yuv2bgr+
#:+yuv2rgb+
#:+bayerbg2gray+
#:+bayergb2gray+
#:+bayerrg2gray+
#:+bayergr2gray+
#:+yuv2rgb-nv12+
#:+yuv2bgr-nv12+
#:+yuv2rgb-nv21+
#:+yuv2bgr-nv21+
#:+yuv420sp2rgb+
#:+yuv420sp2bgr+
#:+yuv2rgba-nv12+
#:+yuv2bgra-nv12+
#:+yuv2rgba-nv21+
#:+yuv2bgra-nv21+
#:+yuv420sp2rgba+
#:+yuv420sp2bgra+
#:+yuv2rgb-yv12+
#:+yuv2bgr-yv12+
#:+yuv2rgb-iyuv+
#:+yuv2bgr-iyuv+
#:+yuv2rgb-i420+
#:+yuv2bgr-i420+
#:+yuv420p2rgb+
#:+yuv420p2bgr+
#:+yuv2rgba-yv12+
#:+yuv2bgra-yv12+
#:+yuv2rgba-iyuv+
#:+yuv2bgra-iyuv+
#:+yuv2rgba-i420+
#:+yuv2bgra-i420+
#:+yuv420p2rgba+
#:+yuv420p2bgra+
#:+yuv2gray-420+
#:+yuv2gray-nv21+
#:+yuv2gray-nv12+
#:+yuv2gray-yv12+
#:+yuv2gray-iyuv+
#:+yuv2gray-i420+
#:+yuv420sp2gray+
#:+yuv420p2gray+
#:+yuv2rgb-uyvy+
#:+yuv2bgr-uyvy+
+ yuv2rgb - vyuy+
#:+yuv2rgb-y422+
#:+yuv2bgr-y422+
#:+yuv2rgb-uynv+
#:+yuv2bgr-uynv+
#:+yuv2rgba-uyvy+
#:+yuv2bgra-uyvy+
#:+yuv2rgba-y422+
#:+yuv2bgra-y422+
#:+yuv2rgba-uynv+
#:+yuv2bgra-uynv+
#:+yuv2rgb-yuy2+
#:+yuv2bgr-yuy2+
#:+yuv2rgb-yvyu+
#:+yuv2bgr-yvyu+
#:+yuv2rgb-yuyv+
#:+yuv2bgr-yuyv+
#:+yuv2rgb-yunv+
#:+yuv2bgr-yunv+
#:+yuv2rgba-yuy2+
#:+yuv2bgra-yuy2+
#:+yuv2rgba-yvyu+
#:+yuv2bgra-yvyu+
#:+yuv2rgba-yuyv+
#:+yuv2bgra-yuyv+
#:+yuv2rgba-yunv+
#:+yuv2bgra-yunv+
#:+yuv2gray-uyvy+
#:+yuv2gray-yuy2+
#:+yuv2gray-y422+
#:+yuv2gray-uynv+
#:+yuv2gray-yvyu+
#:+yuv2gray-yuyv+
#:+yuv2gray-yunv+
#:+rgba2mrgba+
#:+mrgba2rgba+
#:+rgb2yuv-i420+
#:+bgr2yuv-i420+
#:+rgb2yuv-iyuv+
#:+bgr2yuv-iyuv+
#:+rgba2yuv-i420+
#:+bgra2yuv-i420+
#:+rgba2yuv-iyuv+
#:+bgra2yuv-iyuv+
#:+rgb2yuv-yv12+
#:+bgr2yuv-yv12+
#:+rgba2yuv-yv12+
#:+bgra2yuv-yv12+
#:+colorcvt-max+
#:+thresh-binary+
#:+thresh-binary-inv+
#:+thresh-trunc+
#:+thresh-tozero+
#:+thresh-tozero-inv+
#:+adaptitor-doubve-thresh-mean-c+
#:+adaptive-thresh-gaussian-c+
#:+dist-l1+
#:+dist-l2+
#:+dist-c+
#:+dist-label-ccomp+
#:+dist-label-pixel+
#:+dist-mask-3+
#:+dist-mask-5+
#:+dist-mask-precise+
#:+floodfill-fixed-range+
#:+floodfill-mask-only+
#:adaptive-threshold
#:cvt-color
#:distance-transform
#:flood-fill
#:threshold
imgproc - Histograms
#:equalize-hist
imgproc - Motion Analysis and Object Tracking
#:phase-correlate
imgproc - Feature Detection
#:+lsd-refine-none+
#:+lsd-refine-std+
#:+lsd-refine-adv+
#:canny
#:corner-sub-pix
#:good-features-to-track
#:hough-lines
#:hough-lines-p
#:+tm-sqdiff+
#:+tm-sqdiff-normed+
#:+tm-ccorr+
#:+tm-ccorr-normed+
#:+tm-ccoeff+
#:+tm-ccoeff-normed+
#:+cvtimg-flip+
#:+cvtimage-swap-rb+
#:+event-mousemove+
#:+event-lbuttondown+
#:+event-rbuttondown+
#:+event-mbuttondown+
#:+event-lbuttonup+
#:+event-rbuttonup+
#:+event-mbuttonup+
#:+event-lbuttondblclk+
#:+event-rbuttondblclk+
#:+event-mbuttondblclk+
#:+event-flag-lbutton+
#:+event-flag-rbutton+
#:+event-flag-mbutton+
#:+event-flag-ctrlkey+
#:+event-flag-shiftkey+
#:+event-flag-altkey+
#:+window-normal+
#:+window-autosize+
#:convert-image
#:create-trackbar
#:destroy-all-windows
#:%destroy-window
#:destroy-window
#:get-trackbar-pos
#:imshow
#:move-window
#:set-mouse-callback
#:set-trackbar-pos
#:named-window
#:start-window-thread
#:wait-key
#:+cap-any+
#:+cap-prop-pos-msec+
#:+cap-prop-pos-frames+
#:+cap-prop-pos-avi-ratio+
#:+cap-prop-frame-width+
#:+cap-prop-frame-height+
#:+cap-prop-fps+
#:+cap-prop-fourcc+
#:+cap-prop-frame-count+
#:+cap-prop-format+
#:+cap-prop-mode+
#:+cap-prop-brightness+
#:+cap-prop-contrast+
#:+cap-prop-saturation+
#:+cap-prop-hue+
#:+cap-prop-gain+
#:+cap-prop-exposure+
#:+cap-prop-convert-rgb+
#:+cap-prop-white-balance+
#:+cap-prop-rectification+
#:+imread_unchanged+
#:+imread_grayscale+
#:+imread_color+
#:+imread_anydepth+
#:+imread_anycolor+
#:+load-image-unchanged+
#:+load-image-grayscale+
#:+load-image-color+
#:+load-image-anydepth+
#:+load-image-anycolor+
#:cap-is-open
#:four-cc
#:grab
#:%imread
#:imread
#:imwrite
#:make-video-capture
#:make-video-writer
#:retrieve
#:video-capture
#:video-capture-get
#:video-capture-grab
#:video-capture-is-opened
#:video-capture-read
#:video-capture-release
#:video-capture-retrieve
#:video-capture-set
#:video-writer
#:video-writer-four-cc
#:video-writer-is-opened
#:video-writer-write
highgui - Qt New Functions
#:+window-fullscreen+
#:+window-freeratio+
#:+window-keepratio+
#:+wnd-prop-fullscreen+
#:+wnd-prop-autosize+
#:+wnd-prop-aspectratio+
#:display-overlay
#:get-window-property
#:set-window-property
#:+optflow-use-initial-flow+
#:+optflow-lk-get-min-eigenvals+
#:+optflow-farneback-gaussian+
#:+motion-translation+
#:+motion-euclidean+
#:+motion-affine+
#:+motion-homography+
calib3d - Camera Calibration and 3D Reconstruction
#:+calib-cb-adaptive-thresh+
#:+calib-cb-normalize-image+
#:+calib-cb-filter-quads+
#:+calib-cb-fast-check+
#:+lmeds+
#:+ransac+
#:find-homography
#:brisk
#:make-brisk
#:feature-2d-compute
#:feature-2d-create
#:feature-detector-create
#:feature-detector-detect
#:descriptor-extractor-compute
#:descriptor-extractor-create
#:bf-matcher
#:descriptor-matcher-create
#:descriptor-matcher-match
#:flann-based-matcher
#:make-bf-matcher
#:make-flann-based-matcher
#:+draw-matches-flags-default+
#:+draw-matches-flags-draw-over-outimg+
#:+draw-matches-flags-not-draw-single-points+
#:+draw-matches-flags-draw-rich-keypoints+
#:draw-matches
#:+hog-descriptor-l-2-hys+
#:+hog-descriptor-default-nlevels+
#:+cascade-do-canny-pruning+
#:+cascade-scale-image+
#:+cascade-find-biggest-object+
#:+cascade-do-rough-search+
#:cascade-classifier
#:cascade-classifier-load
#:detect-multi-scale
#:make-cascade-classifier
#:+var-numerical+
#:+var-ordered+
#:+var-categorical+
#:+col-sample+
#:+row-sample+
#:stat-model-load
#:stat-model-save
#:make-normal-bayes-classifier
#:normal-bayes-classifier
#:normal-bayes-classifier-predict
#:k-nearest
#:k-nearest-find-nearest
#:make-k-nearest
ml - Support Vector Machines
#:+svm-params-c-svc+
#:+svm-params-nu-svc+
#:+svm-params-one-class+
#:+svm-params-eps-svr+
#:+svm-params-nu-svr+
#:+svm-params-linear+
#:+svm-params-poly+
#:+svm-params-rbf+
#:+svm-params-sigmoid+
#:+svm-params-chi2+
#:+svm-params-inter+
#:+svm-params-c+
#:+svm-params-gamma+
#:+svm-params-p+
#:+svm-params-nu+
#:+svm-params-coef+
#:+svm-params-degree+
#:c
#:class-weights
#:coef-0
#:degree
#:gamma
#:get-support-vector
#:get-support-vector-count
#:kernel-type
#:make-svm
#:make-svm-params
#:nu
#:p
#:svm
#:svm-params-0
#:svm-params
#:svm-predict
#:svm-train
#:svm-type
#:term-crit
#:d-tree
#:d-tree-params
#:d-tree-predict
#:d-tree-train
#:make-d-tree
#:make-d-tree-params
ml - Neural Networks
#:+ann-mlp-identity+
#:+ann-mlp-sigmoid-sym+
#:+ann-mlp-gaussian+
#:+ann-mlp-train-params-backprop+
#:+ann-mlp-train-params-rprop+
#:+update-weights+
#:+no-input-scale+
#:+no-output-scale+
#:ann-mlp
#:ann-mlp-create
#:ann-mlp-predict
#:ann-mlp-train
#:ann-mlp-train-params
#:make-ann-mlp
#:make-ann-mlp-train-params
#:+inpaint-ns+
#:+inpaint-telea+
#:in-paint
#:decolor
#:+normal-clone+
#:+mixed-clone+
#:+feature-exchange+
#:color-change
#:illumination-change
#:seamless-clone
#:texture-flattening
#:+recurs-filter+
#:+normconv-filter+
#:+monochrome-transfer+
#:detail-enhance
#:edge-preserving-filter
#:pencil-sketch
#:stylization
#:make-surf
#:surf
#:+colormap-autumn+
#:+colormap-bone+
#:+colormap-jet+
#:+colormap-winter+
#:+colormap-rainbow+
#:+colormap-ocean+
#:+colormap-summer+
#:+colormap-spring+
#:+colormap-cool+
#:+colormap-hsv+
#:+colormap-pink+
#:+colormap-hot+
#:apply-color-map
))
|
551a7f77bbded8884176ad510eeac52561d81c2df65d449ef3edd5d6bd136f82 | Idorobots/spartan | ast.rkt | #lang racket
AST
(require "ast/utils.rkt")
(require "ast/nodes.rkt")
(require "ast/eqv.rkt")
(require "ast/match.rkt")
(provide (all-from-out "ast/utils.rkt"))
(provide (all-from-out "ast/nodes.rkt"))
(provide (all-from-out "ast/eqv.rkt"))
(provide (all-from-out "ast/match.rkt"))
| null | https://raw.githubusercontent.com/Idorobots/spartan/ef3b032906655585d284f1c9a33a58f1e35cb180/src/compiler/ast.rkt | racket | #lang racket
AST
(require "ast/utils.rkt")
(require "ast/nodes.rkt")
(require "ast/eqv.rkt")
(require "ast/match.rkt")
(provide (all-from-out "ast/utils.rkt"))
(provide (all-from-out "ast/nodes.rkt"))
(provide (all-from-out "ast/eqv.rkt"))
(provide (all-from-out "ast/match.rkt"))
| |
76e2bcff3a2826d1a9d589453b9738692954f0c07cbb59a990e65f2aebbf604c | zkincaid/duet | transitionFormula.ml | open Syntax
type 'a t =
{ formula : 'a formula;
symbols : (symbol * symbol) list;
exists : (symbol -> bool) }
include Log.Make(struct let name = "srk.transitionFormula" end)
let identity srk symbols =
let formula =
List.map (fun (sym, sym') ->
mk_eq srk (mk_const srk sym) (mk_const srk sym'))
symbols
|> mk_and srk
in
let exists _ = true in
{ formula; symbols; exists }
let zero srk symbols = let exists _ = true in { formula = mk_false srk; symbols; exists}
let pre_symbols tr_symbols =
List.fold_left (fun set (s,_) ->
Symbol.Set.add s set)
Symbol.Set.empty
tr_symbols
let post_symbols tr_symbols =
List.fold_left (fun set (_,s') ->
Symbol.Set.add s' set)
Symbol.Set.empty
tr_symbols
(* Map from pre-state vars to their post-state counterparts *)
let post_map srk tr_symbols =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym (mk_const srk sym') map)
Symbol.Map.empty
tr_symbols
let pre_map srk tr_symbols =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym' (mk_const srk sym) map)
Symbol.Map.empty
tr_symbols
let formula tf = tf.formula
let symbols tf = tf.symbols
let exists tf = tf.exists
let make ?(exists=fun _ -> true) formula symbols =
{ exists; formula; symbols }
let wedge_hull srk tf =
let post_symbols = post_symbols tf.symbols in
let subterm x = not (Symbol.Set.mem x post_symbols) in
Wedge.abstract ~exists:tf.exists ~subterm srk tf.formula
let is_symbolic_constant tf =
let pre_symbols = pre_symbols tf.symbols in
let post_symbols = post_symbols tf.symbols in
fun x -> tf.exists x && (not (Symbol.Set.mem x pre_symbols || Symbol.Set.mem x post_symbols))
let symbolic_constants tf =
Symbol.Set.filter (is_symbolic_constant tf) (Syntax.symbols tf.formula)
let mul srk tf1 tf2 =
if (tf1.symbols != tf2.symbols) then
invalid_arg "TransitionFormula.mul: incompatible transition formulas";
let fresh_symbols = ref Symbol.Set.empty in
let (map1, map2) =
List.fold_left (fun (phi_map, psi_map) (sym, sym') ->
let mid_name = "mid_" ^ (show_symbol srk sym) in
let mid_symbol =
mk_symbol srk ~name:mid_name (typ_symbol srk sym)
in
fresh_symbols := Symbol.Set.add mid_symbol (!fresh_symbols);
let mid = mk_const srk mid_symbol in
(Symbol.Map.add sym' mid phi_map,
Symbol.Map.add sym mid psi_map))
(Symbol.Map.empty, Symbol.Map.empty)
tf1.symbols
in
let subst1 = substitute_map srk map1 in
let rename =
Memo.memo (fun x ->
let fresh =
mk_symbol srk ~name:(show_symbol srk x) (typ_symbol srk x)
in
fresh_symbols := Symbol.Set.add fresh (!fresh_symbols);
mk_const srk fresh)
in
rename constants
substitute_const srk
(fun x ->
if Symbol.Map.mem x map2 then
Symbol.Map.find x map2
else if tf2.exists x then
mk_const srk x
else rename x)
in
{ symbols = tf1.symbols;
exists = (fun x -> tf1.exists x && not (Symbol.Set.mem x !fresh_symbols));
formula = mk_and srk [subst1 tf1.formula; subst2 tf2.formula] }
let add srk tf1 tf2 =
if (tf1.symbols != tf2.symbols) then
invalid_arg "TransitionFormula.add: incompatible transition formulas";
{ tf1 with formula = mk_or srk [tf1.formula; tf2.formula] }
let linearize srk tf =
{ tf with formula = Nonlinear.linearize srk tf.formula }
let map_formula f tf = { tf with formula = f tf.formula }
let preimage srk tf state =
logf "preimage of transition formula: %a" (Formula.pp srk) tf.formula;
logf "and state formula: %a" (Formula.pp srk) state;
let open Syntax in
let tf = linearize srk tf in
let fresh_skolem =
Memo.memo (fun sym ->
let name = show_symbol srk sym in
let typ = typ_symbol srk sym in
mk_const srk (mk_symbol srk ~name typ))
in
let post_map =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym sym' map)
Symbol.Map.empty
tf.symbols
in
let pre_map =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym' sym map)
Symbol.Map.empty
tf.symbols
in
(* let post_map = post_map srk tf.symbols in *)
(* let pre_map = pre_map srk tf.symbols in *)
let pre_to_fresh_skolems_map = Symbol.Map.fold
(fun sym _ m ->
Symbol.Map.add sym (fresh_skolem sym) m)
post_map
Symbol.Map.empty in
let subst_tf sym =
match Symbol.Map.find_opt sym pre_map with
| Some pre_symbol -> Symbol.Map.find pre_symbol pre_to_fresh_skolems_map
| None -> mk_const srk sym
in
let subst_state sym =
match ((exists tf) sym) with
| true ->
begin
match (Symbol.Map.find_opt sym post_map) with
| Some _ -> Symbol.Map.find sym pre_to_fresh_skolems_map
| _ -> mk_const srk sym
end
| false -> fresh_skolem sym
in
let result = mk_and srk [substitute_const srk subst_tf (formula tf); substitute_const srk subst_state state]
in
logf "result state formula: %a" (Formula.pp srk) result;
result
| null | https://raw.githubusercontent.com/zkincaid/duet/eb3dbfe6c51d5e1a11cb39ab8f70584aaaa309f9/srk/src/transitionFormula.ml | ocaml | Map from pre-state vars to their post-state counterparts
let post_map = post_map srk tf.symbols in
let pre_map = pre_map srk tf.symbols in | open Syntax
type 'a t =
{ formula : 'a formula;
symbols : (symbol * symbol) list;
exists : (symbol -> bool) }
include Log.Make(struct let name = "srk.transitionFormula" end)
let identity srk symbols =
let formula =
List.map (fun (sym, sym') ->
mk_eq srk (mk_const srk sym) (mk_const srk sym'))
symbols
|> mk_and srk
in
let exists _ = true in
{ formula; symbols; exists }
let zero srk symbols = let exists _ = true in { formula = mk_false srk; symbols; exists}
let pre_symbols tr_symbols =
List.fold_left (fun set (s,_) ->
Symbol.Set.add s set)
Symbol.Set.empty
tr_symbols
let post_symbols tr_symbols =
List.fold_left (fun set (_,s') ->
Symbol.Set.add s' set)
Symbol.Set.empty
tr_symbols
let post_map srk tr_symbols =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym (mk_const srk sym') map)
Symbol.Map.empty
tr_symbols
let pre_map srk tr_symbols =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym' (mk_const srk sym) map)
Symbol.Map.empty
tr_symbols
let formula tf = tf.formula
let symbols tf = tf.symbols
let exists tf = tf.exists
let make ?(exists=fun _ -> true) formula symbols =
{ exists; formula; symbols }
let wedge_hull srk tf =
let post_symbols = post_symbols tf.symbols in
let subterm x = not (Symbol.Set.mem x post_symbols) in
Wedge.abstract ~exists:tf.exists ~subterm srk tf.formula
let is_symbolic_constant tf =
let pre_symbols = pre_symbols tf.symbols in
let post_symbols = post_symbols tf.symbols in
fun x -> tf.exists x && (not (Symbol.Set.mem x pre_symbols || Symbol.Set.mem x post_symbols))
let symbolic_constants tf =
Symbol.Set.filter (is_symbolic_constant tf) (Syntax.symbols tf.formula)
let mul srk tf1 tf2 =
if (tf1.symbols != tf2.symbols) then
invalid_arg "TransitionFormula.mul: incompatible transition formulas";
let fresh_symbols = ref Symbol.Set.empty in
let (map1, map2) =
List.fold_left (fun (phi_map, psi_map) (sym, sym') ->
let mid_name = "mid_" ^ (show_symbol srk sym) in
let mid_symbol =
mk_symbol srk ~name:mid_name (typ_symbol srk sym)
in
fresh_symbols := Symbol.Set.add mid_symbol (!fresh_symbols);
let mid = mk_const srk mid_symbol in
(Symbol.Map.add sym' mid phi_map,
Symbol.Map.add sym mid psi_map))
(Symbol.Map.empty, Symbol.Map.empty)
tf1.symbols
in
let subst1 = substitute_map srk map1 in
let rename =
Memo.memo (fun x ->
let fresh =
mk_symbol srk ~name:(show_symbol srk x) (typ_symbol srk x)
in
fresh_symbols := Symbol.Set.add fresh (!fresh_symbols);
mk_const srk fresh)
in
rename constants
substitute_const srk
(fun x ->
if Symbol.Map.mem x map2 then
Symbol.Map.find x map2
else if tf2.exists x then
mk_const srk x
else rename x)
in
{ symbols = tf1.symbols;
exists = (fun x -> tf1.exists x && not (Symbol.Set.mem x !fresh_symbols));
formula = mk_and srk [subst1 tf1.formula; subst2 tf2.formula] }
let add srk tf1 tf2 =
if (tf1.symbols != tf2.symbols) then
invalid_arg "TransitionFormula.add: incompatible transition formulas";
{ tf1 with formula = mk_or srk [tf1.formula; tf2.formula] }
let linearize srk tf =
{ tf with formula = Nonlinear.linearize srk tf.formula }
let map_formula f tf = { tf with formula = f tf.formula }
let preimage srk tf state =
logf "preimage of transition formula: %a" (Formula.pp srk) tf.formula;
logf "and state formula: %a" (Formula.pp srk) state;
let open Syntax in
let tf = linearize srk tf in
let fresh_skolem =
Memo.memo (fun sym ->
let name = show_symbol srk sym in
let typ = typ_symbol srk sym in
mk_const srk (mk_symbol srk ~name typ))
in
let post_map =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym sym' map)
Symbol.Map.empty
tf.symbols
in
let pre_map =
List.fold_left
(fun map (sym, sym') -> Symbol.Map.add sym' sym map)
Symbol.Map.empty
tf.symbols
in
let pre_to_fresh_skolems_map = Symbol.Map.fold
(fun sym _ m ->
Symbol.Map.add sym (fresh_skolem sym) m)
post_map
Symbol.Map.empty in
let subst_tf sym =
match Symbol.Map.find_opt sym pre_map with
| Some pre_symbol -> Symbol.Map.find pre_symbol pre_to_fresh_skolems_map
| None -> mk_const srk sym
in
let subst_state sym =
match ((exists tf) sym) with
| true ->
begin
match (Symbol.Map.find_opt sym post_map) with
| Some _ -> Symbol.Map.find sym pre_to_fresh_skolems_map
| _ -> mk_const srk sym
end
| false -> fresh_skolem sym
in
let result = mk_and srk [substitute_const srk subst_tf (formula tf); substitute_const srk subst_state state]
in
logf "result state formula: %a" (Formula.pp srk) result;
result
|
52048011e82e6b0c56ea00126464b30e2c7007713e4a3459f8408da8fa0cf2f5 | lambdaisland/uri | normalize_test.cljc | (ns lambdaisland.uri.normalize-test
(:require [lambdaisland.uri :as uri]
[lambdaisland.uri.normalize :as n]
[clojure.test :refer [deftest testing is are]]))
(deftest normalize-test
(are [x y] (= (-> x uri/parse n/normalize str) y)
" b c" ""
"" ""
"/𝍖" "/%F0%9D%8D%96"
"" ""
" are welcome 🙂" "" )
(are [x y] (= (-> x n/normalize str) y)
(uri/map->URI {:query "x=y"}) "?x=y"
(uri/map->URI {:query "x=?y#"}) "?x=?y%23"
(uri/map->URI {:query "foo=bar"}) "?foo=bar"
(uri/map->URI {:query "foo=b%61r"}) "?foo=bar"
(uri/map->URI {:query "foo=bar%3Dbaz"}) "?foo=bar%3Dbaz"
(uri/map->URI {:query "foo=%20%2B%26xxx%3D123"}) "?foo=%20%2B%26xxx%3D123"
(uri/map->URI {:query "text=You are welcome 🙂"}) "?text=You%20are%20welcome%20%F0%9F%99%82"
))
(deftest char-seq-test
(let [long-string (->> "s"
Long enough to trigger StackOverflow in non - tail recursive cases .
(repeat 5000)
(apply str))
long-string-len (count long-string)
cs (n/char-seq long-string)]
(is (= long-string-len (count cs)))
(is (every? #{"s"} cs))))
(deftest normalize-path-test
(are [x y] (= (n/normalize-path x) y)
"/abc" "/abc"
"𝍖" "%F0%9D%8D%96"))
(deftest percent-encode-test
(are [class comp result] (= (n/percent-encode comp class) result)
:alpha "abcAbc" "abcAbc"
:alpha "abc123" "abc%31%32%33"
:path "abc/123" "abc/123"
:path "abc/123:/#" "abc/123:/%23"
:path "𝍖" "%F0%9D%8D%96"))
(deftest percent-decode-test
(are [in out] (= (n/percent-decode in) out)
"%61%62%63" "abc"
"%F0%9F%99%88%F0%9F%99%89" "🙈🙉"))
| null | https://raw.githubusercontent.com/lambdaisland/uri/082ecc790139a6f397ecbd7a16432e97a31fe5bd/test/lambdaisland/uri/normalize_test.cljc | clojure | (ns lambdaisland.uri.normalize-test
(:require [lambdaisland.uri :as uri]
[lambdaisland.uri.normalize :as n]
[clojure.test :refer [deftest testing is are]]))
(deftest normalize-test
(are [x y] (= (-> x uri/parse n/normalize str) y)
" b c" ""
"" ""
"/𝍖" "/%F0%9D%8D%96"
"" ""
" are welcome 🙂" "" )
(are [x y] (= (-> x n/normalize str) y)
(uri/map->URI {:query "x=y"}) "?x=y"
(uri/map->URI {:query "x=?y#"}) "?x=?y%23"
(uri/map->URI {:query "foo=bar"}) "?foo=bar"
(uri/map->URI {:query "foo=b%61r"}) "?foo=bar"
(uri/map->URI {:query "foo=bar%3Dbaz"}) "?foo=bar%3Dbaz"
(uri/map->URI {:query "foo=%20%2B%26xxx%3D123"}) "?foo=%20%2B%26xxx%3D123"
(uri/map->URI {:query "text=You are welcome 🙂"}) "?text=You%20are%20welcome%20%F0%9F%99%82"
))
(deftest char-seq-test
(let [long-string (->> "s"
Long enough to trigger StackOverflow in non - tail recursive cases .
(repeat 5000)
(apply str))
long-string-len (count long-string)
cs (n/char-seq long-string)]
(is (= long-string-len (count cs)))
(is (every? #{"s"} cs))))
(deftest normalize-path-test
(are [x y] (= (n/normalize-path x) y)
"/abc" "/abc"
"𝍖" "%F0%9D%8D%96"))
(deftest percent-encode-test
(are [class comp result] (= (n/percent-encode comp class) result)
:alpha "abcAbc" "abcAbc"
:alpha "abc123" "abc%31%32%33"
:path "abc/123" "abc/123"
:path "abc/123:/#" "abc/123:/%23"
:path "𝍖" "%F0%9D%8D%96"))
(deftest percent-decode-test
(are [in out] (= (n/percent-decode in) out)
"%61%62%63" "abc"
"%F0%9F%99%88%F0%9F%99%89" "🙈🙉"))
| |
1a878f71cd474710161b3477611c9ef8b4517e8a679eeb054475a0939c10511c | MinaProtocol/mina | verification_key.mli | module Data : sig
module Stable : sig
module V1 : sig
type t = { constraints : int } [@@deriving yojson]
include Pickles_types.Sigs.VERSIONED
end
end
type t = Stable.V1.t = { constraints : int } [@@deriving yojson]
end
module Stable : sig
module V2 : sig
type t =
{ commitments :
Backend.Tock.Curve.Affine.t
Pickles_types.Plonk_verification_key_evals.t
; index : Impls.Wrap.Verification_key.t
; data : Data.t
}
[@@deriving fields, to_yojson, bin_shape, bin_io]
include Pickles_types.Sigs.VERSIONED
end
module Latest = V2
end
type t = Stable.Latest.t =
{ commitments :
Backend.Tock.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t
; index : Impls.Wrap.Verification_key.t
; data : Data.t
}
[@@deriving fields, to_yojson]
val dummy_commitments : 'a -> 'a Pickles_types.Plonk_verification_key_evals.t
val dummy : Stable.Latest.t lazy_t
| null | https://raw.githubusercontent.com/MinaProtocol/mina/b19a220d87caa129ed5dcffc94f89204ae874661/src/lib/pickles/verification_key.mli | ocaml | module Data : sig
module Stable : sig
module V1 : sig
type t = { constraints : int } [@@deriving yojson]
include Pickles_types.Sigs.VERSIONED
end
end
type t = Stable.V1.t = { constraints : int } [@@deriving yojson]
end
module Stable : sig
module V2 : sig
type t =
{ commitments :
Backend.Tock.Curve.Affine.t
Pickles_types.Plonk_verification_key_evals.t
; index : Impls.Wrap.Verification_key.t
; data : Data.t
}
[@@deriving fields, to_yojson, bin_shape, bin_io]
include Pickles_types.Sigs.VERSIONED
end
module Latest = V2
end
type t = Stable.Latest.t =
{ commitments :
Backend.Tock.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t
; index : Impls.Wrap.Verification_key.t
; data : Data.t
}
[@@deriving fields, to_yojson]
val dummy_commitments : 'a -> 'a Pickles_types.Plonk_verification_key_evals.t
val dummy : Stable.Latest.t lazy_t
| |
00d0784bd9f817c671e03f5dce16e6a780ba12f86f42a3b4c5647363314d41db | hipsleek/hipsleek | partial.ml | (* See copyright notice at the end of the file *)
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Partial Evaluation & Constant Folding
*
* Soundness Assumptions :
* ( 1 ) Whole program analysis . You may call functions that are not defined
* ( e.g. , library functions ) but they may not call back .
* ( 2 ) An undefined function may not return the address of a function whose
* address is not already taken in the code I can see .
* ( 3 ) A function pointer call may only call a function that has its
* address visibly taken in the code I can see .
*
* ( More assumptions in the comments below )
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Partial Evaluation & Constant Folding
*
* Soundness Assumptions:
* (1) Whole program analysis. You may call functions that are not defined
* (e.g., library functions) but they may not call back.
* (2) An undefined function may not return the address of a function whose
* address is not already taken in the code I can see.
* (3) A function pointer call may only call a function that has its
* address visibly taken in the code I can see.
*
* (More assumptions in the comments below)
*****************************************************************************)
open Cil
open Cilint
open Pretty
(*****************************************************************************
* A generic signature for Alias Analysis information. Used to compute the
* call graph and do symbolic execution.
****************************************************************************)
module type AliasInfo =
sig
val setup : Cil.file -> unit
val can_have_the_same_value : Cil.exp -> Cil.exp -> bool
val resolve_function_pointer : Cil.exp -> Cil.fundec list
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A generic signature for Symbolic Execution execution algorithms . Such
* algorithms are used below to perform constant folding and dead - code
* elimination . You write a " basic - block " symex algorithm , we 'll make it
* a whole - program CFG - pruner .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A generic signature for Symbolic Execution execution algorithms. Such
* algorithms are used below to perform constant folding and dead-code
* elimination. You write a "basic-block" symex algorithm, we'll make it
* a whole-program CFG-pruner.
****************************************************************************)
module type Symex =
sig
the type of a symex algorithm state object
val empty : t (* all values unknown *)
val equal : t -> t -> bool (* are these the same? *)
val assign : t -> Cil.lval -> Cil.exp -> (Cil.exp * t)
(* incorporate an assignment, return the RHS *)
val unassign : t -> Cil.lval -> t
(* lose all information about the given lvalue: assume an
* unknown external value has been assigned to it *)
handle ASM
val assume : t -> Cil.exp -> t (* incorporate an assumption *)
val evaluate : t -> Cil.exp -> Cil.exp (* symbolic evaluation *)
val join : (t list) -> t (* join a bunch of states *)
val call : t -> Cil.fundec -> (Cil.exp list) -> (Cil.exp list * t)
(* we are calling the given function with the given actuals *)
val return : t -> Cil.fundec -> t
(* we are returning from the given function *)
val call_to_unknown_function : t -> t
(* throw away information that may have been changed *)
val debug : t -> unit
end
(*****************************************************************************
* A generic signature for whole-progam call graphs.
****************************************************************************)
type callGraphNode = {
fd : Cil.fundec;
mutable calledBy : Cil.fundec list;
mutable calls : Cil.fundec list
}
type callNodeHash = (Cil.varinfo, callGraphNode) Hashtbl.t
module type CallGraph =
sig
val compute : Cil.file -> callNodeHash
val can_call : callNodeHash -> Cil.fundec -> Cil.fundec list
val can_be_called_by : callNodeHash -> Cil.fundec -> Cil.fundec list
val fundec_of_varinfo : callNodeHash -> Cil.varinfo -> Cil.fundec
end
module type CallGraph' =
sig
type t (* the type of a call graph *)
val compute : Cil.file -> t (* file for which we compute the graph *)
val can_call : t -> Cil.fundec -> Cil.fundec list
val can_be_called_by : t -> Cil.fundec -> Cil.fundec list
val fundec_of_varinfo : t -> Cil.varinfo -> Cil.fundec
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My cheap - o Alias Analysis . Assume all expressions can have the same
* value and any function with its address taken can be the target of
* any function pointer .
*
* Soundness Assumptions :
* ( 1 ) Someone must call " " before the
* results are valid . This is already done in the code below .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My cheap-o Alias Analysis. Assume all expressions can have the same
* value and any function with its address taken can be the target of
* any function pointer.
*
* Soundness Assumptions:
* (1) Someone must call "find_all_functions_with_address_taken" before the
* results are valid. This is already done in the code below.
****************************************************************************)
module EasyAlias : AliasInfo =
struct
let all_functions_with_address_taken = ref []
let find_all_functions_with_address_taken (f : Cil.file) =
iterGlobals
f
(function
GFun (fd, _) ->
if fd.svar.vaddrof then
all_functions_with_address_taken :=
fd :: !all_functions_with_address_taken
| _ -> ())
let setup f = find_all_functions_with_address_taken f
let can_have_the_same_value e1 e2 = true
let resolve_function_pointer e1 = !all_functions_with_address_taken
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Alias analysis using CIL 's Ptranal feature .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Alias analysis using CIL's Ptranal feature.
****************************************************************************)
module PtranalAlias : AliasInfo =
struct
let setup f = EasyAlias.setup f
let can_have_the_same_value e1 e2 =
try Ptranal.may_alias e1 e2
with Not_found -> true
let resolve_function_pointer e1 =
try Ptranal.resolve_funptr e1
with Not_found -> EasyAlias.resolve_function_pointer e1
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My particular method for computing the Call Graph .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My particular method for computing the Call Graph.
****************************************************************************)
module EasyCallGraph = functor (A : AliasInfo) ->
struct
let cgCreateNode cg fundec =
let newnode = {
fd = fundec;
calledBy = [];
calls = []
} in
Hashtbl.add cg fundec.svar newnode
let cgFindNode cg svar = Hashtbl.find cg svar
let cgAddEdge cg caller callee =
try
let n1 = cgFindNode cg caller in
let n2 = cgFindNode cg callee in
n1.calls <- n2.fd :: n1.calls;
n1.calledBy <- n1.fd :: n1.calledBy
with _ -> ()
class callGraphVisitor cg =
object
inherit nopCilVisitor
val the_fun = ref None
method vinst i =
begin
match i with
Call (_, Lval (Var callee, NoOffset), _, _) ->
begin
(* known function call *)
match !the_fun with
None -> failwith "callGraphVisitor: call outside of any function"
| Some enclosing -> cgAddEdge cg enclosing callee
end
| Call (_, e, _, _) ->
begin
(* unknown function call *)
match !the_fun with
None -> failwith "callGraphVisitor: call outside of any function"
| Some enclosing ->
List.iter
(fun possible_target_fd ->
cgAddEdge cg enclosing possible_target_fd.svar)
(A.resolve_function_pointer e)
end
| _ -> ()
end;
SkipChildren
method vfunc f =
the_fun := Some f.svar;
DoChildren
end
let compute (f : Cil.file) =
let cg = Hashtbl.create 511 in
iterGlobals
f
(function GFun (fd, _) -> cgCreateNode cg fd
| _ -> ());
visitCilFileSameGlobals (new callGraphVisitor cg) f;
cg
let can_call cg fd =
let n = cgFindNode cg fd.svar in n.calls
let can_be_called_by cg fd =
let n = cgFindNode cg fd.svar in n.calledBy
let fundec_of_varinfo cg vi =
let n = cgFindNode cg vi in n.fd
end (* END OF: module EasyCallGraph *)
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* 's ( re - written to be applicative )
*
* Soundness Assumptions :
* ( 1 ) Inline assembly does not affect constant folding .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Necula's Constant Folding Strategem (re-written to be applicative)
*
* Soundness Assumptions:
* (1) Inline assembly does not affect constant folding.
****************************************************************************)
module NeculaFolding = functor (A : AliasInfo) ->
struct
module IntMap = Map.Make (struct
type t = int
let compare x y = x - y
end)
Register file . Maps identifiers of local variables to expressions .
* We also remember if the expression depends on memory or depends on
* variables that depend on memory
* We also remember if the expression depends on memory or depends on
* variables that depend on memory *)
type reg = {
rvi : varinfo;
rval : exp;
rmem : bool
}
type t = reg IntMap.t
let empty = IntMap.empty
let equal t1 t2 = (compare t1 t2 = 0) (* use OCAML here *)
let dependsOnMem = ref false
(* Rewrite an expression based on the current register file *)
class rewriteExpClass (regFile : t) =
object
inherit nopCilVisitor
method vexpr = function
Lval (Var v, NoOffset) ->
begin
try
let defined = IntMap.find v.vid regFile in
if defined.rmem then dependsOnMem := true;
match defined.rval with
Const x -> ChangeTo defined.rval
| _ -> DoChildren
with Not_found -> DoChildren
end
| Lval (Mem _, _) ->
dependsOnMem := true;
DoChildren
| _ -> DoChildren
end
(* Rewrite an expression and return the new expression along with an
* indication of whether it depends on memory *)
let rewriteExp r (e : exp) : exp * bool =
dependsOnMem := false;
let e' = constFold true (visitCilExpr (new rewriteExpClass r) e) in
e', !dependsOnMem
let eval r e =
let new_e, _depends = rewriteExp r e in
new_e
let setMemory regFile =
(* Get a list of all mappings that depend on memory *)
let depids = ref [] in
IntMap.iter (fun id v -> if v.rmem then depids := id :: !depids) regFile;
(* And remove them from the register file *)
List.fold_left (fun acc id -> IntMap.remove id acc) regFile !depids
let setRegister regFile (v : varinfo) ((e, b) : exp * bool) =
IntMap.add v.vid {rvi = v; rval = e; rmem = b} regFile
let resetRegister regFile (id : int) =
IntMap.remove id regFile
class findLval lv contains =
object
inherit nopCilVisitor
method vlval l =
if Util.equals l lv then
begin
contains := true;
SkipChildren
end
else
DoChildren
end
let removeMappingsThatDependOn regFile l =
(* Get a list of all mappings that depend on l *)
let depids = ref [] in
IntMap.iter
(fun id reg ->
let found = ref false in
ignore (visitCilExpr (new findLval l found) reg.rval);
if !found then depids := id :: !depids)
regFile;
(* And remove them from the register file *)
List.fold_left (fun acc id -> IntMap.remove id acc) regFile !depids
let assign r l e =
let newe, b = rewriteExp r e in
let r' =
match l with
Var v, NoOffset ->
let r'' = setRegister r v (newe, b) in
removeMappingsThatDependOn r'' l
| Mem _, _ -> setMemory r
| _ -> r
in newe, r'
let unassign r l =
let r' =
match l with
Var v, NoOffset ->
let r'' = resetRegister r v.vid in
removeMappingsThatDependOn r'' l
| Mem _, _ -> setMemory r
| _ -> r
in r'
no - op in Necula - world
no - op in Necula - world
let evaluate r e =
let newe, _ = rewriteExp r e in
newe
Join two symex states
let join2 (r1 : t) (r2 : t) =
let keep = ref [] in
IntMap.iter
(fun id reg ->
try
let reg' = IntMap.find id r2 in
if Util.equals reg'.rval reg.rval && reg'.rmem = reg.rmem then
keep := (id, reg) :: !keep
with _ -> ())
r1;
List.fold_left
(fun acc (id, v) -> IntMap.add id v acc)
IntMap.empty
!keep
let join (lst : t list) =
match lst with
[] -> failwith "empty list"
| r :: tl ->
List.fold_left (fun (acc : t) (elt : t) -> join2 acc elt) r tl
let call r fd el =
let new_arg_list = ref [] in
let final_r =
List.fold_left2
(fun r vi e ->
let newe, r' = assign r (Var vi, NoOffset) e in
new_arg_list := newe :: !new_arg_list;
r')
r
fd.sformals el
in
(List.rev !new_arg_list), final_r
let return r fd =
let filter_out a_predicate a_map =
IntMap.fold
(fun k v a -> if a_predicate k v then a else IntMap.add k v a)
IntMap.empty
a_map
and formals_and_locals = fd.sformals @ fd.slocals
in
filter_out
(fun k v -> List.mem v.rvi formals_and_locals)
r
let call_to_unknown_function r =
setMemory r
let debug r =
IntMap.iter
(fun key reg ->
ignore (Pretty.printf "%s <- %a (%b)@!"
reg.rvi.vname d_exp reg.rval reg.rmem))
r
END OF : NeculaFolding
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A transformation to make every function call end its statement . So
* { x=1 ; ( ) ; y=1 ; }
* becomes at least :
* { { x=1 ; ( ) ; }
* { y=1 ; } }
* But probably more like :
* { { x=1 ; } { ( ) ; } { y=1 ; } }
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A transformation to make every function call end its statement. So
* { x=1; Foo(); y=1; }
* becomes at least:
* { { x=1; Foo(); }
* { y=1; } }
* But probably more like:
* { { x=1; } { Foo(); } { y=1; } }
****************************************************************************)
let rec contains_call il =
match il with
[] -> false
| Call _ :: tl -> true
| _ :: tl -> contains_call tl
class callBBVisitor =
object
inherit nopCilVisitor
method vstmt s =
match s.skind with
Instr il when contains_call il ->
begin
let list_of_stmts =
Util.list_map (fun one_inst -> mkStmtOneInstr one_inst) il in
let block = mkBlock list_of_stmts in
ChangeDoChildrenPost
(s, (fun _ -> s.skind <- Block block; s))
end
| _ -> DoChildren
method vvdec _ = SkipChildren
method vexpr _ = SkipChildren
method vlval _ = SkipChildren
method vtype _ = SkipChildren
end
let calls_end_basic_blocks f =
let thisVisitor = new callBBVisitor in
visitCilFileSameGlobals thisVisitor f
(*****************************************************************************
* A transformation that gives each variable a unique identifier.
****************************************************************************)
class vidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vvdec vi =
vi.vid <- !count;
incr count;
SkipChildren
end
let globally_unique_vids f =
let thisVisitor = new vidVisitor in
visitCilFileSameGlobals thisVisitor f
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* The Weimeric Partial Evaluation Data - Flow Engine
*
* This functor performs flow - sensitive , context - insensitive whole - program
* data - flow analysis with an eye toward partial evaluation and constant
* folding .
*
* Toposort the whole - program inter - procedural CFG to compute
* ( 1 ) the number of actual predecessors for each statement
* ( 2 ) the global toposort ordering
*
* Perform standard data - flow analysis ( joins , etc ) on the ICFG until you
* hit a fixed point . If this changed the structure of the ICFG ( by
* removing an IF - branch or an empty function call ) , redo the whole thing .
*
* Soundness Assumptions :
* ( 1 ) A " call instruction " is the last thing in its statement .
* Use " calls_end_basic_blocks " to get this . cil / src / main.ml does
* this when you pass --makeCFG .
* ( 2 ) All variables have globally unique identifiers .
* Use " globally_unique_vids " to get this . cil / src / main.ml does
* this when you pass --makeCFG .
* ( 3 ) This may not be a strict soundness requirement , but I wrote this
* assuming that the input file has all switch / break / continue
* statements removed .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* The Weimeric Partial Evaluation Data-Flow Engine
*
* This functor performs flow-sensitive, context-insensitive whole-program
* data-flow analysis with an eye toward partial evaluation and constant
* folding.
*
* Toposort the whole-program inter-procedural CFG to compute
* (1) the number of actual predecessors for each statement
* (2) the global toposort ordering
*
* Perform standard data-flow analysis (joins, etc) on the ICFG until you
* hit a fixed point. If this changed the structure of the ICFG (by
* removing an IF-branch or an empty function call), redo the whole thing.
*
* Soundness Assumptions:
* (1) A "call instruction" is the last thing in its statement.
* Use "calls_end_basic_blocks" to get this. cil/src/main.ml does
* this when you pass --makeCFG.
* (2) All variables have globally unique identifiers.
* Use "globally_unique_vids" to get this. cil/src/main.ml does
* this when you pass --makeCFG.
* (3) This may not be a strict soundness requirement, but I wrote this
* assuming that the input file has all switch/break/continue
* statements removed.
****************************************************************************)
module MakePartial =
functor (S : Symex) ->
functor (C : CallGraph) ->
functor (A : AliasInfo) ->
struct
let debug = false
(* Sets of {c goto}-targets *)
module LabelSet =
Set.Make (struct
type t = label
let compare x y =
match x, y with
Label (name1, _, _), Label (name2, _, _) ->
String.compare name1 name2
| _, _ -> 0
end)
We keep this information about every statement . Ideally this should
* be put in the stmt itself , but CIL does n't give us space .
* be put in the stmt itself, but CIL doesn't give us space. *)
type sinfo = { (* statement info *)
incoming_state : (int, S.t) Hashtbl.t;
mapping from stmt.sid to Symex.state
reachable_preds : (int, bool) Hashtbl.t;
(* basically a set of all of the stmt.sids that can really
* reach this statement *)
mutable last_used_state : S.t option;
When we last did the Post ( ) of this statement , what
* incoming state did we use ? If our new incoming state is
* the same , we do n't have to do it again .
* incoming state did we use? If our new incoming state is
* the same, we don't have to do it again. *)
mutable priority : int;
Whole - program toposort priority . High means " do me first " .
* The first stmt in " main ( ) " will have the highest priority .
* The first stmt in "main()" will have the highest priority.
*)
}
let sinfo_ht = Hashtbl.create 511
let clear_sinfo () = Hashtbl.clear sinfo_ht
(* We construct sinfo nodes lazily: if you ask for one that isn't
* there, we build it. *)
let get_sinfo stmt =
try
Hashtbl.find sinfo_ht stmt.sid
with _ ->
let new_sinfo = {incoming_state = Hashtbl.create 3;
reachable_preds = Hashtbl.create 3;
last_used_state = None;
priority = (-1)} in
Hashtbl.add sinfo_ht stmt.sid new_sinfo;
new_sinfo
Topological Sort is a DFS in which you assign a priority right as
* you finished visiting the children . While we 're there we compute
* the actual number of unique predecessors for each statement . The CIL
* information may be out of date because we keep changing the CFG by
* removing IFs and whatnot .
* you finished visiting the children. While we're there we compute
* the actual number of unique predecessors for each statement. The CIL
* information may be out of date because we keep changing the CFG by
* removing IFs and whatnot. *)
let toposort_counter = ref 1
let add_edge s1 s2 =
let si2 = get_sinfo s2 in
Hashtbl.replace si2.reachable_preds s1.sid true
let rec toposort c stmt =
let si = get_sinfo stmt in
if si.priority >= 0 then () (* already visited! *)
else
begin
si.priority <- 0; (* currently visiting *)
(* handle function calls in this basic block *)
begin
match stmt.skind with
Instr il ->
List.iter
(fun i ->
let fd_list =
match i with
Call (_, Lval (Var vi, NoOffset), _, _) ->
begin
try
let fd = C.fundec_of_varinfo c vi in
[fd]
with e -> [] (* calling external function *)
end
| Call (_, e, _, _) ->
A.resolve_function_pointer e
| _ -> []
in
List.iter
(fun fd ->
if List.length fd.sbody.bstmts > 0 then
let fun_stmt = List.hd fd.sbody.bstmts in
add_edge stmt fun_stmt;
toposort c fun_stmt)
fd_list)
il
| _ -> ()
end;
List.iter
(fun succ -> add_edge stmt succ; toposort c succ)
stmt.succs;
si.priority <- !toposort_counter;
incr toposort_counter
end
we set this to true whenever we eliminate an IF or otherwise
* change the CFG
* change the CFG *)
let changed_cfg = ref false
Partially evaluate / constant fold a statement . Basically this
* just asks the Symex algorithm to evaluate the RHS in the current
* state and then compute a new state that incorporates the
* assignment .
*
* However , we have special handling for ifs and calls . If we can
* evaluate an if predicate to a constant , we remove the if .
*
* If we are going to make a call to a function with an empty body ,
* we remove the function call .
* just asks the Symex algorithm to evaluate the RHS in the current
* state and then compute a new state that incorporates the
* assignment.
*
* However, we have special handling for ifs and calls. If we can
* evaluate an if predicate to a constant, we remove the if.
*
* If we are going to make a call to a function with an empty body,
* we remove the function call. *)
let partial_stmt c state stmt handle_funcall =
let result =
match stmt.skind with
Instr il ->
let state = ref state in
let new_il =
Util.list_map
(fun i ->
if debug then
ignore (Pretty.printf "Instr %a@!" d_instr i);
match i with
Set (l, e, loc) ->
let e', state' = S.assign !state l e in
state := state';
[Set (l, e', loc)]
| Call (lo, Lval (Var vi, NoOffset), al, loc) ->
let result, know_retval =
try
let fd = C.fundec_of_varinfo c vi in
match fd.sbody.bstmts with
[] -> [], false (* no point in making this call *)
| hd :: _tl ->
if match hd.skind with
Return (None, _loc) -> true
| _ -> false then
[], false (* no point in making this call *)
else if match hd.skind with
Return (Some ret_exp, _loc) ->
isConstant (S.evaluate !state ret_exp)
| _ -> false then
match lo, hd.skind with
Some lv, Return (Some ret_exp, _loc) ->
let ret_exp', state' = S.assign !state lv ret_exp in
state := state';
[Set (lv, ret_exp', loc)], true (* replace call with constant *)
| None, Return (Some _ret_exp, _loc) ->
failwith "partial_stmt: internal error"
| _, _ -> [], false (* never reached *)
else
let al', state' = S.call !state fd al in
handle_funcall stmt hd state';
let state'' = S.return state' fd in
state := state'';
[Call (lo, Lval (Var vi, NoOffset), al', loc)], false
with e ->
let state'' = S.call_to_unknown_function !state in
let al' = Util.list_map (S.evaluate !state) al in
state := state'';
[Call (lo, Lval (Var vi, NoOffset), al', loc)], false
in
(* handle return value *)
begin
match lo, know_retval with
Some lv, false -> state := S.unassign !state lv
| Some lv, true -> ()
| None, _ -> ()
end;
result
| Call (lo, f, al, loc) ->
let al' = Util.list_map (S.evaluate !state) al in
state := S.call_to_unknown_function !state;
begin
match lo with
Some lv -> state := S.unassign !state lv
| None -> ()
end;
[Call (lo, f, al', loc)]
| Asm _ ->
state := S.assembly !state i;
[i])
il in
stmt.skind <- Instr (List.flatten new_il);
if debug then
ignore (Pretty.printf "New Stmt is %a@!" d_stmt stmt);
!state
| If (e, b1, b2, loc) ->
Answer whether block [ b ] contains labels that are
alive . " Live " labels are actually targets of
[ goto]-instructions { b outside } of [ b ] .
alive. "Live" labels are actually targets of
[goto]-instructions {b outside} of [b]. *)
let has_live_labels b =
let gather_labels acc stmt =
List.fold_left (fun a x -> LabelSet.add x a) acc stmt.labels in
let rec visit_block stmt_fun acc blk =
List.fold_left
(fun a x ->
let y = stmt_fun a x in
match x.skind with
Instr _
| Return _ | Goto _ | Break _ | Continue _ -> y
| If (_expr, then_block, else_block, _loc) ->
visit_block
stmt_fun
(visit_block stmt_fun y then_block)
else_block
| Switch (_expr, block, _stmt_list, _loc) ->
visit_block stmt_fun y block
| Loop (block, _, _loc, _opt_stmt1, _opt_stmt2) ->
visit_block stmt_fun y block
| Block block ->
visit_block stmt_fun y block
| TryFinally (block1, block2, _loc)
| TryExcept (block1, _, block2, _loc) ->
visit_block
stmt_fun
(visit_block stmt_fun y block1)
block2
| HipStmt _ -> y)
acc
blk.bstmts
and gather_gotos acc stmt =
match stmt.skind with
Goto (stmt_ref, _loc) -> gather_labels acc !stmt_ref
| _ -> acc
and transitive_closure ini_stmt =
let rec iter trace acc stmt =
List.fold_left
(fun (a_trace, a_stmt) s ->
if List.mem s.sid a_trace then (a_trace, a_stmt)
else iter (s.sid :: a_trace) (s :: a_stmt) s)
(trace, acc) (stmt.preds @ stmt.succs) in
List.sort (* sorting is unnecessary, but nice *)
(fun a b -> a.sid - b.sid)
(snd (iter [] [] ini_stmt)) in
let block_labels = visit_block gather_labels LabelSet.empty b
and block_gotos = visit_block gather_gotos LabelSet.empty b
and all_gotos =
List.fold_left
(fun a x ->
match x.skind with
Goto (stmt_ref, _loc) -> gather_labels a !stmt_ref
| Block block -> visit_block gather_gotos a block
| _ -> a)
LabelSet.empty
(if b.bstmts = [] then []
else transitive_closure (List.hd b.bstmts))
in
not (LabelSet.is_empty
(LabelSet.inter
(LabelSet.diff all_gotos block_gotos)
block_labels)) in
(* helper function to remove "if"-branch [b] *)
let remove stmt b =
changed_cfg := true;
match b.bstmts with
[] -> ()
| hd :: _tl ->
stmt.succs <- List.filter
(fun succ -> succ.sid <> hd.sid)
stmt.succs
(* helper function to make a simplified "if"-statement block *)
and mk_if_block b =
let stmt = mkStmt (Block b) in
stmt.sid <- new_sid ();
Block {bstmts = [stmt]; battrs = []}
logical falseness in C expressed in cilly 's terms
and is_false e = isZero e
logical truth in C expressed in cilly 's terms
and is_true e =
match getInteger e with
Some x -> not (is_zero_cilint x)
| None -> false in
(* evaluate expression and eliminate branches *)
let e' = S.evaluate state e in
if debug then
ignore (Pretty.printf "%a evals to %a\n" d_exp e d_exp e');
if is_true e' then
begin
if has_live_labels b2 then
begin
() (* leave block alone *)
end
else
begin
if b2.bstmts = [] && b2.battrs = [] then
begin
stmt.skind <- Block b1;
match b1.bstmts with
[] -> ()
| hd :: _tl -> stmt.succs <- [hd]
end
else stmt.skind <- mk_if_block b1;
remove stmt b2
end
end
else if is_false e' then
begin
if has_live_labels b1 then
begin
() (* leave block alone *)
end
else
begin
if b1.bstmts = [] && b1.battrs = [] then
begin
stmt.skind <- Block b2;
match b2.bstmts with
[] -> ()
| hd :: _tl -> stmt.succs <- [hd]
end
else stmt.skind <- mk_if_block b2;
remove stmt b1
end
end
else stmt.skind <- If (e', b1, b2, loc);
state
| Return (Some e, loc) ->
let e' = S.evaluate state e in
stmt.skind <- Return (Some e', loc);
state
| Block b ->
if debug && List.length stmt.succs > 1 then
ignore (Pretty.printf "(%a) has successors [%a]@!"
d_stmt stmt
(docList ~sep:(chr '@') (d_stmt ()))
stmt.succs);
state
| _ -> state
in result
(* This is the main conceptual entry-point for the partial
* evaluation data-flow functor. *)
let dataflow (file : Cil.file) (* whole program *)
(c : callNodeHash) (* control-flow graph *)
(initial_state : S.t) (* any assumptions? *)
(initial_stmt : Cil.stmt) = (* entry point *)
begin
(* count the total number of statements in the program *)
let num_stmts = ref 1 in
iterGlobals
file
(function
GFun (fd, _) ->
begin
match fd.smaxstmtid with
Some i -> if i > !num_stmts then num_stmts := i
| None -> ()
end
| _ -> ());
if debug then
Printf.printf "Dataflow: at most %d statements in program\n" !num_stmts;
(* create a priority queue in which to store statements *)
let worklist = Heap.create !num_stmts in
let finished = ref false in
let passes = ref 0 in
(* add something to the work queue *)
let enqueue caller callee state =
let si = get_sinfo callee in
Hashtbl.replace si.incoming_state caller.sid state;
Heap.insert worklist si.priority callee
in
we will be finished when we complete a round of
* data - flow that does not change the ICFG
* data-flow that does not change the ICFG *)
while not !finished do
clear_sinfo ();
incr passes;
(* we must recompute the ordering and the predecessor
* information because we may have changed it by removing
* IFs *)
if debug then
Printf.printf "Dataflow: Topological Sorting & Reachability\n";
toposort c initial_stmt;
let initial_si = get_sinfo initial_stmt in
Heap.insert worklist initial_si.priority initial_stmt;
while not (Heap.is_empty worklist) do
let p, s = Heap.extract_max worklist in
if debug then
begin
ignore (Pretty.printf "Working on stmt %d (%a) %a@!"
s.sid
(docList ~sep:(chr ',' ++ break) (fun s -> dprintf "%d" s.sid))
s.succs
d_stmt s);
flush stdout;
end;
let si = get_sinfo s in
Even though this stmt is on the worklist , we
* may not have to do anything with it if the join
* of all of the incoming states is the same as the
* last state we used here .
* may not have to do anything with it if the join
* of all of the incoming states is the same as the
* last state we used here. *)
let must_recompute, incoming_state =
begin
let list_of_incoming_states = ref [] in
Hashtbl.iter
(fun true_pred_sid b ->
let this_pred_state =
try
Hashtbl.find si.incoming_state true_pred_sid
with _ ->
this occurs when we 're evaluating a statement and we
* have not yet evaluated all of its predecessors ( the
* first time we look at a loop head , say ) . We must be
* conservative . We 'll come back later with better
* information ( as we work toward the fix - point ) .
* have not yet evaluated all of its predecessors (the
* first time we look at a loop head, say). We must be
* conservative. We'll come back later with better
* information (as we work toward the fix-point). *)
S.empty
in
if debug then
begin
Printf.printf " Incoming State from %d\n" true_pred_sid;
S.debug this_pred_state;
flush stdout
end;
list_of_incoming_states :=
this_pred_state :: !list_of_incoming_states)
si.reachable_preds;
let merged_incoming_state =
if !list_of_incoming_states = [] then
this occurs when we 're looking at the
* first statement in " main " -- it has no
*
* first statement in "main" -- it has no
* preds *)
initial_state
else S.join !list_of_incoming_states
in
if debug then
begin
Printf.printf " Merged State:\n";
S.debug merged_incoming_state;
flush stdout
end;
let must_recompute =
match si.last_used_state with
None -> true
| Some last -> not (S.equal merged_incoming_state last)
in must_recompute, merged_incoming_state
end
in
if must_recompute then
begin
si.last_used_state <- Some incoming_state;
let outgoing_state =
(* partially evaluate and optimize the
* statement *)
partial_stmt c incoming_state s enqueue in
let fresh_succs = s.succs in
(* touch every successor so that we will
* reconsider it *)
List.iter
(fun succ ->
enqueue s succ outgoing_state)
fresh_succs;
end
else
begin
if debug then Printf.printf "No need to recompute.\n"
end
done;
if debug then
Printf.printf "Dataflow: Pass %d Complete\n" !passes;
if !changed_cfg then
begin
if debug then
Printf.printf "Dataflow: Restarting (CFG Changed)\n";
changed_cfg := false
end
else
finished := true
done;
if debug then
Printf.printf "Dataflow: Completed (%d passes)\n" !passes
end
let simplify file c fd (assumptions : (Cil.lval * Cil.exp) list) =
let starting_state =
List.fold_left
(fun s (l, e) -> let _e', s' = S.assign s l e in s')
S.empty
assumptions
in
dataflow file c starting_state (List.hd fd.sbody.bstmts)
end
module PartialAlgorithm :
sig
val use_ptranal_alias : bool ref
val setup_alias_analysis : Cil.file -> unit
val compute_callgraph : Cil.file -> callNodeHash
val simplify :
Cil.file -> callNodeHash -> Cil.fundec -> (Cil.lval * Cil.exp) list -> unit
end
=
struct
Currently our partial - eval optimizer is built out of basically
* nothing . The ( easy-)alias analysis is fake , the call graph is
* cheap , and we 're using 's old basic - block symex . Still , it
* works .
* nothing. The (easy-)alias analysis is fake, the call graph is
* cheap, and we're using George's old basic-block symex. Still, it
* works. *)
(* Don't you love Functor application? *)
module BasicCallGraph : CallGraph = EasyCallGraph (EasyAlias)
module BasicSymex = NeculaFolding (EasyAlias)
module BasicPartial =
MakePartial (BasicSymex) (BasicCallGraph) (EasyAlias)
module PtranalBasicCallGraph : CallGraph = EasyCallGraph (PtranalAlias)
module PtranalBasicSymex = NeculaFolding (PtranalAlias)
module PtranalBasicPartial =
MakePartial (BasicSymex) (PtranalBasicCallGraph) (PtranalAlias)
(* Select easy alias analysis or the fully-fledged one in module
* Ptranal. *)
let use_ptranal_alias = ref false
let setup_alias_analysis f =
if !use_ptranal_alias then PtranalAlias.setup f
else EasyAlias.setup f
let compute_callgraph f =
if !use_ptranal_alias then PtranalBasicCallGraph.compute f
else BasicCallGraph.compute f
let simplify f c fd a =
if !use_ptranal_alias then PtranalBasicPartial.simplify f c fd a
else BasicPartial.simplify f c fd a
end
A very easy entry - point to partial evaluation / symbolic execution .
* You pass the file and a list of assumptions ( lvalue , exp pairs
* that should be treated as assignments that occur before the program
* starts ) .
*
* We partially evaluate and optimize starting from root ( usually
* " main " ) . The Cil.file is modified in place .
* You pass the Cil file and a list of assumptions (lvalue, exp pairs
* that should be treated as assignments that occur before the program
* starts).
*
* We partially evaluate and optimize starting from root (usually
* "main"). The Cil.file is modified in place. *)
let partial (f : Cil.file) (root : string) (assumptions : (Cil.lval * Cil.exp) list) =
try
PartialAlgorithm.setup_alias_analysis f;
let c = PartialAlgorithm.compute_callgraph f in
try
if not (foldGlobals f (fun a x ->
a ||
match x with
GFun (fd, _loc) ->
if fd.svar.vname = root then
begin
PartialAlgorithm.simplify
f c fd assumptions;
true
end
else false
| _ -> false)
false) then
Printf.printf "Warning: root function \"%s\" not found\n" root
with e ->
begin
Printf.printf "Error in DataFlow: %s\n" (Printexc.to_string e);
raise e
end
with e ->
begin
Printf.printf "Error in Partial: %s\n" (Printexc.to_string e);
raise e
end
class globalConstVisitor =
object
inherit nopCilVisitor
val mutable init_const : (lval * exp) list = []
method vglob g =
let is_const vi = hasAttribute "const" (typeAttrs vi.vtype) in
match g with
GVar (vi, ii, loc) ->
if is_const vi then
match ii.init with
Some init ->
begin
match init with
SingleInit exp ->
begin
init_const <- (var vi, exp) :: init_const;
ChangeTo [GVar (vi,
{init = Some (SingleInit (constFold true exp))},
loc)]
end
| CompoundInit (_typ, _ini_list) -> SkipChildren
end
| None -> SkipChildren (* uninitialized constant *)
else SkipChildren
| _ -> SkipChildren
method get_initialized_constants = init_const
end
(* Assume global constants are initialized and feed this information
* into the partial evaluator or treat constants as labels with unknown
* values. I am aware that we ought to distinguish between plain
* constants and "volatile" constants. - cls *)
let initialized_constants = ref false
(* Name of function where we start to simplify *)
let root_fun = ref "main"
let do_feature_partial f =
if not !Cilutil.makeCFG then
Errormsg.s (Errormsg.error
"--dopartial: you must also specify --domakeCFG\n");
if not !(Ptranal.feature.fd_enabled) &&
!PartialAlgorithm.use_ptranal_alias then
Errormsg.s (Errormsg.error
"--dopartial: you must also specify --doptranal\n");
partial
f
!root_fun
(if !initialized_constants then
begin
let gcv = new globalConstVisitor in
visitCilFile (gcv :> Cil.cilVisitor) f;
gcv#get_initialized_constants
end
else [])
let feature : featureDescr = {
fd_name = "partial";
fd_enabled = Cilutil.doPartial;
fd_description = "interprocedural partial evaluation and constant folding";
fd_extraopt = [
("--partial_global_const",
Arg.Set initialized_constants,
" treat global constants as initialized");
("--partial_no_global_const",
Arg.Clear initialized_constants,
" treat global constants as unknown values");
("--partial_root_function",
Arg.String (fun name -> root_fun := name),
(" where to start simplification"));
("--partial_use_easy_alias",
Arg.Clear PartialAlgorithm.use_ptranal_alias,
" to analyze pointers");
("--partial_use_ptranal_alias",
Arg.Set PartialAlgorithm.use_ptranal_alias,
" to analyze pointers (also see options of ptranal feature)")
];
fd_doit = do_feature_partial;
fd_post_check = false
}
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* < >
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* George C. Necula <>
* Scott McPeak <>
* Wes Weimer <>
* Christoph L. Spiel <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/cil/src/ext/partial.ml | ocaml | See copyright notice at the end of the file
****************************************************************************
* A generic signature for Alias Analysis information. Used to compute the
* call graph and do symbolic execution.
***************************************************************************
all values unknown
are these the same?
incorporate an assignment, return the RHS
lose all information about the given lvalue: assume an
* unknown external value has been assigned to it
incorporate an assumption
symbolic evaluation
join a bunch of states
we are calling the given function with the given actuals
we are returning from the given function
throw away information that may have been changed
****************************************************************************
* A generic signature for whole-progam call graphs.
***************************************************************************
the type of a call graph
file for which we compute the graph
known function call
unknown function call
END OF: module EasyCallGraph
use OCAML here
Rewrite an expression based on the current register file
Rewrite an expression and return the new expression along with an
* indication of whether it depends on memory
Get a list of all mappings that depend on memory
And remove them from the register file
Get a list of all mappings that depend on l
And remove them from the register file
****************************************************************************
* A transformation that gives each variable a unique identifier.
***************************************************************************
Sets of {c goto}-targets
statement info
basically a set of all of the stmt.sids that can really
* reach this statement
We construct sinfo nodes lazily: if you ask for one that isn't
* there, we build it.
already visited!
currently visiting
handle function calls in this basic block
calling external function
no point in making this call
no point in making this call
replace call with constant
never reached
handle return value
sorting is unnecessary, but nice
helper function to remove "if"-branch [b]
helper function to make a simplified "if"-statement block
evaluate expression and eliminate branches
leave block alone
leave block alone
This is the main conceptual entry-point for the partial
* evaluation data-flow functor.
whole program
control-flow graph
any assumptions?
entry point
count the total number of statements in the program
create a priority queue in which to store statements
add something to the work queue
we must recompute the ordering and the predecessor
* information because we may have changed it by removing
* IFs
partially evaluate and optimize the
* statement
touch every successor so that we will
* reconsider it
Don't you love Functor application?
Select easy alias analysis or the fully-fledged one in module
* Ptranal.
uninitialized constant
Assume global constants are initialized and feed this information
* into the partial evaluator or treat constants as labels with unknown
* values. I am aware that we ought to distinguish between plain
* constants and "volatile" constants. - cls
Name of function where we start to simplify | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Partial Evaluation & Constant Folding
*
* Soundness Assumptions :
* ( 1 ) Whole program analysis . You may call functions that are not defined
* ( e.g. , library functions ) but they may not call back .
* ( 2 ) An undefined function may not return the address of a function whose
* address is not already taken in the code I can see .
* ( 3 ) A function pointer call may only call a function that has its
* address visibly taken in the code I can see .
*
* ( More assumptions in the comments below )
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Partial Evaluation & Constant Folding
*
* Soundness Assumptions:
* (1) Whole program analysis. You may call functions that are not defined
* (e.g., library functions) but they may not call back.
* (2) An undefined function may not return the address of a function whose
* address is not already taken in the code I can see.
* (3) A function pointer call may only call a function that has its
* address visibly taken in the code I can see.
*
* (More assumptions in the comments below)
*****************************************************************************)
open Cil
open Cilint
open Pretty
module type AliasInfo =
sig
val setup : Cil.file -> unit
val can_have_the_same_value : Cil.exp -> Cil.exp -> bool
val resolve_function_pointer : Cil.exp -> Cil.fundec list
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A generic signature for Symbolic Execution execution algorithms . Such
* algorithms are used below to perform constant folding and dead - code
* elimination . You write a " basic - block " symex algorithm , we 'll make it
* a whole - program CFG - pruner .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A generic signature for Symbolic Execution execution algorithms. Such
* algorithms are used below to perform constant folding and dead-code
* elimination. You write a "basic-block" symex algorithm, we'll make it
* a whole-program CFG-pruner.
****************************************************************************)
module type Symex =
sig
the type of a symex algorithm state object
val assign : t -> Cil.lval -> Cil.exp -> (Cil.exp * t)
val unassign : t -> Cil.lval -> t
handle ASM
val call : t -> Cil.fundec -> (Cil.exp list) -> (Cil.exp list * t)
val return : t -> Cil.fundec -> t
val call_to_unknown_function : t -> t
val debug : t -> unit
end
type callGraphNode = {
fd : Cil.fundec;
mutable calledBy : Cil.fundec list;
mutable calls : Cil.fundec list
}
type callNodeHash = (Cil.varinfo, callGraphNode) Hashtbl.t
module type CallGraph =
sig
val compute : Cil.file -> callNodeHash
val can_call : callNodeHash -> Cil.fundec -> Cil.fundec list
val can_be_called_by : callNodeHash -> Cil.fundec -> Cil.fundec list
val fundec_of_varinfo : callNodeHash -> Cil.varinfo -> Cil.fundec
end
module type CallGraph' =
sig
val can_call : t -> Cil.fundec -> Cil.fundec list
val can_be_called_by : t -> Cil.fundec -> Cil.fundec list
val fundec_of_varinfo : t -> Cil.varinfo -> Cil.fundec
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My cheap - o Alias Analysis . Assume all expressions can have the same
* value and any function with its address taken can be the target of
* any function pointer .
*
* Soundness Assumptions :
* ( 1 ) Someone must call " " before the
* results are valid . This is already done in the code below .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My cheap-o Alias Analysis. Assume all expressions can have the same
* value and any function with its address taken can be the target of
* any function pointer.
*
* Soundness Assumptions:
* (1) Someone must call "find_all_functions_with_address_taken" before the
* results are valid. This is already done in the code below.
****************************************************************************)
module EasyAlias : AliasInfo =
struct
let all_functions_with_address_taken = ref []
let find_all_functions_with_address_taken (f : Cil.file) =
iterGlobals
f
(function
GFun (fd, _) ->
if fd.svar.vaddrof then
all_functions_with_address_taken :=
fd :: !all_functions_with_address_taken
| _ -> ())
let setup f = find_all_functions_with_address_taken f
let can_have_the_same_value e1 e2 = true
let resolve_function_pointer e1 = !all_functions_with_address_taken
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Alias analysis using CIL 's Ptranal feature .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Alias analysis using CIL's Ptranal feature.
****************************************************************************)
module PtranalAlias : AliasInfo =
struct
let setup f = EasyAlias.setup f
let can_have_the_same_value e1 e2 =
try Ptranal.may_alias e1 e2
with Not_found -> true
let resolve_function_pointer e1 =
try Ptranal.resolve_funptr e1
with Not_found -> EasyAlias.resolve_function_pointer e1
end
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My particular method for computing the Call Graph .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* My particular method for computing the Call Graph.
****************************************************************************)
module EasyCallGraph = functor (A : AliasInfo) ->
struct
let cgCreateNode cg fundec =
let newnode = {
fd = fundec;
calledBy = [];
calls = []
} in
Hashtbl.add cg fundec.svar newnode
let cgFindNode cg svar = Hashtbl.find cg svar
let cgAddEdge cg caller callee =
try
let n1 = cgFindNode cg caller in
let n2 = cgFindNode cg callee in
n1.calls <- n2.fd :: n1.calls;
n1.calledBy <- n1.fd :: n1.calledBy
with _ -> ()
class callGraphVisitor cg =
object
inherit nopCilVisitor
val the_fun = ref None
method vinst i =
begin
match i with
Call (_, Lval (Var callee, NoOffset), _, _) ->
begin
match !the_fun with
None -> failwith "callGraphVisitor: call outside of any function"
| Some enclosing -> cgAddEdge cg enclosing callee
end
| Call (_, e, _, _) ->
begin
match !the_fun with
None -> failwith "callGraphVisitor: call outside of any function"
| Some enclosing ->
List.iter
(fun possible_target_fd ->
cgAddEdge cg enclosing possible_target_fd.svar)
(A.resolve_function_pointer e)
end
| _ -> ()
end;
SkipChildren
method vfunc f =
the_fun := Some f.svar;
DoChildren
end
let compute (f : Cil.file) =
let cg = Hashtbl.create 511 in
iterGlobals
f
(function GFun (fd, _) -> cgCreateNode cg fd
| _ -> ());
visitCilFileSameGlobals (new callGraphVisitor cg) f;
cg
let can_call cg fd =
let n = cgFindNode cg fd.svar in n.calls
let can_be_called_by cg fd =
let n = cgFindNode cg fd.svar in n.calledBy
let fundec_of_varinfo cg vi =
let n = cgFindNode cg vi in n.fd
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* 's ( re - written to be applicative )
*
* Soundness Assumptions :
* ( 1 ) Inline assembly does not affect constant folding .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Necula's Constant Folding Strategem (re-written to be applicative)
*
* Soundness Assumptions:
* (1) Inline assembly does not affect constant folding.
****************************************************************************)
module NeculaFolding = functor (A : AliasInfo) ->
struct
module IntMap = Map.Make (struct
type t = int
let compare x y = x - y
end)
Register file . Maps identifiers of local variables to expressions .
* We also remember if the expression depends on memory or depends on
* variables that depend on memory
* We also remember if the expression depends on memory or depends on
* variables that depend on memory *)
type reg = {
rvi : varinfo;
rval : exp;
rmem : bool
}
type t = reg IntMap.t
let empty = IntMap.empty
let dependsOnMem = ref false
class rewriteExpClass (regFile : t) =
object
inherit nopCilVisitor
method vexpr = function
Lval (Var v, NoOffset) ->
begin
try
let defined = IntMap.find v.vid regFile in
if defined.rmem then dependsOnMem := true;
match defined.rval with
Const x -> ChangeTo defined.rval
| _ -> DoChildren
with Not_found -> DoChildren
end
| Lval (Mem _, _) ->
dependsOnMem := true;
DoChildren
| _ -> DoChildren
end
let rewriteExp r (e : exp) : exp * bool =
dependsOnMem := false;
let e' = constFold true (visitCilExpr (new rewriteExpClass r) e) in
e', !dependsOnMem
let eval r e =
let new_e, _depends = rewriteExp r e in
new_e
let setMemory regFile =
let depids = ref [] in
IntMap.iter (fun id v -> if v.rmem then depids := id :: !depids) regFile;
List.fold_left (fun acc id -> IntMap.remove id acc) regFile !depids
let setRegister regFile (v : varinfo) ((e, b) : exp * bool) =
IntMap.add v.vid {rvi = v; rval = e; rmem = b} regFile
let resetRegister regFile (id : int) =
IntMap.remove id regFile
class findLval lv contains =
object
inherit nopCilVisitor
method vlval l =
if Util.equals l lv then
begin
contains := true;
SkipChildren
end
else
DoChildren
end
let removeMappingsThatDependOn regFile l =
let depids = ref [] in
IntMap.iter
(fun id reg ->
let found = ref false in
ignore (visitCilExpr (new findLval l found) reg.rval);
if !found then depids := id :: !depids)
regFile;
List.fold_left (fun acc id -> IntMap.remove id acc) regFile !depids
let assign r l e =
let newe, b = rewriteExp r e in
let r' =
match l with
Var v, NoOffset ->
let r'' = setRegister r v (newe, b) in
removeMappingsThatDependOn r'' l
| Mem _, _ -> setMemory r
| _ -> r
in newe, r'
let unassign r l =
let r' =
match l with
Var v, NoOffset ->
let r'' = resetRegister r v.vid in
removeMappingsThatDependOn r'' l
| Mem _, _ -> setMemory r
| _ -> r
in r'
no - op in Necula - world
no - op in Necula - world
let evaluate r e =
let newe, _ = rewriteExp r e in
newe
Join two symex states
let join2 (r1 : t) (r2 : t) =
let keep = ref [] in
IntMap.iter
(fun id reg ->
try
let reg' = IntMap.find id r2 in
if Util.equals reg'.rval reg.rval && reg'.rmem = reg.rmem then
keep := (id, reg) :: !keep
with _ -> ())
r1;
List.fold_left
(fun acc (id, v) -> IntMap.add id v acc)
IntMap.empty
!keep
let join (lst : t list) =
match lst with
[] -> failwith "empty list"
| r :: tl ->
List.fold_left (fun (acc : t) (elt : t) -> join2 acc elt) r tl
let call r fd el =
let new_arg_list = ref [] in
let final_r =
List.fold_left2
(fun r vi e ->
let newe, r' = assign r (Var vi, NoOffset) e in
new_arg_list := newe :: !new_arg_list;
r')
r
fd.sformals el
in
(List.rev !new_arg_list), final_r
let return r fd =
let filter_out a_predicate a_map =
IntMap.fold
(fun k v a -> if a_predicate k v then a else IntMap.add k v a)
IntMap.empty
a_map
and formals_and_locals = fd.sformals @ fd.slocals
in
filter_out
(fun k v -> List.mem v.rvi formals_and_locals)
r
let call_to_unknown_function r =
setMemory r
let debug r =
IntMap.iter
(fun key reg ->
ignore (Pretty.printf "%s <- %a (%b)@!"
reg.rvi.vname d_exp reg.rval reg.rmem))
r
END OF : NeculaFolding
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A transformation to make every function call end its statement . So
* { x=1 ; ( ) ; y=1 ; }
* becomes at least :
* { { x=1 ; ( ) ; }
* { y=1 ; } }
* But probably more like :
* { { x=1 ; } { ( ) ; } { y=1 ; } }
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A transformation to make every function call end its statement. So
* { x=1; Foo(); y=1; }
* becomes at least:
* { { x=1; Foo(); }
* { y=1; } }
* But probably more like:
* { { x=1; } { Foo(); } { y=1; } }
****************************************************************************)
let rec contains_call il =
match il with
[] -> false
| Call _ :: tl -> true
| _ :: tl -> contains_call tl
class callBBVisitor =
object
inherit nopCilVisitor
method vstmt s =
match s.skind with
Instr il when contains_call il ->
begin
let list_of_stmts =
Util.list_map (fun one_inst -> mkStmtOneInstr one_inst) il in
let block = mkBlock list_of_stmts in
ChangeDoChildrenPost
(s, (fun _ -> s.skind <- Block block; s))
end
| _ -> DoChildren
method vvdec _ = SkipChildren
method vexpr _ = SkipChildren
method vlval _ = SkipChildren
method vtype _ = SkipChildren
end
let calls_end_basic_blocks f =
let thisVisitor = new callBBVisitor in
visitCilFileSameGlobals thisVisitor f
class vidVisitor = object
inherit nopCilVisitor
val count = ref 0
method vvdec vi =
vi.vid <- !count;
incr count;
SkipChildren
end
let globally_unique_vids f =
let thisVisitor = new vidVisitor in
visitCilFileSameGlobals thisVisitor f
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* The Weimeric Partial Evaluation Data - Flow Engine
*
* This functor performs flow - sensitive , context - insensitive whole - program
* data - flow analysis with an eye toward partial evaluation and constant
* folding .
*
* Toposort the whole - program inter - procedural CFG to compute
* ( 1 ) the number of actual predecessors for each statement
* ( 2 ) the global toposort ordering
*
* Perform standard data - flow analysis ( joins , etc ) on the ICFG until you
* hit a fixed point . If this changed the structure of the ICFG ( by
* removing an IF - branch or an empty function call ) , redo the whole thing .
*
* Soundness Assumptions :
* ( 1 ) A " call instruction " is the last thing in its statement .
* Use " calls_end_basic_blocks " to get this . cil / src / main.ml does
* this when you pass --makeCFG .
* ( 2 ) All variables have globally unique identifiers .
* Use " globally_unique_vids " to get this . cil / src / main.ml does
* this when you pass --makeCFG .
* ( 3 ) This may not be a strict soundness requirement , but I wrote this
* assuming that the input file has all switch / break / continue
* statements removed .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* The Weimeric Partial Evaluation Data-Flow Engine
*
* This functor performs flow-sensitive, context-insensitive whole-program
* data-flow analysis with an eye toward partial evaluation and constant
* folding.
*
* Toposort the whole-program inter-procedural CFG to compute
* (1) the number of actual predecessors for each statement
* (2) the global toposort ordering
*
* Perform standard data-flow analysis (joins, etc) on the ICFG until you
* hit a fixed point. If this changed the structure of the ICFG (by
* removing an IF-branch or an empty function call), redo the whole thing.
*
* Soundness Assumptions:
* (1) A "call instruction" is the last thing in its statement.
* Use "calls_end_basic_blocks" to get this. cil/src/main.ml does
* this when you pass --makeCFG.
* (2) All variables have globally unique identifiers.
* Use "globally_unique_vids" to get this. cil/src/main.ml does
* this when you pass --makeCFG.
* (3) This may not be a strict soundness requirement, but I wrote this
* assuming that the input file has all switch/break/continue
* statements removed.
****************************************************************************)
module MakePartial =
functor (S : Symex) ->
functor (C : CallGraph) ->
functor (A : AliasInfo) ->
struct
let debug = false
module LabelSet =
Set.Make (struct
type t = label
let compare x y =
match x, y with
Label (name1, _, _), Label (name2, _, _) ->
String.compare name1 name2
| _, _ -> 0
end)
We keep this information about every statement . Ideally this should
* be put in the stmt itself , but CIL does n't give us space .
* be put in the stmt itself, but CIL doesn't give us space. *)
incoming_state : (int, S.t) Hashtbl.t;
mapping from stmt.sid to Symex.state
reachable_preds : (int, bool) Hashtbl.t;
mutable last_used_state : S.t option;
When we last did the Post ( ) of this statement , what
* incoming state did we use ? If our new incoming state is
* the same , we do n't have to do it again .
* incoming state did we use? If our new incoming state is
* the same, we don't have to do it again. *)
mutable priority : int;
Whole - program toposort priority . High means " do me first " .
* The first stmt in " main ( ) " will have the highest priority .
* The first stmt in "main()" will have the highest priority.
*)
}
let sinfo_ht = Hashtbl.create 511
let clear_sinfo () = Hashtbl.clear sinfo_ht
let get_sinfo stmt =
try
Hashtbl.find sinfo_ht stmt.sid
with _ ->
let new_sinfo = {incoming_state = Hashtbl.create 3;
reachable_preds = Hashtbl.create 3;
last_used_state = None;
priority = (-1)} in
Hashtbl.add sinfo_ht stmt.sid new_sinfo;
new_sinfo
Topological Sort is a DFS in which you assign a priority right as
* you finished visiting the children . While we 're there we compute
* the actual number of unique predecessors for each statement . The CIL
* information may be out of date because we keep changing the CFG by
* removing IFs and whatnot .
* you finished visiting the children. While we're there we compute
* the actual number of unique predecessors for each statement. The CIL
* information may be out of date because we keep changing the CFG by
* removing IFs and whatnot. *)
let toposort_counter = ref 1
let add_edge s1 s2 =
let si2 = get_sinfo s2 in
Hashtbl.replace si2.reachable_preds s1.sid true
let rec toposort c stmt =
let si = get_sinfo stmt in
else
begin
begin
match stmt.skind with
Instr il ->
List.iter
(fun i ->
let fd_list =
match i with
Call (_, Lval (Var vi, NoOffset), _, _) ->
begin
try
let fd = C.fundec_of_varinfo c vi in
[fd]
end
| Call (_, e, _, _) ->
A.resolve_function_pointer e
| _ -> []
in
List.iter
(fun fd ->
if List.length fd.sbody.bstmts > 0 then
let fun_stmt = List.hd fd.sbody.bstmts in
add_edge stmt fun_stmt;
toposort c fun_stmt)
fd_list)
il
| _ -> ()
end;
List.iter
(fun succ -> add_edge stmt succ; toposort c succ)
stmt.succs;
si.priority <- !toposort_counter;
incr toposort_counter
end
we set this to true whenever we eliminate an IF or otherwise
* change the CFG
* change the CFG *)
let changed_cfg = ref false
Partially evaluate / constant fold a statement . Basically this
* just asks the Symex algorithm to evaluate the RHS in the current
* state and then compute a new state that incorporates the
* assignment .
*
* However , we have special handling for ifs and calls . If we can
* evaluate an if predicate to a constant , we remove the if .
*
* If we are going to make a call to a function with an empty body ,
* we remove the function call .
* just asks the Symex algorithm to evaluate the RHS in the current
* state and then compute a new state that incorporates the
* assignment.
*
* However, we have special handling for ifs and calls. If we can
* evaluate an if predicate to a constant, we remove the if.
*
* If we are going to make a call to a function with an empty body,
* we remove the function call. *)
let partial_stmt c state stmt handle_funcall =
let result =
match stmt.skind with
Instr il ->
let state = ref state in
let new_il =
Util.list_map
(fun i ->
if debug then
ignore (Pretty.printf "Instr %a@!" d_instr i);
match i with
Set (l, e, loc) ->
let e', state' = S.assign !state l e in
state := state';
[Set (l, e', loc)]
| Call (lo, Lval (Var vi, NoOffset), al, loc) ->
let result, know_retval =
try
let fd = C.fundec_of_varinfo c vi in
match fd.sbody.bstmts with
| hd :: _tl ->
if match hd.skind with
Return (None, _loc) -> true
| _ -> false then
else if match hd.skind with
Return (Some ret_exp, _loc) ->
isConstant (S.evaluate !state ret_exp)
| _ -> false then
match lo, hd.skind with
Some lv, Return (Some ret_exp, _loc) ->
let ret_exp', state' = S.assign !state lv ret_exp in
state := state';
| None, Return (Some _ret_exp, _loc) ->
failwith "partial_stmt: internal error"
else
let al', state' = S.call !state fd al in
handle_funcall stmt hd state';
let state'' = S.return state' fd in
state := state'';
[Call (lo, Lval (Var vi, NoOffset), al', loc)], false
with e ->
let state'' = S.call_to_unknown_function !state in
let al' = Util.list_map (S.evaluate !state) al in
state := state'';
[Call (lo, Lval (Var vi, NoOffset), al', loc)], false
in
begin
match lo, know_retval with
Some lv, false -> state := S.unassign !state lv
| Some lv, true -> ()
| None, _ -> ()
end;
result
| Call (lo, f, al, loc) ->
let al' = Util.list_map (S.evaluate !state) al in
state := S.call_to_unknown_function !state;
begin
match lo with
Some lv -> state := S.unassign !state lv
| None -> ()
end;
[Call (lo, f, al', loc)]
| Asm _ ->
state := S.assembly !state i;
[i])
il in
stmt.skind <- Instr (List.flatten new_il);
if debug then
ignore (Pretty.printf "New Stmt is %a@!" d_stmt stmt);
!state
| If (e, b1, b2, loc) ->
Answer whether block [ b ] contains labels that are
alive . " Live " labels are actually targets of
[ goto]-instructions { b outside } of [ b ] .
alive. "Live" labels are actually targets of
[goto]-instructions {b outside} of [b]. *)
let has_live_labels b =
let gather_labels acc stmt =
List.fold_left (fun a x -> LabelSet.add x a) acc stmt.labels in
let rec visit_block stmt_fun acc blk =
List.fold_left
(fun a x ->
let y = stmt_fun a x in
match x.skind with
Instr _
| Return _ | Goto _ | Break _ | Continue _ -> y
| If (_expr, then_block, else_block, _loc) ->
visit_block
stmt_fun
(visit_block stmt_fun y then_block)
else_block
| Switch (_expr, block, _stmt_list, _loc) ->
visit_block stmt_fun y block
| Loop (block, _, _loc, _opt_stmt1, _opt_stmt2) ->
visit_block stmt_fun y block
| Block block ->
visit_block stmt_fun y block
| TryFinally (block1, block2, _loc)
| TryExcept (block1, _, block2, _loc) ->
visit_block
stmt_fun
(visit_block stmt_fun y block1)
block2
| HipStmt _ -> y)
acc
blk.bstmts
and gather_gotos acc stmt =
match stmt.skind with
Goto (stmt_ref, _loc) -> gather_labels acc !stmt_ref
| _ -> acc
and transitive_closure ini_stmt =
let rec iter trace acc stmt =
List.fold_left
(fun (a_trace, a_stmt) s ->
if List.mem s.sid a_trace then (a_trace, a_stmt)
else iter (s.sid :: a_trace) (s :: a_stmt) s)
(trace, acc) (stmt.preds @ stmt.succs) in
(fun a b -> a.sid - b.sid)
(snd (iter [] [] ini_stmt)) in
let block_labels = visit_block gather_labels LabelSet.empty b
and block_gotos = visit_block gather_gotos LabelSet.empty b
and all_gotos =
List.fold_left
(fun a x ->
match x.skind with
Goto (stmt_ref, _loc) -> gather_labels a !stmt_ref
| Block block -> visit_block gather_gotos a block
| _ -> a)
LabelSet.empty
(if b.bstmts = [] then []
else transitive_closure (List.hd b.bstmts))
in
not (LabelSet.is_empty
(LabelSet.inter
(LabelSet.diff all_gotos block_gotos)
block_labels)) in
let remove stmt b =
changed_cfg := true;
match b.bstmts with
[] -> ()
| hd :: _tl ->
stmt.succs <- List.filter
(fun succ -> succ.sid <> hd.sid)
stmt.succs
and mk_if_block b =
let stmt = mkStmt (Block b) in
stmt.sid <- new_sid ();
Block {bstmts = [stmt]; battrs = []}
logical falseness in C expressed in cilly 's terms
and is_false e = isZero e
logical truth in C expressed in cilly 's terms
and is_true e =
match getInteger e with
Some x -> not (is_zero_cilint x)
| None -> false in
let e' = S.evaluate state e in
if debug then
ignore (Pretty.printf "%a evals to %a\n" d_exp e d_exp e');
if is_true e' then
begin
if has_live_labels b2 then
begin
end
else
begin
if b2.bstmts = [] && b2.battrs = [] then
begin
stmt.skind <- Block b1;
match b1.bstmts with
[] -> ()
| hd :: _tl -> stmt.succs <- [hd]
end
else stmt.skind <- mk_if_block b1;
remove stmt b2
end
end
else if is_false e' then
begin
if has_live_labels b1 then
begin
end
else
begin
if b1.bstmts = [] && b1.battrs = [] then
begin
stmt.skind <- Block b2;
match b2.bstmts with
[] -> ()
| hd :: _tl -> stmt.succs <- [hd]
end
else stmt.skind <- mk_if_block b2;
remove stmt b1
end
end
else stmt.skind <- If (e', b1, b2, loc);
state
| Return (Some e, loc) ->
let e' = S.evaluate state e in
stmt.skind <- Return (Some e', loc);
state
| Block b ->
if debug && List.length stmt.succs > 1 then
ignore (Pretty.printf "(%a) has successors [%a]@!"
d_stmt stmt
(docList ~sep:(chr '@') (d_stmt ()))
stmt.succs);
state
| _ -> state
in result
begin
let num_stmts = ref 1 in
iterGlobals
file
(function
GFun (fd, _) ->
begin
match fd.smaxstmtid with
Some i -> if i > !num_stmts then num_stmts := i
| None -> ()
end
| _ -> ());
if debug then
Printf.printf "Dataflow: at most %d statements in program\n" !num_stmts;
let worklist = Heap.create !num_stmts in
let finished = ref false in
let passes = ref 0 in
let enqueue caller callee state =
let si = get_sinfo callee in
Hashtbl.replace si.incoming_state caller.sid state;
Heap.insert worklist si.priority callee
in
we will be finished when we complete a round of
* data - flow that does not change the ICFG
* data-flow that does not change the ICFG *)
while not !finished do
clear_sinfo ();
incr passes;
if debug then
Printf.printf "Dataflow: Topological Sorting & Reachability\n";
toposort c initial_stmt;
let initial_si = get_sinfo initial_stmt in
Heap.insert worklist initial_si.priority initial_stmt;
while not (Heap.is_empty worklist) do
let p, s = Heap.extract_max worklist in
if debug then
begin
ignore (Pretty.printf "Working on stmt %d (%a) %a@!"
s.sid
(docList ~sep:(chr ',' ++ break) (fun s -> dprintf "%d" s.sid))
s.succs
d_stmt s);
flush stdout;
end;
let si = get_sinfo s in
Even though this stmt is on the worklist , we
* may not have to do anything with it if the join
* of all of the incoming states is the same as the
* last state we used here .
* may not have to do anything with it if the join
* of all of the incoming states is the same as the
* last state we used here. *)
let must_recompute, incoming_state =
begin
let list_of_incoming_states = ref [] in
Hashtbl.iter
(fun true_pred_sid b ->
let this_pred_state =
try
Hashtbl.find si.incoming_state true_pred_sid
with _ ->
this occurs when we 're evaluating a statement and we
* have not yet evaluated all of its predecessors ( the
* first time we look at a loop head , say ) . We must be
* conservative . We 'll come back later with better
* information ( as we work toward the fix - point ) .
* have not yet evaluated all of its predecessors (the
* first time we look at a loop head, say). We must be
* conservative. We'll come back later with better
* information (as we work toward the fix-point). *)
S.empty
in
if debug then
begin
Printf.printf " Incoming State from %d\n" true_pred_sid;
S.debug this_pred_state;
flush stdout
end;
list_of_incoming_states :=
this_pred_state :: !list_of_incoming_states)
si.reachable_preds;
let merged_incoming_state =
if !list_of_incoming_states = [] then
this occurs when we 're looking at the
* first statement in " main " -- it has no
*
* first statement in "main" -- it has no
* preds *)
initial_state
else S.join !list_of_incoming_states
in
if debug then
begin
Printf.printf " Merged State:\n";
S.debug merged_incoming_state;
flush stdout
end;
let must_recompute =
match si.last_used_state with
None -> true
| Some last -> not (S.equal merged_incoming_state last)
in must_recompute, merged_incoming_state
end
in
if must_recompute then
begin
si.last_used_state <- Some incoming_state;
let outgoing_state =
partial_stmt c incoming_state s enqueue in
let fresh_succs = s.succs in
List.iter
(fun succ ->
enqueue s succ outgoing_state)
fresh_succs;
end
else
begin
if debug then Printf.printf "No need to recompute.\n"
end
done;
if debug then
Printf.printf "Dataflow: Pass %d Complete\n" !passes;
if !changed_cfg then
begin
if debug then
Printf.printf "Dataflow: Restarting (CFG Changed)\n";
changed_cfg := false
end
else
finished := true
done;
if debug then
Printf.printf "Dataflow: Completed (%d passes)\n" !passes
end
let simplify file c fd (assumptions : (Cil.lval * Cil.exp) list) =
let starting_state =
List.fold_left
(fun s (l, e) -> let _e', s' = S.assign s l e in s')
S.empty
assumptions
in
dataflow file c starting_state (List.hd fd.sbody.bstmts)
end
module PartialAlgorithm :
sig
val use_ptranal_alias : bool ref
val setup_alias_analysis : Cil.file -> unit
val compute_callgraph : Cil.file -> callNodeHash
val simplify :
Cil.file -> callNodeHash -> Cil.fundec -> (Cil.lval * Cil.exp) list -> unit
end
=
struct
Currently our partial - eval optimizer is built out of basically
* nothing . The ( easy-)alias analysis is fake , the call graph is
* cheap , and we 're using 's old basic - block symex . Still , it
* works .
* nothing. The (easy-)alias analysis is fake, the call graph is
* cheap, and we're using George's old basic-block symex. Still, it
* works. *)
module BasicCallGraph : CallGraph = EasyCallGraph (EasyAlias)
module BasicSymex = NeculaFolding (EasyAlias)
module BasicPartial =
MakePartial (BasicSymex) (BasicCallGraph) (EasyAlias)
module PtranalBasicCallGraph : CallGraph = EasyCallGraph (PtranalAlias)
module PtranalBasicSymex = NeculaFolding (PtranalAlias)
module PtranalBasicPartial =
MakePartial (BasicSymex) (PtranalBasicCallGraph) (PtranalAlias)
let use_ptranal_alias = ref false
let setup_alias_analysis f =
if !use_ptranal_alias then PtranalAlias.setup f
else EasyAlias.setup f
let compute_callgraph f =
if !use_ptranal_alias then PtranalBasicCallGraph.compute f
else BasicCallGraph.compute f
let simplify f c fd a =
if !use_ptranal_alias then PtranalBasicPartial.simplify f c fd a
else BasicPartial.simplify f c fd a
end
A very easy entry - point to partial evaluation / symbolic execution .
* You pass the file and a list of assumptions ( lvalue , exp pairs
* that should be treated as assignments that occur before the program
* starts ) .
*
* We partially evaluate and optimize starting from root ( usually
* " main " ) . The Cil.file is modified in place .
* You pass the Cil file and a list of assumptions (lvalue, exp pairs
* that should be treated as assignments that occur before the program
* starts).
*
* We partially evaluate and optimize starting from root (usually
* "main"). The Cil.file is modified in place. *)
let partial (f : Cil.file) (root : string) (assumptions : (Cil.lval * Cil.exp) list) =
try
PartialAlgorithm.setup_alias_analysis f;
let c = PartialAlgorithm.compute_callgraph f in
try
if not (foldGlobals f (fun a x ->
a ||
match x with
GFun (fd, _loc) ->
if fd.svar.vname = root then
begin
PartialAlgorithm.simplify
f c fd assumptions;
true
end
else false
| _ -> false)
false) then
Printf.printf "Warning: root function \"%s\" not found\n" root
with e ->
begin
Printf.printf "Error in DataFlow: %s\n" (Printexc.to_string e);
raise e
end
with e ->
begin
Printf.printf "Error in Partial: %s\n" (Printexc.to_string e);
raise e
end
class globalConstVisitor =
object
inherit nopCilVisitor
val mutable init_const : (lval * exp) list = []
method vglob g =
let is_const vi = hasAttribute "const" (typeAttrs vi.vtype) in
match g with
GVar (vi, ii, loc) ->
if is_const vi then
match ii.init with
Some init ->
begin
match init with
SingleInit exp ->
begin
init_const <- (var vi, exp) :: init_const;
ChangeTo [GVar (vi,
{init = Some (SingleInit (constFold true exp))},
loc)]
end
| CompoundInit (_typ, _ini_list) -> SkipChildren
end
else SkipChildren
| _ -> SkipChildren
method get_initialized_constants = init_const
end
let initialized_constants = ref false
let root_fun = ref "main"
let do_feature_partial f =
if not !Cilutil.makeCFG then
Errormsg.s (Errormsg.error
"--dopartial: you must also specify --domakeCFG\n");
if not !(Ptranal.feature.fd_enabled) &&
!PartialAlgorithm.use_ptranal_alias then
Errormsg.s (Errormsg.error
"--dopartial: you must also specify --doptranal\n");
partial
f
!root_fun
(if !initialized_constants then
begin
let gcv = new globalConstVisitor in
visitCilFile (gcv :> Cil.cilVisitor) f;
gcv#get_initialized_constants
end
else [])
let feature : featureDescr = {
fd_name = "partial";
fd_enabled = Cilutil.doPartial;
fd_description = "interprocedural partial evaluation and constant folding";
fd_extraopt = [
("--partial_global_const",
Arg.Set initialized_constants,
" treat global constants as initialized");
("--partial_no_global_const",
Arg.Clear initialized_constants,
" treat global constants as unknown values");
("--partial_root_function",
Arg.String (fun name -> root_fun := name),
(" where to start simplification"));
("--partial_use_easy_alias",
Arg.Clear PartialAlgorithm.use_ptranal_alias,
" to analyze pointers");
("--partial_use_ptranal_alias",
Arg.Set PartialAlgorithm.use_ptranal_alias,
" to analyze pointers (also see options of ptranal feature)")
];
fd_doit = do_feature_partial;
fd_post_check = false
}
*
* Copyright ( c ) 2001 - 2002 ,
* < >
* < >
* < >
* < >
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are
* met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS
* IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED
* TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL ,
* EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR
* PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
* NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
*
* Copyright (c) 2001-2002,
* George C. Necula <>
* Scott McPeak <>
* Wes Weimer <>
* Christoph L. Spiel <>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The names of the contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*)
|
ef60716a945282e975052fad2487f34b11e14115d5f52f5bd3911c989772e8f1 | iskandr/parakeet-retired | toplevel.ml | (*===----------------------------------------------------------------------===
* Top-Level parsing and JIT Driver
*===----------------------------------------------------------------------===*)
open Llvm
open Llvm_executionengine
(* top ::= definition | external | expression | ';' *)
let rec main_loop the_fpm the_execution_engine stream =
match Stream.peek stream with
| None -> ()
(* ignore top-level semicolons. *)
| Some (Token.Kwd ';') ->
Stream.junk stream;
main_loop the_fpm the_execution_engine stream
| Some token ->
begin
try match token with
| Token.Def ->
let e = Parser.parse_definition stream in
print_endline "parsed a function definition.";
dump_value (Codegen.codegen_func the_fpm e);
| Token.Extern ->
let e = Parser.parse_extern stream in
print_endline "parsed an extern.";
dump_value (Codegen.codegen_proto e);
| _ ->
(* Evaluate a top-level expression into an anonymous function. *)
let e = Parser.parse_toplevel stream in
print_endline "parsed a top-level expr";
let the_function = Codegen.codegen_func the_fpm e in
dump_value the_function;
(* JIT the function, returning a function pointer. *)
let result = ExecutionEngine.run_function the_function [||]
the_execution_engine in
print_string "Evaluated to ";
print_float (GenericValue.as_float Codegen.double_type result);
print_newline ();
with Stream.Error s | Codegen.Error s ->
(* Skip token for error recovery. *)
Stream.junk stream;
print_endline s;
end;
print_string "ready> "; flush stdout;
main_loop the_fpm the_execution_engine stream | null | https://raw.githubusercontent.com/iskandr/parakeet-retired/3d7e6e5b699f83ce8a1c01290beed0b78c0d0945/LLVM/toy/toplevel.ml | ocaml | ===----------------------------------------------------------------------===
* Top-Level parsing and JIT Driver
*===----------------------------------------------------------------------===
top ::= definition | external | expression | ';'
ignore top-level semicolons.
Evaluate a top-level expression into an anonymous function.
JIT the function, returning a function pointer.
Skip token for error recovery. |
open Llvm
open Llvm_executionengine
let rec main_loop the_fpm the_execution_engine stream =
match Stream.peek stream with
| None -> ()
| Some (Token.Kwd ';') ->
Stream.junk stream;
main_loop the_fpm the_execution_engine stream
| Some token ->
begin
try match token with
| Token.Def ->
let e = Parser.parse_definition stream in
print_endline "parsed a function definition.";
dump_value (Codegen.codegen_func the_fpm e);
| Token.Extern ->
let e = Parser.parse_extern stream in
print_endline "parsed an extern.";
dump_value (Codegen.codegen_proto e);
| _ ->
let e = Parser.parse_toplevel stream in
print_endline "parsed a top-level expr";
let the_function = Codegen.codegen_func the_fpm e in
dump_value the_function;
let result = ExecutionEngine.run_function the_function [||]
the_execution_engine in
print_string "Evaluated to ";
print_float (GenericValue.as_float Codegen.double_type result);
print_newline ();
with Stream.Error s | Codegen.Error s ->
Stream.junk stream;
print_endline s;
end;
print_string "ready> "; flush stdout;
main_loop the_fpm the_execution_engine stream |
07d4b116ebe718523b285b4836a420bd74acd1032a6a4ca62a6db0b4690e84cc | ucsd-progsys/dsolve | quicksort.ml | fun('a)
qs cmp xs =
case xs of
[] => []
| x :: xs' => par cmp (x, [], [], xs')
withtype ('a * 'a -> bool) -> {n:nat} <n,0> => 'a list(n) -> 'a list(n)
and('a)
par cmp (x, l, r, xs) =
case xs of
[] => qs cmp l @ (x :: qs cmp r)
| x' :: xs' => if cmp(x', x) then par cmp (x, x' :: l, r, xs')
else par cmp (x, l, x' :: r, xs')
withtype ('a * 'a -> bool) ->
{p:nat,q:nat,r:nat} <p+q+r,r+1> =>
'a * 'a list(p) * 'a list(q) * 'a list(r) -> 'a list(p+q+r+1)
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/tests/POPL2008/xiog/DMLex/quicksort.ml | ocaml | fun('a)
qs cmp xs =
case xs of
[] => []
| x :: xs' => par cmp (x, [], [], xs')
withtype ('a * 'a -> bool) -> {n:nat} <n,0> => 'a list(n) -> 'a list(n)
and('a)
par cmp (x, l, r, xs) =
case xs of
[] => qs cmp l @ (x :: qs cmp r)
| x' :: xs' => if cmp(x', x) then par cmp (x, x' :: l, r, xs')
else par cmp (x, l, x' :: r, xs')
withtype ('a * 'a -> bool) ->
{p:nat,q:nat,r:nat} <p+q+r,r+1> =>
'a * 'a list(p) * 'a list(q) * 'a list(r) -> 'a list(p+q+r+1)
| |
53f320b7a0a6124a36d4cb6eec46882b98e142da28b35994e6e09fe424f29ad0 | RJMetrics/sweet-liberty-example | dogs_test.clj | (ns com.rjmetrics.dogs-service.dogs-test
(:require [midje.sweet :refer :all]
[com.rjmetrics.dogs-service.core :as sc]
[ring.mock.request :as mock-req]
[clojure.data.json :as json]
[com.rjmetrics.dogs-service.db :as db]))
(defn to-json-input-stream
"Take a vector or map, write it using json/write-str, and put it into a
ByteArrayInputStream. Necessary because a compojure handler expects the
request body in this form."
[items]
(java.io.ByteArrayInputStream. (.getBytes (json/write-str items))))
(defn from-json-key
"Converts a JSON key from a string to a keyword, replacing any underscores
with a dash. Does not affect capitalization of the string.
(from-json-key \"My_Key\")
=> :My-Key"
[k] (-> k (clojure.string/replace \_ \-) keyword))
(defn from-json
"Parses a JSON string while converting any keys using the from-json-key
function."
[s] (json/read-str s :key-fn from-json-key))
(defn mk-request
[method url]
(sc/handler (mock-req/request method url)))
(with-state-changes [(before :facts (db/initialize-db sc/db-spec
sc/resource-config
sc/init-data))]
(fact-group "about GET /dogs"
(fact "collection route return status 200"
(mk-request :get "/dogs")
=> (contains {:status 200}))
(fact "collection route returns all values"
(-> (mk-request :get "/dogs") :body from-json)
=> [{:breed "poodle", :id 1, :name "Fido"}
{:breed "corgi", :id 2, :name "Lacy"}
{:breed "chihuahua", :id 3, :name "Rex"}
{:breed "dalmation", :id 4, :name "Spot"}
{:breed "chihuahua", :id 5, :name "Taco"}
{:breed "corgi", :id 6, :name "Brody"}]
)
(fact "collection route returns filtered results"
(-> (mk-request :get "/dogs?breed=chihuahua") :body from-json)
=> [{:breed "chihuahua", :id 3, :name "Rex"}
{:breed "chihuahua", :id 5, :name "Taco"}]))
(fact-group "about GET /dogs/:id"
(fact "successful GET returns status 200"
(mk-request :get "/dogs/1")
=> (contains {:status 200}))
(fact "resource returned successfully"
(-> (mk-request :get "/dogs/1") :body from-json)
=> {:breed "poodle", :id 1, :name "Fido"})
(fact "requesting non-existant resource returns status 404"
(:status (mk-request :get "/dogs/100"))
=> 404))
(fact-group "about PUT /dogs/:id"
(fact "resource returned successfully"
(let [result (-> (mock-req/request :put "/dogs/2")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:body
from-json)]
result
=> {:id 2 :breed "corgi" :name "Rocco"}))
(fact "a PUT request to a non-existant resource returns status 501 Not Implemented"
(let [result (-> (mock-req/request :put "/dogs/1000")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:status)]
result
=> 501)))
(fact-group "about POST /dogs"
(fact "create a new resource with POST"
(:status (mk-request :get "/dogs/7"))
=> 404 ;; resource does not exist
(-> (mock-req/request :post "/dogs")
(assoc :body (to-json-input-stream
{:name "Jojo" :breed "poodle"}))
(mock-req/content-type "application/json")
sc/handler
:body
from-json)
=> {:id 7 :breed "poodle" :name "Jojo"} ;; correct response
(-> (mk-request :get "/dogs/7")
:body
from-json)
=> {:breed "poodle", :id 7, :name "Jojo"})
(fact "a POST request to an existing resource returns status 405 Method Not Allowed"
(-> (mock-req/request :post "/dogs/2")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:status)
returning status 405 " Method Not Allowed " ,
instead of 404 , would be more correct here , but I did n't define the route to
;; handle that, for the sake of brevity.
| null | https://raw.githubusercontent.com/RJMetrics/sweet-liberty-example/60214db9eb49b6119ec4a16d877b7b297458408c/test/com/rjmetrics/dogs_service/dogs_test.clj | clojure | resource does not exist
correct response
handle that, for the sake of brevity. | (ns com.rjmetrics.dogs-service.dogs-test
(:require [midje.sweet :refer :all]
[com.rjmetrics.dogs-service.core :as sc]
[ring.mock.request :as mock-req]
[clojure.data.json :as json]
[com.rjmetrics.dogs-service.db :as db]))
(defn to-json-input-stream
"Take a vector or map, write it using json/write-str, and put it into a
ByteArrayInputStream. Necessary because a compojure handler expects the
request body in this form."
[items]
(java.io.ByteArrayInputStream. (.getBytes (json/write-str items))))
(defn from-json-key
"Converts a JSON key from a string to a keyword, replacing any underscores
with a dash. Does not affect capitalization of the string.
(from-json-key \"My_Key\")
=> :My-Key"
[k] (-> k (clojure.string/replace \_ \-) keyword))
(defn from-json
"Parses a JSON string while converting any keys using the from-json-key
function."
[s] (json/read-str s :key-fn from-json-key))
(defn mk-request
[method url]
(sc/handler (mock-req/request method url)))
(with-state-changes [(before :facts (db/initialize-db sc/db-spec
sc/resource-config
sc/init-data))]
(fact-group "about GET /dogs"
(fact "collection route return status 200"
(mk-request :get "/dogs")
=> (contains {:status 200}))
(fact "collection route returns all values"
(-> (mk-request :get "/dogs") :body from-json)
=> [{:breed "poodle", :id 1, :name "Fido"}
{:breed "corgi", :id 2, :name "Lacy"}
{:breed "chihuahua", :id 3, :name "Rex"}
{:breed "dalmation", :id 4, :name "Spot"}
{:breed "chihuahua", :id 5, :name "Taco"}
{:breed "corgi", :id 6, :name "Brody"}]
)
(fact "collection route returns filtered results"
(-> (mk-request :get "/dogs?breed=chihuahua") :body from-json)
=> [{:breed "chihuahua", :id 3, :name "Rex"}
{:breed "chihuahua", :id 5, :name "Taco"}]))
(fact-group "about GET /dogs/:id"
(fact "successful GET returns status 200"
(mk-request :get "/dogs/1")
=> (contains {:status 200}))
(fact "resource returned successfully"
(-> (mk-request :get "/dogs/1") :body from-json)
=> {:breed "poodle", :id 1, :name "Fido"})
(fact "requesting non-existant resource returns status 404"
(:status (mk-request :get "/dogs/100"))
=> 404))
(fact-group "about PUT /dogs/:id"
(fact "resource returned successfully"
(let [result (-> (mock-req/request :put "/dogs/2")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:body
from-json)]
result
=> {:id 2 :breed "corgi" :name "Rocco"}))
(fact "a PUT request to a non-existant resource returns status 501 Not Implemented"
(let [result (-> (mock-req/request :put "/dogs/1000")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:status)]
result
=> 501)))
(fact-group "about POST /dogs"
(fact "create a new resource with POST"
(:status (mk-request :get "/dogs/7"))
(-> (mock-req/request :post "/dogs")
(assoc :body (to-json-input-stream
{:name "Jojo" :breed "poodle"}))
(mock-req/content-type "application/json")
sc/handler
:body
from-json)
(-> (mk-request :get "/dogs/7")
:body
from-json)
=> {:breed "poodle", :id 7, :name "Jojo"})
(fact "a POST request to an existing resource returns status 405 Method Not Allowed"
(-> (mock-req/request :post "/dogs/2")
(assoc :body (to-json-input-stream
{:name "Rocco"}))
(mock-req/content-type "application/json")
sc/handler
:status)
returning status 405 " Method Not Allowed " ,
instead of 404 , would be more correct here , but I did n't define the route to
|
1d66efc427fb9b8ae50d0a335c75b40a0a86b6d6917a67241d50d660991aa808 | vmchale/kempe | Backend.hs | module Backend ( backendTests
) where
import Control.DeepSeq (deepseq)
import qualified Kempe.Asm.Arm.ControlFlow as Arm
import Kempe.Asm.Liveness
import qualified Kempe.Asm.X86.ControlFlow as X86
import Kempe.Inline
import Kempe.Module
import Kempe.Monomorphize
import Kempe.Pipeline
import Kempe.Shuttle
import Prettyprinter (pretty)
import Test.Tasty
import Test.Tasty.HUnit
import Type
backendTests :: TestTree
backendTests =
testGroup "Backend-ish"
[ monoTest "test/data/ty.kmp"
, inlineTest "lib/numbertheory.kmp"
, inlineTest "examples/factorial.kmp"
, pipelineWorks "test/data/ty.kmp"
, pipelineWorks "examples/splitmix.kmp"
, pipelineWorks "examples/factorial.kmp"
, pipelineWorks "test/data/mutual.kmp"
, pipelineWorks "test/data/multiConstruct.kmp"
, pipelineWorks "test/data/mod.kmp"
, irNoYeet "test/data/export.kmp"
, irNoYeet "examples/splitmix.kmp"
, irNoYeet "examples/factorial.kmp"
, irNoYeet "test/data/maybeC.kmp"
, irNoYeet "examples/os.kmp"
, x86NoYeet "examples/factorial.kmp"
, x86NoYeet "examples/splitmix.kmp"
, armNoYeet "examples/factorial.kmp"
, controlFlowGraph "examples/factorial.kmp"
, controlFlowGraph "examples/splitmix.kmp"
, controlFlowGraphArm "lib/gaussian.kmp"
, liveness "examples/factorial.kmp"
, liveness "examples/splitmix.kmp"
, livenessArm "lib/gaussian.kmp"
, codegen "examples/factorial.kmp"
, codegen "examples/splitmix.kmp"
, codegen "lib/numbertheory.kmp"
, codegen "test/examples/bool.kmp"
, codegen "lib/gaussian.kmp"
, codegen "test/data/ccall.kmp"
, codegen "test/data/mutual.kmp"
, codegen "lib/rational.kmp"
, codegen "test/data/regAlloc.kmp"
, armCodegen "examples/factorial.kmp"
, armCodegen "lib/numbertheory.kmp"
, armCodegen "lib/gaussian.kmp"
, armCodegen "lib/rational.kmp"
, armCodegen "examples/splitmix.kmp"
, armCodegen "test/data/regAlloc.kmp"
]
codegen :: FilePath -> TestTree
codegen fp = testCase ("Generates code without throwing an exception (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let code = uncurry x86Alloc parsed
assertBool "Doesn't fail" (code `deepseq` True)
armCodegen :: FilePath -> TestTree
armCodegen fp = testCase ("Generates arm assembly without throwing exception (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let code = uncurry armAlloc parsed
assertBool "Doesn't fail" (code `deepseq` True)
livenessArm :: FilePath -> TestTree
livenessArm fp = testCase ("Aarch64 liveness analysis terminates (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
cf = Arm.mkControlFlow arm
assertBool "Doesn't bottom" (reconstruct cf `deepseq` True)
liveness :: FilePath -> TestTree
liveness fp = testCase ("Liveness analysis terminates (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
cf = X86.mkControlFlow x86
assertBool "Doesn't bottom" (reconstruct cf `deepseq` True)
controlFlowGraph :: FilePath -> TestTree
controlFlowGraph fp = testCase ("Doesn't crash while creating control flow graph for " ++ fp) $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
assertBool "Worked without exception" (X86.mkControlFlow x86 `deepseq` True)
controlFlowGraphArm :: FilePath -> TestTree
controlFlowGraphArm fp = testCase ("Doesn't crash while creating control flow graph for aarch64 assembly " ++ fp) $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
assertBool "Worked without exception" (Arm.mkControlFlow arm `deepseq` True)
armNoYeet :: FilePath -> TestTree
armNoYeet fp = testCase ("Selects instructions for " ++ fp) $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
assertBool "Worked without exception" (arm `deepseq` True)
x86NoYeet :: FilePath -> TestTree
x86NoYeet fp = testCase ("Selects instructions for " ++ fp) $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
assertBool "Worked without exception" (x86 `deepseq` True)
irNoYeet :: FilePath -> TestTree
irNoYeet fp = testCase ("Generates IR without throwing an exception (" ++ fp ++ ")") $ do
(i, m) <- parseProcess fp
let (res, _, _) = irGen i m
assertBool "Worked without failure" (res `deepseq` True)
inlineTest :: FilePath -> TestTree
inlineTest fp = testCase ("Inlines " ++ fp ++ " without error") $ inlineFile fp
inlineFile :: FilePath -> Assertion
inlineFile fp = do
(_, m) <- parseProcess fp
let res = inline m
assertBool "Doesn't bottom when inlining" (res `deepseq` True)
monoTest :: FilePath -> TestTree
monoTest fp = testCase ("Monomorphizes " ++ fp ++ " without error") $ monoFile fp
monoFile :: FilePath -> Assertion
monoFile fp = do
(tyM, i) <- assignTypes fp
let res = runMonoM i (flattenModule tyM)
assertBool "Doesn't throw any exceptions" (res `deepseq` True)
pipelineWorks :: FilePath -> TestTree
pipelineWorks fp = testCase ("Functions in " ++ fp ++ " can be specialized") $ do
(maxU, m) <- parseProcess fp
let res = monomorphize maxU m
case res of
Left err -> assertFailure (show $ pretty err)
Right{} -> assertBool "Doesn't fail type-checking" True
| null | https://raw.githubusercontent.com/vmchale/kempe/aac73a386390747c0a54819d63c7438b54cdb168/test/Backend.hs | haskell | module Backend ( backendTests
) where
import Control.DeepSeq (deepseq)
import qualified Kempe.Asm.Arm.ControlFlow as Arm
import Kempe.Asm.Liveness
import qualified Kempe.Asm.X86.ControlFlow as X86
import Kempe.Inline
import Kempe.Module
import Kempe.Monomorphize
import Kempe.Pipeline
import Kempe.Shuttle
import Prettyprinter (pretty)
import Test.Tasty
import Test.Tasty.HUnit
import Type
backendTests :: TestTree
backendTests =
testGroup "Backend-ish"
[ monoTest "test/data/ty.kmp"
, inlineTest "lib/numbertheory.kmp"
, inlineTest "examples/factorial.kmp"
, pipelineWorks "test/data/ty.kmp"
, pipelineWorks "examples/splitmix.kmp"
, pipelineWorks "examples/factorial.kmp"
, pipelineWorks "test/data/mutual.kmp"
, pipelineWorks "test/data/multiConstruct.kmp"
, pipelineWorks "test/data/mod.kmp"
, irNoYeet "test/data/export.kmp"
, irNoYeet "examples/splitmix.kmp"
, irNoYeet "examples/factorial.kmp"
, irNoYeet "test/data/maybeC.kmp"
, irNoYeet "examples/os.kmp"
, x86NoYeet "examples/factorial.kmp"
, x86NoYeet "examples/splitmix.kmp"
, armNoYeet "examples/factorial.kmp"
, controlFlowGraph "examples/factorial.kmp"
, controlFlowGraph "examples/splitmix.kmp"
, controlFlowGraphArm "lib/gaussian.kmp"
, liveness "examples/factorial.kmp"
, liveness "examples/splitmix.kmp"
, livenessArm "lib/gaussian.kmp"
, codegen "examples/factorial.kmp"
, codegen "examples/splitmix.kmp"
, codegen "lib/numbertheory.kmp"
, codegen "test/examples/bool.kmp"
, codegen "lib/gaussian.kmp"
, codegen "test/data/ccall.kmp"
, codegen "test/data/mutual.kmp"
, codegen "lib/rational.kmp"
, codegen "test/data/regAlloc.kmp"
, armCodegen "examples/factorial.kmp"
, armCodegen "lib/numbertheory.kmp"
, armCodegen "lib/gaussian.kmp"
, armCodegen "lib/rational.kmp"
, armCodegen "examples/splitmix.kmp"
, armCodegen "test/data/regAlloc.kmp"
]
codegen :: FilePath -> TestTree
codegen fp = testCase ("Generates code without throwing an exception (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let code = uncurry x86Alloc parsed
assertBool "Doesn't fail" (code `deepseq` True)
armCodegen :: FilePath -> TestTree
armCodegen fp = testCase ("Generates arm assembly without throwing exception (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let code = uncurry armAlloc parsed
assertBool "Doesn't fail" (code `deepseq` True)
livenessArm :: FilePath -> TestTree
livenessArm fp = testCase ("Aarch64 liveness analysis terminates (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
cf = Arm.mkControlFlow arm
assertBool "Doesn't bottom" (reconstruct cf `deepseq` True)
liveness :: FilePath -> TestTree
liveness fp = testCase ("Liveness analysis terminates (" ++ fp ++ ")") $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
cf = X86.mkControlFlow x86
assertBool "Doesn't bottom" (reconstruct cf `deepseq` True)
controlFlowGraph :: FilePath -> TestTree
controlFlowGraph fp = testCase ("Doesn't crash while creating control flow graph for " ++ fp) $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
assertBool "Worked without exception" (X86.mkControlFlow x86 `deepseq` True)
controlFlowGraphArm :: FilePath -> TestTree
controlFlowGraphArm fp = testCase ("Doesn't crash while creating control flow graph for aarch64 assembly " ++ fp) $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
assertBool "Worked without exception" (Arm.mkControlFlow arm `deepseq` True)
armNoYeet :: FilePath -> TestTree
armNoYeet fp = testCase ("Selects instructions for " ++ fp) $ do
parsed <- parseProcess fp
let arm = uncurry armParsed parsed
assertBool "Worked without exception" (arm `deepseq` True)
x86NoYeet :: FilePath -> TestTree
x86NoYeet fp = testCase ("Selects instructions for " ++ fp) $ do
parsed <- parseProcess fp
let x86 = uncurry x86Parsed parsed
assertBool "Worked without exception" (x86 `deepseq` True)
irNoYeet :: FilePath -> TestTree
irNoYeet fp = testCase ("Generates IR without throwing an exception (" ++ fp ++ ")") $ do
(i, m) <- parseProcess fp
let (res, _, _) = irGen i m
assertBool "Worked without failure" (res `deepseq` True)
inlineTest :: FilePath -> TestTree
inlineTest fp = testCase ("Inlines " ++ fp ++ " without error") $ inlineFile fp
inlineFile :: FilePath -> Assertion
inlineFile fp = do
(_, m) <- parseProcess fp
let res = inline m
assertBool "Doesn't bottom when inlining" (res `deepseq` True)
monoTest :: FilePath -> TestTree
monoTest fp = testCase ("Monomorphizes " ++ fp ++ " without error") $ monoFile fp
monoFile :: FilePath -> Assertion
monoFile fp = do
(tyM, i) <- assignTypes fp
let res = runMonoM i (flattenModule tyM)
assertBool "Doesn't throw any exceptions" (res `deepseq` True)
pipelineWorks :: FilePath -> TestTree
pipelineWorks fp = testCase ("Functions in " ++ fp ++ " can be specialized") $ do
(maxU, m) <- parseProcess fp
let res = monomorphize maxU m
case res of
Left err -> assertFailure (show $ pretty err)
Right{} -> assertBool "Doesn't fail type-checking" True
| |
935c7a9494d9bbb021f77a200e0981c7bfbe1451b3f8c5af450ea305d7d3e1c1 | dnaeon/cl-wol | test-core.lisp | Copyright ( c ) 2021 Nikolov < >
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions
;; are met:
;;
1 . Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer
;; in this position and unchanged.
2 . Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
;;
THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S ) ` ` AS IS '' AND ANY EXPRESS OR
;; IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
;; OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
;; IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT
NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
;; THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :cl-user)
(defpackage :cl-wol.test
(:use :cl :rove))
(in-package :cl-wol.test)
(deftest test-mac-addresses-from-strings
(testing "test supported MAC addresses from strings"
(let ((items (list "00-0B-F8-39-AC-A6"
"00-1C-42-0F-B2-4E"
"D0-F0-DB-97-46-67"
"00-15-5e-b7-10-32"
"00-a0-d1-e5-5e-a2"
"00-17-7f-45-5f-01"
"00:03:EE:73:D4:8F"
"00:0A:FD:15:05:9C"
"90:b9:7d:30:97:9f")))
(dolist (item items)
(ok (cl-wol.core:parse-hex-bytes item) (format nil "parse mac address ~A" item))
(ok (cl-wol.core:make-magic-packet item) (format nil "make-magic-packet with ~A" item)))))
(testing "test unsupported MAC addresses from strings"
(let ((items (list ""
"invalid mac address"
"00 A0 94 0B 14 66"
"08 00 33 5e 2d ea"
"00 21 dd 92 f6 e3"
"01-02-03-04-XX-YY")))
(dolist (item items)
(ng (cl-wol.core:parse-hex-bytes item) (format nil "parse ~A" item))
(ok (signals (cl-wol.core:make-magic-packet item)) (format nil "make-magic-packet signals with ~A" item))))))
(deftest mac-octets
(testing "test mac-octets with known addresses"
(let ((items '((:addr "00-0B-F8-39-AC-A6" :octets #(0 11 248 57 172 166))
(:addr "00-a0-d1-e5-5e-a2" :octets #(0 160 209 229 94 162))
(:addr "ff-ff-ff-ff-ff-ff" :octets #(255 255 255 255 255 255))
(:addr "00:03:EE:73:D4:8F" :octets #(0 3 238 115 212 143))
(:addr "00:0A:FD:15:05:9C" :octets #(0 10 253 21 5 156))
(:addr "90:b9:7d:30:97:9f" :octets #(144 185 125 48 151 159)))))
(dolist (item items)
(let* ((addr (getf item :addr))
(octets (getf item :octets))
(magic-packet (cl-wol.core:make-magic-packet addr)))
(ok (equalp (cl-wol.core:mac-octets magic-packet) octets)
(format nil "mac-octets match for ~A" addr)))))))
(deftest encode-payload
(testing "test encode-payload with known addresses"
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "00:01:02:03:04:05"))
#(#xFF #xFF #xFF #xFF #xFF #xFF ;; Header
1st repetition
2nd repetition
#x00 #x01 #x02 #x03 #x04 #x05 ;; ...
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05 ;; ...
16th repetition
"encode-payload matches for 00:01:02:03:04:05")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "00:01:02:03:04:05" "00-00-00-00-00-00"))
#(#xFF #xFF #xFF #xFF #xFF #xFF ;; Header
1st repetition
2nd repetition
#x00 #x01 #x02 #x03 #x04 #x05 ;; ...
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05 ;; ...
16th repetition
SecureOn password
"encode-payload matches for 00:01:02:03:04:05 with SecureOn password")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "aa-bb-cc-dd-ee-ff"))
#(#xFF #xFF #xFF #xFF #xFF #xFF ;; Header
1st repetition
2nd repetition
#xAA #xBB #xCC #xDD #xEE #xFF ;; ...
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF ;; ...
16th repetition
"encode-payload matches for aa-bb-cc-dd-ee-ff")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "aa-bb-cc-dd-ee-ff" "01-02-03-04-05-06"))
#(#xFF #xFF #xFF #xFF #xFF #xFF ;; Header
1st repetition
2nd repetition
#xAA #xBB #xCC #xDD #xEE #xFF ;; ...
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF ;; ...
16th repetition
SecureOn password
"encode-payload matches for aa-bb-cc-dd-ee-ff with SecureOn password")))
(deftest make-magic-packet
(testing "test make-magic-packet with vectors"
(let ((items (list (list :addr (cl-wol.core:make-octet-vector '(0 0 0 0 0 0))
:octets #(0 0 0 0 0 0)
:password nil)
(list :addr (cl-wol.core:make-octet-vector '(255 255 255 255 255 255))
:octets #(255 255 255 255 255 255)
:password nil)
(list :addr (cl-wol.core:make-octet-vector '(1 2 3 4 5 6))
:octets #(1 2 3 4 5 6)
:password (cl-wol.core:make-octet-vector '(0 0 0 0 0 0))))))
(dolist (item items)
(let* ((addr (getf item :addr))
(octets (getf item :octets))
(password (getf item :password))
(magic-packet (cl-wol.core:make-magic-packet addr password)))
(ok (equalp (cl-wol.core:mac-octets magic-packet) octets)
(format nil "mac-octets match for ~A" addr))))))
(testing "test make-magic-packet with bad vectors"
(let ((items (list (list :addr (cl-wol.core:make-octet-vector '(0)))
(list :addr (cl-wol.core:make-octet-vector '(1 2 3))))))
(dolist (item items)
(let* ((addr (getf item :addr)))
(ok (signals (magic-packet (cl-wol.core:make-magic-packet addr)))
(format nil "signals on make-magic-packet with ~A" addr)))))))
| null | https://raw.githubusercontent.com/dnaeon/cl-wol/8f5cb9c4aeabb726b1991379d2a47eac3f38b2b6/tests/test-core.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer
in this position and unchanged.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Header
...
...
Header
...
...
Header
...
...
Header
...
... | Copyright ( c ) 2021 Nikolov < >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S ) ` ` AS IS '' AND ANY EXPRESS OR
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
(in-package :cl-user)
(defpackage :cl-wol.test
(:use :cl :rove))
(in-package :cl-wol.test)
(deftest test-mac-addresses-from-strings
(testing "test supported MAC addresses from strings"
(let ((items (list "00-0B-F8-39-AC-A6"
"00-1C-42-0F-B2-4E"
"D0-F0-DB-97-46-67"
"00-15-5e-b7-10-32"
"00-a0-d1-e5-5e-a2"
"00-17-7f-45-5f-01"
"00:03:EE:73:D4:8F"
"00:0A:FD:15:05:9C"
"90:b9:7d:30:97:9f")))
(dolist (item items)
(ok (cl-wol.core:parse-hex-bytes item) (format nil "parse mac address ~A" item))
(ok (cl-wol.core:make-magic-packet item) (format nil "make-magic-packet with ~A" item)))))
(testing "test unsupported MAC addresses from strings"
(let ((items (list ""
"invalid mac address"
"00 A0 94 0B 14 66"
"08 00 33 5e 2d ea"
"00 21 dd 92 f6 e3"
"01-02-03-04-XX-YY")))
(dolist (item items)
(ng (cl-wol.core:parse-hex-bytes item) (format nil "parse ~A" item))
(ok (signals (cl-wol.core:make-magic-packet item)) (format nil "make-magic-packet signals with ~A" item))))))
(deftest mac-octets
(testing "test mac-octets with known addresses"
(let ((items '((:addr "00-0B-F8-39-AC-A6" :octets #(0 11 248 57 172 166))
(:addr "00-a0-d1-e5-5e-a2" :octets #(0 160 209 229 94 162))
(:addr "ff-ff-ff-ff-ff-ff" :octets #(255 255 255 255 255 255))
(:addr "00:03:EE:73:D4:8F" :octets #(0 3 238 115 212 143))
(:addr "00:0A:FD:15:05:9C" :octets #(0 10 253 21 5 156))
(:addr "90:b9:7d:30:97:9f" :octets #(144 185 125 48 151 159)))))
(dolist (item items)
(let* ((addr (getf item :addr))
(octets (getf item :octets))
(magic-packet (cl-wol.core:make-magic-packet addr)))
(ok (equalp (cl-wol.core:mac-octets magic-packet) octets)
(format nil "mac-octets match for ~A" addr)))))))
(deftest encode-payload
(testing "test encode-payload with known addresses"
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "00:01:02:03:04:05"))
1st repetition
2nd repetition
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
16th repetition
"encode-payload matches for 00:01:02:03:04:05")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "00:01:02:03:04:05" "00-00-00-00-00-00"))
1st repetition
2nd repetition
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
#x00 #x01 #x02 #x03 #x04 #x05
16th repetition
SecureOn password
"encode-payload matches for 00:01:02:03:04:05 with SecureOn password")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "aa-bb-cc-dd-ee-ff"))
1st repetition
2nd repetition
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
16th repetition
"encode-payload matches for aa-bb-cc-dd-ee-ff")
(ok (equalp (cl-wol.core:encode-payload (cl-wol.core:make-magic-packet "aa-bb-cc-dd-ee-ff" "01-02-03-04-05-06"))
1st repetition
2nd repetition
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
#xAA #xBB #xCC #xDD #xEE #xFF
16th repetition
SecureOn password
"encode-payload matches for aa-bb-cc-dd-ee-ff with SecureOn password")))
(deftest make-magic-packet
(testing "test make-magic-packet with vectors"
(let ((items (list (list :addr (cl-wol.core:make-octet-vector '(0 0 0 0 0 0))
:octets #(0 0 0 0 0 0)
:password nil)
(list :addr (cl-wol.core:make-octet-vector '(255 255 255 255 255 255))
:octets #(255 255 255 255 255 255)
:password nil)
(list :addr (cl-wol.core:make-octet-vector '(1 2 3 4 5 6))
:octets #(1 2 3 4 5 6)
:password (cl-wol.core:make-octet-vector '(0 0 0 0 0 0))))))
(dolist (item items)
(let* ((addr (getf item :addr))
(octets (getf item :octets))
(password (getf item :password))
(magic-packet (cl-wol.core:make-magic-packet addr password)))
(ok (equalp (cl-wol.core:mac-octets magic-packet) octets)
(format nil "mac-octets match for ~A" addr))))))
(testing "test make-magic-packet with bad vectors"
(let ((items (list (list :addr (cl-wol.core:make-octet-vector '(0)))
(list :addr (cl-wol.core:make-octet-vector '(1 2 3))))))
(dolist (item items)
(let* ((addr (getf item :addr)))
(ok (signals (magic-packet (cl-wol.core:make-magic-packet addr)))
(format nil "signals on make-magic-packet with ~A" addr)))))))
|
c69505e05e2327a5d65045275f27b091c987652c51cfc5419a324ee2713d9943 | hkuplg/fcore | Mini.hs | {-# LANGUAGE RankNTypes #-}
module Mini where
data Type t = Int | TVar t | Fun (Type t) (Type t) | Forall (t -> Type t)
newtype CType = CType { unCType :: forall t. Type t }
konstTy = Forall (\a -> Forall (\b -> Fun (TVar a) (Fun (TVar b) (TVar a))))
t1 = let Forall f = konstTy in Forall (\a -> subst' a Int (f a))
t2 = let Forall f = konstTy in join (f Int)
dedeBruijnType :: Int -> Int -> [t] -> Type Int -> Type t
dedeBruijnType s _ as (TVar i) = TVar (reverse as !! (i-s))
dedeBruijnType s _ _ Int = Int
dedeBruijnType s i as (Fun t1 t2) = Fun (dedeBruijnType s i as t1) (dedeBruijnType s i as t2)
dedeBruijnType s i as (Forall f) = Forall (\a -> dedeBruijnType s (i+1) (a:as) (f i))
subst' :: Int -> Type Int -> Type Int -> Type Int
subst' x r Int = Int
subst' x r (TVar a)
| a == x = r
| otherwise = TVar a
subst' x r (Fun t1 t2) = Fun (subst' x r t1) (subst' x r t2)
subst' x r (Forall f) = Forall (\a -> subst' x r (f a))
-- subst :: Type Int -> Type Int -> Type t
subst t1 t2 = dedeBruijnType subst ' 0 t1 t2
pretty :: Int -> Type Int -> String
pretty _ Int = "Int"
pretty _ (TVar a) = "a" ++ show a
pretty i (Fun t1 t2) = "(" ++ pretty i t1 ++ " -> " ++ pretty i t2 ++ ")"
pretty i (Forall f) = "forall a" ++ show i ++ ". " ++ pretty (i+1) (f i)
join :: Type (Type t) -> Type t
join Int = Int
join (TVar a) = a
join (Fun t1 t2) = Fun (join t1) (join t2)
join (Forall f) = Forall (\a -> join (f (TVar a)))
| null | https://raw.githubusercontent.com/hkuplg/fcore/e27b6dec5bfd319edb8c3e90d94a993bcc7b4c95/frontend/Mini.hs | haskell | # LANGUAGE RankNTypes #
subst :: Type Int -> Type Int -> Type t |
module Mini where
data Type t = Int | TVar t | Fun (Type t) (Type t) | Forall (t -> Type t)
newtype CType = CType { unCType :: forall t. Type t }
konstTy = Forall (\a -> Forall (\b -> Fun (TVar a) (Fun (TVar b) (TVar a))))
t1 = let Forall f = konstTy in Forall (\a -> subst' a Int (f a))
t2 = let Forall f = konstTy in join (f Int)
dedeBruijnType :: Int -> Int -> [t] -> Type Int -> Type t
dedeBruijnType s _ as (TVar i) = TVar (reverse as !! (i-s))
dedeBruijnType s _ _ Int = Int
dedeBruijnType s i as (Fun t1 t2) = Fun (dedeBruijnType s i as t1) (dedeBruijnType s i as t2)
dedeBruijnType s i as (Forall f) = Forall (\a -> dedeBruijnType s (i+1) (a:as) (f i))
subst' :: Int -> Type Int -> Type Int -> Type Int
subst' x r Int = Int
subst' x r (TVar a)
| a == x = r
| otherwise = TVar a
subst' x r (Fun t1 t2) = Fun (subst' x r t1) (subst' x r t2)
subst' x r (Forall f) = Forall (\a -> subst' x r (f a))
subst t1 t2 = dedeBruijnType subst ' 0 t1 t2
pretty :: Int -> Type Int -> String
pretty _ Int = "Int"
pretty _ (TVar a) = "a" ++ show a
pretty i (Fun t1 t2) = "(" ++ pretty i t1 ++ " -> " ++ pretty i t2 ++ ")"
pretty i (Forall f) = "forall a" ++ show i ++ ". " ++ pretty (i+1) (f i)
join :: Type (Type t) -> Type t
join Int = Int
join (TVar a) = a
join (Fun t1 t2) = Fun (join t1) (join t2)
join (Forall f) = Forall (\a -> join (f (TVar a)))
|
b1199e544f7b2913dde7c3a29f7f89ec0e34c904a303ab03fbdcc67c092f0d44 | lambe-lang/nethra | proof.mli | val render :
?term_render:(Format.formatter -> 'a Nethra_lang_ast.Term.t -> unit)
-> Format.formatter
-> 'a Nethra_lang_ast.Proof.t
-> unit
| null | https://raw.githubusercontent.com/lambe-lang/nethra/7197277297ffeab769a4cc88a8bfee949def8d56/lib/nethra/lang/render/proof.mli | ocaml | val render :
?term_render:(Format.formatter -> 'a Nethra_lang_ast.Term.t -> unit)
-> Format.formatter
-> 'a Nethra_lang_ast.Proof.t
-> unit
| |
5662cdfbbdbff7e922a6356c1e196ca5ad10d493f8e1e5139549753c33d0e515 | google-research/dex-lang | JIT.hs | Copyright 2020 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- -source/licenses/bsd
# OPTIONS_GHC -Wno - orphans #
module Dex.Foreign.JIT (
NativeFunction, ClosedExportedSignature,
ExportNativeFunction (..), ExportNativeFunctionAddr,
dexGetFunctionSignature, dexFreeFunctionSignature,
dexCompile, dexUnload
) where
import Control.Concurrent.MVar
import Control.Monad.State.Strict
import Foreign.Ptr
import Foreign.C.String
import Foreign.C.Types
import Foreign.Storable
import Foreign.Marshal.Alloc
import Data.Functor
import qualified Data.Map.Strict as M
import Export
import Name
import TopLevel
import Types.Core
import Types.Imp
import Dex.Foreign.Util
import Dex.Foreign.Context
intAsCC :: CInt -> CallingConvention
intAsCC 0 = StandardCC
intAsCC 1 = XLACC
intAsCC _ = error "Unrecognized calling convention"
dexCompile :: Ptr Context -> CInt -> Ptr AtomEx -> IO ExportNativeFunctionAddr
dexCompile ctxPtr ccInt funcAtomPtr = catchErrors do
AtomEx funcAtom <- fromStablePtr funcAtomPtr
let cc = intAsCC ccInt
runTopperMFromContext ctxPtr do
-- TODO: Check if atom is compatible with context! Use module name?
(impFunc, nativeSignature) <- prepareFunctionForExport cc (unsafeCoerceE funcAtom)
nativeFunction <- toCFunction "userFunc" impFunc >>= loadObject
let funcPtr = nativeFunPtr $ nativeFunction
let exportNativeFunction = ExportNativeFunction nativeFunction nativeSignature
liftIO $ insertIntoNativeFunctionTable ctxPtr funcPtr exportNativeFunction
return funcPtr
dexGetFunctionSignature :: Ptr Context -> ExportNativeFunctionAddr -> IO (Ptr (ExportedSignature 'VoidS))
dexGetFunctionSignature ctxPtr funcPtr = do
Context _ _ ptrTabMVar <- fromStablePtr ctxPtr
addrTable <- readMVar ptrTabMVar
case M.lookup funcPtr addrTable of
Nothing -> setError "Invalid function address" $> nullPtr
Just ExportNativeFunction{..} -> putOnHeap nativeSignature
dexFreeFunctionSignature :: Ptr (ExportedSignature 'VoidS) -> IO ()
dexFreeFunctionSignature sigPtr = do
let strPtr = castPtr @(ExportedSignature 'VoidS) @CString sigPtr
free =<< peekElemOff strPtr 0
free =<< peekElemOff strPtr 1
free =<< peekElemOff strPtr 2
free sigPtr
dexUnload :: Ptr Context -> ExportNativeFunctionAddr -> IO ()
dexUnload ctxPtr funcPtr = do
f <- popFromNativeFunctionTable ctxPtr funcPtr
nativeFunTeardown $ nativeFunction f
| null | https://raw.githubusercontent.com/google-research/dex-lang/d2633c7cfd0e4da71d00cf5fe1b7741ff2afb8fc/src/Dex/Foreign/JIT.hs | haskell |
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file or at
-source/licenses/bsd
TODO: Check if atom is compatible with context! Use module name? | Copyright 2020 Google LLC
# OPTIONS_GHC -Wno - orphans #
module Dex.Foreign.JIT (
NativeFunction, ClosedExportedSignature,
ExportNativeFunction (..), ExportNativeFunctionAddr,
dexGetFunctionSignature, dexFreeFunctionSignature,
dexCompile, dexUnload
) where
import Control.Concurrent.MVar
import Control.Monad.State.Strict
import Foreign.Ptr
import Foreign.C.String
import Foreign.C.Types
import Foreign.Storable
import Foreign.Marshal.Alloc
import Data.Functor
import qualified Data.Map.Strict as M
import Export
import Name
import TopLevel
import Types.Core
import Types.Imp
import Dex.Foreign.Util
import Dex.Foreign.Context
intAsCC :: CInt -> CallingConvention
intAsCC 0 = StandardCC
intAsCC 1 = XLACC
intAsCC _ = error "Unrecognized calling convention"
dexCompile :: Ptr Context -> CInt -> Ptr AtomEx -> IO ExportNativeFunctionAddr
dexCompile ctxPtr ccInt funcAtomPtr = catchErrors do
AtomEx funcAtom <- fromStablePtr funcAtomPtr
let cc = intAsCC ccInt
runTopperMFromContext ctxPtr do
(impFunc, nativeSignature) <- prepareFunctionForExport cc (unsafeCoerceE funcAtom)
nativeFunction <- toCFunction "userFunc" impFunc >>= loadObject
let funcPtr = nativeFunPtr $ nativeFunction
let exportNativeFunction = ExportNativeFunction nativeFunction nativeSignature
liftIO $ insertIntoNativeFunctionTable ctxPtr funcPtr exportNativeFunction
return funcPtr
dexGetFunctionSignature :: Ptr Context -> ExportNativeFunctionAddr -> IO (Ptr (ExportedSignature 'VoidS))
dexGetFunctionSignature ctxPtr funcPtr = do
Context _ _ ptrTabMVar <- fromStablePtr ctxPtr
addrTable <- readMVar ptrTabMVar
case M.lookup funcPtr addrTable of
Nothing -> setError "Invalid function address" $> nullPtr
Just ExportNativeFunction{..} -> putOnHeap nativeSignature
dexFreeFunctionSignature :: Ptr (ExportedSignature 'VoidS) -> IO ()
dexFreeFunctionSignature sigPtr = do
let strPtr = castPtr @(ExportedSignature 'VoidS) @CString sigPtr
free =<< peekElemOff strPtr 0
free =<< peekElemOff strPtr 1
free =<< peekElemOff strPtr 2
free sigPtr
dexUnload :: Ptr Context -> ExportNativeFunctionAddr -> IO ()
dexUnload ctxPtr funcPtr = do
f <- popFromNativeFunctionTable ctxPtr funcPtr
nativeFunTeardown $ nativeFunction f
|
10e12931bc0088caab386a879222d23f785fba7813a06aa0e53d7a4901124a68 | yetibot/yetibot | time.clj | (ns yetibot.commands.time
(:require
[yetibot.core.hooks :refer [cmd-hook]]
[clj-time.core :as time]
[clj-time.format :as f]))
(def date-time-formatter
(-> (f/formatter "yyyy-MM-dd HH:mm:ss")
(f/with-zone (time/default-time-zone))))
(defn list-timezones
"time zones # list known timezones"
[_] (vec (time/available-ids)))
(defn format-with [fmt]
(f/unparse fmt (time/now)))
(defn time-with-offset
"time +-<offset> # report current time with given UTC offset"
[{[_ offset-str] :match}]
(->> (read-string offset-str)
(time/time-zone-for-offset)
(f/with-zone date-time-formatter)
(format-with)))
(defn time-default
"time # report current time with server timezone"
[_] (format-with date-time-formatter))
(defn time-with-zoneid
"time <zoneid> # report current time with given zone"
[{[_ zone-id] :match}]
(->> zone-id
(time/time-zone-for-id)
(f/with-zone date-time-formatter)
(format-with)))
(cmd-hook #"time"
#"zones" list-timezones
#"([+-]\d+)" time-with-offset
#"(.+)" time-with-zoneid
_ time-default)
| null | https://raw.githubusercontent.com/yetibot/yetibot/2fb5c1182b1a53ab0e433d6bab2775ebd43367de/src/yetibot/commands/time.clj | clojure | (ns yetibot.commands.time
(:require
[yetibot.core.hooks :refer [cmd-hook]]
[clj-time.core :as time]
[clj-time.format :as f]))
(def date-time-formatter
(-> (f/formatter "yyyy-MM-dd HH:mm:ss")
(f/with-zone (time/default-time-zone))))
(defn list-timezones
"time zones # list known timezones"
[_] (vec (time/available-ids)))
(defn format-with [fmt]
(f/unparse fmt (time/now)))
(defn time-with-offset
"time +-<offset> # report current time with given UTC offset"
[{[_ offset-str] :match}]
(->> (read-string offset-str)
(time/time-zone-for-offset)
(f/with-zone date-time-formatter)
(format-with)))
(defn time-default
"time # report current time with server timezone"
[_] (format-with date-time-formatter))
(defn time-with-zoneid
"time <zoneid> # report current time with given zone"
[{[_ zone-id] :match}]
(->> zone-id
(time/time-zone-for-id)
(f/with-zone date-time-formatter)
(format-with)))
(cmd-hook #"time"
#"zones" list-timezones
#"([+-]\d+)" time-with-offset
#"(.+)" time-with-zoneid
_ time-default)
| |
dc11d33ecccda8d997aba083c64e213e5e6ba487cfeb7ece679bb05ae23a2198 | erebe/wstunnel | Socks5.hs | # LANGUAGE DuplicateRecordFields #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE StrictData #
module Socks5 where
import ClassyPrelude
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString as BC
import qualified Data.ByteString.Char8 as BC8
import Data.Either
import qualified Data.Text as T
import qualified Data.Text.Read as T
import qualified Data.Text.Encoding as E
import Network.Socket (HostName, PortNumber)
import Numeric (showHex)
import Control.Monad.Except (MonadError)
import qualified Data.Streaming.Network as N
socksVersion :: Word8
socksVersion = 0x05
data AuthMethod = NoAuth
| GSSAPI
| Login
| Reserved
| NotAllowed
deriving (Show, Read)
data AddressType = DOMAIN_NAME
| IPv4
deriving (Show, Read, Eq)
data RequestAuth = RequestAuth
{ version :: Int
, methods :: Vector AuthMethod
} deriving (Show, Read)
data ResponseAuth = ResponseAuth
{ version :: Int
, method :: AuthMethod
} deriving (Show, Read)
instance Binary ResponseAuth where
put ResponseAuth{..} = putWord8 (fromIntegral version) >> put method
get = ResponseAuth <$> (fromIntegral <$> getWord8)
<*> get
instance Binary AuthMethod where
put val = case val of
NoAuth -> putWord8 0x00
GSSAPI -> putWord8 0x01
Login -> putWord8 0x02
NotAllowed -> putWord8 0xFF
get = do
method <- getWord8
return $ case method of
0x00 -> NoAuth
0x01 -> GSSAPI
0x02 -> Login
0xFF -> NotAllowed
_ -> Reserved
instance Binary RequestAuth where
put RequestAuth{..} = do
putWord8 (fromIntegral version)
putWord8 (fromIntegral $ length methods)
mapM_ put methods
Check length < = 255
get = do
version <- fromIntegral <$> getWord8
guard (version == 0x05)
nbMethods <- fromIntegral <$> getWord8
guard (nbMethods > 0 && nbMethods <= 0xFF)
methods <- replicateM nbMethods get
return $ RequestAuth version methods
data Request = Request
{ version :: Int
, command :: Command
, addr :: HostName
, port :: PortNumber
, addrType :: AddressType
} deriving (Show)
data Command = Connect
| Bind
| UdpAssociate
deriving (Show, Eq, Enum, Bounded)
instance Binary Command where
put = putWord8 . (+1) . fromIntegral . fromEnum
get = do
cmd <- (\val -> fromIntegral val - 1) <$> getWord8
guard $ cmd >= fromEnum (minBound :: Command) && cmd <= fromEnum (maxBound :: Command)
return .toEnum $ cmd
instance Binary Request where
put Request{..} = do
putWord8 (fromIntegral version)
put command
putWord8 0x00 -- RESERVED
_ <- if addrType == DOMAIN_NAME
then do
putWord8 0x03
let host = BC8.pack addr
putWord8 (fromIntegral . length $ host)
traverse_ put host
else do
putWord8 0x01
let ipv4 = fst . Data.Either.fromRight (0, mempty) . T.decimal . T.pack <$> splitElem '.' addr
traverse_ putWord8 ipv4
putWord16be (fromIntegral port)
get = do
version <- fromIntegral <$> getWord8
guard (version == 5)
cmd <- get :: Get Command
_ <- getWord8 -- RESERVED
Addr type , we support only ipv4 and
DOMAINNAME OR IPV4
host <- if opCode == 0x03
then do
length <- fromIntegral <$> getWord8
fromRight T.empty . E.decodeUtf8' <$> replicateM length getWord8
else do
ipv4 <- replicateM 4 getWord8 :: Get [Word8]
let ipv4Str = T.intercalate "." $ fmap (tshow . fromEnum) ipv4
return ipv4Str
guard (not $ null host)
port <- fromIntegral <$> getWord16be
return Request
{ version = version
, command = cmd
, addr = unpack host
, port = port
, addrType = if opCode == 0x03 then DOMAIN_NAME else IPv4
}
toHex :: LByteString -> String
toHex = foldr showHex "" . unpack
data Response = Response
{ version :: Int
, returnCode :: RetCode
, serverAddr :: HostName
, serverPort :: PortNumber
, serverAddrType :: AddressType
} deriving (Show)
data RetCode = SUCCEEDED
| GENERAL_FAILURE
| NOT_ALLOWED
| NO_NETWORK
| HOST_UNREACHABLE
| CONNECTION_REFUSED
| TTL_EXPIRED
| UNSUPPORTED_COMMAND
| UNSUPPORTED_ADDRESS_TYPE
| UNASSIGNED
deriving (Show, Eq, Enum, Bounded)
instance Binary RetCode where
put = putWord8 . fromIntegral . fromEnum
get = toEnum . min maxBound . fromIntegral <$> getWord8
instance Binary Response where
put Response{..} = do
putWord8 socksVersion
put returnCode
putWord8 0x00 -- Reserved
_ <- if serverAddrType == DOMAIN_NAME
then do
putWord8 0x03
let host = BC8.pack serverAddr
putWord8 (fromIntegral . length $ host)
traverse_ put host
else do
putWord8 0x01
let ipv4 = fst . Data.Either.fromRight (0, mempty) . T.decimal . T.pack <$> splitElem '.' serverAddr
traverse_ putWord8 ipv4
putWord16be (fromIntegral serverPort)
get = do
version <- fromIntegral <$> getWord8
guard(version == fromIntegral socksVersion)
ret <- toEnum . min maxBound . fromIntegral <$> getWord8
getWord8 -- RESERVED
opCode <- fromIntegral <$> getWord8 -- Type
guard(opCode == 0x03 || opCode == 0x01)
host <- if opCode == 0x03
then do
length <- fromIntegral <$> getWord8
fromRight T.empty . E.decodeUtf8' <$> replicateM length getWord8
else do
ipv4 <- replicateM 4 getWord8 :: Get [Word8]
let ipv4Str = T.intercalate "." $ fmap (tshow . fromEnum) ipv4
return ipv4Str
guard (not $ null host)
port <- getWord16be
return Response
{ version = version
, returnCode = ret
, serverAddr = unpack host
, serverPort = fromIntegral port
, serverAddrType = if opCode == 0x03 then DOMAIN_NAME else IPv4
}
data ServerSettings = ServerSettings
{ listenOn :: PortNumber
, bindOn :: HostName
, onAuthentification : : ( MonadIO m , MonadError IOException m ) = > RequestAuth - > m ResponseAuth
, onRequest : : ( MonadIO m , MonadError IOException m ) = > Request - > m Response
} deriving (Show) | null | https://raw.githubusercontent.com/erebe/wstunnel/93f444c7554b05a247beb3cef3f9e9e67dbdf04e/src/Socks5.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
RESERVED
RESERVED
Reserved
RESERVED
Type | # LANGUAGE DuplicateRecordFields #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE StrictData #
module Socks5 where
import ClassyPrelude
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString as BC
import qualified Data.ByteString.Char8 as BC8
import Data.Either
import qualified Data.Text as T
import qualified Data.Text.Read as T
import qualified Data.Text.Encoding as E
import Network.Socket (HostName, PortNumber)
import Numeric (showHex)
import Control.Monad.Except (MonadError)
import qualified Data.Streaming.Network as N
socksVersion :: Word8
socksVersion = 0x05
data AuthMethod = NoAuth
| GSSAPI
| Login
| Reserved
| NotAllowed
deriving (Show, Read)
data AddressType = DOMAIN_NAME
| IPv4
deriving (Show, Read, Eq)
data RequestAuth = RequestAuth
{ version :: Int
, methods :: Vector AuthMethod
} deriving (Show, Read)
data ResponseAuth = ResponseAuth
{ version :: Int
, method :: AuthMethod
} deriving (Show, Read)
instance Binary ResponseAuth where
put ResponseAuth{..} = putWord8 (fromIntegral version) >> put method
get = ResponseAuth <$> (fromIntegral <$> getWord8)
<*> get
instance Binary AuthMethod where
put val = case val of
NoAuth -> putWord8 0x00
GSSAPI -> putWord8 0x01
Login -> putWord8 0x02
NotAllowed -> putWord8 0xFF
get = do
method <- getWord8
return $ case method of
0x00 -> NoAuth
0x01 -> GSSAPI
0x02 -> Login
0xFF -> NotAllowed
_ -> Reserved
instance Binary RequestAuth where
put RequestAuth{..} = do
putWord8 (fromIntegral version)
putWord8 (fromIntegral $ length methods)
mapM_ put methods
Check length < = 255
get = do
version <- fromIntegral <$> getWord8
guard (version == 0x05)
nbMethods <- fromIntegral <$> getWord8
guard (nbMethods > 0 && nbMethods <= 0xFF)
methods <- replicateM nbMethods get
return $ RequestAuth version methods
data Request = Request
{ version :: Int
, command :: Command
, addr :: HostName
, port :: PortNumber
, addrType :: AddressType
} deriving (Show)
data Command = Connect
| Bind
| UdpAssociate
deriving (Show, Eq, Enum, Bounded)
instance Binary Command where
put = putWord8 . (+1) . fromIntegral . fromEnum
get = do
cmd <- (\val -> fromIntegral val - 1) <$> getWord8
guard $ cmd >= fromEnum (minBound :: Command) && cmd <= fromEnum (maxBound :: Command)
return .toEnum $ cmd
instance Binary Request where
put Request{..} = do
putWord8 (fromIntegral version)
put command
_ <- if addrType == DOMAIN_NAME
then do
putWord8 0x03
let host = BC8.pack addr
putWord8 (fromIntegral . length $ host)
traverse_ put host
else do
putWord8 0x01
let ipv4 = fst . Data.Either.fromRight (0, mempty) . T.decimal . T.pack <$> splitElem '.' addr
traverse_ putWord8 ipv4
putWord16be (fromIntegral port)
get = do
version <- fromIntegral <$> getWord8
guard (version == 5)
cmd <- get :: Get Command
Addr type , we support only ipv4 and
DOMAINNAME OR IPV4
host <- if opCode == 0x03
then do
length <- fromIntegral <$> getWord8
fromRight T.empty . E.decodeUtf8' <$> replicateM length getWord8
else do
ipv4 <- replicateM 4 getWord8 :: Get [Word8]
let ipv4Str = T.intercalate "." $ fmap (tshow . fromEnum) ipv4
return ipv4Str
guard (not $ null host)
port <- fromIntegral <$> getWord16be
return Request
{ version = version
, command = cmd
, addr = unpack host
, port = port
, addrType = if opCode == 0x03 then DOMAIN_NAME else IPv4
}
toHex :: LByteString -> String
toHex = foldr showHex "" . unpack
data Response = Response
{ version :: Int
, returnCode :: RetCode
, serverAddr :: HostName
, serverPort :: PortNumber
, serverAddrType :: AddressType
} deriving (Show)
data RetCode = SUCCEEDED
| GENERAL_FAILURE
| NOT_ALLOWED
| NO_NETWORK
| HOST_UNREACHABLE
| CONNECTION_REFUSED
| TTL_EXPIRED
| UNSUPPORTED_COMMAND
| UNSUPPORTED_ADDRESS_TYPE
| UNASSIGNED
deriving (Show, Eq, Enum, Bounded)
instance Binary RetCode where
put = putWord8 . fromIntegral . fromEnum
get = toEnum . min maxBound . fromIntegral <$> getWord8
instance Binary Response where
put Response{..} = do
putWord8 socksVersion
put returnCode
_ <- if serverAddrType == DOMAIN_NAME
then do
putWord8 0x03
let host = BC8.pack serverAddr
putWord8 (fromIntegral . length $ host)
traverse_ put host
else do
putWord8 0x01
let ipv4 = fst . Data.Either.fromRight (0, mempty) . T.decimal . T.pack <$> splitElem '.' serverAddr
traverse_ putWord8 ipv4
putWord16be (fromIntegral serverPort)
get = do
version <- fromIntegral <$> getWord8
guard(version == fromIntegral socksVersion)
ret <- toEnum . min maxBound . fromIntegral <$> getWord8
guard(opCode == 0x03 || opCode == 0x01)
host <- if opCode == 0x03
then do
length <- fromIntegral <$> getWord8
fromRight T.empty . E.decodeUtf8' <$> replicateM length getWord8
else do
ipv4 <- replicateM 4 getWord8 :: Get [Word8]
let ipv4Str = T.intercalate "." $ fmap (tshow . fromEnum) ipv4
return ipv4Str
guard (not $ null host)
port <- getWord16be
return Response
{ version = version
, returnCode = ret
, serverAddr = unpack host
, serverPort = fromIntegral port
, serverAddrType = if opCode == 0x03 then DOMAIN_NAME else IPv4
}
data ServerSettings = ServerSettings
{ listenOn :: PortNumber
, bindOn :: HostName
, onAuthentification : : ( MonadIO m , MonadError IOException m ) = > RequestAuth - > m ResponseAuth
, onRequest : : ( MonadIO m , MonadError IOException m ) = > Request - > m Response
} deriving (Show) |
5654830a75c548af451f325cd68338682d69f916004338209ad5d259a6b6e59d | hammerlab/secotrec | aws_efs.ml | open Common
let tr_remove_new_lines = Genspio.EDSL.exec ["tr"; "-d"; "\\n"]
type guess_value = [ `From_metadata | `Value of string ] [@@deriving yojson]
type t = {
name: string [@main];
guess_subnet: guess_value [@default `From_metadata];
guess_secgroup: guess_value [@default `From_metadata];
} [@@deriving yojson, make]
let default_mount_point t = sprintf "/mnt-%s" t.name
module To_genspio = struct
open Genspio_edsl
(** Function {!Common.sayl} but ith an additional prefix/prompt. *)
let saylp t fmt l =
sayl ("[EFS:%s] " ^ fmt) (string t.name :: l)
let aws_efs more =
call ([string "aws"; string "efs"] @ more)
let aws_efs_strings more =
aws_efs (List.map more ~f:string)
(** Get the interesting contents a command returns something like
["\"129.32.23.11\"\n"]. *)
let get_successful_single_string cmd ~or_else =
output_as_string (cmd |> succeeds |> if_seq ~t:[] ~e:[or_else])
>> exec ["tr"; "-d"; "\\n\""]
|> output_as_string
let aws_get_or_create t name ~get ~create =
let tmp = tmp_file name in
object (self)
method fill =
tmp#set (get_successful_single_string get ~or_else:fail);
method fill_or_null =
tmp#set (get_successful_single_string
(with_redirections get [to_file (int 2) (string "/dev/null")])
~or_else:(exec ["printf";"%s\\n"; "null"]))
method build =
seq [
saylp t "Checking %s" [string name];
self#fill_or_null;
if_seq (
(tmp#get =$= string "null")
||| (tmp#get =$= string ""))
~t:[
saylp t "Building %s" [string name];
tmp#set (get_successful_single_string create ~or_else:(seq [
saylp t "Building %s failed" [string name];
fail
]));
saylp t " -> %s was just created: %s" [string name; tmp#get];
]
~e:[
saylp t " -> %s is already there: %s" [string name; tmp#get];
];
]
method get = tmp#get
end
let get_or_create_file_system_id t =
let make cmd query =
aws_efs_strings [cmd; "--creation-token"; t.name; "--query"; query] in
aws_get_or_create t "file-system-id"
~get:(make "describe-file-systems" "FileSystems[0].FileSystemId")
~create:(make "create-file-system" "FileSystemId")
let get_or_create_mount_target t ~fs_id ~subnet_id ~secgrp_id =
let get =
aws_efs [
string "describe-mount-targets";
string "--file-system-id"; fs_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].MountTargetId";
] in
let create =
aws_efs [
string "create-mount-target";
string "--file-system-id"; fs_id;
string "--subnet-id"; subnet_id;
string "--security-groups"; secgrp_id;
string "--query"; string "MountTargetId";
] in
aws_get_or_create t "mount-target-id" ~get ~create
(**
Assuming you are on an EC2 instance, [curl] a piece of meta-data,
cf.
{{:-instance-metadata.html}EC2 Instance Metadata Docs}. *)
let curl_metadata_item path =
let uri =
string_concat [string "-data/"; path] in
let tmp_err =
let unique = Genspio.Language.to_one_liner path in
ksprintf tmp_file "curl-error-%s" Digest.(string unique |> to_hex) in
call [string "curl"; string "--stderr"; tmp_err#path; uri]
||> tr_remove_new_lines
|> output_as_string
let subnet_id ~aws_cli t =
match t.guess_subnet with
| `Value v -> string v
| `From_metadata ->
let macs_path = string "network/interfaces/macs/" in
let mac = curl_metadata_item macs_path in
curl_metadata_item
@@ string_concat [macs_path; mac; string "subnet-id"]
let security_group ~aws_cli t =
match t.guess_subnet with
| `Value v -> string v
| `From_metadata ->
let name = curl_metadata_item @@ string "security-groups/" in
call [
string "aws"; string "ec2"; string "describe-security-groups";
string "--group-names"; name;
string "--query"; string "SecurityGroups[0].GroupId";
]
|> get_successful_single_string ~or_else:fail
let mount_point t = string (default_mount_point t)
let ensure_nfs_traffic_in_security_group t ~security_group =
seq [
saylp t "Authorizing :2049 traffic within group %s" [security_group];
begin
let tmp_err = tmp_file "asgi-error" in
if_seq (
with_redirections
(call [
string "aws"; string "ec2"; string "authorize-security-group-ingress";
string "--group-id"; security_group;
string "--protocol"; string "tcp"; string "--port"; string "2049";
string "--source-group"; security_group;
]) [
to_file (int 2) tmp_err#path;
]
|> succeeds
)
~t:[saylp t " -> NFSv4 traffic authorized" []]
~e:[
if_seq (call [string "grep"; string "InvalidPermission.Duplicate";
tmp_err#path] |> succeeds_silently)
~t:[saylp t " -> NFSv4 traffic was already authorized" []]
~e:[
saylp t "ERROR while Authorizing NFSv4 traffic:" [];
call [string "cat"; tmp_err#path];
fail;
]
]
end;
]
let wait_for_mount_target_available t ~mount_target_id =
seq [
saylp t "Waiting for mount-target to be really available." [];
seq_succeeds_or
~name:"Waiting-for-mount-target"
~silent:false
~clean_up:[fail] [
loop_until_ok
(
(aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].LifeCycleState";
]
||> tr_remove_new_lines
|> output_as_string)
=$= string "available")
~attempts:40
~sleep:4;
]
]
let mount t ~mount_target_id =
let mt_ip_address =
aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].IpAddress";
]
||> tr_remove_new_lines
|> output_as_string in
seq [
(* sayl "IP Address to mount: %s" [mt_ip_address]; *)
call [string "sudo"; string "mkdir"; string "-p"; mount_point t];
if_seq (
exec ["mount"] ||> call [string "grep"; mt_ip_address]
||> call [string "grep"; mount_point t]
|> succeeds_silently
)
~t:[
saylp t "%s already mounted at %s:" [
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
output_markdown_code "" (
exec ["mount"] ||> call [string "grep"; mt_ip_address]
||> call [string "grep"; mount_point t]
);
]
~e:[
saylp t "Mounting %s at %s" [
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
call [
string "sudo"; string "mount"; string "-t"; string "nfs4";
(* Options from:
-fs-mount-cmd-ip-addr.html *)
string "-o"; string "nfsvers=4.1,rsize=1048576,wsize=1048576,\
hard,timeo=600,retrans=2";
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
]
]
let ensure ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
seq_succeeds_or ~silent:false ~name:(sprintf "Ensure-EFS-%s" t.name)
~clean_up:[fail] [
Aws_cli.configure aws_cli;
file_system_id#build;
(* sayl "EFS-%s: File-system-ID: %s" [string t.name; file_system_id#get]; *)
saylp t "Using: Subnet: %s, Secgrp: %s" [
subnet_id ~aws_cli t;
security_group ~aws_cli t;
];
mount_target_id#build;
ensure_nfs_traffic_in_security_group t
~security_group:(security_group ~aws_cli t);
wait_for_mount_target_available t ~mount_target_id:mount_target_id#get;
mount t ~mount_target_id:mount_target_id#get;
]
let full_mount_script ?owner t =
let aws_cli = Aws_cli.guess () in
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
Genspio_edsl.seq_succeeds_or
~silent:false
~clean_up:[fail]
~name:"Mount EFS" [
Aws_cli.configure aws_cli;
file_system_id#build;
mount_target_id#build;
mount t ~mount_target_id:mount_target_id#get;
begin match owner with
| None -> nop
| Some (user, grp) ->
call [string "sudo";
string "chown"; string (sprintf "%s:%s" user grp);
mount_point t]
end
]
let describe ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
seq [
file_system_id#fill;
if_seq (file_system_id#get =$= string "null")
~t:[
saylp t "File-system-id not available; \
list of all visible file-systems:" [];
output_markdown_code "" begin
aws_efs [string "describe-file-systems";
string "--output"; string "text"];
end;
]
~e:[
saylp t "File-system-id: %s:" [file_system_id#get];
output_markdown_code "json" begin
aws_efs [string "describe-file-systems";
string "--file-system-id"; file_system_id#get;
string "--output"; string "json"];
end;
saylp t "Mount-Targets:" [];
output_markdown_code "" begin
aws_efs [string "describe-mount-targets";
string "--file-system-id"; file_system_id#get;
string "--output"; string "text"];
end;
];
saylp t "Local-mount:" [];
output_markdown_code "" begin
exec ["mount"] ||> call [string "grep"; mount_point t];
end;
saylp t "Using: Subnet: %s, Secgrp: %s" [
subnet_id ~aws_cli t;
security_group ~aws_cli t;
];
saylp t "Mount-command:" [];
output_markdown_code "" begin
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
let mt_ip_address =
aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id#get;
string "--output"; string "text";
string "--query"; string "MountTargets[].IpAddress";
]
||> tr_remove_new_lines
|> output_as_string in
call [
string "printf"; string "%s\n";
string_concat [
string "sudo mount -t nfs4 \
-o nfsvers=4.1,rsize=1048576,wsize=1048576,\
hard,timeo=600,retrans=2 ";
mt_ip_address; string ":/ ";
mount_point t;
];
]
end;
saylp t "Done." [];
]
let destroy ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
let fs_id = file_system_id#get in
let mt_id = mount_target_id#get in
let number_of_mount_targets =
get_successful_single_string
(aws_efs_strings ["describe-file-systems";
"--creation-token"; t.name;
"--query"; "FileSystems[0].NumberOfMountTargets"])
~or_else:(fail)
in
seq [
file_system_id#fill;
mount_target_id#fill_or_null;
saylp t "File-system-ID: %s, Mount-Target-ID: %s" [fs_id; mt_id];
saylp t "Unmounting `%s`..." [mount_point t];
output_markdown_code "" begin
call [string "sudo"; string "umount";
string "-f"; string "-l"; mount_point t];
end;
if_seq ((mt_id <$> string "") &&& (mt_id <$> string "null"))
~t:[
sayf "Deleting mount-target.";
aws_efs [string "delete-mount-target";
string "--mount-target-id"; mt_id];
];
sayl "Waiting for the FS to not be “in use”: `%s` user(s) now..." [
number_of_mount_targets;
];
seq_succeeds_or
~name:"Waiting-for-file-system-to-notice-deletion"
~silent:false
~clean_up:[fail] [
loop_until_ok
((number_of_mount_targets =$= string "0")
||| (number_of_mount_targets =$= string "null"))
~attempts:40
~sleep:4;
];
if_seq (fs_id <$> string "null")
~t:[
sayl "EFS-%s: Deleting file-system." [string t.name];
aws_efs [string "delete-file-system";
string "--file-system-id"; fs_id];
]
~e:[
sayl "EFS-%s: Already deleted." [string t.name];
];
]
end
| null | https://raw.githubusercontent.com/hammerlab/secotrec/c801a43fdb0feea98da6d3636145f948aed4e7be/src/lib/aws_efs.ml | ocaml | * Function {!Common.sayl} but ith an additional prefix/prompt.
* Get the interesting contents a command returns something like
["\"129.32.23.11\"\n"].
*
Assuming you are on an EC2 instance, [curl] a piece of meta-data,
cf.
{{:-instance-metadata.html}EC2 Instance Metadata Docs}.
sayl "IP Address to mount: %s" [mt_ip_address];
Options from:
-fs-mount-cmd-ip-addr.html
sayl "EFS-%s: File-system-ID: %s" [string t.name; file_system_id#get]; | open Common
let tr_remove_new_lines = Genspio.EDSL.exec ["tr"; "-d"; "\\n"]
type guess_value = [ `From_metadata | `Value of string ] [@@deriving yojson]
type t = {
name: string [@main];
guess_subnet: guess_value [@default `From_metadata];
guess_secgroup: guess_value [@default `From_metadata];
} [@@deriving yojson, make]
let default_mount_point t = sprintf "/mnt-%s" t.name
module To_genspio = struct
open Genspio_edsl
let saylp t fmt l =
sayl ("[EFS:%s] " ^ fmt) (string t.name :: l)
let aws_efs more =
call ([string "aws"; string "efs"] @ more)
let aws_efs_strings more =
aws_efs (List.map more ~f:string)
let get_successful_single_string cmd ~or_else =
output_as_string (cmd |> succeeds |> if_seq ~t:[] ~e:[or_else])
>> exec ["tr"; "-d"; "\\n\""]
|> output_as_string
let aws_get_or_create t name ~get ~create =
let tmp = tmp_file name in
object (self)
method fill =
tmp#set (get_successful_single_string get ~or_else:fail);
method fill_or_null =
tmp#set (get_successful_single_string
(with_redirections get [to_file (int 2) (string "/dev/null")])
~or_else:(exec ["printf";"%s\\n"; "null"]))
method build =
seq [
saylp t "Checking %s" [string name];
self#fill_or_null;
if_seq (
(tmp#get =$= string "null")
||| (tmp#get =$= string ""))
~t:[
saylp t "Building %s" [string name];
tmp#set (get_successful_single_string create ~or_else:(seq [
saylp t "Building %s failed" [string name];
fail
]));
saylp t " -> %s was just created: %s" [string name; tmp#get];
]
~e:[
saylp t " -> %s is already there: %s" [string name; tmp#get];
];
]
method get = tmp#get
end
let get_or_create_file_system_id t =
let make cmd query =
aws_efs_strings [cmd; "--creation-token"; t.name; "--query"; query] in
aws_get_or_create t "file-system-id"
~get:(make "describe-file-systems" "FileSystems[0].FileSystemId")
~create:(make "create-file-system" "FileSystemId")
let get_or_create_mount_target t ~fs_id ~subnet_id ~secgrp_id =
let get =
aws_efs [
string "describe-mount-targets";
string "--file-system-id"; fs_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].MountTargetId";
] in
let create =
aws_efs [
string "create-mount-target";
string "--file-system-id"; fs_id;
string "--subnet-id"; subnet_id;
string "--security-groups"; secgrp_id;
string "--query"; string "MountTargetId";
] in
aws_get_or_create t "mount-target-id" ~get ~create
let curl_metadata_item path =
let uri =
string_concat [string "-data/"; path] in
let tmp_err =
let unique = Genspio.Language.to_one_liner path in
ksprintf tmp_file "curl-error-%s" Digest.(string unique |> to_hex) in
call [string "curl"; string "--stderr"; tmp_err#path; uri]
||> tr_remove_new_lines
|> output_as_string
let subnet_id ~aws_cli t =
match t.guess_subnet with
| `Value v -> string v
| `From_metadata ->
let macs_path = string "network/interfaces/macs/" in
let mac = curl_metadata_item macs_path in
curl_metadata_item
@@ string_concat [macs_path; mac; string "subnet-id"]
let security_group ~aws_cli t =
match t.guess_subnet with
| `Value v -> string v
| `From_metadata ->
let name = curl_metadata_item @@ string "security-groups/" in
call [
string "aws"; string "ec2"; string "describe-security-groups";
string "--group-names"; name;
string "--query"; string "SecurityGroups[0].GroupId";
]
|> get_successful_single_string ~or_else:fail
let mount_point t = string (default_mount_point t)
let ensure_nfs_traffic_in_security_group t ~security_group =
seq [
saylp t "Authorizing :2049 traffic within group %s" [security_group];
begin
let tmp_err = tmp_file "asgi-error" in
if_seq (
with_redirections
(call [
string "aws"; string "ec2"; string "authorize-security-group-ingress";
string "--group-id"; security_group;
string "--protocol"; string "tcp"; string "--port"; string "2049";
string "--source-group"; security_group;
]) [
to_file (int 2) tmp_err#path;
]
|> succeeds
)
~t:[saylp t " -> NFSv4 traffic authorized" []]
~e:[
if_seq (call [string "grep"; string "InvalidPermission.Duplicate";
tmp_err#path] |> succeeds_silently)
~t:[saylp t " -> NFSv4 traffic was already authorized" []]
~e:[
saylp t "ERROR while Authorizing NFSv4 traffic:" [];
call [string "cat"; tmp_err#path];
fail;
]
]
end;
]
let wait_for_mount_target_available t ~mount_target_id =
seq [
saylp t "Waiting for mount-target to be really available." [];
seq_succeeds_or
~name:"Waiting-for-mount-target"
~silent:false
~clean_up:[fail] [
loop_until_ok
(
(aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].LifeCycleState";
]
||> tr_remove_new_lines
|> output_as_string)
=$= string "available")
~attempts:40
~sleep:4;
]
]
let mount t ~mount_target_id =
let mt_ip_address =
aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id;
string "--output"; string "text";
string "--query"; string "MountTargets[].IpAddress";
]
||> tr_remove_new_lines
|> output_as_string in
seq [
call [string "sudo"; string "mkdir"; string "-p"; mount_point t];
if_seq (
exec ["mount"] ||> call [string "grep"; mt_ip_address]
||> call [string "grep"; mount_point t]
|> succeeds_silently
)
~t:[
saylp t "%s already mounted at %s:" [
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
output_markdown_code "" (
exec ["mount"] ||> call [string "grep"; mt_ip_address]
||> call [string "grep"; mount_point t]
);
]
~e:[
saylp t "Mounting %s at %s" [
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
call [
string "sudo"; string "mount"; string "-t"; string "nfs4";
string "-o"; string "nfsvers=4.1,rsize=1048576,wsize=1048576,\
hard,timeo=600,retrans=2";
string_concat [mt_ip_address; string ":/"];
mount_point t;
];
]
]
let ensure ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
seq_succeeds_or ~silent:false ~name:(sprintf "Ensure-EFS-%s" t.name)
~clean_up:[fail] [
Aws_cli.configure aws_cli;
file_system_id#build;
saylp t "Using: Subnet: %s, Secgrp: %s" [
subnet_id ~aws_cli t;
security_group ~aws_cli t;
];
mount_target_id#build;
ensure_nfs_traffic_in_security_group t
~security_group:(security_group ~aws_cli t);
wait_for_mount_target_available t ~mount_target_id:mount_target_id#get;
mount t ~mount_target_id:mount_target_id#get;
]
let full_mount_script ?owner t =
let aws_cli = Aws_cli.guess () in
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
Genspio_edsl.seq_succeeds_or
~silent:false
~clean_up:[fail]
~name:"Mount EFS" [
Aws_cli.configure aws_cli;
file_system_id#build;
mount_target_id#build;
mount t ~mount_target_id:mount_target_id#get;
begin match owner with
| None -> nop
| Some (user, grp) ->
call [string "sudo";
string "chown"; string (sprintf "%s:%s" user grp);
mount_point t]
end
]
let describe ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
seq [
file_system_id#fill;
if_seq (file_system_id#get =$= string "null")
~t:[
saylp t "File-system-id not available; \
list of all visible file-systems:" [];
output_markdown_code "" begin
aws_efs [string "describe-file-systems";
string "--output"; string "text"];
end;
]
~e:[
saylp t "File-system-id: %s:" [file_system_id#get];
output_markdown_code "json" begin
aws_efs [string "describe-file-systems";
string "--file-system-id"; file_system_id#get;
string "--output"; string "json"];
end;
saylp t "Mount-Targets:" [];
output_markdown_code "" begin
aws_efs [string "describe-mount-targets";
string "--file-system-id"; file_system_id#get;
string "--output"; string "text"];
end;
];
saylp t "Local-mount:" [];
output_markdown_code "" begin
exec ["mount"] ||> call [string "grep"; mount_point t];
end;
saylp t "Using: Subnet: %s, Secgrp: %s" [
subnet_id ~aws_cli t;
security_group ~aws_cli t;
];
saylp t "Mount-command:" [];
output_markdown_code "" begin
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
let mt_ip_address =
aws_efs [
string "describe-mount-targets";
string "--mount-target-id"; mount_target_id#get;
string "--output"; string "text";
string "--query"; string "MountTargets[].IpAddress";
]
||> tr_remove_new_lines
|> output_as_string in
call [
string "printf"; string "%s\n";
string_concat [
string "sudo mount -t nfs4 \
-o nfsvers=4.1,rsize=1048576,wsize=1048576,\
hard,timeo=600,retrans=2 ";
mt_ip_address; string ":/ ";
mount_point t;
];
]
end;
saylp t "Done." [];
]
let destroy ~aws_cli t =
let file_system_id = get_or_create_file_system_id t in
let mount_target_id =
get_or_create_mount_target t ~fs_id:file_system_id#get
~subnet_id:(subnet_id ~aws_cli t)
~secgrp_id:(security_group ~aws_cli t) in
let fs_id = file_system_id#get in
let mt_id = mount_target_id#get in
let number_of_mount_targets =
get_successful_single_string
(aws_efs_strings ["describe-file-systems";
"--creation-token"; t.name;
"--query"; "FileSystems[0].NumberOfMountTargets"])
~or_else:(fail)
in
seq [
file_system_id#fill;
mount_target_id#fill_or_null;
saylp t "File-system-ID: %s, Mount-Target-ID: %s" [fs_id; mt_id];
saylp t "Unmounting `%s`..." [mount_point t];
output_markdown_code "" begin
call [string "sudo"; string "umount";
string "-f"; string "-l"; mount_point t];
end;
if_seq ((mt_id <$> string "") &&& (mt_id <$> string "null"))
~t:[
sayf "Deleting mount-target.";
aws_efs [string "delete-mount-target";
string "--mount-target-id"; mt_id];
];
sayl "Waiting for the FS to not be “in use”: `%s` user(s) now..." [
number_of_mount_targets;
];
seq_succeeds_or
~name:"Waiting-for-file-system-to-notice-deletion"
~silent:false
~clean_up:[fail] [
loop_until_ok
((number_of_mount_targets =$= string "0")
||| (number_of_mount_targets =$= string "null"))
~attempts:40
~sleep:4;
];
if_seq (fs_id <$> string "null")
~t:[
sayl "EFS-%s: Deleting file-system." [string t.name];
aws_efs [string "delete-file-system";
string "--file-system-id"; fs_id];
]
~e:[
sayl "EFS-%s: Already deleted." [string t.name];
];
]
end
|
91cbf5606211391a08abb0519bb8465329f44dd333c332ff9dbdf9bbeb62c4ca | chef/chef-server | oc_chef_wm_named_cookbook_artifact.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
%% ex: ts=4 sw=4 et
@author < >
Copyright Chef Software , Inc. All Rights Reserved .
-module(oc_chef_wm_named_cookbook_artifact).
-include("oc_chef_wm.hrl").
-behaviour(chef_wm).
Webmachine resource callbacks
-mixin([{oc_chef_wm_base, [content_types_accepted/2,
content_types_provided/2,
finish_request/2,
malformed_request/2,
ping/2,
forbidden/2,
is_authorized/2,
service_available/2,
validate_request/3]}]).
-export([auth_info/2,
init_resource_state/1,
init/1,
request_type/0,
allowed_methods/2,
to_json/2,
malformed_request_message/3]).
init(Config) ->
oc_chef_wm_base:init(?MODULE, Config).
init_resource_state(_Config) ->
{ok, #cookbook_artifacts_state{}}.
request_type() ->
"named_cookbook_artifact".
allowed_methods(Req, State) ->
{['GET'], Req, State}.
-spec auth_info(wm_req(), chef_wm:base_state()) ->
chef_wm:auth_info_return().
auth_info(Req, #base_state{organization_guid = OrgId,
chef_db_context = DbContext,
resource_args = single_artifact} = State) ->
Name = wrq:path_info(name, Req),
BaseRec = #oc_chef_cookbook_artifact{org_id = OrgId,
name = Name},
case chef_db:fetch(BaseRec, DbContext) of
not_found ->
Message = chef_wm_util:error_message_envelope(<<"not_found">>),
Req1 = chef_wm_util:set_json_body(Req, Message),
{{halt, 404}, Req1, State#base_state{log_msg = cookbook_artifact_not_found}};
#oc_chef_cookbook_artifact{} = CookbookArtifactRec ->
auth_info_succeeded([CookbookArtifactRec], Req, State)
end;
auth_info(Req, #base_state{organization_guid = OrgId,
chef_db_context = DbContext,
resource_args = list_artifacts} = State) ->
case chef_db:fetch_multi(oc_chef_cookbook_artifact,
DbContext,
list_cookbook_artifacts_by_org_id,
[OrgId]) of
not_found ->
auth_info_succeeded([], Req, State);
CookbookArtifactRecs when erlang:is_list(CookbookArtifactRecs) ->
auth_info_succeeded(CookbookArtifactRecs, Req, State)
end.
auth_info_succeeded(Recs, Req, #base_state{resource_state = ResourceState} = State) ->
NewResourceState = ResourceState#cookbook_artifacts_state{oc_chef_cookbook_artifacts = Recs},
State1 = State#base_state{resource_state = NewResourceState},
{{container, cookbook_artifact}, Req, State1}.
to_json(Req, #base_state{resource_state = #cookbook_artifacts_state{
oc_chef_cookbook_artifacts = CookbookArtifactRecs
}} = State) ->
CAFun = oc_chef_wm_routes:bulk_route_fun(cookbook_artifact, Req),
ArtifactList = lists:map(
fun(#oc_chef_cookbook_artifact{version_identifiers = Identifiers,
name = CAVName}) ->
CAVFun = oc_chef_wm_routes:bulk_route_fun(cookbook_artifact_version, CAVName, Req),
VersionList = lists:map(
fun(Identifier) ->
{[{<<"url">>, CAVFun(Identifier)},
{<<"identifier">>, Identifier}]}
end,
Identifiers
),
ArtifactEjson = {[{<<"url">>, CAFun(CAVName)},
{<<"versions">>, VersionList}]},
{CAVName, ArtifactEjson}
end,
CookbookArtifactRecs
),
{jiffy:encode({ArtifactList}), Req, State}.
malformed_request_message(Any, _Req, _state) ->
error({unexpected_malformed_request_message, Any}).
| null | https://raw.githubusercontent.com/chef/chef-server/6d31841ecd73d984d819244add7ad6ebac284323/src/oc_erchef/apps/oc_chef_wm/src/oc_chef_wm_named_cookbook_artifact.erl | erlang | ex: ts=4 sw=4 et | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
@author < >
Copyright Chef Software , Inc. All Rights Reserved .
-module(oc_chef_wm_named_cookbook_artifact).
-include("oc_chef_wm.hrl").
-behaviour(chef_wm).
Webmachine resource callbacks
-mixin([{oc_chef_wm_base, [content_types_accepted/2,
content_types_provided/2,
finish_request/2,
malformed_request/2,
ping/2,
forbidden/2,
is_authorized/2,
service_available/2,
validate_request/3]}]).
-export([auth_info/2,
init_resource_state/1,
init/1,
request_type/0,
allowed_methods/2,
to_json/2,
malformed_request_message/3]).
init(Config) ->
oc_chef_wm_base:init(?MODULE, Config).
init_resource_state(_Config) ->
{ok, #cookbook_artifacts_state{}}.
request_type() ->
"named_cookbook_artifact".
allowed_methods(Req, State) ->
{['GET'], Req, State}.
-spec auth_info(wm_req(), chef_wm:base_state()) ->
chef_wm:auth_info_return().
auth_info(Req, #base_state{organization_guid = OrgId,
chef_db_context = DbContext,
resource_args = single_artifact} = State) ->
Name = wrq:path_info(name, Req),
BaseRec = #oc_chef_cookbook_artifact{org_id = OrgId,
name = Name},
case chef_db:fetch(BaseRec, DbContext) of
not_found ->
Message = chef_wm_util:error_message_envelope(<<"not_found">>),
Req1 = chef_wm_util:set_json_body(Req, Message),
{{halt, 404}, Req1, State#base_state{log_msg = cookbook_artifact_not_found}};
#oc_chef_cookbook_artifact{} = CookbookArtifactRec ->
auth_info_succeeded([CookbookArtifactRec], Req, State)
end;
auth_info(Req, #base_state{organization_guid = OrgId,
chef_db_context = DbContext,
resource_args = list_artifacts} = State) ->
case chef_db:fetch_multi(oc_chef_cookbook_artifact,
DbContext,
list_cookbook_artifacts_by_org_id,
[OrgId]) of
not_found ->
auth_info_succeeded([], Req, State);
CookbookArtifactRecs when erlang:is_list(CookbookArtifactRecs) ->
auth_info_succeeded(CookbookArtifactRecs, Req, State)
end.
auth_info_succeeded(Recs, Req, #base_state{resource_state = ResourceState} = State) ->
NewResourceState = ResourceState#cookbook_artifacts_state{oc_chef_cookbook_artifacts = Recs},
State1 = State#base_state{resource_state = NewResourceState},
{{container, cookbook_artifact}, Req, State1}.
to_json(Req, #base_state{resource_state = #cookbook_artifacts_state{
oc_chef_cookbook_artifacts = CookbookArtifactRecs
}} = State) ->
CAFun = oc_chef_wm_routes:bulk_route_fun(cookbook_artifact, Req),
ArtifactList = lists:map(
fun(#oc_chef_cookbook_artifact{version_identifiers = Identifiers,
name = CAVName}) ->
CAVFun = oc_chef_wm_routes:bulk_route_fun(cookbook_artifact_version, CAVName, Req),
VersionList = lists:map(
fun(Identifier) ->
{[{<<"url">>, CAVFun(Identifier)},
{<<"identifier">>, Identifier}]}
end,
Identifiers
),
ArtifactEjson = {[{<<"url">>, CAFun(CAVName)},
{<<"versions">>, VersionList}]},
{CAVName, ArtifactEjson}
end,
CookbookArtifactRecs
),
{jiffy:encode({ArtifactList}), Req, State}.
malformed_request_message(Any, _Req, _state) ->
error({unexpected_malformed_request_message, Any}).
|
c98b2a632553fa6346952b93e9d73fa548c9268ebc2c5ec646279ae0212988a4 | FlowerWrong/mblog | lib_chan_cs.erl | %% ---
Excerpted from " Programming Erlang , Second Edition " ,
published by The Pragmatic Bookshelf .
%% Copyrights apply to this code. It may not be used to create training material,
%% courses, books, articles, and the like. Contact us if you are in doubt.
%% We make no guarantees that this code is fit for any purpose.
%% Visit for more book information.
%%---
-module(lib_chan_cs).
cs stands for client_server
-export([start_raw_server/4, start_raw_client/3]).
-export([stop/1]).
-export([children/1]).
start_raw_server(Port , Fun , Max , PacketLength )
This server accepts up to connections on Port
The * first * time a connection is made to Port
%% Then Fun(Socket) is called.
%% Thereafter messages to the socket result in messages to the handler.
PacketLength is usually 0,1,2 or 4 ( see the inet manual page for details ) .
%% tcp_is typically used as follows:
%% To setup a listener
%% start_agent(Port) ->
%% process_flag(trap_exit, true),
%% lib_chan_server:start_raw_server(Port,
%% fun(Socket) -> input_handler(Socket) end,
%% 15, 0).
start_raw_client(Host, Port, PacketLength) ->
gen_tcp:connect(Host, Port,
[binary, {active, true}, {packet, PacketLength}]).
%% Note when start_raw_server returns it should be ready to
%% Immediately accept connections
start_raw_server(Port, Fun, Max, PacketLength) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn_link(fun() ->
cold_start(Self,Port,Fun,Max,PacketLength)
end),
receive
{Pid, ok} ->
register(Name, Pid),
{ok, self()};
{Pid, Error} ->
Error
end;
_Pid ->
{error, already_started}
end.
stop(Port) when integer(Port) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
not_started;
Pid ->
exit(Pid, kill),
(catch unregister(Name)),
stopped
end.
children(Port) when integer(Port) ->
port_name(Port) ! {children, self()},
receive
{session_server, Reply} -> Reply
end.
port_name(Port) when integer(Port) ->
list_to_atom("portServer" ++ integer_to_list(Port)).
cold_start(Master, Port, Fun, Max, PacketLength) ->
process_flag(trap_exit, true),
%% io:format("Starting a port server on ~p...~n",[Port]),
case gen_tcp:listen(Port, [binary,
%% {dontroute, true},
{nodelay,true},
{packet, PacketLength},
{reuseaddr, true},
{active, true}]) of
{ok, Listen} ->
%% io:format("Listening to:~p~n",[Listen]),
Master ! {self(), ok},
New = start_accept(Listen, Fun),
%% Now we're ready to run
socket_loop(Listen, New, [], Fun, Max);
Error ->
Master ! {self(), Error}
end.
socket_loop(Listen, New, Active, Fun, Max) ->
receive
{istarted, New} ->
Active1 = [New|Active],
possibly_start_another(false,Listen,Active1,Fun,Max);
{'EXIT', New, _Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
possibly_start_another(false,Listen,Active,Fun,Max);
{'EXIT', Pid, _Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
Active1 = lists:delete(Pid, Active),
possibly_start_another(New,Listen,Active1,Fun,Max);
{children, From} ->
From ! {session_server, Active},
socket_loop(Listen,New,Active,Fun,Max);
_Other ->
socket_loop(Listen,New,Active,Fun,Max)
end.
possibly_start_another(New, Listen, Active, Fun, Max)
when pid(New) ->
socket_loop(Listen, New, Active, Fun, Max);
possibly_start_another(false, Listen, Active, Fun, Max) ->
case length(Active) of
N when N < Max ->
New = start_accept(Listen, Fun),
socket_loop(Listen, New, Active, Fun,Max);
_ ->
socket_loop(Listen, false, Active, Fun, Max)
end.
start_accept(Listen, Fun) ->
S = self(),
spawn_link(fun() -> start_child(S, Listen, Fun) end).
start_child(Parent, Listen, Fun) ->
case gen_tcp:accept(Listen) of
{ok, Socket} ->
Parent ! {istarted,self()}, % tell the controller
inet:setopts(Socket, [{packet,4},
binary,
{nodelay,true},
{active, true}]),
%% before we activate socket
%% io:format("running the child:~p Fun=~p~n", [Socket, Fun]),
process_flag(trap_exit, true),
case (catch Fun(Socket)) of
{'EXIT', normal} ->
true;
{'EXIT', Why} ->
io:format("Port process dies with exit:~p~n",[Why]),
true;
_ ->
%% not an exit so everything's ok
true
end
end.
| null | https://raw.githubusercontent.com/FlowerWrong/mblog/3233ede938d2019a7b57391405197ac19c805b27/categories/erlang/demo/jaerlang2_code/socket_dist/lib_chan_cs.erl | erlang | ---
Copyrights apply to this code. It may not be used to create training material,
courses, books, articles, and the like. Contact us if you are in doubt.
We make no guarantees that this code is fit for any purpose.
Visit for more book information.
---
Then Fun(Socket) is called.
Thereafter messages to the socket result in messages to the handler.
tcp_is typically used as follows:
To setup a listener
start_agent(Port) ->
process_flag(trap_exit, true),
lib_chan_server:start_raw_server(Port,
fun(Socket) -> input_handler(Socket) end,
15, 0).
Note when start_raw_server returns it should be ready to
Immediately accept connections
io:format("Starting a port server on ~p...~n",[Port]),
{dontroute, true},
io:format("Listening to:~p~n",[Listen]),
Now we're ready to run
tell the controller
before we activate socket
io:format("running the child:~p Fun=~p~n", [Socket, Fun]),
not an exit so everything's ok | Excerpted from " Programming Erlang , Second Edition " ,
published by The Pragmatic Bookshelf .
-module(lib_chan_cs).
cs stands for client_server
-export([start_raw_server/4, start_raw_client/3]).
-export([stop/1]).
-export([children/1]).
start_raw_server(Port , Fun , Max , PacketLength )
This server accepts up to connections on Port
The * first * time a connection is made to Port
PacketLength is usually 0,1,2 or 4 ( see the inet manual page for details ) .
start_raw_client(Host, Port, PacketLength) ->
gen_tcp:connect(Host, Port,
[binary, {active, true}, {packet, PacketLength}]).
start_raw_server(Port, Fun, Max, PacketLength) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn_link(fun() ->
cold_start(Self,Port,Fun,Max,PacketLength)
end),
receive
{Pid, ok} ->
register(Name, Pid),
{ok, self()};
{Pid, Error} ->
Error
end;
_Pid ->
{error, already_started}
end.
stop(Port) when integer(Port) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
not_started;
Pid ->
exit(Pid, kill),
(catch unregister(Name)),
stopped
end.
children(Port) when integer(Port) ->
port_name(Port) ! {children, self()},
receive
{session_server, Reply} -> Reply
end.
port_name(Port) when integer(Port) ->
list_to_atom("portServer" ++ integer_to_list(Port)).
cold_start(Master, Port, Fun, Max, PacketLength) ->
process_flag(trap_exit, true),
case gen_tcp:listen(Port, [binary,
{nodelay,true},
{packet, PacketLength},
{reuseaddr, true},
{active, true}]) of
{ok, Listen} ->
Master ! {self(), ok},
New = start_accept(Listen, Fun),
socket_loop(Listen, New, [], Fun, Max);
Error ->
Master ! {self(), Error}
end.
socket_loop(Listen, New, Active, Fun, Max) ->
receive
{istarted, New} ->
Active1 = [New|Active],
possibly_start_another(false,Listen,Active1,Fun,Max);
{'EXIT', New, _Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
possibly_start_another(false,Listen,Active,Fun,Max);
{'EXIT', Pid, _Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
Active1 = lists:delete(Pid, Active),
possibly_start_another(New,Listen,Active1,Fun,Max);
{children, From} ->
From ! {session_server, Active},
socket_loop(Listen,New,Active,Fun,Max);
_Other ->
socket_loop(Listen,New,Active,Fun,Max)
end.
possibly_start_another(New, Listen, Active, Fun, Max)
when pid(New) ->
socket_loop(Listen, New, Active, Fun, Max);
possibly_start_another(false, Listen, Active, Fun, Max) ->
case length(Active) of
N when N < Max ->
New = start_accept(Listen, Fun),
socket_loop(Listen, New, Active, Fun,Max);
_ ->
socket_loop(Listen, false, Active, Fun, Max)
end.
start_accept(Listen, Fun) ->
S = self(),
spawn_link(fun() -> start_child(S, Listen, Fun) end).
start_child(Parent, Listen, Fun) ->
case gen_tcp:accept(Listen) of
{ok, Socket} ->
inet:setopts(Socket, [{packet,4},
binary,
{nodelay,true},
{active, true}]),
process_flag(trap_exit, true),
case (catch Fun(Socket)) of
{'EXIT', normal} ->
true;
{'EXIT', Why} ->
io:format("Port process dies with exit:~p~n",[Why]),
true;
_ ->
true
end
end.
|
8a11f818fe33d7a5dd87abd54191fe56e4c8a2b08682b1a4c8e9ad5bcc7b12c9 | eslick/cl-stdutils | stdutils.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
;;; ASD File for Think Utilities Module
(defpackage #:utils.system
(:use #:cl #:asdf))
(in-package #:utils.system)
mop hacks
# + ( or allegro cmu lispworks openmcl )
;;(pushnew :kmr-mop cl:*features*)
(defsystem #:utils
:description "Think Utilities: A set of helpful utilities used by the Think system"
:version "1.0"
:author "Ian Eslick <>"
:licence "Public Domain"
:components ((:file "package")
(:file "system" :depends-on ("package")) ;; global system macros
import and re - export : and : kmrcl
(:file "lists" :depends-on ("system")) ;; list related utilities, tree walking, searching, etc
Useful macros , such as aif , awhen , etc .
(:file "iteration" :depends-on ("system")) ;; iteration related utilities
(:file "conditionals" :depends-on ("lists")) ;; anaphoric macros,
(:file "map" :depends-on ("iteration")) ;; map related utilities
(:file "iteration2" :depends-on ("map")) ;; more iteration related utilities
(:file "shorthand" :depends-on ("map" "conditionals")) ;; abbreviations
(:file "functions" :depends-on ("shorthand")) ;; function utilities
(:file "math" :depends-on ("functions")) ;; math tools
shortcuts for dealing with places & setf macros
(:file "clos" :depends-on ("functions")) ;; utilities for clos
(:file "hashutil" :depends-on ("shorthand")) ;; a wrapper around the hash function
(:file "file" :depends-on ("shorthand")) ;; file utilities
(:file "bitvector" :depends-on ("shorthand")) ;; bitvector utilites (empty for now)
(:file "arrays" :depends-on ("shorthand")) ;; Various array-oriented utilities
(:file "random") ;; Random generation and manipulation
(:file "regex"
:depends-on ("imports" "arrays" "iteration")) ;; Regular expression enhancement
(:file "strings" :depends-on ("regex")) ;; String utilities of various kinds
Dependency for allegro libs
(:file "split-sequence") ;; Sequence manipulation
if *
(:file "interactive" :depends-on ("functions" "clos")) ;; useful repl tools
;; Larger scale utilities, but common enough to be included here
(:file "tokenizer" :depends-on ("lists")) ;; a simple configurable tokenizer
(:file "match" :depends-on ("shorthand")) ;; structure matching with variables
(:file "log") ;; simple generic logging facility for production and debugging use
(: file " plotutils " ) ; ; additions to cllibs gnuplot interface
(:file "time" :depends-on ("shorthand")) ;; Utility set for parsing time strings
(:file "prof" :depends-on ("shorthand")) ;; profiling support
;; (:file "monitor") ;; a perf monitoring system; package 'monitor'
(:file "threads")
(:file "matrix")
;; Data structures; move to a different library later?
(:file "queue" :depends-on ("shorthand"))
(:file "cache" :depends-on ("queue" "hashutil"))
(:file "collections" :depends-on ("shorthand" "clos" "hashutil")) ;; collection ds
(:file "wordseq" :depends-on ("shorthand")) ;; manipulate sequences of dictionary words
(:file "gds" :depends-on ("clos" "collections"))
(:file "table" :depends-on ("gds"))
(:file "iterator" :depends-on ("table"))
(:file "assoc-table" :depends-on ("iterator"))
(:file "hashed-table" :depends-on ("assoc-table"))
(:file "vector-keyed-table" :depends-on ("hashed-table"))
;; (:file "vechash" :depends-on ("shorthand")) ;; a fast hash table for vector keys
;; (:file "sbtree" :depends-on ("shorthand"))
)
:serial t
:in-order-to ((load-op (compile-op :utils)))
:depends-on (:cl-ppcre)) | null | https://raw.githubusercontent.com/eslick/cl-stdutils/4a4e5a4036b815318282da5dee2a22825369137b/src/stdutils.lisp | lisp | Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
ASD File for Think Utilities Module
(pushnew :kmr-mop cl:*features*)
global system macros
list related utilities, tree walking, searching, etc
iteration related utilities
anaphoric macros,
map related utilities
more iteration related utilities
abbreviations
function utilities
math tools
utilities for clos
a wrapper around the hash function
file utilities
bitvector utilites (empty for now)
Various array-oriented utilities
Random generation and manipulation
Regular expression enhancement
String utilities of various kinds
Sequence manipulation
useful repl tools
Larger scale utilities, but common enough to be included here
a simple configurable tokenizer
structure matching with variables
simple generic logging facility for production and debugging use
; additions to cllibs gnuplot interface
Utility set for parsing time strings
profiling support
(:file "monitor") ;; a perf monitoring system; package 'monitor'
Data structures; move to a different library later?
collection ds
manipulate sequences of dictionary words
(:file "vechash" :depends-on ("shorthand")) ;; a fast hash table for vector keys
(:file "sbtree" :depends-on ("shorthand"))
|
(defpackage #:utils.system
(:use #:cl #:asdf))
(in-package #:utils.system)
mop hacks
# + ( or allegro cmu lispworks openmcl )
(defsystem #:utils
:description "Think Utilities: A set of helpful utilities used by the Think system"
:version "1.0"
:author "Ian Eslick <>"
:licence "Public Domain"
:components ((:file "package")
import and re - export : and : kmrcl
Useful macros , such as aif , awhen , etc .
shortcuts for dealing with places & setf macros
(:file "regex"
Dependency for allegro libs
if *
(:file "threads")
(:file "matrix")
(:file "queue" :depends-on ("shorthand"))
(:file "cache" :depends-on ("queue" "hashutil"))
(:file "gds" :depends-on ("clos" "collections"))
(:file "table" :depends-on ("gds"))
(:file "iterator" :depends-on ("table"))
(:file "assoc-table" :depends-on ("iterator"))
(:file "hashed-table" :depends-on ("assoc-table"))
(:file "vector-keyed-table" :depends-on ("hashed-table"))
)
:serial t
:in-order-to ((load-op (compile-op :utils)))
:depends-on (:cl-ppcre)) |
7721883be227a26e131ed7ecb8f640905c26fa3d9e4d8bedc22ef40fe3824a50 | marick/fp-oo | klass-2.clj | Exercise 2
;; I'll mark classes invisible by tagging them with metadata.
(def invisible
(fn [class]
(assoc class :__invisible__ true)))
(def invisible?
(fn [class-symbol] (:__invisible__ (eval class-symbol))))
;; Change the already-defined metaclasses to be invisible:
(def MetaAnything (invisible MetaAnything))
(def MetaKlass (invisible MetaKlass))
(def MetaPoint (invisible MetaPoint))
;; Ancestors just removes invisible classes from the
;; reversed lineage.
(def Klass
(assoc-in Klass
[:__instance_methods__ :ancestors]
(fn [class]
(remove invisible?
(reverse (lineage (:__own_symbol__ class)))))))
;; New metaclasses need to be created to be invisible.
(def MetaKlass
(assoc-in MetaKlass
[:__instance_methods__ :new]
(fn [this
new-class-symbol superclass-symbol
instance-methods class-methods]
(install
VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV new
(invisible
;; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(basic-class (metasymbol new-class-symbol)
:left 'Klass
:up 'MetaAnything
class-methods)))
;; Class
(install
(basic-class new-class-symbol
:left (metasymbol new-class-symbol)
:up superclass-symbol
instance-methods)))))
;; Test data:
(send-to Klass :new
'ColoredPoint 'Point
{
:color :color
:add-instance-values
(fn [this x y color]
;; This is a hack because we haven't implemented
;; `send-super` yet.
(merge (send-to Point :new x y)
(assoc this :color color)))
}
{
:origin (fn [class]
(send-to class :new 0 0 'white))
})
(prn (send-to Anything :ancestors))
(prn (send-to Klass :ancestors))
(prn (send-to Point :ancestors))
(prn (send-to ColoredPoint :ancestors))
| null | https://raw.githubusercontent.com/marick/fp-oo/434937826d794d6fe02b3e9a62cf5b4fbc314412/solutions/pieces/klass-2.clj | clojure | I'll mark classes invisible by tagging them with metadata.
Change the already-defined metaclasses to be invisible:
Ancestors just removes invisible classes from the
reversed lineage.
New metaclasses need to be created to be invisible.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Class
Test data:
This is a hack because we haven't implemented
`send-super` yet. | Exercise 2
(def invisible
(fn [class]
(assoc class :__invisible__ true)))
(def invisible?
(fn [class-symbol] (:__invisible__ (eval class-symbol))))
(def MetaAnything (invisible MetaAnything))
(def MetaKlass (invisible MetaKlass))
(def MetaPoint (invisible MetaPoint))
(def Klass
(assoc-in Klass
[:__instance_methods__ :ancestors]
(fn [class]
(remove invisible?
(reverse (lineage (:__own_symbol__ class)))))))
(def MetaKlass
(assoc-in MetaKlass
[:__instance_methods__ :new]
(fn [this
new-class-symbol superclass-symbol
instance-methods class-methods]
(install
VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV new
(invisible
(basic-class (metasymbol new-class-symbol)
:left 'Klass
:up 'MetaAnything
class-methods)))
(install
(basic-class new-class-symbol
:left (metasymbol new-class-symbol)
:up superclass-symbol
instance-methods)))))
(send-to Klass :new
'ColoredPoint 'Point
{
:color :color
:add-instance-values
(fn [this x y color]
(merge (send-to Point :new x y)
(assoc this :color color)))
}
{
:origin (fn [class]
(send-to class :new 0 0 'white))
})
(prn (send-to Anything :ancestors))
(prn (send-to Klass :ancestors))
(prn (send-to Point :ancestors))
(prn (send-to ColoredPoint :ancestors))
|
d8b33b1d11175b54a29d1f597fa94d007d2738df3bfe7033e016b0bb0c2da68a | lpgauth/marina | marina_profile.erl | -module(marina_profile).
-include("test.hrl").
-export([
fprofx/0
]).
-define(N, 1000).
-define(P, 20).
%% public
-spec fprofx() -> ok.
fprofx() ->
Filenames = filelib:wildcard("_build/default/lib/*/ebin/*.beam"),
Rootnames = [filename:rootname(Filename, ".beam") ||
Filename <- Filenames],
lists:foreach(fun code:load_abs/1, Rootnames),
marina_app:start(),
setup(),
marina_app:stop(),
fprofx:start(),
{ok, Tracer} = fprofx:profile(start),
fprofx:trace([start, {procs, new}, {tracer, Tracer}]),
Self = self(),
Query = <<"SELECT * FROM test.users WHERE key = ?;">>,
Uid = <<153, 73, 45, 254, 217, 74, 17, 228, 175, 57, 88,
244, 65, 16, 117, 125>>,
Opts = [{skip_metadata, true}],
marina_app:start(),
[spawn(fun () ->
[{ok, _} = marina:reusable_query(Query, [Uid], ?CONSISTENCY_LOCAL_ONE,
Opts, ?TIMEOUT) || _ <- lists:seq(1, ?N)],
Self ! exit
end) || _ <- lists:seq(1, ?P)],
wait(),
fprofx:trace(stop),
fprofx:analyse([totals, {dest, ""}]),
fprofx:stop(),
marina_app:stop(),
ok.
%% private
setup() ->
[marina:query(Query, [], ?CONSISTENCY_LOCAL_ONE, [], ?TIMEOUT) || Query <- [
<<"DROP KEYSPACE test;">>,
<<"CREATE KEYSPACE test WITH REPLICATION =
{'class':'SimpleStrategy', 'replication_factor':1};">>,
<<"CREATE TABLE test.users (key uuid, column1 text,
column2 text, value blob, PRIMARY KEY (key, column1, column2));">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test', 'test2',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test2', 'test3',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test3', 'test4',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test4', 'test5',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test5', 'test6',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test6', 'test7',
intAsBlob(0))">>
]].
wait() ->
wait(?P).
wait(0) ->
ok;
wait(X) ->
receive
exit ->
wait(X - 1)
end.
| null | https://raw.githubusercontent.com/lpgauth/marina/2d775c003f58d125bb38e7c953c30c36aebc72c6/test/marina_profile.erl | erlang | public
private | -module(marina_profile).
-include("test.hrl").
-export([
fprofx/0
]).
-define(N, 1000).
-define(P, 20).
-spec fprofx() -> ok.
fprofx() ->
Filenames = filelib:wildcard("_build/default/lib/*/ebin/*.beam"),
Rootnames = [filename:rootname(Filename, ".beam") ||
Filename <- Filenames],
lists:foreach(fun code:load_abs/1, Rootnames),
marina_app:start(),
setup(),
marina_app:stop(),
fprofx:start(),
{ok, Tracer} = fprofx:profile(start),
fprofx:trace([start, {procs, new}, {tracer, Tracer}]),
Self = self(),
Query = <<"SELECT * FROM test.users WHERE key = ?;">>,
Uid = <<153, 73, 45, 254, 217, 74, 17, 228, 175, 57, 88,
244, 65, 16, 117, 125>>,
Opts = [{skip_metadata, true}],
marina_app:start(),
[spawn(fun () ->
[{ok, _} = marina:reusable_query(Query, [Uid], ?CONSISTENCY_LOCAL_ONE,
Opts, ?TIMEOUT) || _ <- lists:seq(1, ?N)],
Self ! exit
end) || _ <- lists:seq(1, ?P)],
wait(),
fprofx:trace(stop),
fprofx:analyse([totals, {dest, ""}]),
fprofx:stop(),
marina_app:stop(),
ok.
setup() ->
[marina:query(Query, [], ?CONSISTENCY_LOCAL_ONE, [], ?TIMEOUT) || Query <- [
<<"DROP KEYSPACE test;">>,
<<"CREATE KEYSPACE test WITH REPLICATION =
{'class':'SimpleStrategy', 'replication_factor':1};">>,
<<"CREATE TABLE test.users (key uuid, column1 text,
column2 text, value blob, PRIMARY KEY (key, column1, column2));">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test', 'test2',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test2', 'test3',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test3', 'test4',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test4', 'test5',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test5', 'test6',
intAsBlob(0))">>,
<<"INSERT INTO test.users (key, column1, column2, value)
values (99492dfe-d94a-11e4-af39-58f44110757d, 'test6', 'test7',
intAsBlob(0))">>
]].
wait() ->
wait(?P).
wait(0) ->
ok;
wait(X) ->
receive
exit ->
wait(X - 1)
end.
|
993c721f20319d6e6bf8b35e76aedbd5e64671169b0814e2dd353a9d6ff2ae70 | expipiplus1/vulkan | VK_NV_copy_memory_indirect.hs | {-# language CPP #-}
-- | = Name
--
-- VK_NV_copy_memory_indirect - device extension
--
= = VK_NV_copy_memory_indirect
--
-- [__Name String__]
-- @VK_NV_copy_memory_indirect@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
427
--
-- [__Revision__]
1
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@ to be enabled
-- for any device-level functionality
--
-- - Requires @VK_KHR_buffer_device_address@ to be enabled for any
-- device-level functionality
--
-- [__Contact__]
--
- Kushwaha
< -Docs/issues/new?body=[VK_NV_copy_memory_indirect ] @vkushwaha - nv%0A*Here describe the issue or question you have about the VK_NV_copy_memory_indirect extension * >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2022 - 10 - 14
--
-- [__Contributors__]
--
- , NVIDIA
--
- , NVIDIA
--
- , NVIDIA
--
- , NVIDIA
--
-- == Description
--
-- This extension adds support for performing copies between memory and
-- image regions using indirect parameters that are read by the device from
-- a buffer during execution. This functionality /may/ be useful for
-- performing copies where the copy parameters are not known during the
-- command buffer creation time.
--
-- == New Commands
--
- ' '
--
-- - 'cmdCopyMemoryToImageIndirectNV'
--
-- == New Structures
--
-- - 'CopyMemoryIndirectCommandNV'
--
- ' CopyMemoryToImageIndirectCommandNV '
--
-- - Extending
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 ' ,
' Vulkan . Core10.Device . DeviceCreateInfo ' :
--
-- - 'PhysicalDeviceCopyMemoryIndirectFeaturesNV'
--
-- - Extending
' Vulkan . ' :
--
-- - 'PhysicalDeviceCopyMemoryIndirectPropertiesNV'
--
-- == New Enum Constants
--
-- - 'NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME'
--
- ' NV_COPY_MEMORY_INDIRECT_SPEC_VERSION '
--
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV '
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV '
--
-- == Version History
--
- Revision 1 , 2022 - 10 - 14 ( )
--
-- - Initial draft
--
-- == See Also
--
' CopyMemoryIndirectCommandNV ' , ' CopyMemoryToImageIndirectCommandNV ' ,
-- 'PhysicalDeviceCopyMemoryIndirectFeaturesNV',
-- 'PhysicalDeviceCopyMemoryIndirectPropertiesNV',
' ' , ' cmdCopyMemoryToImageIndirectNV '
--
-- == Document Notes
--
-- For more information, see the
< -extensions/html/vkspec.html#VK_NV_copy_memory_indirect Vulkan Specification >
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NV_copy_memory_indirect ( cmdCopyMemoryIndirectNV
, cmdCopyMemoryToImageIndirectNV
, CopyMemoryIndirectCommandNV(..)
, CopyMemoryToImageIndirectCommandNV(..)
, PhysicalDeviceCopyMemoryIndirectFeaturesNV(..)
, PhysicalDeviceCopyMemoryIndirectPropertiesNV(..)
, NV_COPY_MEMORY_INDIRECT_SPEC_VERSION
, pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION
, NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME
, pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Core10.FundamentalTypes (DeviceAddress)
import Vulkan.Dynamic (DeviceCmds(pVkCmdCopyMemoryIndirectNV))
import Vulkan.Dynamic (DeviceCmds(pVkCmdCopyMemoryToImageIndirectNV))
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.FundamentalTypes (Extent3D)
import Vulkan.Core10.Handles (Image)
import Vulkan.Core10.Handles (Image(..))
import Vulkan.Core10.Enums.ImageLayout (ImageLayout)
import Vulkan.Core10.Enums.ImageLayout (ImageLayout(..))
import Vulkan.Core10.CommandBufferBuilding (ImageSubresourceLayers)
import Vulkan.Core10.FundamentalTypes (Offset3D)
import Vulkan.Core10.Enums.QueueFlagBits (QueueFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdCopyMemoryIndirectNV
:: FunPtr (Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> IO ()) -> Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> IO ()
-- | vkCmdCopyMemoryIndirectNV - Copy data between memory regions
--
-- = Description
--
Each region read from @copyBufferAddress@ is copied from the source
-- region to the specified destination region. The results are undefined if
-- any of the source and destination regions overlap in memory.
--
-- == Valid Usage
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-None-07653# The
-- <-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
-- feature /must/ be enabled
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-copyBufferAddress-07654#
@copyBufferAddress@ /must/ be 4 byte aligned
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-stride-07655# @stride@ /must/ be a
-- multiple of @4@ and /must/ be greater than or equal to
-- sizeof('CopyMemoryIndirectCommandNV')
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-07656# The
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
allocated from /must/ support at least one of the
' PhysicalDeviceCopyMemoryIndirectPropertiesNV'::@supportedQueues@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
' Vulkan . Core10.Handles . CommandBuffer ' handle
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-cmdpool# The
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
-- allocated from /must/ support transfer, graphics, or compute
-- operations
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-renderpass# This command /must/ only
-- be called outside of a render pass instance
--
-- - #VUID-vkCmdCopyMemoryIndirectNV-videocoding# This command /must/
-- only be called outside of a video coding scope
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
- Host access to the ' Vulkan . Core10.Handles . ' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
-- +============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
-- | Primary | Outside | Outside | Transfer | Action |
-- | Secondary | | | Graphics | |
-- | | | | Compute | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . DeviceAddress '
cmdCopyMemoryIndirectNV :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @copyBufferAddress@ is the buffer address specifying the copy
-- parameters. This buffer is laid out in memory as an array of
-- 'CopyMemoryIndirectCommandNV' structures.
("copyBufferAddress" ::: DeviceAddress)
| @copyCount@ is the number of copies to execute , and can be zero .
("copyCount" ::: Word32)
-> -- | @stride@ is the stride in bytes between successive sets of copy
-- parameters.
("stride" ::: Word32)
-> io ()
cmdCopyMemoryIndirectNV commandBuffer
copyBufferAddress
copyCount
stride = liftIO $ do
let vkCmdCopyMemoryIndirectNVPtr = pVkCmdCopyMemoryIndirectNV (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
unless (vkCmdCopyMemoryIndirectNVPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdCopyMemoryIndirectNV is null" Nothing Nothing
let vkCmdCopyMemoryIndirectNV' = mkVkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNVPtr
traceAroundEvent "vkCmdCopyMemoryIndirectNV" (vkCmdCopyMemoryIndirectNV'
(commandBufferHandle (commandBuffer))
(copyBufferAddress)
(copyCount)
(stride))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdCopyMemoryToImageIndirectNV
:: FunPtr (Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> Image -> ImageLayout -> Ptr ImageSubresourceLayers -> IO ()) -> Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> Image -> ImageLayout -> Ptr ImageSubresourceLayers -> IO ()
-- | vkCmdCopyMemoryToImageIndirectNV - Copy data from a memory region into
-- an image
--
-- = Description
--
Each region in @copyBufferAddress@ is copied from the source memory
-- region to an image region in the destination image. If the destination
image is of type ' Vulkan . Core10.Enums . ImageType . IMAGE_TYPE_3D ' , the
-- starting slice and number of slices to copy are specified in
-- @pImageSubresources@::@baseArrayLayer@ and
@pImageSubresources@::@layerCount@ respectively . The copy /must/ be
-- performed on a queue that supports indirect copy operations, see
-- 'PhysicalDeviceCopyMemoryIndirectPropertiesNV'.
--
-- == Valid Usage
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-None-07660# The
-- <-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
-- feature /must/ be enabled
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07661# @dstImage@
-- /must/ not be a protected image
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-aspectMask-07662# The
@aspectMask@ member for every subresource in @pImageSubresources@
-- /must/ only have a single bit set
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07663# The image
region specified by each element in @copyBufferAddress@ /must/ be a
-- region that is contained within @dstImage@
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07664# @dstImage@
-- /must/ have been created with
' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_TRANSFER_DST_BIT '
-- usage flag
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07665# If @dstImage@
-- is non-sparse then it /must/ be bound completely and contiguously to
a single ' Vulkan . Core10.Handles . DeviceMemory ' object
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07666# @dstImage@
-- /must/ have a sample count equal to
' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT '
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-07667#
-- @dstImageLayout@ /must/ specify the layout of the image subresources
-- of @dstImage@ at the time this command is executed on a
' Vulkan . Core10.Handles . Device '
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-07669#
-- @dstImageLayout@ /must/ be
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ' ,
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_GENERAL ' , or
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_SHARED_PRESENT_KHR '
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-mipLevel-07670# The specified
-- @mipLevel@ of each region /must/ be less than the @mipLevels@
specified in ' Vulkan . Core10.Image . ImageCreateInfo ' when @dstImage@
-- was created
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-baseArrayLayer-07671# The
-- specified @baseArrayLayer@ + @layerCount@ of each region /must/ be
-- less than or equal to the @arrayLayers@ specified in
' Vulkan . Core10.Image . ImageCreateInfo ' when was created
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-imageOffset-07672# The
-- @imageOffset@ and @imageExtent@ members of each region /must/
-- respect the image transfer granularity requirements of
-- @commandBuffer@’s command pool’s queue family, as described in
' Vulkan . Core10.DeviceInitialization . QueueFamilyProperties '
--
- # VUID - vkCmdCopyMemoryToImageIndirectNV - dstImage-07673 # @dstImage@
-- /must/ not have been created with @flags@ containing
' Vulkan . Core10.Enums . ImageCreateFlagBits . IMAGE_CREATE_SUBSAMPLED_BIT_EXT '
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-07674# If the
queue family used to create the ' Vulkan . Core10.Handles . '
-- which @commandBuffer@ was allocated from does not support
' Vulkan . Core10.Enums . QueueFlagBits . QUEUE_GRAPHICS_BIT ' , for each
region , the @aspectMask@ member of @pImageSubresources@ /must/ not
be ' Vulkan . Core10.Enums . ImageAspectFlagBits . IMAGE_ASPECT_DEPTH_BIT '
-- or
' Vulkan . Core10.Enums . ImageAspectFlagBits . IMAGE_ASPECT_STENCIL_BIT '
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-imageOffset-07675# For each
region in @copyBufferAddress@ , @imageOffset.y@ and
-- (@imageExtent.height@ + @imageOffset.y@) /must/ both be greater than
or equal to @0@ and less than or equal to the height of the
specified
--
- # VUID - vkCmdCopyMemoryToImageIndirectNV - offset-07676 # @offset@ /must/
be 4 byte aligned
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-stride-07677# @stride@ /must/
-- be a multiple of @4@ and /must/ be greater than or equal to
-- sizeof('CopyMemoryToImageIndirectCommandNV')
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
' Vulkan . Core10.Handles . CommandBuffer ' handle
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-parameter#
@dstImage@ /must/ be a valid ' Vulkan . Core10.Handles . Image ' handle
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-parameter#
-- @dstImageLayout@ /must/ be a valid
' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' value
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-pImageSubresources-parameter#
-- @pImageSubresources@ /must/ be a valid pointer to an array of
@copyCount@ valid
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
-- structures
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-cmdpool# The
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
-- allocated from /must/ support transfer, graphics, or compute
-- operations
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-renderpass# This command
-- /must/ only be called outside of a render pass instance
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-videocoding# This command
-- /must/ only be called outside of a video coding scope
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-copyCount-arraylength#
@copyCount@ /must/ be greater than @0@
--
-- - #VUID-vkCmdCopyMemoryToImageIndirectNV-commonparent# Both of
-- @commandBuffer@, and @dstImage@ /must/ have been created, allocated,
or retrieved from the same ' Vulkan . Core10.Handles . Device '
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
- Host access to the ' Vulkan . Core10.Handles . ' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
-- +============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
-- | Primary | Outside | Outside | Transfer | Action |
-- | Secondary | | | Graphics | |
-- | | | | Compute | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.Handles . Image ' ,
' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' ,
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
cmdCopyMemoryToImageIndirectNV :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @copyBufferAddress@ is the buffer address specifying the copy
-- parameters. This buffer is laid out in memory as an array of
' CopyMemoryToImageIndirectCommandNV ' structures .
("copyBufferAddress" ::: DeviceAddress)
-> -- | @stride@ is the byte stride between successive sets of copy parameters.
("stride" ::: Word32)
-> -- | @dstImage@ is the destination image.
("dstImage" ::: Image)
-> -- | @dstImageLayout@ is the layout of the destination image subresources for
-- the copy.
("dstImageLayout" ::: ImageLayout)
-> -- | @pImageSubresources@ is a pointer to an array of size @copyCount@ of
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' used to
-- specify the specific image subresource of the destination image data for
-- that copy.
("imageSubresources" ::: Vector ImageSubresourceLayers)
-> io ()
cmdCopyMemoryToImageIndirectNV commandBuffer
copyBufferAddress
stride
dstImage
dstImageLayout
imageSubresources = liftIO . evalContT $ do
let vkCmdCopyMemoryToImageIndirectNVPtr = pVkCmdCopyMemoryToImageIndirectNV (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdCopyMemoryToImageIndirectNVPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdCopyMemoryToImageIndirectNV is null" Nothing Nothing
let vkCmdCopyMemoryToImageIndirectNV' = mkVkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNVPtr
pPImageSubresources <- ContT $ allocaBytes @ImageSubresourceLayers ((Data.Vector.length (imageSubresources)) * 16)
lift $ Data.Vector.imapM_ (\i e -> poke (pPImageSubresources `plusPtr` (16 * (i)) :: Ptr ImageSubresourceLayers) (e)) (imageSubresources)
lift $ traceAroundEvent "vkCmdCopyMemoryToImageIndirectNV" (vkCmdCopyMemoryToImageIndirectNV'
(commandBufferHandle (commandBuffer))
(copyBufferAddress)
((fromIntegral (Data.Vector.length $ (imageSubresources)) :: Word32))
(stride)
(dstImage)
(dstImageLayout)
(pPImageSubresources))
pure $ ()
-- | VkCopyMemoryIndirectCommandNV - Structure specifying indirect memory
-- region copy operation
--
-- == Valid Usage
--
- # VUID - VkCopyMemoryIndirectCommandNV - srcAddress-07657 # The
@srcAddress@ /must/ be 4 byte aligned
--
- # VUID - VkCopyMemoryIndirectCommandNV - dstAddress-07658 # The
@dstAddress@ /must/ be 4 byte aligned
--
- # VUID - VkCopyMemoryIndirectCommandNV - size-07659 # The @size@ /must/ be
-- 4 byte aligned
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.FundamentalTypes . DeviceSize '
data CopyMemoryIndirectCommandNV = CopyMemoryIndirectCommandNV
{ -- | @srcAddress@ is the starting address of the source host or device memory
-- to copy from.
srcAddress :: DeviceAddress
, -- | @dstAddress@ is the starting address of the destination host or device
-- memory to copy to.
dstAddress :: DeviceAddress
, -- | @size@ is the size of the copy in bytes.
size :: DeviceSize
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CopyMemoryIndirectCommandNV)
#endif
deriving instance Show CopyMemoryIndirectCommandNV
instance ToCStruct CopyMemoryIndirectCommandNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CopyMemoryIndirectCommandNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (srcAddress)
poke ((p `plusPtr` 8 :: Ptr DeviceAddress)) (dstAddress)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (size)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 8 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (zero)
f
instance FromCStruct CopyMemoryIndirectCommandNV where
peekCStruct p = do
srcAddress <- peek @DeviceAddress ((p `plusPtr` 0 :: Ptr DeviceAddress))
dstAddress <- peek @DeviceAddress ((p `plusPtr` 8 :: Ptr DeviceAddress))
size <- peek @DeviceSize ((p `plusPtr` 16 :: Ptr DeviceSize))
pure $ CopyMemoryIndirectCommandNV
srcAddress dstAddress size
instance Storable CopyMemoryIndirectCommandNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CopyMemoryIndirectCommandNV where
zero = CopyMemoryIndirectCommandNV
zero
zero
zero
-- | VkCopyMemoryToImageIndirectCommandNV - Structure specifying indirect
-- buffer image copy operation
--
-- == Valid Usage
--
-- - #VUID-VkCopyMemoryToImageIndirectCommandNV-srcAddress-07678# The
@srcAddress@ /must/ be 4 byte aligned
--
- # VUID - VkCopyMemoryToImageIndirectCommandNV - bufferRowLength-07679 #
@bufferRowLength@ /must/ be @0@ , or greater than or equal to the
@width@ member of @imageExtent@
--
-- - #VUID-VkCopyMemoryToImageIndirectCommandNV-bufferImageHeight-07680#
@bufferImageHeight@ /must/ be @0@ , or greater than or equal to the
-- @height@ member of @imageExtent@
--
-- - #VUID-VkCopyMemoryToImageIndirectCommandNV-imageOffset-07681#
-- @imageOffset@ /must/ specify a valid offset in the destination image
--
-- - #VUID-VkCopyMemoryToImageIndirectCommandNV-imageExtent-07682#
-- @imageExtent@ /must/ specify a valid region in the destination image
and can be @0@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkCopyMemoryToImageIndirectCommandNV-imageSubresource-parameter#
-- @imageSubresource@ /must/ be a valid
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
-- structure
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.FundamentalTypes . Extent3D ' ,
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' ,
' Vulkan . Core10.FundamentalTypes . Offset3D '
data CopyMemoryToImageIndirectCommandNV = CopyMemoryToImageIndirectCommandNV
{ -- | @srcAddress@ is the starting address of the source host or device memory
-- to copy from.
srcAddress :: DeviceAddress
| @bufferRowLength@ and @bufferImageHeight@ specify in texels a subregion
of a larger two- or three - dimensional image in buffer memory , and
control the addressing calculations . If either of these values is zero ,
-- that aspect of the buffer memory is considered to be tightly packed
according to the @imageExtent@.
bufferRowLength :: Word32
No documentation found for Nested " VkCopyMemoryToImageIndirectCommandNV " " bufferImageHeight "
bufferImageHeight :: Word32
, -- | @imageSubresource@ is a
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' used to
-- specify the specific image subresources of the image used for the
-- destination image data, which /must/ match the values specified in
-- @pImageSubresources@ parameter of 'cmdCopyMemoryToImageIndirectNV'
-- during command recording.
imageSubresource :: ImageSubresourceLayers
, -- | @imageOffset@ selects the initial @x@, @y@, @z@ offsets in texels of the
-- sub-region of the destination image data.
imageOffset :: Offset3D
| @imageExtent@ is the size in texels of the destination image in @width@ ,
-- @height@ and @depth@.
imageExtent :: Extent3D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CopyMemoryToImageIndirectCommandNV)
#endif
deriving instance Show CopyMemoryToImageIndirectCommandNV
instance ToCStruct CopyMemoryToImageIndirectCommandNV where
withCStruct x f = allocaBytes 56 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CopyMemoryToImageIndirectCommandNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (srcAddress)
poke ((p `plusPtr` 8 :: Ptr Word32)) (bufferRowLength)
poke ((p `plusPtr` 12 :: Ptr Word32)) (bufferImageHeight)
poke ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers)) (imageSubresource)
poke ((p `plusPtr` 32 :: Ptr Offset3D)) (imageOffset)
poke ((p `plusPtr` 44 :: Ptr Extent3D)) (imageExtent)
f
cStructSize = 56
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 8 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 12 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers)) (zero)
poke ((p `plusPtr` 32 :: Ptr Offset3D)) (zero)
poke ((p `plusPtr` 44 :: Ptr Extent3D)) (zero)
f
instance FromCStruct CopyMemoryToImageIndirectCommandNV where
peekCStruct p = do
srcAddress <- peek @DeviceAddress ((p `plusPtr` 0 :: Ptr DeviceAddress))
bufferRowLength <- peek @Word32 ((p `plusPtr` 8 :: Ptr Word32))
bufferImageHeight <- peek @Word32 ((p `plusPtr` 12 :: Ptr Word32))
imageSubresource <- peekCStruct @ImageSubresourceLayers ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers))
imageOffset <- peekCStruct @Offset3D ((p `plusPtr` 32 :: Ptr Offset3D))
imageExtent <- peekCStruct @Extent3D ((p `plusPtr` 44 :: Ptr Extent3D))
pure $ CopyMemoryToImageIndirectCommandNV
srcAddress
bufferRowLength
bufferImageHeight
imageSubresource
imageOffset
imageExtent
instance Storable CopyMemoryToImageIndirectCommandNV where
sizeOf ~_ = 56
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CopyMemoryToImageIndirectCommandNV where
zero = CopyMemoryToImageIndirectCommandNV
zero
zero
zero
zero
zero
zero
-- | VkPhysicalDeviceCopyMemoryIndirectFeaturesNV - Structure describing
-- indirect copy features supported by an implementation
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceCopyMemoryIndirectFeaturesNV' structure is
-- included in the @pNext@ chain of the
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceCopyMemoryIndirectFeaturesNV' /can/ also be
used in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceCopyMemoryIndirectFeaturesNV = PhysicalDeviceCopyMemoryIndirectFeaturesNV
{ -- | #features-indirectCopy# @indirectCopy@ indicates whether
-- <-extensions/html/vkspec.html#indirect-copies indirect copies>
-- are supported.
indirectCopy :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceCopyMemoryIndirectFeaturesNV)
#endif
deriving instance Show PhysicalDeviceCopyMemoryIndirectFeaturesNV
instance ToCStruct PhysicalDeviceCopyMemoryIndirectFeaturesNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceCopyMemoryIndirectFeaturesNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (indirectCopy))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceCopyMemoryIndirectFeaturesNV where
peekCStruct p = do
indirectCopy <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceCopyMemoryIndirectFeaturesNV
(bool32ToBool indirectCopy)
instance Storable PhysicalDeviceCopyMemoryIndirectFeaturesNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceCopyMemoryIndirectFeaturesNV where
zero = PhysicalDeviceCopyMemoryIndirectFeaturesNV
zero
-- | VkPhysicalDeviceCopyMemoryIndirectPropertiesNV - Structure describing
-- supported queues for indirect copy
--
-- = Description
--
-- If the
-- <-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
feature is supported , /must/ return at least one
-- supported queue.
--
-- If the 'PhysicalDeviceCopyMemoryIndirectPropertiesNV' structure is
-- included in the @pNext@ chain of the
' Vulkan . '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
' Vulkan . Core10.Enums . QueueFlagBits . QueueFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceCopyMemoryIndirectPropertiesNV = PhysicalDeviceCopyMemoryIndirectPropertiesNV
| is a bitmask of
' Vulkan . Core10.Enums . QueueFlagBits . QueueFlagBits ' indicating the queues
-- on which
-- <-extensions/html/vkspec.html#indirect-copies indirect copy commands>
-- are supported.
supportedQueues :: QueueFlags }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceCopyMemoryIndirectPropertiesNV)
#endif
deriving instance Show PhysicalDeviceCopyMemoryIndirectPropertiesNV
instance ToCStruct PhysicalDeviceCopyMemoryIndirectPropertiesNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceCopyMemoryIndirectPropertiesNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr QueueFlags)) (supportedQueues)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr QueueFlags)) (zero)
f
instance FromCStruct PhysicalDeviceCopyMemoryIndirectPropertiesNV where
peekCStruct p = do
supportedQueues <- peek @QueueFlags ((p `plusPtr` 16 :: Ptr QueueFlags))
pure $ PhysicalDeviceCopyMemoryIndirectPropertiesNV
supportedQueues
instance Storable PhysicalDeviceCopyMemoryIndirectPropertiesNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceCopyMemoryIndirectPropertiesNV where
zero = PhysicalDeviceCopyMemoryIndirectPropertiesNV
zero
type NV_COPY_MEMORY_INDIRECT_SPEC_VERSION = 1
No documentation found for TopLevel " VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION "
pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION :: forall a . Integral a => a
pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION = 1
type NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME = "VK_NV_copy_memory_indirect"
No documentation found for TopLevel " VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "
pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME = "VK_NV_copy_memory_indirect"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/70d8cca16893f8de76c0eb89e79e73f5a455db76/src/Vulkan/Extensions/VK_NV_copy_memory_indirect.hs | haskell | # language CPP #
| = Name
VK_NV_copy_memory_indirect - device extension
[__Name String__]
@VK_NV_copy_memory_indirect@
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
- Requires @VK_KHR_get_physical_device_properties2@ to be enabled
for any device-level functionality
- Requires @VK_KHR_buffer_device_address@ to be enabled for any
device-level functionality
[__Contact__]
== Other Extension Metadata
[__Last Modified Date__]
[__Contributors__]
== Description
This extension adds support for performing copies between memory and
image regions using indirect parameters that are read by the device from
a buffer during execution. This functionality /may/ be useful for
performing copies where the copy parameters are not known during the
command buffer creation time.
== New Commands
- 'cmdCopyMemoryToImageIndirectNV'
== New Structures
- 'CopyMemoryIndirectCommandNV'
- Extending
- 'PhysicalDeviceCopyMemoryIndirectFeaturesNV'
- Extending
- 'PhysicalDeviceCopyMemoryIndirectPropertiesNV'
== New Enum Constants
- 'NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME'
== Version History
- Initial draft
== See Also
'PhysicalDeviceCopyMemoryIndirectFeaturesNV',
'PhysicalDeviceCopyMemoryIndirectPropertiesNV',
== Document Notes
For more information, see the
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly.
| vkCmdCopyMemoryIndirectNV - Copy data between memory regions
= Description
region to the specified destination region. The results are undefined if
any of the source and destination regions overlap in memory.
== Valid Usage
- #VUID-vkCmdCopyMemoryIndirectNV-None-07653# The
<-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
feature /must/ be enabled
- #VUID-vkCmdCopyMemoryIndirectNV-copyBufferAddress-07654#
- #VUID-vkCmdCopyMemoryIndirectNV-stride-07655# @stride@ /must/ be a
multiple of @4@ and /must/ be greater than or equal to
sizeof('CopyMemoryIndirectCommandNV')
- #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-07656# The
== Valid Usage (Implicit)
- #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-parameter#
@commandBuffer@ /must/ be a valid
- #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-recording#
@commandBuffer@ /must/ be in the
<-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
- #VUID-vkCmdCopyMemoryIndirectNV-commandBuffer-cmdpool# The
allocated from /must/ support transfer, graphics, or compute
operations
- #VUID-vkCmdCopyMemoryIndirectNV-renderpass# This command /must/ only
be called outside of a render pass instance
- #VUID-vkCmdCopyMemoryIndirectNV-videocoding# This command /must/
only be called outside of a video coding scope
== Host Synchronization
- Host access to @commandBuffer@ /must/ be externally synchronized
@commandBuffer@ was allocated from /must/ be externally synchronized
== Command Properties
\'
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
+============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
| Primary | Outside | Outside | Transfer | Action |
| Secondary | | | Graphics | |
| | | | Compute | |
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
| @commandBuffer@ is the command buffer into which the command will be
recorded.
| @copyBufferAddress@ is the buffer address specifying the copy
parameters. This buffer is laid out in memory as an array of
'CopyMemoryIndirectCommandNV' structures.
| @stride@ is the stride in bytes between successive sets of copy
parameters.
| vkCmdCopyMemoryToImageIndirectNV - Copy data from a memory region into
an image
= Description
region to an image region in the destination image. If the destination
starting slice and number of slices to copy are specified in
@pImageSubresources@::@baseArrayLayer@ and
performed on a queue that supports indirect copy operations, see
'PhysicalDeviceCopyMemoryIndirectPropertiesNV'.
== Valid Usage
- #VUID-vkCmdCopyMemoryToImageIndirectNV-None-07660# The
<-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
feature /must/ be enabled
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07661# @dstImage@
/must/ not be a protected image
- #VUID-vkCmdCopyMemoryToImageIndirectNV-aspectMask-07662# The
/must/ only have a single bit set
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07663# The image
region that is contained within @dstImage@
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07664# @dstImage@
/must/ have been created with
usage flag
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07665# If @dstImage@
is non-sparse then it /must/ be bound completely and contiguously to
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-07666# @dstImage@
/must/ have a sample count equal to
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-07667#
@dstImageLayout@ /must/ specify the layout of the image subresources
of @dstImage@ at the time this command is executed on a
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-07669#
@dstImageLayout@ /must/ be
- #VUID-vkCmdCopyMemoryToImageIndirectNV-mipLevel-07670# The specified
@mipLevel@ of each region /must/ be less than the @mipLevels@
was created
- #VUID-vkCmdCopyMemoryToImageIndirectNV-baseArrayLayer-07671# The
specified @baseArrayLayer@ + @layerCount@ of each region /must/ be
less than or equal to the @arrayLayers@ specified in
- #VUID-vkCmdCopyMemoryToImageIndirectNV-imageOffset-07672# The
@imageOffset@ and @imageExtent@ members of each region /must/
respect the image transfer granularity requirements of
@commandBuffer@’s command pool’s queue family, as described in
/must/ not have been created with @flags@ containing
- #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-07674# If the
which @commandBuffer@ was allocated from does not support
or
- #VUID-vkCmdCopyMemoryToImageIndirectNV-imageOffset-07675# For each
(@imageExtent.height@ + @imageOffset.y@) /must/ both be greater than
- #VUID-vkCmdCopyMemoryToImageIndirectNV-stride-07677# @stride@ /must/
be a multiple of @4@ and /must/ be greater than or equal to
sizeof('CopyMemoryToImageIndirectCommandNV')
== Valid Usage (Implicit)
- #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-parameter#
@commandBuffer@ /must/ be a valid
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImage-parameter#
- #VUID-vkCmdCopyMemoryToImageIndirectNV-dstImageLayout-parameter#
@dstImageLayout@ /must/ be a valid
- #VUID-vkCmdCopyMemoryToImageIndirectNV-pImageSubresources-parameter#
@pImageSubresources@ /must/ be a valid pointer to an array of
structures
- #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-recording#
@commandBuffer@ /must/ be in the
<-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
- #VUID-vkCmdCopyMemoryToImageIndirectNV-commandBuffer-cmdpool# The
allocated from /must/ support transfer, graphics, or compute
operations
- #VUID-vkCmdCopyMemoryToImageIndirectNV-renderpass# This command
/must/ only be called outside of a render pass instance
- #VUID-vkCmdCopyMemoryToImageIndirectNV-videocoding# This command
/must/ only be called outside of a video coding scope
- #VUID-vkCmdCopyMemoryToImageIndirectNV-copyCount-arraylength#
- #VUID-vkCmdCopyMemoryToImageIndirectNV-commonparent# Both of
@commandBuffer@, and @dstImage@ /must/ have been created, allocated,
== Host Synchronization
- Host access to @commandBuffer@ /must/ be externally synchronized
@commandBuffer@ was allocated from /must/ be externally synchronized
== Command Properties
\'
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
+============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
| Primary | Outside | Outside | Transfer | Action |
| Secondary | | | Graphics | |
| | | | Compute | |
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
| @commandBuffer@ is the command buffer into which the command will be
recorded.
| @copyBufferAddress@ is the buffer address specifying the copy
parameters. This buffer is laid out in memory as an array of
| @stride@ is the byte stride between successive sets of copy parameters.
| @dstImage@ is the destination image.
| @dstImageLayout@ is the layout of the destination image subresources for
the copy.
| @pImageSubresources@ is a pointer to an array of size @copyCount@ of
specify the specific image subresource of the destination image data for
that copy.
| VkCopyMemoryIndirectCommandNV - Structure specifying indirect memory
region copy operation
== Valid Usage
4 byte aligned
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
| @srcAddress@ is the starting address of the source host or device memory
to copy from.
| @dstAddress@ is the starting address of the destination host or device
memory to copy to.
| @size@ is the size of the copy in bytes.
| VkCopyMemoryToImageIndirectCommandNV - Structure specifying indirect
buffer image copy operation
== Valid Usage
- #VUID-VkCopyMemoryToImageIndirectCommandNV-srcAddress-07678# The
- #VUID-VkCopyMemoryToImageIndirectCommandNV-bufferImageHeight-07680#
@height@ member of @imageExtent@
- #VUID-VkCopyMemoryToImageIndirectCommandNV-imageOffset-07681#
@imageOffset@ /must/ specify a valid offset in the destination image
- #VUID-VkCopyMemoryToImageIndirectCommandNV-imageExtent-07682#
@imageExtent@ /must/ specify a valid region in the destination image
== Valid Usage (Implicit)
- #VUID-VkCopyMemoryToImageIndirectCommandNV-imageSubresource-parameter#
@imageSubresource@ /must/ be a valid
structure
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
| @srcAddress@ is the starting address of the source host or device memory
to copy from.
that aspect of the buffer memory is considered to be tightly packed
| @imageSubresource@ is a
specify the specific image subresources of the image used for the
destination image data, which /must/ match the values specified in
@pImageSubresources@ parameter of 'cmdCopyMemoryToImageIndirectNV'
during command recording.
| @imageOffset@ selects the initial @x@, @y@, @z@ offsets in texels of the
sub-region of the destination image data.
@height@ and @depth@.
| VkPhysicalDeviceCopyMemoryIndirectFeaturesNV - Structure describing
indirect copy features supported by an implementation
= Members
This structure describes the following features:
= Description
If the 'PhysicalDeviceCopyMemoryIndirectFeaturesNV' structure is
included in the @pNext@ chain of the
structure passed to
it is filled in to indicate whether each corresponding feature is
supported. 'PhysicalDeviceCopyMemoryIndirectFeaturesNV' /can/ also be
selectively enable these features.
== Valid Usage (Implicit)
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
| #features-indirectCopy# @indirectCopy@ indicates whether
<-extensions/html/vkspec.html#indirect-copies indirect copies>
are supported.
| VkPhysicalDeviceCopyMemoryIndirectPropertiesNV - Structure describing
supported queues for indirect copy
= Description
If the
<-extensions/html/vkspec.html#features-indirectCopy indirectCopy>
supported queue.
If the 'PhysicalDeviceCopyMemoryIndirectPropertiesNV' structure is
included in the @pNext@ chain of the
structure passed to
it is filled in with each corresponding implementation-dependent
property.
== Valid Usage (Implicit)
= See Also
<-extensions/html/vkspec.html#VK_NV_copy_memory_indirect VK_NV_copy_memory_indirect>,
on which
<-extensions/html/vkspec.html#indirect-copies indirect copy commands>
are supported. | = = VK_NV_copy_memory_indirect
427
1
- Requires support for Vulkan 1.0
- Kushwaha
< -Docs/issues/new?body=[VK_NV_copy_memory_indirect ] @vkushwaha - nv%0A*Here describe the issue or question you have about the VK_NV_copy_memory_indirect extension * >
2022 - 10 - 14
- , NVIDIA
- , NVIDIA
- , NVIDIA
- , NVIDIA
- ' '
- ' CopyMemoryToImageIndirectCommandNV '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 ' ,
' Vulkan . Core10.Device . DeviceCreateInfo ' :
' Vulkan . ' :
- ' NV_COPY_MEMORY_INDIRECT_SPEC_VERSION '
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV '
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV '
- Revision 1 , 2022 - 10 - 14 ( )
' CopyMemoryIndirectCommandNV ' , ' CopyMemoryToImageIndirectCommandNV ' ,
' ' , ' cmdCopyMemoryToImageIndirectNV '
< -extensions/html/vkspec.html#VK_NV_copy_memory_indirect Vulkan Specification >
module Vulkan.Extensions.VK_NV_copy_memory_indirect ( cmdCopyMemoryIndirectNV
, cmdCopyMemoryToImageIndirectNV
, CopyMemoryIndirectCommandNV(..)
, CopyMemoryToImageIndirectCommandNV(..)
, PhysicalDeviceCopyMemoryIndirectFeaturesNV(..)
, PhysicalDeviceCopyMemoryIndirectPropertiesNV(..)
, NV_COPY_MEMORY_INDIRECT_SPEC_VERSION
, pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION
, NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME
, pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Core10.FundamentalTypes (DeviceAddress)
import Vulkan.Dynamic (DeviceCmds(pVkCmdCopyMemoryIndirectNV))
import Vulkan.Dynamic (DeviceCmds(pVkCmdCopyMemoryToImageIndirectNV))
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.FundamentalTypes (Extent3D)
import Vulkan.Core10.Handles (Image)
import Vulkan.Core10.Handles (Image(..))
import Vulkan.Core10.Enums.ImageLayout (ImageLayout)
import Vulkan.Core10.Enums.ImageLayout (ImageLayout(..))
import Vulkan.Core10.CommandBufferBuilding (ImageSubresourceLayers)
import Vulkan.Core10.FundamentalTypes (Offset3D)
import Vulkan.Core10.Enums.QueueFlagBits (QueueFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdCopyMemoryIndirectNV
:: FunPtr (Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> IO ()) -> Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> IO ()
Each region read from @copyBufferAddress@ is copied from the source
@copyBufferAddress@ /must/ be 4 byte aligned
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
allocated from /must/ support at least one of the
' PhysicalDeviceCopyMemoryIndirectPropertiesNV'::@supportedQueues@
' Vulkan . Core10.Handles . CommandBuffer ' handle
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
- Host access to the ' Vulkan . Core10.Handles . ' that
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . DeviceAddress '
cmdCopyMemoryIndirectNV :: forall io
. (MonadIO io)
CommandBuffer
("copyBufferAddress" ::: DeviceAddress)
| @copyCount@ is the number of copies to execute , and can be zero .
("copyCount" ::: Word32)
("stride" ::: Word32)
-> io ()
cmdCopyMemoryIndirectNV commandBuffer
copyBufferAddress
copyCount
stride = liftIO $ do
let vkCmdCopyMemoryIndirectNVPtr = pVkCmdCopyMemoryIndirectNV (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
unless (vkCmdCopyMemoryIndirectNVPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdCopyMemoryIndirectNV is null" Nothing Nothing
let vkCmdCopyMemoryIndirectNV' = mkVkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNVPtr
traceAroundEvent "vkCmdCopyMemoryIndirectNV" (vkCmdCopyMemoryIndirectNV'
(commandBufferHandle (commandBuffer))
(copyBufferAddress)
(copyCount)
(stride))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdCopyMemoryToImageIndirectNV
:: FunPtr (Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> Image -> ImageLayout -> Ptr ImageSubresourceLayers -> IO ()) -> Ptr CommandBuffer_T -> DeviceAddress -> Word32 -> Word32 -> Image -> ImageLayout -> Ptr ImageSubresourceLayers -> IO ()
Each region in @copyBufferAddress@ is copied from the source memory
image is of type ' Vulkan . Core10.Enums . ImageType . IMAGE_TYPE_3D ' , the
@pImageSubresources@::@layerCount@ respectively . The copy /must/ be
@aspectMask@ member for every subresource in @pImageSubresources@
region specified by each element in @copyBufferAddress@ /must/ be a
' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_TRANSFER_DST_BIT '
a single ' Vulkan . Core10.Handles . DeviceMemory ' object
' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT '
' Vulkan . Core10.Handles . Device '
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ' ,
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_GENERAL ' , or
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_SHARED_PRESENT_KHR '
specified in ' Vulkan . Core10.Image . ImageCreateInfo ' when @dstImage@
' Vulkan . Core10.Image . ImageCreateInfo ' when was created
' Vulkan . Core10.DeviceInitialization . QueueFamilyProperties '
- # VUID - vkCmdCopyMemoryToImageIndirectNV - dstImage-07673 # @dstImage@
' Vulkan . Core10.Enums . ImageCreateFlagBits . IMAGE_CREATE_SUBSAMPLED_BIT_EXT '
queue family used to create the ' Vulkan . Core10.Handles . '
' Vulkan . Core10.Enums . QueueFlagBits . QUEUE_GRAPHICS_BIT ' , for each
region , the @aspectMask@ member of @pImageSubresources@ /must/ not
be ' Vulkan . Core10.Enums . ImageAspectFlagBits . IMAGE_ASPECT_DEPTH_BIT '
' Vulkan . Core10.Enums . ImageAspectFlagBits . IMAGE_ASPECT_STENCIL_BIT '
region in @copyBufferAddress@ , @imageOffset.y@ and
or equal to @0@ and less than or equal to the height of the
specified
- # VUID - vkCmdCopyMemoryToImageIndirectNV - offset-07676 # @offset@ /must/
be 4 byte aligned
' Vulkan . Core10.Handles . CommandBuffer ' handle
@dstImage@ /must/ be a valid ' Vulkan . Core10.Handles . Image ' handle
' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' value
@copyCount@ valid
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
@copyCount@ /must/ be greater than @0@
or retrieved from the same ' Vulkan . Core10.Handles . Device '
- Host access to the ' Vulkan . Core10.Handles . ' that
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.Handles . Image ' ,
' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' ,
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
cmdCopyMemoryToImageIndirectNV :: forall io
. (MonadIO io)
CommandBuffer
' CopyMemoryToImageIndirectCommandNV ' structures .
("copyBufferAddress" ::: DeviceAddress)
("stride" ::: Word32)
("dstImage" ::: Image)
("dstImageLayout" ::: ImageLayout)
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' used to
("imageSubresources" ::: Vector ImageSubresourceLayers)
-> io ()
cmdCopyMemoryToImageIndirectNV commandBuffer
copyBufferAddress
stride
dstImage
dstImageLayout
imageSubresources = liftIO . evalContT $ do
let vkCmdCopyMemoryToImageIndirectNVPtr = pVkCmdCopyMemoryToImageIndirectNV (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdCopyMemoryToImageIndirectNVPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdCopyMemoryToImageIndirectNV is null" Nothing Nothing
let vkCmdCopyMemoryToImageIndirectNV' = mkVkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNVPtr
pPImageSubresources <- ContT $ allocaBytes @ImageSubresourceLayers ((Data.Vector.length (imageSubresources)) * 16)
lift $ Data.Vector.imapM_ (\i e -> poke (pPImageSubresources `plusPtr` (16 * (i)) :: Ptr ImageSubresourceLayers) (e)) (imageSubresources)
lift $ traceAroundEvent "vkCmdCopyMemoryToImageIndirectNV" (vkCmdCopyMemoryToImageIndirectNV'
(commandBufferHandle (commandBuffer))
(copyBufferAddress)
((fromIntegral (Data.Vector.length $ (imageSubresources)) :: Word32))
(stride)
(dstImage)
(dstImageLayout)
(pPImageSubresources))
pure $ ()
- # VUID - VkCopyMemoryIndirectCommandNV - srcAddress-07657 # The
@srcAddress@ /must/ be 4 byte aligned
- # VUID - VkCopyMemoryIndirectCommandNV - dstAddress-07658 # The
@dstAddress@ /must/ be 4 byte aligned
- # VUID - VkCopyMemoryIndirectCommandNV - size-07659 # The @size@ /must/ be
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.FundamentalTypes . DeviceSize '
data CopyMemoryIndirectCommandNV = CopyMemoryIndirectCommandNV
srcAddress :: DeviceAddress
dstAddress :: DeviceAddress
size :: DeviceSize
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CopyMemoryIndirectCommandNV)
#endif
deriving instance Show CopyMemoryIndirectCommandNV
instance ToCStruct CopyMemoryIndirectCommandNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CopyMemoryIndirectCommandNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (srcAddress)
poke ((p `plusPtr` 8 :: Ptr DeviceAddress)) (dstAddress)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (size)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 8 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 16 :: Ptr DeviceSize)) (zero)
f
instance FromCStruct CopyMemoryIndirectCommandNV where
peekCStruct p = do
srcAddress <- peek @DeviceAddress ((p `plusPtr` 0 :: Ptr DeviceAddress))
dstAddress <- peek @DeviceAddress ((p `plusPtr` 8 :: Ptr DeviceAddress))
size <- peek @DeviceSize ((p `plusPtr` 16 :: Ptr DeviceSize))
pure $ CopyMemoryIndirectCommandNV
srcAddress dstAddress size
instance Storable CopyMemoryIndirectCommandNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CopyMemoryIndirectCommandNV where
zero = CopyMemoryIndirectCommandNV
zero
zero
zero
@srcAddress@ /must/ be 4 byte aligned
- # VUID - VkCopyMemoryToImageIndirectCommandNV - bufferRowLength-07679 #
@bufferRowLength@ /must/ be @0@ , or greater than or equal to the
@width@ member of @imageExtent@
@bufferImageHeight@ /must/ be @0@ , or greater than or equal to the
and can be @0@
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers '
' Vulkan . Core10.FundamentalTypes . DeviceAddress ' ,
' Vulkan . Core10.FundamentalTypes . Extent3D ' ,
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' ,
' Vulkan . Core10.FundamentalTypes . Offset3D '
data CopyMemoryToImageIndirectCommandNV = CopyMemoryToImageIndirectCommandNV
srcAddress :: DeviceAddress
| @bufferRowLength@ and @bufferImageHeight@ specify in texels a subregion
of a larger two- or three - dimensional image in buffer memory , and
control the addressing calculations . If either of these values is zero ,
according to the @imageExtent@.
bufferRowLength :: Word32
No documentation found for Nested " VkCopyMemoryToImageIndirectCommandNV " " bufferImageHeight "
bufferImageHeight :: Word32
' Vulkan . Core10.CommandBufferBuilding . ImageSubresourceLayers ' used to
imageSubresource :: ImageSubresourceLayers
imageOffset :: Offset3D
| @imageExtent@ is the size in texels of the destination image in @width@ ,
imageExtent :: Extent3D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CopyMemoryToImageIndirectCommandNV)
#endif
deriving instance Show CopyMemoryToImageIndirectCommandNV
instance ToCStruct CopyMemoryToImageIndirectCommandNV where
withCStruct x f = allocaBytes 56 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CopyMemoryToImageIndirectCommandNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (srcAddress)
poke ((p `plusPtr` 8 :: Ptr Word32)) (bufferRowLength)
poke ((p `plusPtr` 12 :: Ptr Word32)) (bufferImageHeight)
poke ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers)) (imageSubresource)
poke ((p `plusPtr` 32 :: Ptr Offset3D)) (imageOffset)
poke ((p `plusPtr` 44 :: Ptr Extent3D)) (imageExtent)
f
cStructSize = 56
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr DeviceAddress)) (zero)
poke ((p `plusPtr` 8 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 12 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers)) (zero)
poke ((p `plusPtr` 32 :: Ptr Offset3D)) (zero)
poke ((p `plusPtr` 44 :: Ptr Extent3D)) (zero)
f
instance FromCStruct CopyMemoryToImageIndirectCommandNV where
peekCStruct p = do
srcAddress <- peek @DeviceAddress ((p `plusPtr` 0 :: Ptr DeviceAddress))
bufferRowLength <- peek @Word32 ((p `plusPtr` 8 :: Ptr Word32))
bufferImageHeight <- peek @Word32 ((p `plusPtr` 12 :: Ptr Word32))
imageSubresource <- peekCStruct @ImageSubresourceLayers ((p `plusPtr` 16 :: Ptr ImageSubresourceLayers))
imageOffset <- peekCStruct @Offset3D ((p `plusPtr` 32 :: Ptr Offset3D))
imageExtent <- peekCStruct @Extent3D ((p `plusPtr` 44 :: Ptr Extent3D))
pure $ CopyMemoryToImageIndirectCommandNV
srcAddress
bufferRowLength
bufferImageHeight
imageSubresource
imageOffset
imageExtent
instance Storable CopyMemoryToImageIndirectCommandNV where
sizeOf ~_ = 56
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CopyMemoryToImageIndirectCommandNV where
zero = CopyMemoryToImageIndirectCommandNV
zero
zero
zero
zero
zero
zero
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
used in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceCopyMemoryIndirectFeaturesNV = PhysicalDeviceCopyMemoryIndirectFeaturesNV
indirectCopy :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceCopyMemoryIndirectFeaturesNV)
#endif
deriving instance Show PhysicalDeviceCopyMemoryIndirectFeaturesNV
instance ToCStruct PhysicalDeviceCopyMemoryIndirectFeaturesNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceCopyMemoryIndirectFeaturesNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (indirectCopy))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceCopyMemoryIndirectFeaturesNV where
peekCStruct p = do
indirectCopy <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceCopyMemoryIndirectFeaturesNV
(bool32ToBool indirectCopy)
instance Storable PhysicalDeviceCopyMemoryIndirectFeaturesNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceCopyMemoryIndirectFeaturesNV where
zero = PhysicalDeviceCopyMemoryIndirectFeaturesNV
zero
feature is supported , /must/ return at least one
' Vulkan . '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
' Vulkan . Core10.Enums . QueueFlagBits . QueueFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceCopyMemoryIndirectPropertiesNV = PhysicalDeviceCopyMemoryIndirectPropertiesNV
| is a bitmask of
' Vulkan . Core10.Enums . QueueFlagBits . QueueFlagBits ' indicating the queues
supportedQueues :: QueueFlags }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceCopyMemoryIndirectPropertiesNV)
#endif
deriving instance Show PhysicalDeviceCopyMemoryIndirectPropertiesNV
instance ToCStruct PhysicalDeviceCopyMemoryIndirectPropertiesNV where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceCopyMemoryIndirectPropertiesNV{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr QueueFlags)) (supportedQueues)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr QueueFlags)) (zero)
f
instance FromCStruct PhysicalDeviceCopyMemoryIndirectPropertiesNV where
peekCStruct p = do
supportedQueues <- peek @QueueFlags ((p `plusPtr` 16 :: Ptr QueueFlags))
pure $ PhysicalDeviceCopyMemoryIndirectPropertiesNV
supportedQueues
instance Storable PhysicalDeviceCopyMemoryIndirectPropertiesNV where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceCopyMemoryIndirectPropertiesNV where
zero = PhysicalDeviceCopyMemoryIndirectPropertiesNV
zero
type NV_COPY_MEMORY_INDIRECT_SPEC_VERSION = 1
No documentation found for TopLevel " VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION "
pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION :: forall a . Integral a => a
pattern NV_COPY_MEMORY_INDIRECT_SPEC_VERSION = 1
type NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME = "VK_NV_copy_memory_indirect"
No documentation found for TopLevel " VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "
pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME = "VK_NV_copy_memory_indirect"
|
1089c49444d8bb4c7295e4007bfa8764601d0349f8e6bd202ae48a1071259026 | jscrane/jvml | kmeans.clj | (ns ml.kmeans
(:use (ml util)
(incanter core stats)))
(defn find-closest-centroid [centroids point]
(indexes-of? < (map #(sum-of-squares (minus point %)) centroids)))
(defn find-closest-centroids [centroids X]
(map (partial find-closest-centroid centroids) (to-list X)))
(defn- update-sums [[sums counts] [point idx]]
; note this doall: otherwise the stack blows when the reduction is realised!
[(assoc sums idx (doall (plus point (sums idx)))) (assoc counts idx (inc (counts idx)))])
(defn compute-centroids [X idx k]
(let [ic (zeroes k)
is (vec (repeat k (zeroes (ncol X))))
[s c] (reduce update-sums [is ic] (map vector (to-list X) idx))]
(matrix (map div s c))))
(defn- kmeans [X centroids]
(compute-centroids X (find-closest-centroids centroids X) (nrow centroids)))
(defn run-kmeans [X initial-centroids n]
(nth (iterate (partial kmeans X) initial-centroids) n))
(defn init-centroids [X k]
(sel X :rows (take k (permute (range (nrow X)))))) | null | https://raw.githubusercontent.com/jscrane/jvml/844eb267150564a3a0f882edbbd505ce94924c8f/src/main/clojure/ml/kmeans.clj | clojure | note this doall: otherwise the stack blows when the reduction is realised! | (ns ml.kmeans
(:use (ml util)
(incanter core stats)))
(defn find-closest-centroid [centroids point]
(indexes-of? < (map #(sum-of-squares (minus point %)) centroids)))
(defn find-closest-centroids [centroids X]
(map (partial find-closest-centroid centroids) (to-list X)))
(defn- update-sums [[sums counts] [point idx]]
[(assoc sums idx (doall (plus point (sums idx)))) (assoc counts idx (inc (counts idx)))])
(defn compute-centroids [X idx k]
(let [ic (zeroes k)
is (vec (repeat k (zeroes (ncol X))))
[s c] (reduce update-sums [is ic] (map vector (to-list X) idx))]
(matrix (map div s c))))
(defn- kmeans [X centroids]
(compute-centroids X (find-closest-centroids centroids X) (nrow centroids)))
(defn run-kmeans [X initial-centroids n]
(nth (iterate (partial kmeans X) initial-centroids) n))
(defn init-centroids [X k]
(sel X :rows (take k (permute (range (nrow X)))))) |
b8c159036ab68ae0c8c8d4dfdedb42dab00448cbbf0b88c20bd2ed17976735a1 | garrigue/labltk | frx_listbox.ml | (***********************************************************************)
(* *)
MLTk , Tcl / Tk interface of OCaml
(* *)
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
(* described in file LICENSE found in the OCaml source tree. *)
(* *)
(***********************************************************************)
open Camltk
let version = "$Id$"
(*
* Link a scrollbar and a listbox
*)
let scroll_link sb lb =
Listbox.configure lb
[YScrollCommand (Scrollbar.set sb)];
Scrollbar.configure sb
[ScrollCommand (Listbox.yview lb)]
* Completion for listboxes , Macintosh style .
* As long as you type fast enough , the listbox is repositioned to the
* first entry " greater " than the typed prefix .
* assumes :
* sorted list ( otherwise it 's stupid )
* fixed size , because we do n't recompute size at each callback invocation
* Completion for listboxes, Macintosh style.
* As long as you type fast enough, the listbox is repositioned to the
* first entry "greater" than the typed prefix.
* assumes:
* sorted list (otherwise it's stupid)
* fixed size, because we don't recompute size at each callback invocation
*)
let add_completion lb action =
let prefx = ref "" (* current match prefix *)
and maxi = Listbox.size lb - 1 (* maximum index (doesn't matter actually) *)
and current = ref 0 (* current position *)
and lastevent = ref 0 in
let rec move_forward () =
if Listbox.get lb (Number !current) < !prefx then
if !current < maxi then begin incr current; move_forward() end
and recenter () =
let element = Number !current in
(* Clean the selection *)
Listbox.selection_clear lb (Number 0) End;
(* Set it to our unique element *)
Listbox.selection_set lb element element;
Activate it , to keep consistent with Up / Down .
You have to be in Extended or Browse mode
You have to be in Extended or Browse mode *)
Listbox.activate lb element;
Listbox.selection_anchor lb element;
Listbox.see lb element in
let complete time s =
if time - !lastevent < 500 then (* sorry, hard coded limit *)
prefx := !prefx ^ s
else begin (* reset *)
current := 0;
prefx := s
end;
lastevent := time;
move_forward();
recenter() in
bind lb [[], KeyPress]
(BindSet([Ev_Char; Ev_Time],
(function ev ->
(* consider only keys producing characters. The callback is called
* even if you press Shift.
*)
if ev.ev_Char <> "" then complete ev.ev_Time ev.ev_Char)));
Key specific bindings override KeyPress
bind lb [[], KeyPressDetail "Return"] (BindSet([], action));
(* Finally, we have to set focus, otherwise events dont get through *)
Focus.set lb;
so that first item is selected
let new_scrollable_listbox top options =
let f = Frame.create top [] in
let lb = Listbox.create f options
and sb = Scrollbar.create f [] in
scroll_link sb lb;
pack [lb] [Side Side_Left; Fill Fill_Both; Expand true];
pack [sb] [Side Side_Left; Fill Fill_Y];
f, lb
| null | https://raw.githubusercontent.com/garrigue/labltk/c7f50b4faed57f1ac03cb3c9aedc35b10d36bdb6/frx/frx_listbox.ml | ocaml | *********************************************************************
described in file LICENSE found in the OCaml source tree.
*********************************************************************
* Link a scrollbar and a listbox
current match prefix
maximum index (doesn't matter actually)
current position
Clean the selection
Set it to our unique element
sorry, hard coded limit
reset
consider only keys producing characters. The callback is called
* even if you press Shift.
Finally, we have to set focus, otherwise events dont get through | MLTk , Tcl / Tk interface of OCaml
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
open Camltk
let version = "$Id$"
let scroll_link sb lb =
Listbox.configure lb
[YScrollCommand (Scrollbar.set sb)];
Scrollbar.configure sb
[ScrollCommand (Listbox.yview lb)]
* Completion for listboxes , Macintosh style .
* As long as you type fast enough , the listbox is repositioned to the
* first entry " greater " than the typed prefix .
* assumes :
* sorted list ( otherwise it 's stupid )
* fixed size , because we do n't recompute size at each callback invocation
* Completion for listboxes, Macintosh style.
* As long as you type fast enough, the listbox is repositioned to the
* first entry "greater" than the typed prefix.
* assumes:
* sorted list (otherwise it's stupid)
* fixed size, because we don't recompute size at each callback invocation
*)
let add_completion lb action =
and lastevent = ref 0 in
let rec move_forward () =
if Listbox.get lb (Number !current) < !prefx then
if !current < maxi then begin incr current; move_forward() end
and recenter () =
let element = Number !current in
Listbox.selection_clear lb (Number 0) End;
Listbox.selection_set lb element element;
Activate it , to keep consistent with Up / Down .
You have to be in Extended or Browse mode
You have to be in Extended or Browse mode *)
Listbox.activate lb element;
Listbox.selection_anchor lb element;
Listbox.see lb element in
let complete time s =
prefx := !prefx ^ s
current := 0;
prefx := s
end;
lastevent := time;
move_forward();
recenter() in
bind lb [[], KeyPress]
(BindSet([Ev_Char; Ev_Time],
(function ev ->
if ev.ev_Char <> "" then complete ev.ev_Time ev.ev_Char)));
Key specific bindings override KeyPress
bind lb [[], KeyPressDetail "Return"] (BindSet([], action));
Focus.set lb;
so that first item is selected
let new_scrollable_listbox top options =
let f = Frame.create top [] in
let lb = Listbox.create f options
and sb = Scrollbar.create f [] in
scroll_link sb lb;
pack [lb] [Side Side_Left; Fill Fill_Both; Expand true];
pack [sb] [Side Side_Left; Fill Fill_Y];
f, lb
|
4084eb79bb686795d617a9777fb8b8ba1e2222fd4d29a758bd7e356e3f42cf59 | racket/gui | include-bitmap.rkt | #lang racket/base
(require racket/gui/base
racket/class
racket/file
setup/main-collects)
(require (for-syntax racket/base
syntax/path-spec
compiler/cm-accomplice
setup/main-collects))
(provide include-bitmap
include-bitmap/relative-to)
(define-syntax (-include-bitmap stx)
(syntax-case stx ()
[(_ orig-stx source path-spec type)
(let* ([c-file (resolve-path-spec #'path-spec #'source #'orig-stx)]
[content
(with-handlers ([exn:fail?
(lambda (exn)
(error 'include-bitmap
"could not load ~e: ~a"
c-file
(if (exn? exn)
(exn-message exn)
(format "~e" exn))))])
(with-input-from-file c-file
(lambda ()
(read-bytes (file-size c-file)))))])
(register-external-file c-file)
(with-syntax ([content content]
[c-file (path->main-collects-relative c-file)])
(syntax/loc stx
(get-or-load-bitmap content 'path-spec type))))]))
(define-syntax (include-bitmap/relative-to stx)
(syntax-case stx ()
[(_ source path-spec) #`(-include-bitmap #,stx source path-spec 'unknown/mask)]
[(_ source path-spec type) #`(-include-bitmap #,stx source path-spec type)]))
(define-syntax (include-bitmap stx)
(syntax-case stx ()
[(_ path-spec) #`(-include-bitmap #,stx #,stx path-spec 'unknown/mask)]
[(_ path-spec type) #`(-include-bitmap #,stx #,stx path-spec type)]))
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Run-time support
(define cached (make-hash))
(define (get-or-load-bitmap content orig type)
(hash-ref cached
(cons content type)
(λ ()
(define-values (in out) (make-pipe))
(thread
(λ ()
(display content out)
(close-output-port out)))
(define bm (make-object bitmap% in type))
(unless (send bm ok?)
(error 'include-bitmap
"unable to parse image, originated from: ~a"
(path->string (main-collects-relative->path orig))))
(hash-set! cached (cons content type) bm)
bm)))
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-lib/mrlib/include-bitmap.rkt | racket | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Run-time support | #lang racket/base
(require racket/gui/base
racket/class
racket/file
setup/main-collects)
(require (for-syntax racket/base
syntax/path-spec
compiler/cm-accomplice
setup/main-collects))
(provide include-bitmap
include-bitmap/relative-to)
(define-syntax (-include-bitmap stx)
(syntax-case stx ()
[(_ orig-stx source path-spec type)
(let* ([c-file (resolve-path-spec #'path-spec #'source #'orig-stx)]
[content
(with-handlers ([exn:fail?
(lambda (exn)
(error 'include-bitmap
"could not load ~e: ~a"
c-file
(if (exn? exn)
(exn-message exn)
(format "~e" exn))))])
(with-input-from-file c-file
(lambda ()
(read-bytes (file-size c-file)))))])
(register-external-file c-file)
(with-syntax ([content content]
[c-file (path->main-collects-relative c-file)])
(syntax/loc stx
(get-or-load-bitmap content 'path-spec type))))]))
(define-syntax (include-bitmap/relative-to stx)
(syntax-case stx ()
[(_ source path-spec) #`(-include-bitmap #,stx source path-spec 'unknown/mask)]
[(_ source path-spec type) #`(-include-bitmap #,stx source path-spec type)]))
(define-syntax (include-bitmap stx)
(syntax-case stx ()
[(_ path-spec) #`(-include-bitmap #,stx #,stx path-spec 'unknown/mask)]
[(_ path-spec type) #`(-include-bitmap #,stx #,stx path-spec type)]))
(define cached (make-hash))
(define (get-or-load-bitmap content orig type)
(hash-ref cached
(cons content type)
(λ ()
(define-values (in out) (make-pipe))
(thread
(λ ()
(display content out)
(close-output-port out)))
(define bm (make-object bitmap% in type))
(unless (send bm ok?)
(error 'include-bitmap
"unable to parse image, originated from: ~a"
(path->string (main-collects-relative->path orig))))
(hash-set! cached (cons content type) bm)
bm)))
|
eac7507eefaa8d84895b8beef5451e6ec5b89d77c3aa18900838439bda33a71d | JacquesCarette/Drasil | Print.hs | -- | Defines functions to help debug examples. Generated files appear in ./code/debug.
module Language.Drasil.Log.Print where
import Language.Drasil hiding (symbol)
import Language.Drasil.Development (showUID)
import qualified Language.Drasil as L (symbol)
import Database.Drasil
import Utils.Drasil (stringList)
import qualified Data.Map as Map
import Control.Lens ((^.), view)
import Data.List (nub, sort, sortBy)
import Data.Maybe (fromMaybe)
import Data.Bifunctor (second)
import Data.Function (on)
import Text.PrettyPrint.HughesPJ
import Language.Drasil.Plain.Print
import Language.Drasil.Printing.PrintingInformation
import Prelude hiding ((<>))
-- * Main Function
-- | Gathers all printing functions and creates the debugging tables from them.
printAllDebugInfo :: PrintingInformation -> [Doc]
printAllDebugInfo pinfo = map (cdbSection . ($ pinfo)) [mkTableReferencedChunks, mkTableDepChunks, mkTableDepReffedChunks,
mkTableSymb, mkTableOfTerms, mkTableConcepts, mkTableUnitDefn,
mkTableDataDef, mkTableGenDef, mkTableTMod, mkTableIMod, mkTableCI,
mkTableSec, mkTableLC, mkTableRef, renderUsedUIDs . mkListShowUsedUIDs]
-- * Helpers
-- ** Separators
-- | Debugging table separator.
cdbSection :: Doc -> Doc
cdbSection dd = text (replicate 100 '#' ++ "\n") $$ dd $$ text "\n"
-- | Header for debugging tables.
header :: Doc -> Doc
header d = text (replicate 100 '-') $$ d $$ text (replicate 100 '-')
-- ** Table Generators
-- | General function to make the debugging tables. Takes in printing information, a function
that extracts a certain field from the printing information , a title , three column headers ,
and three functions that sort the data from the printing information field into the
required display formats ( often ' UID 's , terms , shortnames , definitions , etc . ) .
mkTableFromLenses :: PrintingInformation -> (ChunkDB -> UMap a)
-> String -> String -> String -> String -> (a -> Doc) -> (a -> Doc) -> (a -> Doc) -> Doc
mkTableFromLenses PI{_ckdb = db} tableLens ttle h1 h2 h3 l1 l2 l3 =
text ttle <> colon
$$ header (text h1 $$ nest nestNum (text h2) $$ nest (nestNum*3) (text h3))
$$ vcat (map chunkLayout chunks)
where
chunkLayout x = l1 x $$ nest nestNum (l2 x)
$$ nest (nestNum*3) (l3 x)
chunks = map (fst.snd) (Map.assocs $ tableLens db)
nestNum = 30
| Makes a table with all symbolic quantities in the SRS .
mkTableSymb :: PrintingInformation -> Doc
mkTableSymb pinfo = mkTableFromLenses pinfo symbolTable
"Symbol Chunks" "UID" "Term" "Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(symbolDoc . flip L.symbol (pinfo ^. stg))
| Makes a table with terms in the SRS .
mkTableOfTerms :: PrintingInformation -> Doc
mkTableOfTerms pinfo = mkTableFromLenses pinfo termTable
"Term Chunks" "UID" "Term" "Abbreviation"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(text . fromMaybe "" . getA)
| Makes a table with all concepts in the SRS .
mkTableConcepts :: PrintingInformation -> Doc
mkTableConcepts pinfo = mkTableFromLenses pinfo defTable
"Concepts" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all units used in the SRS .
mkTableUnitDefn :: PrintingInformation -> Doc
mkTableUnitDefn pinfo = mkTableFromLenses pinfo (view unitTable)
"Unit Definitions" "UID" "Term" "Unit Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . Sy . usymb)
| Makes a table with all data definitions in the SRS .
mkTableDataDef :: PrintingInformation -> Doc
mkTableDataDef pinfo = mkTableFromLenses pinfo (view dataDefnTable)
"Data Definitions" "UID" "Term" "Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(symbolDoc . flip L.symbol (pinfo ^. stg))
| Makes a table with all general definitions in the SRS .
mkTableGenDef :: PrintingInformation -> Doc
mkTableGenDef pinfo = mkTableFromLenses pinfo (view gendefTable)
"General Definitions" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all theoretical models in the SRS .
mkTableTMod :: PrintingInformation -> Doc
mkTableTMod pinfo = mkTableFromLenses pinfo (view theoryModelTable)
"Theory Models" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all instance models in the SRS .
mkTableIMod :: PrintingInformation -> Doc
mkTableIMod pinfo = mkTableFromLenses pinfo (view insmodelTable)
"Instance Models" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all concept instances in the SRS .
mkTableCI :: PrintingInformation -> Doc
mkTableCI pinfo = mkTableFromLenses pinfo (view conceptinsTable)
"ConceptInstance" "UID" "Term" "ShortName"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
| Makes a table with all sections in the SRS .
mkTableSec :: PrintingInformation -> Doc
mkTableSec pinfo = mkTableFromLenses pinfo (view sectionTable)
"Sections" "UID" "Title" "ShortName"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . tle)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
| Makes a table with all labelled content in the SRS .
mkTableLC :: PrintingInformation -> Doc
mkTableLC pinfo = mkTableFromLenses pinfo (view labelledcontentTable)
"LabelledContent" "UID" "ShortName" "Type of Content"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
(text . getContConst . view accessContents)
where
getContConst :: RawContent -> String
getContConst Table{} = "Table"
getContConst Paragraph{} = "Paragraph"
getContConst EqnBlock{} = "Equation"
getContConst DerivBlock{} = "Derivation"
getContConst Enumeration{} = "Enumeration"
getContConst Defini{} = "Definition or Model"
getContConst Figure{} = "Figure"
getContConst Bib{} = "Bibliography"
getContConst Graph{} = "Graph"
| Makes a table with all references in the SRS .
mkTableRef :: PrintingInformation -> Doc
mkTableRef pinfo = mkTableFromLenses pinfo (view refTable)
"Reference" "UID" "Reference Address" "ShortName"
(text . showUID)
(text . getAdd . getRefAdd)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
-- | Chunks that depend on other chunks. An empty list means the chunks do not depend on anything.
mkTableDepChunks :: PrintingInformation -> Doc
mkTableDepChunks PI{_ckdb = db} = text "Dependent Chunks (the chunks on the left use the chunks on the right in some capacity)" <> colon
$$ header (text "UID" $$ nest nestNum (text "Dependent UIDs"))
$$ vcat (map testIndepLayout traceMapUIDs)
where
testIndepLayout :: (UID, [UID]) -> Doc
testIndepLayout (x, ys) = text (show x) $$ nest nestNum (text $ show ys)
traceMapUIDs :: [(UID, [UID])]
traceMapUIDs = Map.assocs $ db ^. traceTable
nestNum = 30
-- | Chunks that are referenced and used by other chunks.
-- Those chunks build on top of the ones listed here.
mkTableReferencedChunks :: PrintingInformation -> Doc
mkTableReferencedChunks PI{_ckdb = db} = text "Referenced Chunks (other chunks build from these)" <> colon
$$ header (text "UID" $$ nest nestNum (text "UIDs that use the left UID"))
$$ vcat (map testIsolateLayout refbyUIDs)
where
testIsolateLayout :: (UID, [UID]) -> Doc
testIsolateLayout (x, ys) = text (show x) $$ nest nestNum (text $ show ys)
refbyUIDs :: [(UID, [UID])]
refbyUIDs = Map.assocs $ db ^. refbyTable
nestNum = 30
-- | Chunks that use and are used by other chunks.
mkTableDepReffedChunks :: PrintingInformation -> Doc
mkTableDepReffedChunks PI{_ckdb = db} = text "Dependent and Referenced Chunks (chunks dependent on middle UIDs and used in the chunks on the right)" <> colon
$$ header (text "UID" $$ nest nestNum (text "Dependent Chunk") $$ nest (nestNum*3) (text "Used-in Chunk"))
$$ vcat (map traceRefLayout $ Map.assocs combinedMaps)
where
traceRefLayout :: (UID, ([UID], [UID])) -> Doc
traceRefLayout x = text (show $ fst x) $$ nest nestNum (text $ show $ fst $ snd x)
$$ nest (nestNum*3) (text $ show $ snd $ snd x)
combinedMaps = Map.unionWith (\x y -> (fst x, snd y)) traceMapUIDs refByUIDs
traceMapUIDs = Map.fromList $ map (\(x, y) -> (x, (y, []))) $ Map.assocs $ db ^. traceTable
refByUIDs = Map.fromList $ map (\(x, y) -> (x, ([], y))) $ Map.assocs $ db ^. refbyTable
nestNum = 30
-- ** 'UID' Manipulation
-- | Creates a table of all UIDs and their "highest" recorded level of information. See 'mkListShowUsedUIDs'
-- for more details.
renderUsedUIDs :: [(UID, String)] -> Doc
renderUsedUIDs chs = header (text "UIDs" $$ nest 40 (text "Associated Chunks")) $$ vcat (map renderUsedUID chs)
where
renderUsedUID (u, chks) = text (show u) $$ nest 40 (text chks)
| For the last section of the log output . Shows which chunk UID is being used at which stage .
Note that chunks used at a " higher stage " ( like ' Concept 's and ' QuantityDict 's ) will still be built off of the
-- more basic types (like 'IdeaDict's), they are just not explicitly used in that manner.
-- Also, some chunks may have been "downgraded" when put into the database (for example, mapping a
' QuantityDict ' wrapper onto things like Constrained and Unital chunks happens often ) .
mkListShowUsedUIDs :: PrintingInformation -> [(UID, String)]
mkListShowUsedUIDs PI{_ckdb = db} = sortBy (compare `on` fst) $ map (second stringList) $ Map.toList $ Map.fromListWith (++) $
map (\x -> (fst x, ["QuantityDict"])) (Map.assocs $ symbolTable db) ++
map (\x -> (fst x, ["IdeaDict"])) (Map.assocs $ termTable db) ++
map (\x -> (fst x, ["ConceptChunk"])) (Map.assocs $ defTable db) ++
map (\x -> (fst x, ["UnitDefn"])) (Map.assocs $ db ^. unitTable) ++
map (\x -> (fst x, ["DataDefinition"])) (Map.assocs $ db ^. dataDefnTable) ++
map (\x -> (fst x, ["InstanceModel"])) (Map.assocs $ db ^. insmodelTable) ++
map (\x -> (fst x, ["GeneralDefinition"])) (Map.assocs $ db ^. gendefTable) ++
map (\x -> (fst x, ["TheoryModel"])) (Map.assocs $ db ^. theoryModelTable) ++
map (\x -> (fst x, ["ConceptInstance"])) (Map.assocs $ db ^. conceptinsTable) ++
map (\x -> (fst x, ["Section"])) (Map.assocs $ db ^. sectionTable) ++
map (\x -> (fst x, ["LabelledContent"])) (Map.assocs $ db ^. labelledcontentTable) ++
map (\x -> (fst x, ["Reference"])) (Map.assocs $ db ^. refTable)
-- Currently Unused
| Get all ' UID 's from a database ( ' ' ) .
mkListAll :: ChunkDB -> [UID]
mkListAll db = nub $ sort $
map fst (Map.assocs $ symbolTable db) ++
map fst (Map.assocs $ termTable db) ++
map fst (Map.assocs $ defTable db) ++
map fst (Map.assocs $ db ^. unitTable) ++
map fst (Map.assocs $ db ^. traceTable) ++
map fst (Map.assocs $ db ^. refbyTable) ++
map fst (Map.assocs $ db ^. dataDefnTable) ++
map fst (Map.assocs $ db ^. insmodelTable) ++
map fst (Map.assocs $ db ^. gendefTable) ++
map fst (Map.assocs $ db ^. theoryModelTable) ++
map fst (Map.assocs $ db ^. conceptinsTable) ++
map fst (Map.assocs $ db ^. sectionTable) ++
map fst (Map.assocs $ db ^. labelledcontentTable) ++
map fst (Map.assocs $ db ^. refTable)
| null | https://raw.githubusercontent.com/JacquesCarette/Drasil/92dddf7a545ba5029f99ad5c5eddcd8dad56a2d8/code/drasil-printers/lib/Language/Drasil/Log/Print.hs | haskell | | Defines functions to help debug examples. Generated files appear in ./code/debug.
* Main Function
| Gathers all printing functions and creates the debugging tables from them.
* Helpers
** Separators
| Debugging table separator.
| Header for debugging tables.
** Table Generators
| General function to make the debugging tables. Takes in printing information, a function
| Chunks that depend on other chunks. An empty list means the chunks do not depend on anything.
| Chunks that are referenced and used by other chunks.
Those chunks build on top of the ones listed here.
| Chunks that use and are used by other chunks.
** 'UID' Manipulation
| Creates a table of all UIDs and their "highest" recorded level of information. See 'mkListShowUsedUIDs'
for more details.
more basic types (like 'IdeaDict's), they are just not explicitly used in that manner.
Also, some chunks may have been "downgraded" when put into the database (for example, mapping a
Currently Unused | module Language.Drasil.Log.Print where
import Language.Drasil hiding (symbol)
import Language.Drasil.Development (showUID)
import qualified Language.Drasil as L (symbol)
import Database.Drasil
import Utils.Drasil (stringList)
import qualified Data.Map as Map
import Control.Lens ((^.), view)
import Data.List (nub, sort, sortBy)
import Data.Maybe (fromMaybe)
import Data.Bifunctor (second)
import Data.Function (on)
import Text.PrettyPrint.HughesPJ
import Language.Drasil.Plain.Print
import Language.Drasil.Printing.PrintingInformation
import Prelude hiding ((<>))
printAllDebugInfo :: PrintingInformation -> [Doc]
printAllDebugInfo pinfo = map (cdbSection . ($ pinfo)) [mkTableReferencedChunks, mkTableDepChunks, mkTableDepReffedChunks,
mkTableSymb, mkTableOfTerms, mkTableConcepts, mkTableUnitDefn,
mkTableDataDef, mkTableGenDef, mkTableTMod, mkTableIMod, mkTableCI,
mkTableSec, mkTableLC, mkTableRef, renderUsedUIDs . mkListShowUsedUIDs]
cdbSection :: Doc -> Doc
cdbSection dd = text (replicate 100 '#' ++ "\n") $$ dd $$ text "\n"
header :: Doc -> Doc
header d = text (replicate 100 '-') $$ d $$ text (replicate 100 '-')
that extracts a certain field from the printing information , a title , three column headers ,
and three functions that sort the data from the printing information field into the
required display formats ( often ' UID 's , terms , shortnames , definitions , etc . ) .
mkTableFromLenses :: PrintingInformation -> (ChunkDB -> UMap a)
-> String -> String -> String -> String -> (a -> Doc) -> (a -> Doc) -> (a -> Doc) -> Doc
mkTableFromLenses PI{_ckdb = db} tableLens ttle h1 h2 h3 l1 l2 l3 =
text ttle <> colon
$$ header (text h1 $$ nest nestNum (text h2) $$ nest (nestNum*3) (text h3))
$$ vcat (map chunkLayout chunks)
where
chunkLayout x = l1 x $$ nest nestNum (l2 x)
$$ nest (nestNum*3) (l3 x)
chunks = map (fst.snd) (Map.assocs $ tableLens db)
nestNum = 30
| Makes a table with all symbolic quantities in the SRS .
mkTableSymb :: PrintingInformation -> Doc
mkTableSymb pinfo = mkTableFromLenses pinfo symbolTable
"Symbol Chunks" "UID" "Term" "Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(symbolDoc . flip L.symbol (pinfo ^. stg))
| Makes a table with terms in the SRS .
mkTableOfTerms :: PrintingInformation -> Doc
mkTableOfTerms pinfo = mkTableFromLenses pinfo termTable
"Term Chunks" "UID" "Term" "Abbreviation"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(text . fromMaybe "" . getA)
| Makes a table with all concepts in the SRS .
mkTableConcepts :: PrintingInformation -> Doc
mkTableConcepts pinfo = mkTableFromLenses pinfo defTable
"Concepts" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all units used in the SRS .
mkTableUnitDefn :: PrintingInformation -> Doc
mkTableUnitDefn pinfo = mkTableFromLenses pinfo (view unitTable)
"Unit Definitions" "UID" "Term" "Unit Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . Sy . usymb)
| Makes a table with all data definitions in the SRS .
mkTableDataDef :: PrintingInformation -> Doc
mkTableDataDef pinfo = mkTableFromLenses pinfo (view dataDefnTable)
"Data Definitions" "UID" "Term" "Symbol"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(symbolDoc . flip L.symbol (pinfo ^. stg))
| Makes a table with all general definitions in the SRS .
mkTableGenDef :: PrintingInformation -> Doc
mkTableGenDef pinfo = mkTableFromLenses pinfo (view gendefTable)
"General Definitions" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all theoretical models in the SRS .
mkTableTMod :: PrintingInformation -> Doc
mkTableTMod pinfo = mkTableFromLenses pinfo (view theoryModelTable)
"Theory Models" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all instance models in the SRS .
mkTableIMod :: PrintingInformation -> Doc
mkTableIMod pinfo = mkTableFromLenses pinfo (view insmodelTable)
"Instance Models" "UID" "Term" "Definition"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . view defn)
| Makes a table with all concept instances in the SRS .
mkTableCI :: PrintingInformation -> Doc
mkTableCI pinfo = mkTableFromLenses pinfo (view conceptinsTable)
"ConceptInstance" "UID" "Term" "ShortName"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . phraseNP . view term)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
| Makes a table with all sections in the SRS .
mkTableSec :: PrintingInformation -> Doc
mkTableSec pinfo = mkTableFromLenses pinfo (view sectionTable)
"Sections" "UID" "Title" "ShortName"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Nonlinear . tle)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
| Makes a table with all labelled content in the SRS .
mkTableLC :: PrintingInformation -> Doc
mkTableLC pinfo = mkTableFromLenses pinfo (view labelledcontentTable)
"LabelledContent" "UID" "ShortName" "Type of Content"
(text . showUID)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
(text . getContConst . view accessContents)
where
getContConst :: RawContent -> String
getContConst Table{} = "Table"
getContConst Paragraph{} = "Paragraph"
getContConst EqnBlock{} = "Equation"
getContConst DerivBlock{} = "Derivation"
getContConst Enumeration{} = "Enumeration"
getContConst Defini{} = "Definition or Model"
getContConst Figure{} = "Figure"
getContConst Bib{} = "Bibliography"
getContConst Graph{} = "Graph"
| Makes a table with all references in the SRS .
mkTableRef :: PrintingInformation -> Doc
mkTableRef pinfo = mkTableFromLenses pinfo (view refTable)
"Reference" "UID" "Reference Address" "ShortName"
(text . showUID)
(text . getAdd . getRefAdd)
(sentenceDoc (pinfo ^. ckdb) (pinfo ^. stg) Linear . getSentSN . shortname)
mkTableDepChunks :: PrintingInformation -> Doc
mkTableDepChunks PI{_ckdb = db} = text "Dependent Chunks (the chunks on the left use the chunks on the right in some capacity)" <> colon
$$ header (text "UID" $$ nest nestNum (text "Dependent UIDs"))
$$ vcat (map testIndepLayout traceMapUIDs)
where
testIndepLayout :: (UID, [UID]) -> Doc
testIndepLayout (x, ys) = text (show x) $$ nest nestNum (text $ show ys)
traceMapUIDs :: [(UID, [UID])]
traceMapUIDs = Map.assocs $ db ^. traceTable
nestNum = 30
mkTableReferencedChunks :: PrintingInformation -> Doc
mkTableReferencedChunks PI{_ckdb = db} = text "Referenced Chunks (other chunks build from these)" <> colon
$$ header (text "UID" $$ nest nestNum (text "UIDs that use the left UID"))
$$ vcat (map testIsolateLayout refbyUIDs)
where
testIsolateLayout :: (UID, [UID]) -> Doc
testIsolateLayout (x, ys) = text (show x) $$ nest nestNum (text $ show ys)
refbyUIDs :: [(UID, [UID])]
refbyUIDs = Map.assocs $ db ^. refbyTable
nestNum = 30
mkTableDepReffedChunks :: PrintingInformation -> Doc
mkTableDepReffedChunks PI{_ckdb = db} = text "Dependent and Referenced Chunks (chunks dependent on middle UIDs and used in the chunks on the right)" <> colon
$$ header (text "UID" $$ nest nestNum (text "Dependent Chunk") $$ nest (nestNum*3) (text "Used-in Chunk"))
$$ vcat (map traceRefLayout $ Map.assocs combinedMaps)
where
traceRefLayout :: (UID, ([UID], [UID])) -> Doc
traceRefLayout x = text (show $ fst x) $$ nest nestNum (text $ show $ fst $ snd x)
$$ nest (nestNum*3) (text $ show $ snd $ snd x)
combinedMaps = Map.unionWith (\x y -> (fst x, snd y)) traceMapUIDs refByUIDs
traceMapUIDs = Map.fromList $ map (\(x, y) -> (x, (y, []))) $ Map.assocs $ db ^. traceTable
refByUIDs = Map.fromList $ map (\(x, y) -> (x, ([], y))) $ Map.assocs $ db ^. refbyTable
nestNum = 30
renderUsedUIDs :: [(UID, String)] -> Doc
renderUsedUIDs chs = header (text "UIDs" $$ nest 40 (text "Associated Chunks")) $$ vcat (map renderUsedUID chs)
where
renderUsedUID (u, chks) = text (show u) $$ nest 40 (text chks)
| For the last section of the log output . Shows which chunk UID is being used at which stage .
Note that chunks used at a " higher stage " ( like ' Concept 's and ' QuantityDict 's ) will still be built off of the
' QuantityDict ' wrapper onto things like Constrained and Unital chunks happens often ) .
mkListShowUsedUIDs :: PrintingInformation -> [(UID, String)]
mkListShowUsedUIDs PI{_ckdb = db} = sortBy (compare `on` fst) $ map (second stringList) $ Map.toList $ Map.fromListWith (++) $
map (\x -> (fst x, ["QuantityDict"])) (Map.assocs $ symbolTable db) ++
map (\x -> (fst x, ["IdeaDict"])) (Map.assocs $ termTable db) ++
map (\x -> (fst x, ["ConceptChunk"])) (Map.assocs $ defTable db) ++
map (\x -> (fst x, ["UnitDefn"])) (Map.assocs $ db ^. unitTable) ++
map (\x -> (fst x, ["DataDefinition"])) (Map.assocs $ db ^. dataDefnTable) ++
map (\x -> (fst x, ["InstanceModel"])) (Map.assocs $ db ^. insmodelTable) ++
map (\x -> (fst x, ["GeneralDefinition"])) (Map.assocs $ db ^. gendefTable) ++
map (\x -> (fst x, ["TheoryModel"])) (Map.assocs $ db ^. theoryModelTable) ++
map (\x -> (fst x, ["ConceptInstance"])) (Map.assocs $ db ^. conceptinsTable) ++
map (\x -> (fst x, ["Section"])) (Map.assocs $ db ^. sectionTable) ++
map (\x -> (fst x, ["LabelledContent"])) (Map.assocs $ db ^. labelledcontentTable) ++
map (\x -> (fst x, ["Reference"])) (Map.assocs $ db ^. refTable)
| Get all ' UID 's from a database ( ' ' ) .
mkListAll :: ChunkDB -> [UID]
mkListAll db = nub $ sort $
map fst (Map.assocs $ symbolTable db) ++
map fst (Map.assocs $ termTable db) ++
map fst (Map.assocs $ defTable db) ++
map fst (Map.assocs $ db ^. unitTable) ++
map fst (Map.assocs $ db ^. traceTable) ++
map fst (Map.assocs $ db ^. refbyTable) ++
map fst (Map.assocs $ db ^. dataDefnTable) ++
map fst (Map.assocs $ db ^. insmodelTable) ++
map fst (Map.assocs $ db ^. gendefTable) ++
map fst (Map.assocs $ db ^. theoryModelTable) ++
map fst (Map.assocs $ db ^. conceptinsTable) ++
map fst (Map.assocs $ db ^. sectionTable) ++
map fst (Map.assocs $ db ^. labelledcontentTable) ++
map fst (Map.assocs $ db ^. refTable)
|
acf29763641b71fc30bee79afc91244be5ac43457f8a48353f028f25dfb4fe19 | exoscale/interceptor | impl.cljc | (ns ^:no-doc exoscale.interceptor.impl
"Core implementation"
(:require [exoscale.interceptor.protocols :as p]))
(defrecord Interceptor [enter leave error])
(extend-protocol p/Interceptor
#?(:clj clojure.lang.IPersistentMap
:cljs cljs.core.PersistentHashMap)
(interceptor [m] (map->Interceptor m))
Interceptor
(interceptor [r] r)
#?(:clj clojure.lang.Fn
:cljs function)
(interceptor [f]
(p/interceptor {:enter f}))
#?(:clj clojure.lang.Keyword
:cljs cljs.core.Keyword)
(interceptor [f]
(p/interceptor {:enter f}))
#?(:bb sci.lang.Var
:clj clojure.lang.Var
:cljs cljs.core.Var)
(interceptor [v]
(p/interceptor (deref v)))
#?(:clj Object :cljs object)
(interceptor [x]
Fallback : Could already be ILookup'able , would cover custom types ( ex :
;; records)
(when-not (instance? #?(:clj clojure.lang.ILookup
:cljs cljs.core.ILookup)
x)
(throw (ex-info "Unsupported interceptor format/type"
{:exoscale.ex/type :exoscale.ex/invalid
:val x})))
x))
not working in cljs for some reason
#?(:clj
(extend-protocol p/Interceptor
clojure.lang.Symbol
(interceptor [s]
(p/interceptor (resolve s)))))
(def empty-queue
#?(:clj clojure.lang.PersistentQueue/EMPTY
:cljs #queue []))
(defn invoke-stage
[ctx interceptor stage err]
(if-let [f (get interceptor stage)]
(try
(let [ctx' (if err
(f (dissoc ctx :exoscale.interceptor/error) err)
(f ctx))]
(cond-> ctx'
(p/async? ctx')
(p/catch (fn [e] (assoc ctx :exoscale.interceptor/error e)))))
(catch #?(:clj Exception :cljs :default) e
(assoc ctx :exoscale.interceptor/error e)))
ctx))
(defn leave [ctx]
(if (p/async? ctx)
(p/then ctx leave)
(let [stack (:exoscale.interceptor/stack ctx)]
(if-let [interceptor (peek stack)]
(recur (let [err (:exoscale.interceptor/error ctx)]
(invoke-stage (assoc ctx :exoscale.interceptor/stack (pop stack))
interceptor
(if err :error :leave)
err)))
ctx))))
(defn enter [ctx]
(if (p/async? ctx)
(p/then ctx enter)
(let [queue (:exoscale.interceptor/queue ctx)
stack (:exoscale.interceptor/stack ctx)
interceptor (peek queue)]
(if (or (not interceptor)
(:exoscale.interceptor/error ctx))
ctx
(-> (assoc ctx
:exoscale.interceptor/queue (pop queue)
:exoscale.interceptor/stack (conj stack interceptor))
(invoke-stage interceptor :enter nil)
recur)))))
(defn complete
[ctx success error]
(if (p/async? ctx)
(p/then ctx #(complete % success error))
(if-let [err (:exoscale.interceptor/error ctx)]
(error err)
(success ctx))))
(defn into-queue
[q interceptors]
(into (or q empty-queue)
(map p/interceptor)
interceptors))
(defn enqueue
[ctx interceptors]
(update ctx
:exoscale.interceptor/queue
into-queue
interceptors))
(defn execute
[ctx success error]
(-> ctx
(enter)
(leave)
(complete success error)))
| null | https://raw.githubusercontent.com/exoscale/interceptor/c6b5b82d5624711972cd332a766351d5d410c9ae/src/exoscale/interceptor/impl.cljc | clojure | records) | (ns ^:no-doc exoscale.interceptor.impl
"Core implementation"
(:require [exoscale.interceptor.protocols :as p]))
(defrecord Interceptor [enter leave error])
(extend-protocol p/Interceptor
#?(:clj clojure.lang.IPersistentMap
:cljs cljs.core.PersistentHashMap)
(interceptor [m] (map->Interceptor m))
Interceptor
(interceptor [r] r)
#?(:clj clojure.lang.Fn
:cljs function)
(interceptor [f]
(p/interceptor {:enter f}))
#?(:clj clojure.lang.Keyword
:cljs cljs.core.Keyword)
(interceptor [f]
(p/interceptor {:enter f}))
#?(:bb sci.lang.Var
:clj clojure.lang.Var
:cljs cljs.core.Var)
(interceptor [v]
(p/interceptor (deref v)))
#?(:clj Object :cljs object)
(interceptor [x]
Fallback : Could already be ILookup'able , would cover custom types ( ex :
(when-not (instance? #?(:clj clojure.lang.ILookup
:cljs cljs.core.ILookup)
x)
(throw (ex-info "Unsupported interceptor format/type"
{:exoscale.ex/type :exoscale.ex/invalid
:val x})))
x))
not working in cljs for some reason
#?(:clj
(extend-protocol p/Interceptor
clojure.lang.Symbol
(interceptor [s]
(p/interceptor (resolve s)))))
(def empty-queue
#?(:clj clojure.lang.PersistentQueue/EMPTY
:cljs #queue []))
(defn invoke-stage
[ctx interceptor stage err]
(if-let [f (get interceptor stage)]
(try
(let [ctx' (if err
(f (dissoc ctx :exoscale.interceptor/error) err)
(f ctx))]
(cond-> ctx'
(p/async? ctx')
(p/catch (fn [e] (assoc ctx :exoscale.interceptor/error e)))))
(catch #?(:clj Exception :cljs :default) e
(assoc ctx :exoscale.interceptor/error e)))
ctx))
(defn leave [ctx]
(if (p/async? ctx)
(p/then ctx leave)
(let [stack (:exoscale.interceptor/stack ctx)]
(if-let [interceptor (peek stack)]
(recur (let [err (:exoscale.interceptor/error ctx)]
(invoke-stage (assoc ctx :exoscale.interceptor/stack (pop stack))
interceptor
(if err :error :leave)
err)))
ctx))))
(defn enter [ctx]
(if (p/async? ctx)
(p/then ctx enter)
(let [queue (:exoscale.interceptor/queue ctx)
stack (:exoscale.interceptor/stack ctx)
interceptor (peek queue)]
(if (or (not interceptor)
(:exoscale.interceptor/error ctx))
ctx
(-> (assoc ctx
:exoscale.interceptor/queue (pop queue)
:exoscale.interceptor/stack (conj stack interceptor))
(invoke-stage interceptor :enter nil)
recur)))))
(defn complete
[ctx success error]
(if (p/async? ctx)
(p/then ctx #(complete % success error))
(if-let [err (:exoscale.interceptor/error ctx)]
(error err)
(success ctx))))
(defn into-queue
[q interceptors]
(into (or q empty-queue)
(map p/interceptor)
interceptors))
(defn enqueue
[ctx interceptors]
(update ctx
:exoscale.interceptor/queue
into-queue
interceptors))
(defn execute
[ctx success error]
(-> ctx
(enter)
(leave)
(complete success error)))
|
c389fad09201f77452fb9a125c51f3e886c8dbbf0957c0ab35ce0049c171c551 | AdRoll/rebar3_format | simple_task.erl | -module(simple_task).
-vsn(1.0).
-author("test author").
-export([
stop/1,
start/1
]).
-behaviour(task).
stop(_Name) ->
ok.
start(Spec)->task:name(Spec).
| null | https://raw.githubusercontent.com/AdRoll/rebar3_format/5ffb11341796173317ae094d4e165b85fad6aa19/test_app/src/otp_samples/simple_task.erl | erlang | -module(simple_task).
-vsn(1.0).
-author("test author").
-export([
stop/1,
start/1
]).
-behaviour(task).
stop(_Name) ->
ok.
start(Spec)->task:name(Spec).
| |
57a21776e2d80bb94e649dbcf073917d2194d98105a07b0823b35958897fb3ca | xmonad/xmonad-contrib | DynamicProperty.hs | -- |
Module : XMonad . Hooks .
-- Description : Apply a ManageHook to an already-mapped window.
Copyright : ( c ) , 2015
-- License : BSD3-style (see LICENSE)
-- Maintainer :
--
module XMonad.Hooks.DynamicProperty {-# DEPRECATED "Use \"XMonad.Hooks.OnPropertyChange\" instead." #-}
( module XMonad.Hooks.OnPropertyChange
, dynamicPropertyChange
, dynamicTitle
) where
import XMonad
import XMonad.Hooks.OnPropertyChange
import XMonad.Prelude
-- | 'dynamicPropertyChange' = 'onXPropertyChange'
dynamicPropertyChange :: String -> ManageHook -> Event -> X All
dynamicPropertyChange = onXPropertyChange
| ' dynamicTitle ' = ' onTitleChange '
dynamicTitle :: ManageHook -> Event -> X All
dynamicTitle = onTitleChange
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/c27a1f07915ac933f1feee301b47ae77cd55d8a6/XMonad/Hooks/DynamicProperty.hs | haskell | |
Description : Apply a ManageHook to an already-mapped window.
License : BSD3-style (see LICENSE)
Maintainer :
# DEPRECATED "Use \"XMonad.Hooks.OnPropertyChange\" instead." #
| 'dynamicPropertyChange' = 'onXPropertyChange' | Module : XMonad . Hooks .
Copyright : ( c ) , 2015
( module XMonad.Hooks.OnPropertyChange
, dynamicPropertyChange
, dynamicTitle
) where
import XMonad
import XMonad.Hooks.OnPropertyChange
import XMonad.Prelude
dynamicPropertyChange :: String -> ManageHook -> Event -> X All
dynamicPropertyChange = onXPropertyChange
| ' dynamicTitle ' = ' onTitleChange '
dynamicTitle :: ManageHook -> Event -> X All
dynamicTitle = onTitleChange
|
40e31db4f79bcee3d6d671dadec223a9b68854e720a0aa563a7e306be9374c4c | ocaml-multicore/tezos | mockup.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2020 Nomadic Labs < >
Copyright ( c ) 2022 , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
Testing
-------
Component : Client - mockup mode
Invocation : dune exec / tests / main.exe -- --file mockup.ml
Subject : Unexhaustive tests of the client 's --mode mockup . Unexhaustive ,
because most tests of the mockup are written with the python
framework for now . It was important , though , to provide the
mockup 's API in tezt ; for other tests that use the mockup .
-------
Component: Client - mockup mode
Invocation: dune exec tezt/tests/main.exe -- --file mockup.ml
Subject: Unexhaustive tests of the client's --mode mockup. Unexhaustive,
because most tests of the mockup are written with the python
framework for now. It was important, though, to provide the
mockup's API in tezt; for other tests that use the mockup.
*)
(* Test.
Call `tezos-client rpc list` and check that return code is 0.
*)
let test_rpc_list =
Protocol.register_test
~__FILE__
~title:"(Mockup) RPC list"
~tags:["mockup"; "client"; "rpc"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* _ = Client.rpc_list client in
Lwt.return_unit
(* Test.
Call `tezos-client rpc /chains/<chain_id>/blocks/<block_id>/header/shell` and check that return code is 0.
*)
let test_rpc_header_shell =
Protocol.register_test
~__FILE__
~title:"(Mockup) RPC header/shell"
~tags:["mockup"; "client"; "rpc"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* _ = Client.shell_header client in
Lwt.return_unit
let transfer_data =
(Constant.bootstrap1.alias, Tez.one, Constant.bootstrap2.alias)
let test_balances_after_transfer giver amount receiver =
let (giver_balance_before, giver_balance_after) = giver in
let (receiver_balance_before, receiver_balance_after) = receiver in
if not (giver_balance_after < giver_balance_before -. amount) then
Test.fail
"Invalid balance of giver after transfer: %f (before it was %f)"
giver_balance_after
giver_balance_before ;
Log.info "Balance of giver after transfer is valid: %f" giver_balance_after ;
let receiver_expected_after = receiver_balance_before +. amount in
if receiver_balance_after <> receiver_expected_after then
Test.fail
"Invalid balance of receiver after transfer: %f (expected %f)"
receiver_balance_after
receiver_expected_after ;
Log.info
"Balance of receiver after transfer is valid: %f"
receiver_balance_after
(* Test.
Transfer some tz and check balance changes are as expected.
*)
let test_transfer =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer"
~tags:["mockup"; "client"; "transfer"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let* giver_balance_before = Client.get_balance_for ~account:giver client in
let* receiver_balance_before =
Client.get_balance_for ~account:receiver client
in
Log.info
"About to transfer %s from %s to %s"
(Tez.to_string amount)
giver
receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* giver_balance_after = Client.get_balance_for ~account:giver client in
let* receiver_balance_after =
Client.get_balance_for ~account:receiver client
in
test_balances_after_transfer
(giver_balance_before, giver_balance_after)
(Tez.to_float amount)
(receiver_balance_before, receiver_balance_after) ;
return ()
let test_calling_contract_with_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Calling a contract with a global constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_999.tz" in
let storage = "0" in
let input = "Unit" in
let* result = Client.run_script ~prg:script ~storage ~input client in
let result = String.trim result in
Log.info "Contract with constant output storage %s" result ;
if result = value then return ()
else Test.fail "Expected storage '%s' but got '%s'" value result
let test_calling_contract_with_global_constant_failure ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Calling a contract with a global constant failure"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_999.tz" in
let storage = "0" in
let input = "Unit" in
let process = Client.spawn_run_script ~prg:script ~storage ~input client in
Process.check_error
~exit_code:1
~msg:(rex "No registered global was found")
process
let test_register_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Register Global Constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* result = Client.register_global_constant ~src ~value ?burn_cap client in
Log.info "Registered Global Connstant %s with hash %s" value result ;
return ()
let test_register_global_constant_failure ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Register Global Constant failure"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "Pair 1 (constant \"foobar\")" in
let burn_cap = Some (Tez.of_int 1) in
let proccess =
Client.spawn_register_global_constant ~src ~value ?burn_cap client
in
Process.check_error
~exit_code:1
~msg:(rex "register global constant simulation failed")
proccess
let test_originate_contract_with_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Originate Contract with Global Constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let* result =
Client.originate_contract
~alias:"with_global_constant"
~amount:Tez.zero
~src:"bootstrap1"
~prg:"file:./tezt/tests/contracts/proto_alpha/constant_999.tz"
~init:"0"
~burn_cap:(Tez.of_int 2)
client
in
Log.info "result %s" result ;
return ()
let test_typechecking_and_normalization_work_with_constants ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Typechecking and normalization work with constants"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
(* Register the type *)
let value = "unit" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
(* Register the value *)
let value = "Unit" in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_unit.tz" in
let* _ = Client.normalize_script ~script client in
let* _ = Client.typecheck_script ~script client in
return ()
let test_simple_baking_event =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Log.info "Transferring %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
Log.info "Baking pending operations..." ;
Client.bake_for ~keys:[giver] client
let transfer_expected_to_fail ~giver ~receiver ~amount client =
let process = Client.spawn_transfer ~amount ~giver ~receiver client in
let* status = Process.wait process in
if status = Unix.WEXITED 0 then
Test.fail "Last transfer was successful but was expected to fail ..." ;
return ()
let test_same_transfer_twice =
Protocol.register_test
~__FILE__
~title:"(Mockup) Same transfer twice (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
let mempool_file = Client.base_dir client // "mockup" // "mempool.json" in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* mempool1 = read_file mempool_file in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = transfer_expected_to_fail ~amount ~giver ~receiver client in
let* mempool2 = read_file mempool_file in
Log.info "Checking that mempool is unchanged" ;
if mempool1 <> mempool2 then
Test.fail
"Expected mempool to stay unchanged\n--\n%s--\n %s"
mempool1
mempool2 ;
return ()
let test_transfer_same_participants =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer same participants (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
let base_dir = Client.base_dir client in
let mempool_file = base_dir // "mockup" // "mempool.json" in
let thrashpool_file = base_dir // "mockup" // "trashpool.json" in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* mempool1 = read_file mempool_file in
let amount = Tez.(amount + one) in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = transfer_expected_to_fail ~amount ~giver ~receiver client in
let* mempool2 = read_file mempool_file in
Log.info "Checking that mempool is unchanged" ;
if mempool1 <> mempool2 then
Test.fail
"Expected mempool to stay unchanged\n--\n%s\n--\n %s"
mempool1
mempool2 ;
Log.info
"Checking that last operation was discarded into a newly created trashpool" ;
let* str = read_file thrashpool_file in
if String.equal str "" then
Test.fail "Expected thrashpool to have one operation" ;
return ()
let test_multiple_baking =
Protocol.register_test
~__FILE__
~title:"(Mockup) Multi transfer/multi baking (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
For the equality test below to hold , alice , and must be
different accounts . Here , alice is bootstrap1 , is bootstrap2 and
baker is bootstrap3 .
different accounts. Here, alice is bootstrap1, bob is bootstrap2 and
baker is bootstrap3. *)
let (alice, _amount, bob) = transfer_data and baker = "bootstrap3" in
if String.(equal alice bob || equal bob baker || equal baker alice) then
Test.fail "alice, bob and baker need to be different accounts" ;
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Lwt_list.iteri_s
(fun i amount ->
let amount = Tez.of_int amount in
let* () = Client.transfer ~amount ~giver:alice ~receiver:bob client in
let* () = Client.transfer ~amount ~giver:bob ~receiver:alice client in
let* () = Client.bake_for ~keys:[baker] client in
let* alice_balance = Client.get_balance_for ~account:alice client in
let* bob_balance = Client.get_balance_for ~account:bob client in
Log.info
"%d. Balances\n - Alice :: %f\n - Bob :: %f"
i
alice_balance
bob_balance ;
if alice_balance <> bob_balance then
Test.fail
"Unexpected balances for Alice (%f) and Bob (%f). They should be \
equal."
alice_balance
bob_balance ;
return ())
(range 1 10)
let perform_migration ~protocol ~next_protocol ~next_constants ~pre_migration
~post_migration =
let* client = Client.init_mockup ~constants:next_constants ~protocol () in
let* pre_result = pre_migration client in
Log.info
"Migrating from %s to %s"
(Protocol.hash protocol)
(Protocol.hash next_protocol) ;
let* () = Client.migrate_mockup ~next_protocol client in
post_migration client pre_result
let get_candidates_to_migration () =
let* mockup_protocols =
let transient = Client.create_with_mode Client.Mockup in
Client.list_protocols `Mockup transient
in
(* Find all registered mockup protocols which declare a next protocol *)
let result =
List.filter_map
(fun (protocol : Protocol.t) ->
match Protocol.next_protocol protocol with
| None -> None
| Some next ->
let next_hash = Protocol.hash next in
if
List.exists
(String.equal (Protocol.hash protocol))
mockup_protocols
&& List.exists (String.equal next_hash) mockup_protocols
then Some (protocol, next)
else None)
Protocol.all
in
return result
(* Test mockup migration. *)
let test_migration ?(migration_spec : (Protocol.t * Protocol.t) option)
~pre_migration ~post_migration ~info () =
Test.register
~__FILE__
~title:(sf "(Mockup) Migration (%s)" info)
~tags:["mockup"; "migration"]
(fun () ->
match migration_spec with
| None -> (
Log.info "Searching for protocols to migrate..." ;
let* protocols = get_candidates_to_migration () in
match protocols with
| [] -> Test.fail "No protocol can be tested for migration!"
| (protocol, next_protocol) :: _ ->
perform_migration
~protocol
~next_protocol
~next_constants:Protocol.default_constants
~pre_migration
~post_migration)
| Some (protocol, next_protocol) ->
perform_migration
~protocol
~next_protocol
~next_constants:Protocol.default_constants
~pre_migration
~post_migration)
let test_migration_transfer ?migration_spec () =
let (giver, amount, receiver) = ("alice", Tez.of_int 1, "bob") in
test_migration
?migration_spec
~pre_migration:(fun client ->
Log.info
"Creating two new accounts %s and %s and fund them sufficiently."
giver
receiver ;
let* _ = Client.gen_keys ~alias:giver client in
let* _ = Client.gen_keys ~alias:receiver client in
let bigger_amount = Tez.of_int 2 in
let* () =
Client.transfer
~amount:bigger_amount
~giver:Constant.bootstrap1.alias
~receiver:giver
~burn_cap:Tez.one
client
in
let* () =
Client.transfer
~amount:bigger_amount
~giver:Constant.bootstrap1.alias
~receiver
~burn_cap:Tez.one
client
in
Log.info
"About to transfer %s from %s to %s"
(Tez.to_string amount)
giver
receiver ;
let* giver_balance_before =
Client.get_balance_for ~account:giver client
in
let* receiver_balance_before =
Client.get_balance_for ~account:receiver client
in
let* () = Client.transfer ~amount ~giver ~receiver client in
return (giver_balance_before, receiver_balance_before))
~post_migration:
(fun client (giver_balance_before, receiver_balance_before) ->
let* giver_balance_after = Client.get_balance_for ~account:giver client in
let* receiver_balance_after =
Client.get_balance_for ~account:receiver client
in
test_balances_after_transfer
(giver_balance_before, giver_balance_after)
(Tez.to_float amount)
(receiver_balance_before, receiver_balance_after) ;
return ())
~info:"transfer"
()
(* Check constants equality between that obtained by directly initializing
a mockup context at alpha and that obtained by migrating from
alpha~1 to alpha *)
let test_migration_constants ~migrate_from ~migrate_to =
Test.register
~__FILE__
~title:
(sf
"(%s -> %s) constant migration"
(Protocol.name migrate_from)
(Protocol.name migrate_to))
~tags:["mockup"; "migration"]
(fun () ->
let constants_path =
["chains"; "main"; "blocks"; "head"; "context"; "constants"]
in
let* client_to =
Client.init_mockup
~constants:Protocol.Constants_mainnet
~protocol:migrate_to
()
in
let* const_to = Client.(rpc GET constants_path client_to) in
let* const_migrated =
perform_migration
~protocol:migrate_from
~next_protocol:migrate_to
~next_constants:Protocol.Constants_mainnet
~pre_migration:(fun _ -> return ())
~post_migration:(fun client () ->
Client.(rpc GET constants_path client))
in
if const_to = const_migrated then return ()
else (
Log.error
"constants (%s):\n%s\n"
(Protocol.tag migrate_to)
(JSON.encode const_to) ;
Log.error
"constants (migrated from %s):\n%s\n"
(Protocol.tag migrate_from)
(JSON.encode const_migrated) ;
Test.fail "Protocol constants mismatch"))
let test_migration_ticket_balance ~migrate_from ~migrate_to =
Regression.register
~__FILE__
~title:
(sf
"(%s -> %s) ticket balance migration"
(Protocol.name migrate_from)
(Protocol.name migrate_to))
~tags:["mockup"; "migration"; "tickets"]
~output_file:("tickets" // "ticket_balance")
(fun () ->
let* context_json =
perform_migration
~protocol:migrate_from
~next_protocol:migrate_to
~next_constants:Protocol.Constants_mainnet
~pre_migration:(fun client ->
let* _ =
Client.originate_contract
~alias:"with_tickets"
~amount:Tez.zero
~src:"bootstrap1"
~prg:
"file:./tezt/tests/contracts/proto_current_mainnet/tickets.tz"
~init:"{}"
~burn_cap:(Tez.of_int 2)
client
in
Client.transfer
~amount:(Tez.of_int 0)
~giver:"bootstrap1"
~receiver:"with_tickets"
~burn_cap:(Tez.of_int 1)
client)
~post_migration:(fun client _ ->
let context_file =
Client.base_dir client // "mockup" // "context.json"
in
let json = JSON.parse_file context_file in
let json =
JSON.(
json |-> "context" |-> "context" |=> 0 |=> 1 |> as_list
|> List.find (fun item ->
item |=> 0 |> as_string = "ticket_balance"))
in
return json)
in
Regression.capture (JSON.encode context_json) ;
return ())
(** Test. Reproduce the scenario of /-/issues/1143 *)
let test_origination_from_unrevealed_fees =
Protocol.register_test
~__FILE__
~title:"(Mockup) origination fees from unrevealed"
~tags:["mockup"; "client"; "transfer"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* () =
Client.import_secret_key
client
{
alias = "originator";
public_key_hash = "";
public_key = "";
secret_key =
Unencrypted
"edskRiUZpqYpyBCUQmhpfCmzHfYahfiMqkKb9AaYKaEggXKaEKVUWPBz6RkwabTmLHXajbpiytRdMJb4v4f4T8zN9t6QCHLTjy";
}
in
let* () =
Client.transfer
~burn_cap:Tez.one
~amount:(Tez.of_int 999999)
~giver:"bootstrap1"
~receiver:"originator"
client
in
let* _ =
Client.originate_contract
~wait:"none"
~alias:"contract_name"
~amount:Tez.zero
~src:"originator"
~prg:"file:./tezt/tests/contracts/proto_alpha/str_id.tz"
~init:"None"
~burn_cap:(Tez.of_int 20)
client
in
return ()
(** Test. Reproduce the scenario fixed by /-/merge_requests/3546 *)
let test_multiple_transfers =
Protocol.register_test
~__FILE__
~title:"(Mockup) multiple transfer simulation"
~tags:["mockup"; "client"; "multiple"; "transfer"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let batch_line =
`O
[
("destination", `String Constant.bootstrap1.public_key_hash);
("amount", `String "0.02");
]
in
let batch n = `A (List.init n (fun _ -> batch_line)) in
let file = Temp.file "batch.json" in
let oc = open_out file in
Ezjsonm.to_channel oc (batch 200) ;
close_out oc ;
Client.multiple_transfers ~giver:"bootstrap2" ~json_batch:file client
let test_empty_block_baking =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer (empty, asynchronous)"
~tags:["mockup"; "client"; "empty"; "bake_for"; "asynchronous"]
@@ fun protocol ->
let (giver, _amount, _receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Log.info "Baking pending operations..." ;
Client.bake_for ~keys:[giver] client
let register ~protocols =
test_rpc_list ~protocols ;
test_same_transfer_twice ~protocols ;
test_transfer_same_participants ~protocols ;
test_transfer ~protocols ;
test_empty_block_baking ~protocols ;
test_simple_baking_event ~protocols ;
test_multiple_baking ~protocols ;
test_rpc_header_shell ~protocols ;
test_origination_from_unrevealed_fees ~protocols ;
test_multiple_transfers ~protocols
let register_global_constants ~protocols =
test_register_global_constant_success ~protocols ;
test_register_global_constant_failure ~protocols ;
test_calling_contract_with_global_constant_success ~protocols ;
test_calling_contract_with_global_constant_failure ~protocols ;
test_originate_contract_with_global_constant_success ~protocols ;
test_typechecking_and_normalization_work_with_constants ~protocols
let register_constant_migration ~migrate_from ~migrate_to =
test_migration_constants ~migrate_from ~migrate_to
let register_migration_ticket_balance ~migrate_from ~migrate_to =
test_migration_ticket_balance ~migrate_from ~migrate_to
let register_protocol_independent () = test_migration_transfer ()
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/tezt/tests/mockup.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Test.
Call `tezos-client rpc list` and check that return code is 0.
Test.
Call `tezos-client rpc /chains/<chain_id>/blocks/<block_id>/header/shell` and check that return code is 0.
Test.
Transfer some tz and check balance changes are as expected.
Register the type
Register the value
Find all registered mockup protocols which declare a next protocol
Test mockup migration.
Check constants equality between that obtained by directly initializing
a mockup context at alpha and that obtained by migrating from
alpha~1 to alpha
* Test. Reproduce the scenario of /-/issues/1143
* Test. Reproduce the scenario fixed by /-/merge_requests/3546 | Copyright ( c ) 2020 Nomadic Labs < >
Copyright ( c ) 2022 , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
Testing
-------
Component : Client - mockup mode
Invocation : dune exec / tests / main.exe -- --file mockup.ml
Subject : Unexhaustive tests of the client 's --mode mockup . Unexhaustive ,
because most tests of the mockup are written with the python
framework for now . It was important , though , to provide the
mockup 's API in tezt ; for other tests that use the mockup .
-------
Component: Client - mockup mode
Invocation: dune exec tezt/tests/main.exe -- --file mockup.ml
Subject: Unexhaustive tests of the client's --mode mockup. Unexhaustive,
because most tests of the mockup are written with the python
framework for now. It was important, though, to provide the
mockup's API in tezt; for other tests that use the mockup.
*)
let test_rpc_list =
Protocol.register_test
~__FILE__
~title:"(Mockup) RPC list"
~tags:["mockup"; "client"; "rpc"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* _ = Client.rpc_list client in
Lwt.return_unit
let test_rpc_header_shell =
Protocol.register_test
~__FILE__
~title:"(Mockup) RPC header/shell"
~tags:["mockup"; "client"; "rpc"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* _ = Client.shell_header client in
Lwt.return_unit
let transfer_data =
(Constant.bootstrap1.alias, Tez.one, Constant.bootstrap2.alias)
let test_balances_after_transfer giver amount receiver =
let (giver_balance_before, giver_balance_after) = giver in
let (receiver_balance_before, receiver_balance_after) = receiver in
if not (giver_balance_after < giver_balance_before -. amount) then
Test.fail
"Invalid balance of giver after transfer: %f (before it was %f)"
giver_balance_after
giver_balance_before ;
Log.info "Balance of giver after transfer is valid: %f" giver_balance_after ;
let receiver_expected_after = receiver_balance_before +. amount in
if receiver_balance_after <> receiver_expected_after then
Test.fail
"Invalid balance of receiver after transfer: %f (expected %f)"
receiver_balance_after
receiver_expected_after ;
Log.info
"Balance of receiver after transfer is valid: %f"
receiver_balance_after
let test_transfer =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer"
~tags:["mockup"; "client"; "transfer"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let* giver_balance_before = Client.get_balance_for ~account:giver client in
let* receiver_balance_before =
Client.get_balance_for ~account:receiver client
in
Log.info
"About to transfer %s from %s to %s"
(Tez.to_string amount)
giver
receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* giver_balance_after = Client.get_balance_for ~account:giver client in
let* receiver_balance_after =
Client.get_balance_for ~account:receiver client
in
test_balances_after_transfer
(giver_balance_before, giver_balance_after)
(Tez.to_float amount)
(receiver_balance_before, receiver_balance_after) ;
return ()
let test_calling_contract_with_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Calling a contract with a global constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_999.tz" in
let storage = "0" in
let input = "Unit" in
let* result = Client.run_script ~prg:script ~storage ~input client in
let result = String.trim result in
Log.info "Contract with constant output storage %s" result ;
if result = value then return ()
else Test.fail "Expected storage '%s' but got '%s'" value result
let test_calling_contract_with_global_constant_failure ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Calling a contract with a global constant failure"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_999.tz" in
let storage = "0" in
let input = "Unit" in
let process = Client.spawn_run_script ~prg:script ~storage ~input client in
Process.check_error
~exit_code:1
~msg:(rex "No registered global was found")
process
let test_register_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Register Global Constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* result = Client.register_global_constant ~src ~value ?burn_cap client in
Log.info "Registered Global Connstant %s with hash %s" value result ;
return ()
let test_register_global_constant_failure ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Register Global Constant failure"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "Pair 1 (constant \"foobar\")" in
let burn_cap = Some (Tez.of_int 1) in
let proccess =
Client.spawn_register_global_constant ~src ~value ?burn_cap client
in
Process.check_error
~exit_code:1
~msg:(rex "register global constant simulation failed")
proccess
let test_originate_contract_with_global_constant_success ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Originate Contract with Global Constant success"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "999" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let* result =
Client.originate_contract
~alias:"with_global_constant"
~amount:Tez.zero
~src:"bootstrap1"
~prg:"file:./tezt/tests/contracts/proto_alpha/constant_999.tz"
~init:"0"
~burn_cap:(Tez.of_int 2)
client
in
Log.info "result %s" result ;
return ()
let test_typechecking_and_normalization_work_with_constants ~protocols =
Protocol.register_test
~__FILE__
~title:"(Mockup) Typechecking and normalization work with constants"
~tags:["mockup"; "client"; "global_constant"]
~protocols
@@ fun protocol ->
let (src, _, _) = transfer_data in
let* client = Client.init_mockup ~protocol () in
let value = "unit" in
let burn_cap = Some (Tez.of_int 1) in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let value = "Unit" in
let* _ = Client.register_global_constant ~src ~value ?burn_cap client in
let script = "file:./tezt/tests/contracts/proto_alpha/constant_unit.tz" in
let* _ = Client.normalize_script ~script client in
let* _ = Client.typecheck_script ~script client in
return ()
let test_simple_baking_event =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Log.info "Transferring %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
Log.info "Baking pending operations..." ;
Client.bake_for ~keys:[giver] client
let transfer_expected_to_fail ~giver ~receiver ~amount client =
let process = Client.spawn_transfer ~amount ~giver ~receiver client in
let* status = Process.wait process in
if status = Unix.WEXITED 0 then
Test.fail "Last transfer was successful but was expected to fail ..." ;
return ()
let test_same_transfer_twice =
Protocol.register_test
~__FILE__
~title:"(Mockup) Same transfer twice (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
let mempool_file = Client.base_dir client // "mockup" // "mempool.json" in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* mempool1 = read_file mempool_file in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = transfer_expected_to_fail ~amount ~giver ~receiver client in
let* mempool2 = read_file mempool_file in
Log.info "Checking that mempool is unchanged" ;
if mempool1 <> mempool2 then
Test.fail
"Expected mempool to stay unchanged\n--\n%s--\n %s"
mempool1
mempool2 ;
return ()
let test_transfer_same_participants =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer same participants (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
let (giver, amount, receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
let base_dir = Client.base_dir client in
let mempool_file = base_dir // "mockup" // "mempool.json" in
let thrashpool_file = base_dir // "mockup" // "trashpool.json" in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = Client.transfer ~amount ~giver ~receiver client in
let* mempool1 = read_file mempool_file in
let amount = Tez.(amount + one) in
Log.info "Transfer %s from %s to %s" (Tez.to_string amount) giver receiver ;
let* () = transfer_expected_to_fail ~amount ~giver ~receiver client in
let* mempool2 = read_file mempool_file in
Log.info "Checking that mempool is unchanged" ;
if mempool1 <> mempool2 then
Test.fail
"Expected mempool to stay unchanged\n--\n%s\n--\n %s"
mempool1
mempool2 ;
Log.info
"Checking that last operation was discarded into a newly created trashpool" ;
let* str = read_file thrashpool_file in
if String.equal str "" then
Test.fail "Expected thrashpool to have one operation" ;
return ()
let test_multiple_baking =
Protocol.register_test
~__FILE__
~title:"(Mockup) Multi transfer/multi baking (asynchronous)"
~tags:["mockup"; "client"; "transfer"; "asynchronous"]
@@ fun protocol ->
For the equality test below to hold , alice , and must be
different accounts . Here , alice is bootstrap1 , is bootstrap2 and
baker is bootstrap3 .
different accounts. Here, alice is bootstrap1, bob is bootstrap2 and
baker is bootstrap3. *)
let (alice, _amount, bob) = transfer_data and baker = "bootstrap3" in
if String.(equal alice bob || equal bob baker || equal baker alice) then
Test.fail "alice, bob and baker need to be different accounts" ;
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Lwt_list.iteri_s
(fun i amount ->
let amount = Tez.of_int amount in
let* () = Client.transfer ~amount ~giver:alice ~receiver:bob client in
let* () = Client.transfer ~amount ~giver:bob ~receiver:alice client in
let* () = Client.bake_for ~keys:[baker] client in
let* alice_balance = Client.get_balance_for ~account:alice client in
let* bob_balance = Client.get_balance_for ~account:bob client in
Log.info
"%d. Balances\n - Alice :: %f\n - Bob :: %f"
i
alice_balance
bob_balance ;
if alice_balance <> bob_balance then
Test.fail
"Unexpected balances for Alice (%f) and Bob (%f). They should be \
equal."
alice_balance
bob_balance ;
return ())
(range 1 10)
let perform_migration ~protocol ~next_protocol ~next_constants ~pre_migration
~post_migration =
let* client = Client.init_mockup ~constants:next_constants ~protocol () in
let* pre_result = pre_migration client in
Log.info
"Migrating from %s to %s"
(Protocol.hash protocol)
(Protocol.hash next_protocol) ;
let* () = Client.migrate_mockup ~next_protocol client in
post_migration client pre_result
let get_candidates_to_migration () =
let* mockup_protocols =
let transient = Client.create_with_mode Client.Mockup in
Client.list_protocols `Mockup transient
in
let result =
List.filter_map
(fun (protocol : Protocol.t) ->
match Protocol.next_protocol protocol with
| None -> None
| Some next ->
let next_hash = Protocol.hash next in
if
List.exists
(String.equal (Protocol.hash protocol))
mockup_protocols
&& List.exists (String.equal next_hash) mockup_protocols
then Some (protocol, next)
else None)
Protocol.all
in
return result
let test_migration ?(migration_spec : (Protocol.t * Protocol.t) option)
~pre_migration ~post_migration ~info () =
Test.register
~__FILE__
~title:(sf "(Mockup) Migration (%s)" info)
~tags:["mockup"; "migration"]
(fun () ->
match migration_spec with
| None -> (
Log.info "Searching for protocols to migrate..." ;
let* protocols = get_candidates_to_migration () in
match protocols with
| [] -> Test.fail "No protocol can be tested for migration!"
| (protocol, next_protocol) :: _ ->
perform_migration
~protocol
~next_protocol
~next_constants:Protocol.default_constants
~pre_migration
~post_migration)
| Some (protocol, next_protocol) ->
perform_migration
~protocol
~next_protocol
~next_constants:Protocol.default_constants
~pre_migration
~post_migration)
let test_migration_transfer ?migration_spec () =
let (giver, amount, receiver) = ("alice", Tez.of_int 1, "bob") in
test_migration
?migration_spec
~pre_migration:(fun client ->
Log.info
"Creating two new accounts %s and %s and fund them sufficiently."
giver
receiver ;
let* _ = Client.gen_keys ~alias:giver client in
let* _ = Client.gen_keys ~alias:receiver client in
let bigger_amount = Tez.of_int 2 in
let* () =
Client.transfer
~amount:bigger_amount
~giver:Constant.bootstrap1.alias
~receiver:giver
~burn_cap:Tez.one
client
in
let* () =
Client.transfer
~amount:bigger_amount
~giver:Constant.bootstrap1.alias
~receiver
~burn_cap:Tez.one
client
in
Log.info
"About to transfer %s from %s to %s"
(Tez.to_string amount)
giver
receiver ;
let* giver_balance_before =
Client.get_balance_for ~account:giver client
in
let* receiver_balance_before =
Client.get_balance_for ~account:receiver client
in
let* () = Client.transfer ~amount ~giver ~receiver client in
return (giver_balance_before, receiver_balance_before))
~post_migration:
(fun client (giver_balance_before, receiver_balance_before) ->
let* giver_balance_after = Client.get_balance_for ~account:giver client in
let* receiver_balance_after =
Client.get_balance_for ~account:receiver client
in
test_balances_after_transfer
(giver_balance_before, giver_balance_after)
(Tez.to_float amount)
(receiver_balance_before, receiver_balance_after) ;
return ())
~info:"transfer"
()
let test_migration_constants ~migrate_from ~migrate_to =
Test.register
~__FILE__
~title:
(sf
"(%s -> %s) constant migration"
(Protocol.name migrate_from)
(Protocol.name migrate_to))
~tags:["mockup"; "migration"]
(fun () ->
let constants_path =
["chains"; "main"; "blocks"; "head"; "context"; "constants"]
in
let* client_to =
Client.init_mockup
~constants:Protocol.Constants_mainnet
~protocol:migrate_to
()
in
let* const_to = Client.(rpc GET constants_path client_to) in
let* const_migrated =
perform_migration
~protocol:migrate_from
~next_protocol:migrate_to
~next_constants:Protocol.Constants_mainnet
~pre_migration:(fun _ -> return ())
~post_migration:(fun client () ->
Client.(rpc GET constants_path client))
in
if const_to = const_migrated then return ()
else (
Log.error
"constants (%s):\n%s\n"
(Protocol.tag migrate_to)
(JSON.encode const_to) ;
Log.error
"constants (migrated from %s):\n%s\n"
(Protocol.tag migrate_from)
(JSON.encode const_migrated) ;
Test.fail "Protocol constants mismatch"))
let test_migration_ticket_balance ~migrate_from ~migrate_to =
Regression.register
~__FILE__
~title:
(sf
"(%s -> %s) ticket balance migration"
(Protocol.name migrate_from)
(Protocol.name migrate_to))
~tags:["mockup"; "migration"; "tickets"]
~output_file:("tickets" // "ticket_balance")
(fun () ->
let* context_json =
perform_migration
~protocol:migrate_from
~next_protocol:migrate_to
~next_constants:Protocol.Constants_mainnet
~pre_migration:(fun client ->
let* _ =
Client.originate_contract
~alias:"with_tickets"
~amount:Tez.zero
~src:"bootstrap1"
~prg:
"file:./tezt/tests/contracts/proto_current_mainnet/tickets.tz"
~init:"{}"
~burn_cap:(Tez.of_int 2)
client
in
Client.transfer
~amount:(Tez.of_int 0)
~giver:"bootstrap1"
~receiver:"with_tickets"
~burn_cap:(Tez.of_int 1)
client)
~post_migration:(fun client _ ->
let context_file =
Client.base_dir client // "mockup" // "context.json"
in
let json = JSON.parse_file context_file in
let json =
JSON.(
json |-> "context" |-> "context" |=> 0 |=> 1 |> as_list
|> List.find (fun item ->
item |=> 0 |> as_string = "ticket_balance"))
in
return json)
in
Regression.capture (JSON.encode context_json) ;
return ())
let test_origination_from_unrevealed_fees =
Protocol.register_test
~__FILE__
~title:"(Mockup) origination fees from unrevealed"
~tags:["mockup"; "client"; "transfer"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let* () =
Client.import_secret_key
client
{
alias = "originator";
public_key_hash = "";
public_key = "";
secret_key =
Unencrypted
"edskRiUZpqYpyBCUQmhpfCmzHfYahfiMqkKb9AaYKaEggXKaEKVUWPBz6RkwabTmLHXajbpiytRdMJb4v4f4T8zN9t6QCHLTjy";
}
in
let* () =
Client.transfer
~burn_cap:Tez.one
~amount:(Tez.of_int 999999)
~giver:"bootstrap1"
~receiver:"originator"
client
in
let* _ =
Client.originate_contract
~wait:"none"
~alias:"contract_name"
~amount:Tez.zero
~src:"originator"
~prg:"file:./tezt/tests/contracts/proto_alpha/str_id.tz"
~init:"None"
~burn_cap:(Tez.of_int 20)
client
in
return ()
let test_multiple_transfers =
Protocol.register_test
~__FILE__
~title:"(Mockup) multiple transfer simulation"
~tags:["mockup"; "client"; "multiple"; "transfer"]
@@ fun protocol ->
let* client = Client.init_mockup ~protocol () in
let batch_line =
`O
[
("destination", `String Constant.bootstrap1.public_key_hash);
("amount", `String "0.02");
]
in
let batch n = `A (List.init n (fun _ -> batch_line)) in
let file = Temp.file "batch.json" in
let oc = open_out file in
Ezjsonm.to_channel oc (batch 200) ;
close_out oc ;
Client.multiple_transfers ~giver:"bootstrap2" ~json_batch:file client
let test_empty_block_baking =
Protocol.register_test
~__FILE__
~title:"(Mockup) Transfer (empty, asynchronous)"
~tags:["mockup"; "client"; "empty"; "bake_for"; "asynchronous"]
@@ fun protocol ->
let (giver, _amount, _receiver) = transfer_data in
let* client =
Client.init_mockup ~sync_mode:Client.Asynchronous ~protocol ()
in
Log.info "Baking pending operations..." ;
Client.bake_for ~keys:[giver] client
let register ~protocols =
test_rpc_list ~protocols ;
test_same_transfer_twice ~protocols ;
test_transfer_same_participants ~protocols ;
test_transfer ~protocols ;
test_empty_block_baking ~protocols ;
test_simple_baking_event ~protocols ;
test_multiple_baking ~protocols ;
test_rpc_header_shell ~protocols ;
test_origination_from_unrevealed_fees ~protocols ;
test_multiple_transfers ~protocols
let register_global_constants ~protocols =
test_register_global_constant_success ~protocols ;
test_register_global_constant_failure ~protocols ;
test_calling_contract_with_global_constant_success ~protocols ;
test_calling_contract_with_global_constant_failure ~protocols ;
test_originate_contract_with_global_constant_success ~protocols ;
test_typechecking_and_normalization_work_with_constants ~protocols
let register_constant_migration ~migrate_from ~migrate_to =
test_migration_constants ~migrate_from ~migrate_to
let register_migration_ticket_balance ~migrate_from ~migrate_to =
test_migration_ticket_balance ~migrate_from ~migrate_to
let register_protocol_independent () = test_migration_transfer ()
|
07a97b4b4553121e28c6ee2345fcae3e239fc9a2efc5d37a32e99b48b76e87a6 | rads/up-clojure | server.clj | (ns up-clojure.server
(:require [ring.adapter.jetty :as jetty])
(:gen-class))
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body "Hello World"})
(defn -main [& args]
(let [port (or (some-> (System/getenv "PORT") Integer/parseInt)
3000)]
(jetty/run-jetty handler {:port port})))
| null | https://raw.githubusercontent.com/rads/up-clojure/b533597371d57f2bfb640cd5f9da5d67ecfa0f26/src/up_clojure/server.clj | clojure | (ns up-clojure.server
(:require [ring.adapter.jetty :as jetty])
(:gen-class))
(defn handler [request]
{:status 200
:headers {"Content-Type" "text/html"}
:body "Hello World"})
(defn -main [& args]
(let [port (or (some-> (System/getenv "PORT") Integer/parseInt)
3000)]
(jetty/run-jetty handler {:port port})))
| |
e1c80930f66e69584c9bd677ac487619b3676600fd9f4dd6218bfd84d3d00a15 | GlideAngle/flare-timing | Tag.hs | module Flight.Lookup.Tag
( TaskTimeLookup(..)
, TaskLeadingLookup(..)
, ArrivalRankLookup(..)
, TimeLookup(..)
, LeadingLookup(..)
, TagLookup(..)
, TickLookup(..)
, tagTaskTime
, tagTaskLeading
, tagArrivalRank
, tagPilotTime
, tagPilotTag
, tagTicked
) where
import Data.List (find, elemIndex)
import Data.Maybe (catMaybes, listToMaybe, isJust)
import Control.Lens ((^?), element)
import qualified Flight.Kml as Kml (MarkedFixes(..))
import Flight.Zone.SpeedSection (SpeedSection)
import Flight.Comp
( IxTask(..)
, Pilot(..)
, StartEnd(..)
, StartEndMark
, StartEndDown(..)
, StartEndDownMark
, FirstLead(..)
, LastArrival(..)
)
import Flight.Track.Time (ZoneIdx(..))
import Flight.Track.Tag
( CompTagging(..), TrackTime(..), TrackTag(..), PilotTrackTag(..)
, firstLead, lastArrival
)
import Flight.Mask (Ticked, RaceSections(..), slice, section)
import Flight.Track.Cross (InterpolatedFix(..), ZoneTag(..))
type TaskTaggingLookup a = IxTask -> SpeedSection -> Maybe a
newtype TaskTimeLookup =
TaskTimeLookup
(Maybe (TaskTaggingLookup StartEndMark))
newtype TaskLeadingLookup =
TaskLeadingLookup
(Maybe (TaskTaggingLookup StartEndDownMark))
type TaggingLookup a = IxTask -> SpeedSection -> Pilot -> Kml.MarkedFixes -> Maybe a
tagTaskTime :: Maybe CompTagging -> TaskTimeLookup
tagTaskTime = TaskTimeLookup . fmap taskTimeElapsed
tagTaskLeading :: Maybe CompTagging -> TaskLeadingLookup
tagTaskLeading = TaskLeadingLookup . fmap taskLeadingTimes
taskLeadingTimes
:: CompTagging
-> IxTask
-> SpeedSection
-> Maybe StartEndDownMark
taskLeadingTimes _ _ Nothing = Nothing
taskLeadingTimes x (IxTask i) ss = do
TrackTime{zonesFirst, zonesLast, lastLanding} <- timing x ^? element (fromIntegral i - 1)
FirstLead start <- firstLead ss zonesFirst
let end = (\(LastArrival a) -> a) <$> lastArrival ss zonesLast
return $ StartEndDown start end lastLanding
taskTimeElapsed
:: CompTagging
-> IxTask
-> SpeedSection
-> Maybe StartEndMark
taskTimeElapsed _ _ Nothing = Nothing
taskTimeElapsed x (IxTask i) ss = do
TrackTime{zonesFirst, zonesLast} <- timing x ^? element (fromIntegral i - 1)
FirstLead start <- firstLead ss zonesFirst
let end = (\(LastArrival a) -> a) <$> lastArrival ss zonesLast
return $ StartEnd start end
newtype ArrivalRankLookup = ArrivalRankLookup (Maybe (TaggingLookup Int))
newtype TimeLookup = TimeLookup (Maybe (TaggingLookup StartEndMark))
newtype LeadingLookup = LeadingLookup (Maybe (TaggingLookup StartEndDownMark))
newtype TagLookup = TagLookup (Maybe (TaggingLookup [Maybe ZoneTag]))
newtype TickLookup = TickLookup (Maybe (TaggingLookup Ticked))
tagTicked :: Maybe CompTagging -> TickLookup
tagTicked = TickLookup . fmap ticked
tagPilotTime :: Maybe CompTagging -> TimeLookup
tagPilotTime = TimeLookup . fmap timeElapsed
tagPilotTag :: Maybe CompTagging -> TagLookup
tagPilotTag = TagLookup . fmap tagged
tagArrivalRank :: Maybe CompTagging -> ArrivalRankLookup
tagArrivalRank = ArrivalRankLookup . fmap arrivalRank
ticked
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe Ticked
ticked _ _ Nothing _ _ = Nothing
ticked x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
tickedPilot speedSection
=<< find (\(PilotTrackTag p _) -> p == pilot) xs
| The time of the first and last fix in the list .
tickedZones :: SpeedSection -> [Maybe ZoneTag] -> Ticked
tickedZones speedSection xs =
RaceSections
{ prolog = f prolog
, race = f race
, epilog = f epilog
}
where
f = fmap ZoneIdx . catMaybes . takeWhile isJust
RaceSections{..} =
section speedSection
$ (fmap . fmap) (round . fixFrac . inter) xs
tickedPilot :: SpeedSection -> PilotTrackTag -> Maybe Ticked
tickedPilot _ (PilotTrackTag _ Nothing) = Nothing
tickedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
Just $ tickedZones speedSection zonesTag
timeElapsed
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe StartEndMark
timeElapsed _ _ Nothing _ _ = Nothing
timeElapsed x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
timeElapsedPilot speedSection
=<< find (\(PilotTrackTag p _) -> p == pilot) xs
| The time of the first and last fix in the list .
startEnd :: [Maybe ZoneTag] -> Maybe StartEndMark
startEnd xs = do
ys <- sequence xs
start <- listToMaybe $ take 1 ys
end <- listToMaybe $ take 1 $ reverse ys
return $ StartEnd (time . inter $ start) (Just . time . inter $ end)
timeElapsedPilot :: SpeedSection -> PilotTrackTag -> Maybe StartEndMark
timeElapsedPilot _ (PilotTrackTag _ Nothing) = Nothing
timeElapsedPilot Nothing _ = Nothing
timeElapsedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
startEnd $ slice speedSection zonesTag
tagged
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe [Maybe ZoneTag]
tagged _ _ Nothing _ _ = Nothing
tagged x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
taggedPilot speedSection
<$> find (\(PilotTrackTag p _) -> p == pilot) xs
taggedPilot :: SpeedSection -> PilotTrackTag -> [Maybe ZoneTag]
taggedPilot _ (PilotTrackTag _ Nothing) = []
taggedPilot Nothing _ = []
taggedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
slice speedSection zonesTag
arrivalRank
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe Int
arrivalRank _ _ Nothing _ _ = Nothing
arrivalRank x (IxTask i) speedSection pilot _ =
case timing x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just TrackTime{..} -> arrivalRankPilot pilot speedSection zonesRankPilot
arrivalRankPilot :: Pilot -> SpeedSection -> [[Pilot]] -> Maybe Int
arrivalRankPilot _ Nothing _ = Nothing
arrivalRankPilot p speedSection xss =
case pss of
[] -> Nothing
(ps : _) -> (+ 1) <$> elemIndex p ps
where
pss :: [[Pilot]] = reverse $ slice speedSection xss
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/172a9b199eb1ff72c967669dc349cbf8d9c4bc52/lang-haskell/lookup/library/Flight/Lookup/Tag.hs | haskell | module Flight.Lookup.Tag
( TaskTimeLookup(..)
, TaskLeadingLookup(..)
, ArrivalRankLookup(..)
, TimeLookup(..)
, LeadingLookup(..)
, TagLookup(..)
, TickLookup(..)
, tagTaskTime
, tagTaskLeading
, tagArrivalRank
, tagPilotTime
, tagPilotTag
, tagTicked
) where
import Data.List (find, elemIndex)
import Data.Maybe (catMaybes, listToMaybe, isJust)
import Control.Lens ((^?), element)
import qualified Flight.Kml as Kml (MarkedFixes(..))
import Flight.Zone.SpeedSection (SpeedSection)
import Flight.Comp
( IxTask(..)
, Pilot(..)
, StartEnd(..)
, StartEndMark
, StartEndDown(..)
, StartEndDownMark
, FirstLead(..)
, LastArrival(..)
)
import Flight.Track.Time (ZoneIdx(..))
import Flight.Track.Tag
( CompTagging(..), TrackTime(..), TrackTag(..), PilotTrackTag(..)
, firstLead, lastArrival
)
import Flight.Mask (Ticked, RaceSections(..), slice, section)
import Flight.Track.Cross (InterpolatedFix(..), ZoneTag(..))
type TaskTaggingLookup a = IxTask -> SpeedSection -> Maybe a
newtype TaskTimeLookup =
TaskTimeLookup
(Maybe (TaskTaggingLookup StartEndMark))
newtype TaskLeadingLookup =
TaskLeadingLookup
(Maybe (TaskTaggingLookup StartEndDownMark))
type TaggingLookup a = IxTask -> SpeedSection -> Pilot -> Kml.MarkedFixes -> Maybe a
tagTaskTime :: Maybe CompTagging -> TaskTimeLookup
tagTaskTime = TaskTimeLookup . fmap taskTimeElapsed
tagTaskLeading :: Maybe CompTagging -> TaskLeadingLookup
tagTaskLeading = TaskLeadingLookup . fmap taskLeadingTimes
taskLeadingTimes
:: CompTagging
-> IxTask
-> SpeedSection
-> Maybe StartEndDownMark
taskLeadingTimes _ _ Nothing = Nothing
taskLeadingTimes x (IxTask i) ss = do
TrackTime{zonesFirst, zonesLast, lastLanding} <- timing x ^? element (fromIntegral i - 1)
FirstLead start <- firstLead ss zonesFirst
let end = (\(LastArrival a) -> a) <$> lastArrival ss zonesLast
return $ StartEndDown start end lastLanding
taskTimeElapsed
:: CompTagging
-> IxTask
-> SpeedSection
-> Maybe StartEndMark
taskTimeElapsed _ _ Nothing = Nothing
taskTimeElapsed x (IxTask i) ss = do
TrackTime{zonesFirst, zonesLast} <- timing x ^? element (fromIntegral i - 1)
FirstLead start <- firstLead ss zonesFirst
let end = (\(LastArrival a) -> a) <$> lastArrival ss zonesLast
return $ StartEnd start end
newtype ArrivalRankLookup = ArrivalRankLookup (Maybe (TaggingLookup Int))
newtype TimeLookup = TimeLookup (Maybe (TaggingLookup StartEndMark))
newtype LeadingLookup = LeadingLookup (Maybe (TaggingLookup StartEndDownMark))
newtype TagLookup = TagLookup (Maybe (TaggingLookup [Maybe ZoneTag]))
newtype TickLookup = TickLookup (Maybe (TaggingLookup Ticked))
tagTicked :: Maybe CompTagging -> TickLookup
tagTicked = TickLookup . fmap ticked
tagPilotTime :: Maybe CompTagging -> TimeLookup
tagPilotTime = TimeLookup . fmap timeElapsed
tagPilotTag :: Maybe CompTagging -> TagLookup
tagPilotTag = TagLookup . fmap tagged
tagArrivalRank :: Maybe CompTagging -> ArrivalRankLookup
tagArrivalRank = ArrivalRankLookup . fmap arrivalRank
ticked
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe Ticked
ticked _ _ Nothing _ _ = Nothing
ticked x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
tickedPilot speedSection
=<< find (\(PilotTrackTag p _) -> p == pilot) xs
| The time of the first and last fix in the list .
tickedZones :: SpeedSection -> [Maybe ZoneTag] -> Ticked
tickedZones speedSection xs =
RaceSections
{ prolog = f prolog
, race = f race
, epilog = f epilog
}
where
f = fmap ZoneIdx . catMaybes . takeWhile isJust
RaceSections{..} =
section speedSection
$ (fmap . fmap) (round . fixFrac . inter) xs
tickedPilot :: SpeedSection -> PilotTrackTag -> Maybe Ticked
tickedPilot _ (PilotTrackTag _ Nothing) = Nothing
tickedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
Just $ tickedZones speedSection zonesTag
timeElapsed
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe StartEndMark
timeElapsed _ _ Nothing _ _ = Nothing
timeElapsed x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
timeElapsedPilot speedSection
=<< find (\(PilotTrackTag p _) -> p == pilot) xs
| The time of the first and last fix in the list .
startEnd :: [Maybe ZoneTag] -> Maybe StartEndMark
startEnd xs = do
ys <- sequence xs
start <- listToMaybe $ take 1 ys
end <- listToMaybe $ take 1 $ reverse ys
return $ StartEnd (time . inter $ start) (Just . time . inter $ end)
timeElapsedPilot :: SpeedSection -> PilotTrackTag -> Maybe StartEndMark
timeElapsedPilot _ (PilotTrackTag _ Nothing) = Nothing
timeElapsedPilot Nothing _ = Nothing
timeElapsedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
startEnd $ slice speedSection zonesTag
tagged
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe [Maybe ZoneTag]
tagged _ _ Nothing _ _ = Nothing
tagged x (IxTask i) speedSection pilot _ =
case tagging x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just xs ->
taggedPilot speedSection
<$> find (\(PilotTrackTag p _) -> p == pilot) xs
taggedPilot :: SpeedSection -> PilotTrackTag -> [Maybe ZoneTag]
taggedPilot _ (PilotTrackTag _ Nothing) = []
taggedPilot Nothing _ = []
taggedPilot speedSection (PilotTrackTag _ (Just TrackTag{zonesTag})) =
slice speedSection zonesTag
arrivalRank
:: CompTagging
-> IxTask
-> SpeedSection
-> Pilot
-> Kml.MarkedFixes
-> Maybe Int
arrivalRank _ _ Nothing _ _ = Nothing
arrivalRank x (IxTask i) speedSection pilot _ =
case timing x ^? element (fromIntegral i - 1) of
Nothing -> Nothing
Just TrackTime{..} -> arrivalRankPilot pilot speedSection zonesRankPilot
arrivalRankPilot :: Pilot -> SpeedSection -> [[Pilot]] -> Maybe Int
arrivalRankPilot _ Nothing _ = Nothing
arrivalRankPilot p speedSection xss =
case pss of
[] -> Nothing
(ps : _) -> (+ 1) <$> elemIndex p ps
where
pss :: [[Pilot]] = reverse $ slice speedSection xss
| |
990c2e24f21bac6a082d6cc61d76dd36291e9e284cc1d7b01f906fb6775eb250 | basho/riak_cs | riak_cs_list_objects_utils.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% @doc
-module(riak_cs_list_objects_utils).
-include("riak_cs.hrl").
-include("list_objects.hrl").
%%%===================================================================
%%% Types
%%%===================================================================
-type tagged_item() :: {prefix, binary()} |
{manifest, lfs_manifest()} |
{manifest, {Key :: binary(), lfs_manifest()}}.
-type tagged_item_list() :: list(tagged_item()).
-type manifests_and_prefixes() :: {list(lfs_manifest()), ordsets:ordset(binary())}.
-export_type([tagged_item/0,
tagged_item_list/0,
manifests_and_prefixes/0]).
%%%===================================================================
%%% Exports
%%%===================================================================
%% API
-export([start_link/5,
get_object_list/1,
get_internal_state/1]).
%% Shared Helpers
-export([manifests_and_prefix_length/1,
tagged_manifest_and_prefix/1,
untagged_manifest_and_prefix/1,
manifests_and_prefix_slice/2,
filter_prefix_keys/2,
extract_group/2]).
Observability / Configuration
-export([get_key_list_multiplier/0,
set_key_list_multiplier/1,
fold_objects_for_list_keys/0,
fold_objects_timeout/0]).
%%%===================================================================
%%% API
%%%===================================================================
-spec start_link(riak_client(), pid(), list_object_request(), term(),
UseCache :: boolean()) ->
{ok, pid()} | {error, term()}.
%% @doc An abstraction between the old and new list-keys mechanism. Uses the
%% old mechanism if `fold_objects_for_list_keys' is false, otherwise uses
%% the new one. After getting a pid back, the API is the same, so users don't
%% need to differentiate.
start_link(RcPid, CallerPid, ListKeysRequest, CacheKey, UseCache) ->
case fold_objects_for_list_keys() of
true ->
riak_cs_list_objects_fsm_v2:start_link(RcPid, ListKeysRequest);
false ->
riak_cs_list_objects_fsm:start_link(RcPid, CallerPid,
ListKeysRequest, CacheKey,
UseCache)
end.
-spec get_object_list(pid()) ->
{ok, list_object_response()} |
{error, term()}.
get_object_list(FSMPid) ->
gen_fsm:sync_send_all_state_event(FSMPid, get_object_list, infinity).
get_internal_state(FSMPid) ->
gen_fsm:sync_send_all_state_event(FSMPid, get_internal_state, infinity).
%%%===================================================================
%%% Shared Helpers
%%%===================================================================
-spec manifests_and_prefix_length({list(), ordsets:ordset(term())}) ->
non_neg_integer().
manifests_and_prefix_length({List, Set}) ->
length(List) + ordsets:size(Set).
-spec tagged_manifest_and_prefix(manifests_and_prefixes()) ->
riak_cs_list_objects_utils:tagged_item_list().
tagged_manifest_and_prefix({Manifests, Prefixes}) ->
tagged_manifest_list(Manifests) ++ tagged_prefix_list(Prefixes).
-spec tagged_manifest_list(list()) ->
list({manifest, term()}).
tagged_manifest_list(KeyAndManifestList) ->
[{manifest, M} || M <- KeyAndManifestList].
-spec tagged_prefix_list(list(binary())) ->
list({prefix, binary()}).
tagged_prefix_list(Prefixes) ->
[{prefix, P} || P <- ordsets:to_list(Prefixes)].
-spec untagged_manifest_and_prefix(riak_cs_list_objects_utils:tagged_item_list()) ->
manifests_and_prefixes().
untagged_manifest_and_prefix(TaggedInput) ->
Pred = fun({manifest, _}) -> true;
(_Else) -> false end,
{A, B} = lists:partition(Pred, TaggedInput),
{[element(2, M) || M <- A],
[element(2, P) || P <- B]}.
-spec manifests_and_prefix_slice(riak_cs_list_objects_utils:manifests_and_prefixes(),
non_neg_integer()) ->
riak_cs_list_objects_utils:tagged_item_list().
manifests_and_prefix_slice(ManifestsAndPrefixes, MaxObjects) ->
TaggedList =
riak_cs_list_objects_utils:tagged_manifest_and_prefix(ManifestsAndPrefixes),
Sorted = lists:sort(fun tagged_sort_fun/2, TaggedList),
lists:sublist(Sorted, MaxObjects).
-spec tagged_sort_fun(riak_cs_list_objects_utils:tagged_item(),
riak_cs_list_objects_utils:tagged_item()) ->
boolean().
tagged_sort_fun(A, B) ->
AKey = key_from_tag(A),
BKey = key_from_tag(B),
AKey =< BKey.
-spec key_from_tag(riak_cs_list_objects_utils:tagged_item()) -> binary().
key_from_tag({manifest, ?MANIFEST{bkey={_Bucket, Key}}}) ->
Key;
key_from_tag({prefix, Key}) ->
Key.
-spec filter_prefix_keys({ManifestList :: list(lfs_manifest()),
CommonPrefixes :: ordsets:ordset(binary())},
list_object_request()) ->
riak_cs_list_objects_utils:manifests_and_prefixes().
filter_prefix_keys({_ManifestList, _CommonPrefixes}=Input,
?LOREQ{prefix=undefined,
delimiter=undefined}) ->
Input;
filter_prefix_keys({ManifestList, CommonPrefixes},
?LOREQ{prefix=Prefix,
delimiter=Delimiter}) ->
PrefixFilter =
fun(Manifest, Acc) ->
prefix_filter(Manifest, Acc, Prefix, Delimiter)
end,
lists:foldl(PrefixFilter, {[], CommonPrefixes}, ManifestList).
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
Acc, undefined, Delimiter) ->
Group = extract_group(Key, Delimiter),
update_keys_and_prefixes(Acc, Manifest, <<>>, 0, Group);
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
{ManifestList, Prefixes}=Acc, Prefix, undefined) ->
PrefixLen = byte_size(Prefix),
case Key of
<< Prefix:PrefixLen/binary, _/binary >> ->
{[Manifest | ManifestList], Prefixes};
_ ->
Acc
end;
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
{_ManifestList, _Prefixes}=Acc, Prefix, Delimiter) ->
PrefixLen = byte_size(Prefix),
case Key of
<< Prefix:PrefixLen/binary, Rest/binary >> ->
Group = extract_group(Rest, Delimiter),
update_keys_and_prefixes(Acc, Manifest, Prefix, PrefixLen, Group);
_ ->
Acc
end.
extract_group(Key, Delimiter) ->
case binary:match(Key, [Delimiter]) of
nomatch ->
nomatch;
{Pos, Len} ->
binary:part(Key, {0, Pos+Len})
end.
update_keys_and_prefixes({ManifestList, Prefixes},
Manifest, _, _, nomatch) ->
{[Manifest | ManifestList], Prefixes};
update_keys_and_prefixes({ManifestList, Prefixes},
_, Prefix, PrefixLen, Group) ->
NewPrefix = << Prefix:PrefixLen/binary, Group/binary >>,
{ManifestList, ordsets:add_element(NewPrefix, Prefixes)}.
%%%===================================================================
Observability / Configuration
%%%===================================================================
-spec get_key_list_multiplier() -> float().
get_key_list_multiplier() ->
riak_cs_config:get_env(riak_cs, key_list_multiplier,
?KEY_LIST_MULTIPLIER).
-spec set_key_list_multiplier(float()) -> 'ok'.
set_key_list_multiplier(Multiplier) ->
application:set_env(riak_cs, key_list_multiplier,
Multiplier).
-spec fold_objects_for_list_keys() -> boolean().
fold_objects_for_list_keys() ->
riak_cs_config:get_env(riak_cs, fold_objects_for_list_keys,
?FOLD_OBJECTS_FOR_LIST_KEYS).
-spec fold_objects_timeout() -> non_neg_integer().
fold_objects_timeout() ->
riak_cs_config:fold_objects_timeout().
| null | https://raw.githubusercontent.com/basho/riak_cs/c0c1012d1c9c691c74c8c5d9f69d388f5047bcd2/src/riak_cs_list_objects_utils.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
@doc
===================================================================
Types
===================================================================
===================================================================
Exports
===================================================================
API
Shared Helpers
===================================================================
API
===================================================================
@doc An abstraction between the old and new list-keys mechanism. Uses the
old mechanism if `fold_objects_for_list_keys' is false, otherwise uses
the new one. After getting a pid back, the API is the same, so users don't
need to differentiate.
===================================================================
Shared Helpers
===================================================================
===================================================================
=================================================================== | Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_cs_list_objects_utils).
-include("riak_cs.hrl").
-include("list_objects.hrl").
-type tagged_item() :: {prefix, binary()} |
{manifest, lfs_manifest()} |
{manifest, {Key :: binary(), lfs_manifest()}}.
-type tagged_item_list() :: list(tagged_item()).
-type manifests_and_prefixes() :: {list(lfs_manifest()), ordsets:ordset(binary())}.
-export_type([tagged_item/0,
tagged_item_list/0,
manifests_and_prefixes/0]).
-export([start_link/5,
get_object_list/1,
get_internal_state/1]).
-export([manifests_and_prefix_length/1,
tagged_manifest_and_prefix/1,
untagged_manifest_and_prefix/1,
manifests_and_prefix_slice/2,
filter_prefix_keys/2,
extract_group/2]).
Observability / Configuration
-export([get_key_list_multiplier/0,
set_key_list_multiplier/1,
fold_objects_for_list_keys/0,
fold_objects_timeout/0]).
-spec start_link(riak_client(), pid(), list_object_request(), term(),
UseCache :: boolean()) ->
{ok, pid()} | {error, term()}.
start_link(RcPid, CallerPid, ListKeysRequest, CacheKey, UseCache) ->
case fold_objects_for_list_keys() of
true ->
riak_cs_list_objects_fsm_v2:start_link(RcPid, ListKeysRequest);
false ->
riak_cs_list_objects_fsm:start_link(RcPid, CallerPid,
ListKeysRequest, CacheKey,
UseCache)
end.
-spec get_object_list(pid()) ->
{ok, list_object_response()} |
{error, term()}.
get_object_list(FSMPid) ->
gen_fsm:sync_send_all_state_event(FSMPid, get_object_list, infinity).
get_internal_state(FSMPid) ->
gen_fsm:sync_send_all_state_event(FSMPid, get_internal_state, infinity).
-spec manifests_and_prefix_length({list(), ordsets:ordset(term())}) ->
non_neg_integer().
manifests_and_prefix_length({List, Set}) ->
length(List) + ordsets:size(Set).
-spec tagged_manifest_and_prefix(manifests_and_prefixes()) ->
riak_cs_list_objects_utils:tagged_item_list().
tagged_manifest_and_prefix({Manifests, Prefixes}) ->
tagged_manifest_list(Manifests) ++ tagged_prefix_list(Prefixes).
-spec tagged_manifest_list(list()) ->
list({manifest, term()}).
tagged_manifest_list(KeyAndManifestList) ->
[{manifest, M} || M <- KeyAndManifestList].
-spec tagged_prefix_list(list(binary())) ->
list({prefix, binary()}).
tagged_prefix_list(Prefixes) ->
[{prefix, P} || P <- ordsets:to_list(Prefixes)].
-spec untagged_manifest_and_prefix(riak_cs_list_objects_utils:tagged_item_list()) ->
manifests_and_prefixes().
untagged_manifest_and_prefix(TaggedInput) ->
Pred = fun({manifest, _}) -> true;
(_Else) -> false end,
{A, B} = lists:partition(Pred, TaggedInput),
{[element(2, M) || M <- A],
[element(2, P) || P <- B]}.
-spec manifests_and_prefix_slice(riak_cs_list_objects_utils:manifests_and_prefixes(),
non_neg_integer()) ->
riak_cs_list_objects_utils:tagged_item_list().
manifests_and_prefix_slice(ManifestsAndPrefixes, MaxObjects) ->
TaggedList =
riak_cs_list_objects_utils:tagged_manifest_and_prefix(ManifestsAndPrefixes),
Sorted = lists:sort(fun tagged_sort_fun/2, TaggedList),
lists:sublist(Sorted, MaxObjects).
-spec tagged_sort_fun(riak_cs_list_objects_utils:tagged_item(),
riak_cs_list_objects_utils:tagged_item()) ->
boolean().
tagged_sort_fun(A, B) ->
AKey = key_from_tag(A),
BKey = key_from_tag(B),
AKey =< BKey.
-spec key_from_tag(riak_cs_list_objects_utils:tagged_item()) -> binary().
key_from_tag({manifest, ?MANIFEST{bkey={_Bucket, Key}}}) ->
Key;
key_from_tag({prefix, Key}) ->
Key.
-spec filter_prefix_keys({ManifestList :: list(lfs_manifest()),
CommonPrefixes :: ordsets:ordset(binary())},
list_object_request()) ->
riak_cs_list_objects_utils:manifests_and_prefixes().
filter_prefix_keys({_ManifestList, _CommonPrefixes}=Input,
?LOREQ{prefix=undefined,
delimiter=undefined}) ->
Input;
filter_prefix_keys({ManifestList, CommonPrefixes},
?LOREQ{prefix=Prefix,
delimiter=Delimiter}) ->
PrefixFilter =
fun(Manifest, Acc) ->
prefix_filter(Manifest, Acc, Prefix, Delimiter)
end,
lists:foldl(PrefixFilter, {[], CommonPrefixes}, ManifestList).
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
Acc, undefined, Delimiter) ->
Group = extract_group(Key, Delimiter),
update_keys_and_prefixes(Acc, Manifest, <<>>, 0, Group);
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
{ManifestList, Prefixes}=Acc, Prefix, undefined) ->
PrefixLen = byte_size(Prefix),
case Key of
<< Prefix:PrefixLen/binary, _/binary >> ->
{[Manifest | ManifestList], Prefixes};
_ ->
Acc
end;
prefix_filter(Manifest=?MANIFEST{bkey={_Bucket, Key}},
{_ManifestList, _Prefixes}=Acc, Prefix, Delimiter) ->
PrefixLen = byte_size(Prefix),
case Key of
<< Prefix:PrefixLen/binary, Rest/binary >> ->
Group = extract_group(Rest, Delimiter),
update_keys_and_prefixes(Acc, Manifest, Prefix, PrefixLen, Group);
_ ->
Acc
end.
extract_group(Key, Delimiter) ->
case binary:match(Key, [Delimiter]) of
nomatch ->
nomatch;
{Pos, Len} ->
binary:part(Key, {0, Pos+Len})
end.
update_keys_and_prefixes({ManifestList, Prefixes},
Manifest, _, _, nomatch) ->
{[Manifest | ManifestList], Prefixes};
update_keys_and_prefixes({ManifestList, Prefixes},
_, Prefix, PrefixLen, Group) ->
NewPrefix = << Prefix:PrefixLen/binary, Group/binary >>,
{ManifestList, ordsets:add_element(NewPrefix, Prefixes)}.
Observability / Configuration
-spec get_key_list_multiplier() -> float().
get_key_list_multiplier() ->
riak_cs_config:get_env(riak_cs, key_list_multiplier,
?KEY_LIST_MULTIPLIER).
-spec set_key_list_multiplier(float()) -> 'ok'.
set_key_list_multiplier(Multiplier) ->
application:set_env(riak_cs, key_list_multiplier,
Multiplier).
-spec fold_objects_for_list_keys() -> boolean().
fold_objects_for_list_keys() ->
riak_cs_config:get_env(riak_cs, fold_objects_for_list_keys,
?FOLD_OBJECTS_FOR_LIST_KEYS).
-spec fold_objects_timeout() -> non_neg_integer().
fold_objects_timeout() ->
riak_cs_config:fold_objects_timeout().
|
96968c2bbc0b6cfb0cacb06a9c750ec9863e34496d809ca5a09c99f48740f41e | wilkerlucio/pathom | planner_readers_test.cljc | (ns com.wsscode.pathom.connect.planner-readers-test
(:require
[clojure.core.async :as async :refer [go]]
[clojure.test :refer [deftest is are run-tests testing]]
[com.wsscode.pathom.connect :as pc]
[com.wsscode.pathom.connect.foreign :as pcf]
[com.wsscode.pathom.core :as p]
[com.wsscode.pathom.misc :as p.misc]
[com.wsscode.pathom.sugar :as ps]
[com.wsscode.pathom.test-helpers :as th]))
(defn index-query? [tx]
(try
(= (first (ffirst tx)) ::pc/indexes)
(catch #?(:clj Throwable :cljs :default) _ false)))
(defn run-parser [{::keys [resolvers query entity foreign error-stack? plugins]}]
(let [foreign-calls (atom {})
plugins' (or plugins identity)
parser (ps/connect-serial-parser
(cond-> {::ps/connect-reader [pc/reader3
{::foreign-calls (fn [_] @foreign-calls)}]
::ps/plugins (fn [p]
(plugins'
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(reset! foreign-calls {})
(parser env tx)))})))}
foreign
(assoc ::ps/foreign-parsers
(mapv
(fn [{::keys [resolvers foreign-id fatal-error?]}]
(let [source-id (or foreign-id (gensym "foreign-source-"))]
(ps/connect-serial-parser
{::ps/connect-reader pc/reader3
::ps/plugins (fn [p]
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(if (and fatal-error? (not (index-query? tx)))
(throw (ex-info "Parser Error" {:foo "bar"})))
(swap! foreign-calls update source-id p.misc/vconj tx)
(parser env tx)))}))}
resolvers)))
foreign)))
resolvers)]
(parser (cond-> {}
entity (assoc ::p/entity (atom entity))
error-stack? (assoc ::p/process-error (fn [_ e] (.printStackTrace e) (p/error-str e))))
query)))
#?(:clj
(defn run-parser-async [{::keys [resolvers query entity foreign error-stack? plugins]}]
(let [foreign-calls (atom {})
pplugins (or plugins identity)
parser (ps/connect-async-parser
(cond-> {::ps/connect-reader [pc/reader3
{::foreign-calls (fn [_] @foreign-calls)}]
::ps/plugins (fn [p]
(pplugins
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(reset! foreign-calls {})
(parser env tx)))})))}
foreign
(assoc ::ps/foreign-parsers
(mapv
(fn [{::keys [resolvers foreign-id]}]
(let [source-id (or foreign-id (gensym "foreign-source-"))]
(ps/connect-async-parser
{::ps/connect-reader pc/reader3
::ps/plugins (fn [p]
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(swap! foreign-calls update source-id p.misc/vconj tx)
(parser env tx)))}))}
resolvers)))
foreign)))
resolvers)]
(async/<!!
(parser (cond-> {}
entity (assoc ::p/entity (atom entity))
error-stack? (assoc ::p/process-error (fn [_ e] (.printStackTrace e) (p/error-str e))))
query)))))
(deftest test-reader3
(testing "single attribute"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)]
::query [:a]})
{:a 42}))
(testing "params"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [env _]
{:a (p/params env)}))]
::query '[(:a {:x 42})]})
{:a {:x 42}}))
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [env _]
{:a (p/params env)
:b "foo"}))]
::query '[:b (:a {:x 42})]})
{:a {:x 42} :b "foo"}))
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [env _]
{:a (p/params env)}))]
::query '[{:>/ph [(:a {:x 42})]}]})
{:>/ph {:a {:x 42}}})))
(testing "missed output"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
::query [:a]})
{:a ::p/not-found})))
(testing "don't call when data is already available"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)]
::entity {:a "value"}
::query [:a]})
{:a "value"})))
(testing "resolver error"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]
::query [:a]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}})))
(testing "invalid response"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] 42))]
::query [:a]})
{:a ::p/not-found}))))
(testing "multiple attributes on the same resolver"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [_ _] {:a 42 :b "foo"}))]
::query [:a :b]})
{:a 42
:b "foo"})))
(testing "and branches"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")
(pc/resolver 'a-b-dep
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ {:keys [a b]}] {:c [a b]}))]
::query [:c]})
{:c [42 "boo"]})))
(testing "or branches"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(testing "run next node"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)
(pc/single-attr-resolver :a :b inc)]
::query [:b]})
#?(:clj {:b 45}
:cljs {:b 43}))))
(testing "missed output"
(is (= (run-parser
{::resolvers [[(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 44}))))
(testing "mixed or and"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(assoc (pc/constantly-resolver :a 43) ::pc/sym 'a2)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
#?(:clj {:a 43 :b "boo"}
:cljs {:a 42 :b "boo"}))))
(testing "ident query"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [{[:b "boo"] [:c]}]})
{[:b "boo"] {:c "boo-C"}})))
(testing "chained call"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(testing "skip resolver call when all require attributes are available"
(let [mock (th/mock)]
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'ab
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {:b "bar"}) mock))]
::query [:b]})
{:b "foo"}))
(is (= @mock [])))
(let [mock (th/mock)]
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'b
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {}) mock))
(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [:c]})
{:c "foo-C"}))
(is (= @mock [])))))
(testing "resolver cache"
(testing "reads from cache"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42) ::pc/sym 'a)]
::query [:a]
::plugins #(conj %
(p/env-wrap-plugin
(fn [e]
(assoc e ::p/request-cache (atom '{[a {} {}] {:a 44}})))))})
{:a 44}))))
(testing "batching"
(is (= (run-parser
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id 1}
{:id 2}
{:id 3}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name "1"} {:name "2"} {:name "3"}]}))
(is (= (run-parser
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id ::p/not-found}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name ::p/not-found}]})))
(testing "placeholders"
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [{:>/foo [:x]} :y]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [:y {:>/foo [:x]}]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::query [{[:x 3] [:y {:>/foo [:x :y]}]}]})
{[:x 3] {:y 2 :>/foo {:x 3 :y 2}}}))))
(deftest test-runner3-dynamic-resolvers
(testing "integration with local parser"
(testing "local dependency first"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote
::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]}]
::query [:c ::foreign-calls]})
{:c "boo-C"
::foreign-calls '{remote [[{([:b "boo"] #:pathom{:context {}}) [:c]}]]}}))
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote-b
::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]}
{::foreign-id 'remote-c
::resolvers [(pc/single-attr-resolver :c :d #(str % "-D"))]}]
::query [:d ::foreign-calls]})
'{:d "boo-C-D"
::foreign-calls {remote-b [[{([:b "boo"] {:pathom/context {}}) [:c]}]]
remote-c [[{([:c "boo-C"] {:pathom/context {}}) [:d]}]]}})))
(testing "nested dependency with local and remote things depending on it"
(is (= (run-parser
{::resolvers [(pc/alias-resolver :list-provided :local-name)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'list-of-things
{::pc/output [{:list [:list-provided]}]}
(fn [_ _]
{:list {:list-provided 10}}))
(pc/alias-resolver :list-provided :remote-name)]}]
::query [{:list [:local-name :remote-name]} ::foreign-calls]})
{:list {:remote-name 10, :local-name 10}
::foreign-calls '{remote [[{:list [:list-provided :remote-name]}]]}})))
(testing "foreign dependency first"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::foreign [{::foreign-id 'remote
::resolvers [(pc/constantly-resolver :b "boo")]}]
::query [:c ::foreign-calls]})
{:c "boo-C"
::foreign-calls '{remote [[:b]]}})))
(testing "with multiple local dependencies"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a "baa")
(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'remote/c
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ input]
{:c (str (:a input) "-" (:b input) "-C")}))]}]
::query [:c ::foreign-calls]})
'{:c
"baa-boo-C"
::foreign-calls
{remote [[{([::pcf/foreign-call nil] {:pathom/context {:b "boo" :a "baa"}})
[:c]}]]}})))
#_(testing "with multiple foreign dependencies"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :D #(str % "-DD"))]
::foreign [{::foreign-id 'remote
::resolvers [(pc/constantly-resolver :a "foo")
(pc/single-attr-resolver :a :b #(str % "-B"))
(pc/constantly-resolver :c "CCC")]}]
::query [:D ::foreign-calls]})
'{:D
"foo-B-DD"
::foreign-calls
{remote [[:b]]}})))
#_(testing "batch - test not ready"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :items [{:item/id 1} {:item/id 2}])]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'item-by-id
{::pc/input #{:item/id}
::pc/output [:item/prop]}
(fn [_ {:keys [item/id]}]
{:item/prop (str "x-" id)}))]}]
::query [{:items [:item/prop]} ::foreign-calls]})
'{:c
"baa-boo-C"
::foreign-calls
{remote [[{([::pcf/foreign-call nil] {:pathom/context {:b "boo" :a "baa"}})
[:c]}]]}})))
(testing "distribution"
(is (= (run-parser
{::resolvers [(pc/alias-resolver :video/id :great-video-service.video/id)
(pc/alias-resolver :video/id :other-video-thing.video/id)]
::foreign [{::foreign-id 'great-video-service
::resolvers [(pc/resolver 'great-video-service/video-by-id
{::pc/input #{:great-video-service.video/id}
::pc/output [:great-video-service.video/title
:great-video-service.video/duration
:great-video-service.video/like-count
:great-video-service.video/channel-title]}
(fn [_ _]
{:great-video-service.video/title "Great Video Title"
:great-video-service.video/duration 420
:great-video-service.video/like-count 42
:great-video-service.video/channel-title "Channel"}))]}
{::foreign-id 'other-video-thing
::resolvers [(pc/resolver 'other-video-thing/video-by-id
{::pc/input #{:other-video-thing.video/id}
::pc/output [:other-video-thing.video/title
:other-video-thing.video/duration
:other-video-thing.video/like-count
:other-video-thing.video/channel-title]}
(fn [_ _]
{:other-video-thing.video/title "Other Video Thing Title"
:other-video-thing.video/duration 860
:other-video-thing.video/like-count 88
:other-video-thing.video/channel-title "VChannel"}))]}]
::query [{[:video/id 123]
[:great-video-service.video/title
:other-video-thing.video/like-count
:great-video-service.video/duration
:other-video-thing.video/title]}
::foreign-calls]})
'{[:video/id 123] {:great-video-service.video/title "Great Video Title"
:other-video-thing.video/like-count 88
:great-video-service.video/duration 420
:other-video-thing.video/title "Other Video Thing Title"}
::foreign-calls {other-video-thing [[{([:other-video-thing.video/id 123] {:pathom/context {}})
[:other-video-thing.video/title
:other-video-thing.video/like-count]}]]
great-video-service [[{([:great-video-service.video/id 123] {:pathom/context {}})
[:great-video-service.video/duration
:great-video-service.video/title]}]]}})))
(testing "error propagation"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::query [:a ::foreign-calls]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls {'remote [[:a]]}}))
(testing "ident request"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/input #{:x}
::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::entity {:x 5}
::query [:a ::foreign-calls]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls '{remote [[{([:x 5] {:pathom/context {}}) [:a]}]]}})))
(testing "error on nested path"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::query [{[:x 5] [:a]} ::foreign-calls]})
{[:x 5] {:a ::p/reader-error}
::p/errors {[[:x 5] :a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls {'remote [[:a]]}})))
(testing "fatal error on remote parser"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:critical-error]}
(fn [_ _] {:critical-error 4}))]
::fatal-error? true}]
::query [:critical-error]})
{:critical-error :com.wsscode.pathom.core/reader-error,
:com.wsscode.pathom.core/errors {[:critical-error] #?(:clj "class clojure.lang.ExceptionInfo: Parser Error - {:foo \"bar\"}"
:cljs "Parser Error - {:foo \"bar\"}")}}))
(testing "in ident request"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/input #{:id}
::pc/output [:critical-error]}
(fn [_ _] {:critical-error 4}))]
::fatal-error? true}]
::query [{[:id 123] [:critical-error]}]})
{[:id 123] {:critical-error :com.wsscode.pathom.core/reader-error},
:com.wsscode.pathom.core/errors {[[:id 123] :critical-error] #?(:clj "class clojure.lang.ExceptionInfo: Parser Error - {:foo \"bar\"}"
:cljs "Parser Error - {:foo \"bar\"}")}})))))
(testing "nested queries"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:users [:user/id]}]}
(fn [_ _] {:users {:user/id 1}}))]}]
::query [{:users [:user/name]} ::foreign-calls]})
{:users {:user/name "1"}
::foreign-calls {'remote [[{:users [:user/id]}]]}}))
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:users [:user/id]}]}
(fn [_ _] {:users [{:user/id 1}
{:user/id 2}
{:user/id 3}]}))]}]
::query [{:users [:user/name]} ::foreign-calls]})
{:users [{:user/name "1"}
{:user/name "2"}
{:user/name "3"}]
::foreign-calls {'remote [[{:users [:user/id]}]]}}))
(testing "deep nesting"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:nest [{:users [:user/id
:user/email]}]}]}
(fn [_ _] {:nest {:users {:user/id 1}}}))]}]
::query [{:nest [{:users [:user/name]}]} ::foreign-calls]})
{:nest {:users {:user/name "1"}}
::foreign-calls {'remote [[{:nest [{:users [:user/id]}]}]]}}))))
(testing "delegating params"
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [:param-value]}
(fn [env _] {:param-value (-> env :ast :params :param-x)}))]}]
::query '[(:param-value {:param-x 42}) ::foreign-calls]})
{:param-value 42
::foreign-calls '{remote [[(:param-value {:param-x 42})]]}}))
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'param1
{::pc/output [:param-value]}
(fn [env _] {:param-value (-> env :ast :params :param-x)}))
(pc/resolver 'param2
{::pc/output [:param-value2]}
(fn [env _] {:param-value2 (-> env :ast :params :param-x)}))]}]
::query '[(:param-value {:param-x 42})
(:param-value2 {:param-x "foo"}) ::foreign-calls]})
{:param-value 42
:param-value2 "foo"
::foreign-calls '{remote [[(:param-value {:param-x 42})
(:param-value2 {:param-x "foo"})]]}})))
(testing "union queries"
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:joined-item [:id :id2 :name :age]}]}
(fn [_ _] {:joined-item [{:id 1 :name "name" :age 24}
{:id2 2 :name "other" :age 42}]}))]}]
::query '[{:joined-item {:id [:id :name]
:id2 [:id2 :age]}}
::foreign-calls]})
{:joined-item [{:id 1 :name "name"}
{:id2 2 :age 42}]
::foreign-calls '{remote [[{:joined-item [:id
:name
:id2
:age]}]]}})))))
(defn constantly-resolver-async
"Like pc/constantly-resolver, but returns an async response."
([attribute value]
(constantly-resolver-async {::attribute attribute
:value value}))
([{::keys [attribute sym] :keys [value]}]
(let [sym (or sym (symbol (str (munge (subs (str attribute) 1)) "-constant")))]
(pc/resolver sym
{::pc/output [attribute]}
(fn [_ _] (go {attribute value}))))))
#?(:clj
(deftest test-reader3-async
(testing "single attribute"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)]
::query [:a]})
{:a 42}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)]
::query [:a]})
{:a 42}))
(testing "missed output"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
::query [:a]})
{:a ::p/not-found})))
(testing "don't call when data is already available"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)]
::entity {:a "value"}
::query [:a]})
{:a "value"})))
(testing "resolver error"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]
::query [:a]})
{:a ::p/reader-error
::p/errors {[:a] "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"}})))
(testing "invalid response"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] 42))]
::query [:a]})
{:a ::p/not-found}))))
(testing "multiple attributes on the same resolver"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [_ _] {:a 42 :b "foo"}))]
::query [:a :b]})
{:a 42
:b "foo"})))
(testing "and branches"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(constantly-resolver-async :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(constantly-resolver-async :b "boo")
(pc/resolver 'a-b-dep
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ {:keys [a b]}] (go {:c [a b]})))]
::query [:c]})
{:c [42 "boo"]})))
(testing "or branches"
(is (= (run-parser-async
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 42)
::pc/sym 'a)
(assoc (constantly-resolver-async :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(testing "run next node"
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 42)
::pc/sym 'a)
(assoc (constantly-resolver-async :a 44)
::pc/sym 'a2)
(pc/single-attr-resolver :a :b inc)]
::query [:b]})
{:b 45})))
(testing "missed output"
(is (= (run-parser-async
{::resolvers [[(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 44}))
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 44)
::pc/sym 'a)
[(pc/resolver 'a2
{::pc/output [:a]}
(fn [_ _] (go {})))]]
::query [:a]})
{:a 44}))))
(testing "mixed or and"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(assoc (pc/constantly-resolver :a 43) ::pc/sym 'a2)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 43 :b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(assoc (constantly-resolver-async :a 43) ::pc/sym 'a2)
(constantly-resolver-async :b "boo")]
::query [:a :b]})
{:a 43 :b "boo"})))
(testing "ident query"
(is (= (run-parser-async
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [{[:b "boo"] [:c]}]})
{[:b "boo"] {:c "boo-C"}})))
(testing "chained call"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(testing "skip resolver call when all require attributes are available"
(let [mock (th/mock)]
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'ab
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {:b "bar"}) mock))]
::query [:b]})
{:b "foo"}))
(is (= @mock [])))
(let [mock (th/mock)]
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'b
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {}) mock))
(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [:c]})
{:c "foo-C"}))
(is (= @mock [])))))
(testing "resolver cache"
(testing "reads from cache"
(is (= (run-parser-async
{::resolvers [(assoc (pc/constantly-resolver :a 42) ::pc/sym 'a)]
::query [:a]
::plugins #(conj %
(p/env-wrap-plugin
(fn [e]
(assoc e ::p/request-cache (atom '{[a {} {}] {:a 44}})))))})
{:a 44}))))
(testing "batching"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id 1}
{:id 2}
{:id 3}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name "1"} {:name "2"} {:name "3"}]}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id ::p/not-found}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name ::p/not-found}]})))
(testing "placeholders"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [{:>/foo [:x]} :y]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [:y {:>/foo [:x]}]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::query [{[:x 3] [:y {:>/foo [:x :y]}]}]})
{[:x 3] {:y 2 :>/foo {:x 3 :y 2}}})))))
(comment
; just to make linter happy
pcf/index-query)
| null | https://raw.githubusercontent.com/wilkerlucio/pathom/4ec25055d3d156241e9174d68ec438c93c971b9b/test/com/wsscode/pathom/connect/planner_readers_test.cljc | clojure | just to make linter happy | (ns com.wsscode.pathom.connect.planner-readers-test
(:require
[clojure.core.async :as async :refer [go]]
[clojure.test :refer [deftest is are run-tests testing]]
[com.wsscode.pathom.connect :as pc]
[com.wsscode.pathom.connect.foreign :as pcf]
[com.wsscode.pathom.core :as p]
[com.wsscode.pathom.misc :as p.misc]
[com.wsscode.pathom.sugar :as ps]
[com.wsscode.pathom.test-helpers :as th]))
(defn index-query? [tx]
(try
(= (first (ffirst tx)) ::pc/indexes)
(catch #?(:clj Throwable :cljs :default) _ false)))
(defn run-parser [{::keys [resolvers query entity foreign error-stack? plugins]}]
(let [foreign-calls (atom {})
plugins' (or plugins identity)
parser (ps/connect-serial-parser
(cond-> {::ps/connect-reader [pc/reader3
{::foreign-calls (fn [_] @foreign-calls)}]
::ps/plugins (fn [p]
(plugins'
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(reset! foreign-calls {})
(parser env tx)))})))}
foreign
(assoc ::ps/foreign-parsers
(mapv
(fn [{::keys [resolvers foreign-id fatal-error?]}]
(let [source-id (or foreign-id (gensym "foreign-source-"))]
(ps/connect-serial-parser
{::ps/connect-reader pc/reader3
::ps/plugins (fn [p]
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(if (and fatal-error? (not (index-query? tx)))
(throw (ex-info "Parser Error" {:foo "bar"})))
(swap! foreign-calls update source-id p.misc/vconj tx)
(parser env tx)))}))}
resolvers)))
foreign)))
resolvers)]
(parser (cond-> {}
entity (assoc ::p/entity (atom entity))
error-stack? (assoc ::p/process-error (fn [_ e] (.printStackTrace e) (p/error-str e))))
query)))
#?(:clj
(defn run-parser-async [{::keys [resolvers query entity foreign error-stack? plugins]}]
(let [foreign-calls (atom {})
pplugins (or plugins identity)
parser (ps/connect-async-parser
(cond-> {::ps/connect-reader [pc/reader3
{::foreign-calls (fn [_] @foreign-calls)}]
::ps/plugins (fn [p]
(pplugins
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(reset! foreign-calls {})
(parser env tx)))})))}
foreign
(assoc ::ps/foreign-parsers
(mapv
(fn [{::keys [resolvers foreign-id]}]
(let [source-id (or foreign-id (gensym "foreign-source-"))]
(ps/connect-async-parser
{::ps/connect-reader pc/reader3
::ps/plugins (fn [p]
(conj p
{::p/wrap-parser
(fn [parser]
(fn [env tx]
(swap! foreign-calls update source-id p.misc/vconj tx)
(parser env tx)))}))}
resolvers)))
foreign)))
resolvers)]
(async/<!!
(parser (cond-> {}
entity (assoc ::p/entity (atom entity))
error-stack? (assoc ::p/process-error (fn [_ e] (.printStackTrace e) (p/error-str e))))
query)))))
(deftest test-reader3
(testing "single attribute"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)]
::query [:a]})
{:a 42}))
(testing "params"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [env _]
{:a (p/params env)}))]
::query '[(:a {:x 42})]})
{:a {:x 42}}))
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [env _]
{:a (p/params env)
:b "foo"}))]
::query '[:b (:a {:x 42})]})
{:a {:x 42} :b "foo"}))
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [env _]
{:a (p/params env)}))]
::query '[{:>/ph [(:a {:x 42})]}]})
{:>/ph {:a {:x 42}}})))
(testing "missed output"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
::query [:a]})
{:a ::p/not-found})))
(testing "don't call when data is already available"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)]
::entity {:a "value"}
::query [:a]})
{:a "value"})))
(testing "resolver error"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]
::query [:a]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}})))
(testing "invalid response"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] 42))]
::query [:a]})
{:a ::p/not-found}))))
(testing "multiple attributes on the same resolver"
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [_ _] {:a 42 :b "foo"}))]
::query [:a :b]})
{:a 42
:b "foo"})))
(testing "and branches"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")
(pc/resolver 'a-b-dep
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ {:keys [a b]}] {:c [a b]}))]
::query [:c]})
{:c [42 "boo"]})))
(testing "or branches"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(testing "run next node"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)
(pc/single-attr-resolver :a :b inc)]
::query [:b]})
#?(:clj {:b 45}
:cljs {:b 43}))))
(testing "missed output"
(is (= (run-parser
{::resolvers [[(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 44}))))
(testing "mixed or and"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(assoc (pc/constantly-resolver :a 43) ::pc/sym 'a2)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
#?(:clj {:a 43 :b "boo"}
:cljs {:a 42 :b "boo"}))))
(testing "ident query"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [{[:b "boo"] [:c]}]})
{[:b "boo"] {:c "boo-C"}})))
(testing "chained call"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(testing "skip resolver call when all require attributes are available"
(let [mock (th/mock)]
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'ab
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {:b "bar"}) mock))]
::query [:b]})
{:b "foo"}))
(is (= @mock [])))
(let [mock (th/mock)]
(is (= (run-parser
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'b
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {}) mock))
(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [:c]})
{:c "foo-C"}))
(is (= @mock [])))))
(testing "resolver cache"
(testing "reads from cache"
(is (= (run-parser
{::resolvers [(assoc (pc/constantly-resolver :a 42) ::pc/sym 'a)]
::query [:a]
::plugins #(conj %
(p/env-wrap-plugin
(fn [e]
(assoc e ::p/request-cache (atom '{[a {} {}] {:a 44}})))))})
{:a 44}))))
(testing "batching"
(is (= (run-parser
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id 1}
{:id 2}
{:id 3}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name "1"} {:name "2"} {:name "3"}]}))
(is (= (run-parser
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id ::p/not-found}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name ::p/not-found}]})))
(testing "placeholders"
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [{:>/foo [:x]} :y]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [:y {:>/foo [:x]}]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::query [{[:x 3] [:y {:>/foo [:x :y]}]}]})
{[:x 3] {:y 2 :>/foo {:x 3 :y 2}}}))))
(deftest test-runner3-dynamic-resolvers
(testing "integration with local parser"
(testing "local dependency first"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote
::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]}]
::query [:c ::foreign-calls]})
{:c "boo-C"
::foreign-calls '{remote [[{([:b "boo"] #:pathom{:context {}}) [:c]}]]}}))
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote-b
::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]}
{::foreign-id 'remote-c
::resolvers [(pc/single-attr-resolver :c :d #(str % "-D"))]}]
::query [:d ::foreign-calls]})
'{:d "boo-C-D"
::foreign-calls {remote-b [[{([:b "boo"] {:pathom/context {}}) [:c]}]]
remote-c [[{([:c "boo-C"] {:pathom/context {}}) [:d]}]]}})))
(testing "nested dependency with local and remote things depending on it"
(is (= (run-parser
{::resolvers [(pc/alias-resolver :list-provided :local-name)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'list-of-things
{::pc/output [{:list [:list-provided]}]}
(fn [_ _]
{:list {:list-provided 10}}))
(pc/alias-resolver :list-provided :remote-name)]}]
::query [{:list [:local-name :remote-name]} ::foreign-calls]})
{:list {:remote-name 10, :local-name 10}
::foreign-calls '{remote [[{:list [:list-provided :remote-name]}]]}})))
(testing "foreign dependency first"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::foreign [{::foreign-id 'remote
::resolvers [(pc/constantly-resolver :b "boo")]}]
::query [:c ::foreign-calls]})
{:c "boo-C"
::foreign-calls '{remote [[:b]]}})))
(testing "with multiple local dependencies"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :a "baa")
(pc/constantly-resolver :b "boo")]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'remote/c
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ input]
{:c (str (:a input) "-" (:b input) "-C")}))]}]
::query [:c ::foreign-calls]})
'{:c
"baa-boo-C"
::foreign-calls
{remote [[{([::pcf/foreign-call nil] {:pathom/context {:b "boo" :a "baa"}})
[:c]}]]}})))
#_(testing "with multiple foreign dependencies"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :b :D #(str % "-DD"))]
::foreign [{::foreign-id 'remote
::resolvers [(pc/constantly-resolver :a "foo")
(pc/single-attr-resolver :a :b #(str % "-B"))
(pc/constantly-resolver :c "CCC")]}]
::query [:D ::foreign-calls]})
'{:D
"foo-B-DD"
::foreign-calls
{remote [[:b]]}})))
#_(testing "batch - test not ready"
(is (= (run-parser
{::resolvers [(pc/constantly-resolver :items [{:item/id 1} {:item/id 2}])]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'item-by-id
{::pc/input #{:item/id}
::pc/output [:item/prop]}
(fn [_ {:keys [item/id]}]
{:item/prop (str "x-" id)}))]}]
::query [{:items [:item/prop]} ::foreign-calls]})
'{:c
"baa-boo-C"
::foreign-calls
{remote [[{([::pcf/foreign-call nil] {:pathom/context {:b "boo" :a "baa"}})
[:c]}]]}})))
(testing "distribution"
(is (= (run-parser
{::resolvers [(pc/alias-resolver :video/id :great-video-service.video/id)
(pc/alias-resolver :video/id :other-video-thing.video/id)]
::foreign [{::foreign-id 'great-video-service
::resolvers [(pc/resolver 'great-video-service/video-by-id
{::pc/input #{:great-video-service.video/id}
::pc/output [:great-video-service.video/title
:great-video-service.video/duration
:great-video-service.video/like-count
:great-video-service.video/channel-title]}
(fn [_ _]
{:great-video-service.video/title "Great Video Title"
:great-video-service.video/duration 420
:great-video-service.video/like-count 42
:great-video-service.video/channel-title "Channel"}))]}
{::foreign-id 'other-video-thing
::resolvers [(pc/resolver 'other-video-thing/video-by-id
{::pc/input #{:other-video-thing.video/id}
::pc/output [:other-video-thing.video/title
:other-video-thing.video/duration
:other-video-thing.video/like-count
:other-video-thing.video/channel-title]}
(fn [_ _]
{:other-video-thing.video/title "Other Video Thing Title"
:other-video-thing.video/duration 860
:other-video-thing.video/like-count 88
:other-video-thing.video/channel-title "VChannel"}))]}]
::query [{[:video/id 123]
[:great-video-service.video/title
:other-video-thing.video/like-count
:great-video-service.video/duration
:other-video-thing.video/title]}
::foreign-calls]})
'{[:video/id 123] {:great-video-service.video/title "Great Video Title"
:other-video-thing.video/like-count 88
:great-video-service.video/duration 420
:other-video-thing.video/title "Other Video Thing Title"}
::foreign-calls {other-video-thing [[{([:other-video-thing.video/id 123] {:pathom/context {}})
[:other-video-thing.video/title
:other-video-thing.video/like-count]}]]
great-video-service [[{([:great-video-service.video/id 123] {:pathom/context {}})
[:great-video-service.video/duration
:great-video-service.video/title]}]]}})))
(testing "error propagation"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::query [:a ::foreign-calls]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls {'remote [[:a]]}}))
(testing "ident request"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/input #{:x}
::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::entity {:x 5}
::query [:a ::foreign-calls]})
{:a ::p/reader-error
::p/errors {[:a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls '{remote [[{([:x 5] {:pathom/context {}}) [:a]}]]}})))
(testing "error on nested path"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]}]
::query [{[:x 5] [:a]} ::foreign-calls]})
{[:x 5] {:a ::p/reader-error}
::p/errors {[[:x 5] :a] #?(:clj "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"
:cljs "Error - {:error \"detail\"}")}
::foreign-calls {'remote [[:a]]}})))
(testing "fatal error on remote parser"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/output [:critical-error]}
(fn [_ _] {:critical-error 4}))]
::fatal-error? true}]
::query [:critical-error]})
{:critical-error :com.wsscode.pathom.core/reader-error,
:com.wsscode.pathom.core/errors {[:critical-error] #?(:clj "class clojure.lang.ExceptionInfo: Parser Error - {:foo \"bar\"}"
:cljs "Parser Error - {:foo \"bar\"}")}}))
(testing "in ident request"
(is (= (run-parser
{::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'a
{::pc/input #{:id}
::pc/output [:critical-error]}
(fn [_ _] {:critical-error 4}))]
::fatal-error? true}]
::query [{[:id 123] [:critical-error]}]})
{[:id 123] {:critical-error :com.wsscode.pathom.core/reader-error},
:com.wsscode.pathom.core/errors {[[:id 123] :critical-error] #?(:clj "class clojure.lang.ExceptionInfo: Parser Error - {:foo \"bar\"}"
:cljs "Parser Error - {:foo \"bar\"}")}})))))
(testing "nested queries"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:users [:user/id]}]}
(fn [_ _] {:users {:user/id 1}}))]}]
::query [{:users [:user/name]} ::foreign-calls]})
{:users {:user/name "1"}
::foreign-calls {'remote [[{:users [:user/id]}]]}}))
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:users [:user/id]}]}
(fn [_ _] {:users [{:user/id 1}
{:user/id 2}
{:user/id 3}]}))]}]
::query [{:users [:user/name]} ::foreign-calls]})
{:users [{:user/name "1"}
{:user/name "2"}
{:user/name "3"}]
::foreign-calls {'remote [[{:users [:user/id]}]]}}))
(testing "deep nesting"
(is (= (run-parser
{::resolvers [(pc/single-attr-resolver :user/id :user/name str)]
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:nest [{:users [:user/id
:user/email]}]}]}
(fn [_ _] {:nest {:users {:user/id 1}}}))]}]
::query [{:nest [{:users [:user/name]}]} ::foreign-calls]})
{:nest {:users {:user/name "1"}}
::foreign-calls {'remote [[{:nest [{:users [:user/id]}]}]]}}))))
(testing "delegating params"
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [:param-value]}
(fn [env _] {:param-value (-> env :ast :params :param-x)}))]}]
::query '[(:param-value {:param-x 42}) ::foreign-calls]})
{:param-value 42
::foreign-calls '{remote [[(:param-value {:param-x 42})]]}}))
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'param1
{::pc/output [:param-value]}
(fn [env _] {:param-value (-> env :ast :params :param-x)}))
(pc/resolver 'param2
{::pc/output [:param-value2]}
(fn [env _] {:param-value2 (-> env :ast :params :param-x)}))]}]
::query '[(:param-value {:param-x 42})
(:param-value2 {:param-x "foo"}) ::foreign-calls]})
{:param-value 42
:param-value2 "foo"
::foreign-calls '{remote [[(:param-value {:param-x 42})
(:param-value2 {:param-x "foo"})]]}})))
(testing "union queries"
(is (= (run-parser
{::resolvers []
::foreign [{::foreign-id 'remote
::resolvers [(pc/resolver 'users
{::pc/output [{:joined-item [:id :id2 :name :age]}]}
(fn [_ _] {:joined-item [{:id 1 :name "name" :age 24}
{:id2 2 :name "other" :age 42}]}))]}]
::query '[{:joined-item {:id [:id :name]
:id2 [:id2 :age]}}
::foreign-calls]})
{:joined-item [{:id 1 :name "name"}
{:id2 2 :age 42}]
::foreign-calls '{remote [[{:joined-item [:id
:name
:id2
:age]}]]}})))))
(defn constantly-resolver-async
"Like pc/constantly-resolver, but returns an async response."
([attribute value]
(constantly-resolver-async {::attribute attribute
:value value}))
([{::keys [attribute sym] :keys [value]}]
(let [sym (or sym (symbol (str (munge (subs (str attribute) 1)) "-constant")))]
(pc/resolver sym
{::pc/output [attribute]}
(fn [_ _] (go {attribute value}))))))
#?(:clj
(deftest test-reader3-async
(testing "single attribute"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)]
::query [:a]})
{:a 42}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)]
::query [:a]})
{:a 42}))
(testing "missed output"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
::query [:a]})
{:a ::p/not-found})))
(testing "don't call when data is already available"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)]
::entity {:a "value"}
::query [:a]})
{:a "value"})))
(testing "resolver error"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] (throw (ex-info "Error" {:error "detail"}))))]
::query [:a]})
{:a ::p/reader-error
::p/errors {[:a] "class clojure.lang.ExceptionInfo: Error - {:error \"detail\"}"}})))
(testing "invalid response"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] 42))]
::query [:a]})
{:a ::p/not-found}))))
(testing "multiple attributes on the same resolver"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a :b]}
(fn [_ _] {:a 42 :b "foo"}))]
::query [:a :b]})
{:a 42
:b "foo"})))
(testing "and branches"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(constantly-resolver-async :b "boo")]
::query [:a :b]})
{:a 42
:b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(constantly-resolver-async :b "boo")
(pc/resolver 'a-b-dep
{::pc/input #{:a :b}
::pc/output [:c]}
(fn [_ {:keys [a b]}] (go {:c [a b]})))]
::query [:c]})
{:c [42 "boo"]})))
(testing "or branches"
(is (= (run-parser-async
{::resolvers [(assoc (pc/constantly-resolver :a 42)
::pc/sym 'a)
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 42)
::pc/sym 'a)
(assoc (constantly-resolver-async :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 42}))
(testing "run next node"
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 42)
::pc/sym 'a)
(assoc (constantly-resolver-async :a 44)
::pc/sym 'a2)
(pc/single-attr-resolver :a :b inc)]
::query [:b]})
{:b 45})))
(testing "missed output"
(is (= (run-parser-async
{::resolvers [[(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {}))]
(assoc (pc/constantly-resolver :a 44)
::pc/sym 'a2)]
::query [:a]})
{:a 44}))
(is (= (run-parser-async
{::resolvers [(assoc (constantly-resolver-async :a 44)
::pc/sym 'a)
[(pc/resolver 'a2
{::pc/output [:a]}
(fn [_ _] (go {})))]]
::query [:a]})
{:a 44}))))
(testing "mixed or and"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(assoc (pc/constantly-resolver :a 43) ::pc/sym 'a2)
(pc/constantly-resolver :b "boo")]
::query [:a :b]})
{:a 43 :b "boo"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(assoc (constantly-resolver-async :a 43) ::pc/sym 'a2)
(constantly-resolver-async :b "boo")]
::query [:a :b]})
{:a 43 :b "boo"})))
(testing "ident query"
(is (= (run-parser-async
{::resolvers [(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [{[:b "boo"] [:c]}]})
{[:b "boo"] {:c "boo-C"}})))
(testing "chained call"
(is (= (run-parser-async
{::resolvers [(pc/constantly-resolver :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(is (= (run-parser-async
{::resolvers [(constantly-resolver-async :a 42)
(pc/single-attr-resolver :a :b str)]
::query [:b]})
{:b "42"}))
(testing "skip resolver call when all require attributes are available"
(let [mock (th/mock)]
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'ab
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {:b "bar"}) mock))]
::query [:b]})
{:b "foo"}))
(is (= @mock [])))
(let [mock (th/mock)]
(is (= (run-parser-async
{::resolvers [(pc/resolver 'a
{::pc/output [:a]}
(fn [_ _] {:a "ready" :b "foo"}))
(pc/resolver 'b
{::pc/input #{:a}
::pc/output [:b]}
(comp (constantly {}) mock))
(pc/single-attr-resolver :b :c #(str % "-C"))]
::query [:c]})
{:c "foo-C"}))
(is (= @mock [])))))
(testing "resolver cache"
(testing "reads from cache"
(is (= (run-parser-async
{::resolvers [(assoc (pc/constantly-resolver :a 42) ::pc/sym 'a)]
::query [:a]
::plugins #(conj %
(p/env-wrap-plugin
(fn [e]
(assoc e ::p/request-cache (atom '{[a {} {}] {:a 44}})))))})
{:a 44}))))
(testing "batching"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id 1}
{:id 2}
{:id 3}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name "1"} {:name "2"} {:name "3"}]}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'users
{::pc/output [{:users [:id]}]}
(fn [_ _] {:users [{:id ::p/not-found}]}))
(pc/resolver 'batcher
{::pc/input #{:id}
::pc/output [:name]
::pc/batch? true}
(fn [_ ids]
(if (sequential? ids)
(mapv #(hash-map :name (str (:id %))) ids)
{:name (str (:id ids))})))]
::error-stack? true
::query [{:users [:name]}]})
{:users [{:name ::p/not-found}]})))
(testing "placeholders"
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [{:>/foo [:x]} :y]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::entity {:x 3}
::query [:y {:>/foo [:x]}]})
{:>/foo {:x 3}, :y 2}))
(is (= (run-parser-async
{::resolvers [(pc/resolver 'y
{::pc/output [:y]}
(fn [_ _] {:y 2}))]
::query [{[:x 3] [:y {:>/foo [:x :y]}]}]})
{[:x 3] {:y 2 :>/foo {:x 3 :y 2}}})))))
(comment
pcf/index-query)
|
094543fa7e9d96cb29c8573a9031539a932ba562ccce8f53e9b33f012acd1fc0 | leonidas/lambda-webdev | Connection.hs | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Network.WebSockets.Messaging.Connection where
import Network.WebSockets hiding (send, Request, Message)
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
import Control.Applicative
import Control.Monad (guard, forever, void, (>=>), mplus)
import Control.Monad.IO.Class
import Data.Aeson (encode, decode, ToJSON(..), FromJSON(..), fromJSON, Result(..))
import qualified Data.Aeson as Json
import Data.Traversable (traverse)
import Data.Foldable (traverse_)
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Control.Exception (catch)
import Prelude hiding (catch)
import Network.WebSockets.Messaging.Container
import Network.WebSockets.Messaging.Message
type Closable c a = c (Maybe a)
type Handler r = Json.Value -> STM (IO r)
type SubId = Int
data Connection = Connection
{ outbox :: !(Closable TQueue Json.Value)
, disconnected :: !(TVar Bool)
, subId :: !(TVar SubId)
, requestSubs :: !(TVar (IntMap (Handler Json.Value)))
, notifySubs :: !(TVar (IntMap (Handler ())))
, reqId :: !(TVar ReqId)
, reqMap :: !(TVar (IntMap (TMVar Json.Value)))
}
newtype Future a = Future (TMVar a)
get :: Future a -> STM a
get (Future var) = readTMVar var
newConnection :: STM Connection
newConnection = Connection
<$> newTQueue
<*> newTVar False
<*> newTVar 0
<*> newTVar IntMap.empty
<*> newTVar IntMap.empty
<*> newTVar 0
<*> newTVar IntMap.empty
requestAsync :: (Message req, FromJSON resp) => Connection -> req -> IO (Future resp)
requestAsync conn@(Connection {..}) !req = do
resp <- newEmptyTMVarIO
fut <- newEmptyTMVarIO
void $ forkIO $ do
rqId <- atomically $ do
rqId <- nextReqId conn
modifyTVar' reqMap $! IntMap.insert rqId resp
send conn $! Request rqId $! msgToJSON req
return rqId
js <- atomically $ do
modifyTVar' reqMap $! IntMap.delete rqId
readTMVar resp
case fromJSON js of
Json.Success dat -> atomically $! putTMVar fut $! dat
Json.Error msg -> do
atomically $! send conn $! ProtocolError $! T.pack msg
error "malformed response"
return $ Future fut
request :: (Message req, FromJSON resp) => Connection -> req -> IO resp
request conn@(Connection {..}) !req = do
rqId <- atomically $ do
rqId' <- readTVar reqId
writeTVar reqId $! rqId' + 1
return rqId'
resp <- newEmptyTMVarIO
atomically $ do
modifyTVar' reqMap $! IntMap.insert rqId resp
send conn $! Request rqId $! msgToJSON req
js <- atomically $ do
modifyTVar' reqMap $! IntMap.delete rqId
readTMVar resp
case fromJSON js of
Json.Success dat -> return dat
Json.Error msg -> do
atomically $! send conn $! ProtocolError $! T.pack msg
error "malformed response"
notify :: Message ntfy => Connection -> ntfy -> STM ()
notify conn = send conn . Notification . msgToJSON
nextSubId :: Connection -> STM SubId
nextSubId (Connection {..}) = do
sId <- readTVar subId
writeTVar subId $! sId + 1
return sId
nextReqId :: Connection -> STM SubId
nextReqId (Connection {..}) = do
rqId <- readTVar reqId
writeTVar reqId $! rqId + 1
return rqId
onRequest :: (Message req, Message resp) => Connection -> (req -> IO resp) -> STM ()
onRequest conn@(Connection {..}) !handler = do
sid <- nextSubId conn
modifyTVar' requestSubs (IntMap.insert sid handler') where
handler' js = case msgFromJSON js of
Json.Success rq -> return $! msgToJSON <$> handler rq
Error _ -> retry
onNotify :: Message req => Connection -> (req -> IO ()) -> STM ()
onNotify conn@(Connection{..}) !handler = do
sid <- nextSubId conn
modifyTVar' notifySubs (IntMap.insert sid handler') where
handler' js = case msgFromJSON js of
Json.Success ntfy -> return $! handler ntfy
Error _ -> retry
onDisconnect :: Connection -> STM () -> STM ()
onDisconnect !(Connection {..}) !handler =
readTVar disconnected >>= guard >> handler
send :: Connection -> Container -> STM ()
send (Connection {..}) = writeTQueue outbox . Just . toJSON
recvJson :: (TextProtocol p, FromJSON a) => WebSockets p (Maybe a)
recvJson = decode <$> receiveData
sendJson :: TextProtocol p => Json.Value -> WebSockets p ()
sendJson = sendTextData . encode
sinkJson :: TextProtocol p => Sink p -> Json.Value -> IO ()
sinkJson sink = sendSink sink . DataMessage . Text . encode
-- sinkJson sink js = sendSink sink . DataMessage . Text . encode $ (trace (show js) js)
untilClosed :: Closable TQueue a -> (a -> STM b) -> (b -> IO c) -> IO ()
untilClosed chan handler after = loop where
loop =
atomically (readTQueue chan >>= traverse handler)
>>= traverse_ (after >=> const loop)
dispatch :: Connection -> Container -> IO ()
dispatch conn@(Connection {..}) !c = case c of
Request rqId js -> do
handler <- atomically $ do
subs <- readTVar requestSubs
let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs
fmap Just trySubs `orElse` return Nothing
void $ forkIO $ maybe invalidRequest respond handler
where
invalidRequest = atomically . send conn
$ ProtocolError "unrecognized request"
respond h = h >>= atomically . send conn . Response rqId
Notification js -> do
handler <- atomically $ do
subs <- readTVar notifySubs
let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs
fmap Just trySubs `orElse` return Nothing
void $ forkIO $ fromMaybe noHandler handler
where
noHandler = atomically . send conn
$ ProtocolDebug "ignored notification"
Response rqId js -> atomically $ do
h <- IntMap.lookup rqId <$> readTVar reqMap
case h of
Nothing -> responseIgnored
Just var -> putTMVar var js
where
responseIgnored = send conn $ ProtocolDebug "ignored response"
_ -> return () -- TODO: print/log error?
onConnect :: TextProtocol p => (Connection -> IO ()) -> WebSockets p ()
onConnect handler = do
conn@(Connection {..}) <- liftIO $ atomically newConnection
let replyInvalid = send conn $ ProtocolError "invalid message"
handleWriteError (_ :: ConnectionError) = signalDisconnect
handleReadError _ = liftIO signalDisconnect
signalDisconnect = do
atomically $ do
writeTQueue outbox Nothing
writeTVar disconnected True
readLoop = forever $ do
recvJson >>= liftIO . maybe (atomically $ replyInvalid) (dispatch conn)
sink <- getSink
liftIO $ do
void . forkIO $ untilClosed outbox return (sinkJson sink)
`catch` handleWriteError
void . forkIO $ handler conn
catchWsError readLoop handleReadError
| null | https://raw.githubusercontent.com/leonidas/lambda-webdev/0213745e6d2cd071007a158b1dc7a83f238bdc00/lib/Network/WebSockets/Messaging/Connection.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE BangPatterns #
sinkJson sink js = sendSink sink . DataMessage . Text . encode $ (trace (show js) js)
TODO: print/log error? | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module Network.WebSockets.Messaging.Connection where
import Network.WebSockets hiding (send, Request, Message)
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
import Control.Applicative
import Control.Monad (guard, forever, void, (>=>), mplus)
import Control.Monad.IO.Class
import Data.Aeson (encode, decode, ToJSON(..), FromJSON(..), fromJSON, Result(..))
import qualified Data.Aeson as Json
import Data.Traversable (traverse)
import Data.Foldable (traverse_)
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Control.Exception (catch)
import Prelude hiding (catch)
import Network.WebSockets.Messaging.Container
import Network.WebSockets.Messaging.Message
type Closable c a = c (Maybe a)
type Handler r = Json.Value -> STM (IO r)
type SubId = Int
data Connection = Connection
{ outbox :: !(Closable TQueue Json.Value)
, disconnected :: !(TVar Bool)
, subId :: !(TVar SubId)
, requestSubs :: !(TVar (IntMap (Handler Json.Value)))
, notifySubs :: !(TVar (IntMap (Handler ())))
, reqId :: !(TVar ReqId)
, reqMap :: !(TVar (IntMap (TMVar Json.Value)))
}
newtype Future a = Future (TMVar a)
get :: Future a -> STM a
get (Future var) = readTMVar var
newConnection :: STM Connection
newConnection = Connection
<$> newTQueue
<*> newTVar False
<*> newTVar 0
<*> newTVar IntMap.empty
<*> newTVar IntMap.empty
<*> newTVar 0
<*> newTVar IntMap.empty
requestAsync :: (Message req, FromJSON resp) => Connection -> req -> IO (Future resp)
requestAsync conn@(Connection {..}) !req = do
resp <- newEmptyTMVarIO
fut <- newEmptyTMVarIO
void $ forkIO $ do
rqId <- atomically $ do
rqId <- nextReqId conn
modifyTVar' reqMap $! IntMap.insert rqId resp
send conn $! Request rqId $! msgToJSON req
return rqId
js <- atomically $ do
modifyTVar' reqMap $! IntMap.delete rqId
readTMVar resp
case fromJSON js of
Json.Success dat -> atomically $! putTMVar fut $! dat
Json.Error msg -> do
atomically $! send conn $! ProtocolError $! T.pack msg
error "malformed response"
return $ Future fut
request :: (Message req, FromJSON resp) => Connection -> req -> IO resp
request conn@(Connection {..}) !req = do
rqId <- atomically $ do
rqId' <- readTVar reqId
writeTVar reqId $! rqId' + 1
return rqId'
resp <- newEmptyTMVarIO
atomically $ do
modifyTVar' reqMap $! IntMap.insert rqId resp
send conn $! Request rqId $! msgToJSON req
js <- atomically $ do
modifyTVar' reqMap $! IntMap.delete rqId
readTMVar resp
case fromJSON js of
Json.Success dat -> return dat
Json.Error msg -> do
atomically $! send conn $! ProtocolError $! T.pack msg
error "malformed response"
notify :: Message ntfy => Connection -> ntfy -> STM ()
notify conn = send conn . Notification . msgToJSON
nextSubId :: Connection -> STM SubId
nextSubId (Connection {..}) = do
sId <- readTVar subId
writeTVar subId $! sId + 1
return sId
nextReqId :: Connection -> STM SubId
nextReqId (Connection {..}) = do
rqId <- readTVar reqId
writeTVar reqId $! rqId + 1
return rqId
onRequest :: (Message req, Message resp) => Connection -> (req -> IO resp) -> STM ()
onRequest conn@(Connection {..}) !handler = do
sid <- nextSubId conn
modifyTVar' requestSubs (IntMap.insert sid handler') where
handler' js = case msgFromJSON js of
Json.Success rq -> return $! msgToJSON <$> handler rq
Error _ -> retry
onNotify :: Message req => Connection -> (req -> IO ()) -> STM ()
onNotify conn@(Connection{..}) !handler = do
sid <- nextSubId conn
modifyTVar' notifySubs (IntMap.insert sid handler') where
handler' js = case msgFromJSON js of
Json.Success ntfy -> return $! handler ntfy
Error _ -> retry
onDisconnect :: Connection -> STM () -> STM ()
onDisconnect !(Connection {..}) !handler =
readTVar disconnected >>= guard >> handler
send :: Connection -> Container -> STM ()
send (Connection {..}) = writeTQueue outbox . Just . toJSON
recvJson :: (TextProtocol p, FromJSON a) => WebSockets p (Maybe a)
recvJson = decode <$> receiveData
sendJson :: TextProtocol p => Json.Value -> WebSockets p ()
sendJson = sendTextData . encode
sinkJson :: TextProtocol p => Sink p -> Json.Value -> IO ()
sinkJson sink = sendSink sink . DataMessage . Text . encode
untilClosed :: Closable TQueue a -> (a -> STM b) -> (b -> IO c) -> IO ()
untilClosed chan handler after = loop where
loop =
atomically (readTQueue chan >>= traverse handler)
>>= traverse_ (after >=> const loop)
dispatch :: Connection -> Container -> IO ()
dispatch conn@(Connection {..}) !c = case c of
Request rqId js -> do
handler <- atomically $ do
subs <- readTVar requestSubs
let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs
fmap Just trySubs `orElse` return Nothing
void $ forkIO $ maybe invalidRequest respond handler
where
invalidRequest = atomically . send conn
$ ProtocolError "unrecognized request"
respond h = h >>= atomically . send conn . Response rqId
Notification js -> do
handler <- atomically $ do
subs <- readTVar notifySubs
let trySubs = foldr mplus retry $ map ($ js) $ IntMap.elems subs
fmap Just trySubs `orElse` return Nothing
void $ forkIO $ fromMaybe noHandler handler
where
noHandler = atomically . send conn
$ ProtocolDebug "ignored notification"
Response rqId js -> atomically $ do
h <- IntMap.lookup rqId <$> readTVar reqMap
case h of
Nothing -> responseIgnored
Just var -> putTMVar var js
where
responseIgnored = send conn $ ProtocolDebug "ignored response"
onConnect :: TextProtocol p => (Connection -> IO ()) -> WebSockets p ()
onConnect handler = do
conn@(Connection {..}) <- liftIO $ atomically newConnection
let replyInvalid = send conn $ ProtocolError "invalid message"
handleWriteError (_ :: ConnectionError) = signalDisconnect
handleReadError _ = liftIO signalDisconnect
signalDisconnect = do
atomically $ do
writeTQueue outbox Nothing
writeTVar disconnected True
readLoop = forever $ do
recvJson >>= liftIO . maybe (atomically $ replyInvalid) (dispatch conn)
sink <- getSink
liftIO $ do
void . forkIO $ untilClosed outbox return (sinkJson sink)
`catch` handleWriteError
void . forkIO $ handler conn
catchWsError readLoop handleReadError
|
6280092095aa725df37556792d72712f031a4494ee19f0597df1534d9e4ca394 | privet-kitty/cl-competitive | smawk.lisp | (defpackage :cp/smawk
(:use :cl)
(:export #:smawk))
(in-package :cp/smawk)
(declaim (inline smawk))
(defun smawk (function x1 y1 x2 y2 &key (order #'<))
(declare (fixnum x1 y1 x2 y2))
(assert (and (<= x1 x2) (<= y1 y2)))
(let* ((rowlen (- x2 x1))
(collen (- y2 y1))
(res (make-array rowlen :element-type 'fixnum))
(cols (make-array (+ (* 2 rowlen) collen) :element-type 'fixnum)))
(declare ((mod #.array-dimension-limit) rowlen collen))
(dotimes (i collen)
(setf (aref cols i) (+ y1 i)))
(labels
((recur (xinit cstart cend step)
(declare (fixnum xinit step cstart cend))
(when (>= xinit x2)
(return-from recur (make-array 0 :element-type 'fixnum)))
(let ((rend (ceiling (the fixnum (- x2 xinit)) step))
(new-end cend))
(declare ((mod #.array-dimension-limit) rend new-end))
(loop
with x of-type fixnum = (- xinit step)
for pos from cstart below cend
for c = (aref cols pos)
do (loop
while (and (/= new-end cend)
(funcall order
(funcall function x c)
(funcall function x (aref cols (- new-end 1)))))
do (decf new-end)
(decf x step))
(when (< (- new-end cend) rend)
(setf (aref cols new-end) c)
(incf new-end)
(incf x step)))
(recur (+ xinit step) cend new-end (* 2 step))
(let ((pos cend))
(declare ((mod #.array-dimension-limit) pos))
(loop for x of-type fixnum from xinit below x2 by (* 2 step)
for end = (if (< (+ x step) x2)
(aref res (- (the fixnum (+ x step)) x1))
(aref cols (- new-end 1)))
for col = (aref cols pos)
do (loop while (< (aref cols pos) end)
do (incf pos)
when (funcall order
(funcall function x (aref cols pos))
(funcall function x col))
do (setq col (aref cols pos)))
(setf (aref res (- x x1)) col))
res))))
(recur x1 0 collen 1)
res)))
| null | https://raw.githubusercontent.com/privet-kitty/cl-competitive/2c4d4e82af2a1672eef334a0a229c67d4fb37188/module/smawk.lisp | lisp | (defpackage :cp/smawk
(:use :cl)
(:export #:smawk))
(in-package :cp/smawk)
(declaim (inline smawk))
(defun smawk (function x1 y1 x2 y2 &key (order #'<))
(declare (fixnum x1 y1 x2 y2))
(assert (and (<= x1 x2) (<= y1 y2)))
(let* ((rowlen (- x2 x1))
(collen (- y2 y1))
(res (make-array rowlen :element-type 'fixnum))
(cols (make-array (+ (* 2 rowlen) collen) :element-type 'fixnum)))
(declare ((mod #.array-dimension-limit) rowlen collen))
(dotimes (i collen)
(setf (aref cols i) (+ y1 i)))
(labels
((recur (xinit cstart cend step)
(declare (fixnum xinit step cstart cend))
(when (>= xinit x2)
(return-from recur (make-array 0 :element-type 'fixnum)))
(let ((rend (ceiling (the fixnum (- x2 xinit)) step))
(new-end cend))
(declare ((mod #.array-dimension-limit) rend new-end))
(loop
with x of-type fixnum = (- xinit step)
for pos from cstart below cend
for c = (aref cols pos)
do (loop
while (and (/= new-end cend)
(funcall order
(funcall function x c)
(funcall function x (aref cols (- new-end 1)))))
do (decf new-end)
(decf x step))
(when (< (- new-end cend) rend)
(setf (aref cols new-end) c)
(incf new-end)
(incf x step)))
(recur (+ xinit step) cend new-end (* 2 step))
(let ((pos cend))
(declare ((mod #.array-dimension-limit) pos))
(loop for x of-type fixnum from xinit below x2 by (* 2 step)
for end = (if (< (+ x step) x2)
(aref res (- (the fixnum (+ x step)) x1))
(aref cols (- new-end 1)))
for col = (aref cols pos)
do (loop while (< (aref cols pos) end)
do (incf pos)
when (funcall order
(funcall function x (aref cols pos))
(funcall function x col))
do (setq col (aref cols pos)))
(setf (aref res (- x x1)) col))
res))))
(recur x1 0 collen 1)
res)))
| |
fa1a54335b247e0590016a74ed11071bd35c58cd38e6556c4b08f6ebd9008707 | jyh/metaprl | sil_state_type.ml |
* Type judgments for the state .
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL , a modular , higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages .
*
* See the file doc / htmlman / default.html or visit /
* for more information .
*
* Copyright ( C ) 1999 , Cornell University
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author :
*
* Type judgments for the state.
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL, a modular, higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages.
*
* See the file doc/htmlman/default.html or visit /
* for more information.
*
* Copyright (C) 1999 Jason Hickey, Cornell University
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey
*
*)
extends Sil_state
open Tactic_type
(************************************************************************
* SYNTAX *
************************************************************************)
(*
* Type definitions.
*)
declare label_type
declare decl_type[i:l]
declare state_empty_decl
declare state_alloc_decl{'r; 't}
declare state_store_decl{'r; 'l; 't}
declare in_domain{'r; 'decl}
declare state_type{'decl}
(************************************************************************
* DISPLAY *
************************************************************************)
dform label_type_df : label_type =
`"Label"
dform decl_type_df : decl_type[i:l] =
`"Decl[" slot[i:l] `"]"
dform in_domain_df : in_domain{'r; 'l} =
slot{'l} " " Mpsymbols!member `" Dom(" slot{'r} `")"
dform state_empty_decl_df : state_empty_decl =
`"[]"
dform state_alloc_decl_df : state_alloc_decl{'r; 't} =
slot{'r} `"@" slot{'t}
dform state_store_decl_df : state_store_decl{'r; 'l; 't} =
slot{'r} `"." slot{'l} `"=" slot{'t}
dform state_type_df : state_type{'decl} =
`"{" slot{'decl} `"}"
(************************************************************************
* DEFINITIONS *
************************************************************************)
prim_rw unfold_label_type : label_type <--> int
prim_rw unfold_decl_type : decl_type[i:l] <--> list{univ[i:l]}
prim_rw unfold_in_domain : in_domain{'r; 'l} <--> (ge{'l; 0} & lt{'l; length{'r}})
prim_rw unfold_state_empty_decl : state_empty_decl <--> nil
prim_rw unfold_state_alloc_decl : state_alloc_decl{'r; 't} <-->
append{'r; cons{'t; nil}}
prim_rw unfold_state_store_decl : state_store_decl{'r; 'l; 't} <-->
replace_nth{'r; 'l; 't}
prim_rw unfold_state_type : state_type{'decl} <-->
(l: { i: label_type | in_domain{'decl; 'i} } -> nth{'decl; 'l})
(************************************************************************
* RULES *
************************************************************************)
(*
* Need this unhiding.
*)
interactive unhide_in_domain {| elim [] |} 'H :
sequent { <H>; u: in_domain{'decl; 'l}; <J['u]> >- 'C['u] } -->
sequent { <H>; u: hide{in_domain{'decl; 'l}}; <J['u]> >- 'C['u] }
(*
* Typing rules.
*)
interactive label_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; label_type} }
interactive label_type_type {| intro [] |} :
sequent { <H> >- "type"{label_type} }
interactive in_domain_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{univ[i:l]; in_domain{'r; 'l}} }
interactive in_domain_type {| intro [] |} decl_type[i:l] :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- "type"{in_domain{'r; 'l}} }
interactive empty_member {| intro [] |} :
sequent { <H> >- member{decl_type[i:l]; state_empty_decl} }
interactive alloc_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{univ[i:l]; 't} } -->
sequent { <H> >- member{decl_type[i:l]; state_alloc_decl{'r; 't}} }
interactive store_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- in_domain{'r; 'l} } -->
[wf] sequent { <H> >- member{univ[i:l]; 't} } -->
sequent { <H> >- member{decl_type[i:l]; state_store_decl{'r; 'l; 't}} }
interactive state_type_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'decl} } -->
sequent { <H> >- member{univ[i:l]; state_type{'decl}} }
interactive state_type_type {| intro [] |} decl_type[i:l] :
[wf] sequent { <H> >- member{decl_type[i:l]; 'decl} } -->
sequent { <H> >- "type"{state_type{'decl}} }
(*
* Membership of state operations.
*)
interactive empty_member2 {| intro [] |} :
sequent { <H> >- member{state_type{state_empty_decl}; empty} }
* -*-
* Local Variables :
* Caml - master : " nl "
* End :
* -*-
* -*-
* Local Variables:
* Caml-master: "nl"
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/sil/sil_state_type.ml | ocaml | ***********************************************************************
* SYNTAX *
***********************************************************************
* Type definitions.
***********************************************************************
* DISPLAY *
***********************************************************************
***********************************************************************
* DEFINITIONS *
***********************************************************************
***********************************************************************
* RULES *
***********************************************************************
* Need this unhiding.
* Typing rules.
* Membership of state operations.
|
* Type judgments for the state .
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL , a modular , higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages .
*
* See the file doc / htmlman / default.html or visit /
* for more information .
*
* Copyright ( C ) 1999 , Cornell University
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author :
*
* Type judgments for the state.
*
* ----------------------------------------------------------------
*
* This file is part of MetaPRL, a modular, higher order
* logical framework that provides a logical programming
* environment for OCaml and other languages.
*
* See the file doc/htmlman/default.html or visit /
* for more information.
*
* Copyright (C) 1999 Jason Hickey, Cornell University
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey
*
*)
extends Sil_state
open Tactic_type
declare label_type
declare decl_type[i:l]
declare state_empty_decl
declare state_alloc_decl{'r; 't}
declare state_store_decl{'r; 'l; 't}
declare in_domain{'r; 'decl}
declare state_type{'decl}
dform label_type_df : label_type =
`"Label"
dform decl_type_df : decl_type[i:l] =
`"Decl[" slot[i:l] `"]"
dform in_domain_df : in_domain{'r; 'l} =
slot{'l} " " Mpsymbols!member `" Dom(" slot{'r} `")"
dform state_empty_decl_df : state_empty_decl =
`"[]"
dform state_alloc_decl_df : state_alloc_decl{'r; 't} =
slot{'r} `"@" slot{'t}
dform state_store_decl_df : state_store_decl{'r; 'l; 't} =
slot{'r} `"." slot{'l} `"=" slot{'t}
dform state_type_df : state_type{'decl} =
`"{" slot{'decl} `"}"
prim_rw unfold_label_type : label_type <--> int
prim_rw unfold_decl_type : decl_type[i:l] <--> list{univ[i:l]}
prim_rw unfold_in_domain : in_domain{'r; 'l} <--> (ge{'l; 0} & lt{'l; length{'r}})
prim_rw unfold_state_empty_decl : state_empty_decl <--> nil
prim_rw unfold_state_alloc_decl : state_alloc_decl{'r; 't} <-->
append{'r; cons{'t; nil}}
prim_rw unfold_state_store_decl : state_store_decl{'r; 'l; 't} <-->
replace_nth{'r; 'l; 't}
prim_rw unfold_state_type : state_type{'decl} <-->
(l: { i: label_type | in_domain{'decl; 'i} } -> nth{'decl; 'l})
interactive unhide_in_domain {| elim [] |} 'H :
sequent { <H>; u: in_domain{'decl; 'l}; <J['u]> >- 'C['u] } -->
sequent { <H>; u: hide{in_domain{'decl; 'l}}; <J['u]> >- 'C['u] }
interactive label_type_member {| intro [] |} :
sequent { <H> >- member{univ[i:l]; label_type} }
interactive label_type_type {| intro [] |} :
sequent { <H> >- "type"{label_type} }
interactive in_domain_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- member{univ[i:l]; in_domain{'r; 'l}} }
interactive in_domain_type {| intro [] |} decl_type[i:l] :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{label_type; 'l} } -->
sequent { <H> >- "type"{in_domain{'r; 'l}} }
interactive empty_member {| intro [] |} :
sequent { <H> >- member{decl_type[i:l]; state_empty_decl} }
interactive alloc_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- member{univ[i:l]; 't} } -->
sequent { <H> >- member{decl_type[i:l]; state_alloc_decl{'r; 't}} }
interactive store_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'r} } -->
[wf] sequent { <H> >- in_domain{'r; 'l} } -->
[wf] sequent { <H> >- member{univ[i:l]; 't} } -->
sequent { <H> >- member{decl_type[i:l]; state_store_decl{'r; 'l; 't}} }
interactive state_type_member {| intro [] |} :
[wf] sequent { <H> >- member{decl_type[i:l]; 'decl} } -->
sequent { <H> >- member{univ[i:l]; state_type{'decl}} }
interactive state_type_type {| intro [] |} decl_type[i:l] :
[wf] sequent { <H> >- member{decl_type[i:l]; 'decl} } -->
sequent { <H> >- "type"{state_type{'decl}} }
interactive empty_member2 {| intro [] |} :
sequent { <H> >- member{state_type{state_empty_decl}; empty} }
* -*-
* Local Variables :
* Caml - master : " nl "
* End :
* -*-
* -*-
* Local Variables:
* Caml-master: "nl"
* End:
* -*-
*)
|
feb381d8ca3e5a2c0988394a209b290518a9d3aebfc0a20d9c9a156c4fbd51b8 | RutledgePaulV/ring-firewall-middleware | core_test.clj | (ns ring-firewall-middleware.core-test
(:require [clojure.test :refer :all]
[ring-firewall-middleware.core :refer :all]))
(deftest wrap-allow-ips-test
(let [handler (fn [req] {:status 200 :body "You have access!"})
protected (wrap-allow-ips handler {:allow-list ["10.0.0.0/8"]})]
(testing "remote-addr only"
(is (= 200 (:status (protected {:remote-addr "10.20.206.46"}))))
(is (= 403 (:status (protected {:remote-addr "192.1.1.1"})))))
(testing "remote and forwarded"
(is (= 200 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24"}
:remote-addr "10.20.206.46"}))))
(is (= 403 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24,192.10.1.1"}
:remote-addr "10.20.206.46"})))))))
(deftest wrap-deny-ips-test
(let [handler (fn [req] {:status 200 :body "You have access!"})
protected (wrap-deny-ips handler {:deny-list ["10.0.0.0/8"]})]
(testing "remote-addr only"
(is (= 403 (:status (protected {:remote-addr "10.20.206.46"}))))
(is (= 200 (:status (protected {:remote-addr "192.1.1.1"})))))
(testing "remote and forwarded"
(is (= 200 (:status (protected {:headers {"x-forwarded-for" "192.1.1.2"}
:remote-addr "192.1.1.1"}))))
(is (= 403 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24,192.10.1.2"}
:remote-addr "192.1.1.1"})))))))
(deftest wrap-blocking-concurrency-limit-test
(let [handler (fn [req] (Thread/sleep 1000) {:status 200 :body "Response!"})
protected (wrap-concurrency-throttle handler {:max-concurrent 1})
start (System/currentTimeMillis)
one (future (protected {}))
two (future (protected {}))]
(deref one)
(deref two)
(is (<= 2000 (- (System/currentTimeMillis) start)))))
(deftest wrap-rejecting-concurrency-limit-test
(let [handler (fn [req] (Thread/sleep 1000) {:status 200 :body "Response!"})
protected (wrap-concurrency-limit handler {:max-concurrent 1})
one (future (protected {}))
two (future (protected {}))
responses [(deref one) (deref two)]]
(is (not-empty (filter #(= 429 (:status %)) responses)))
(is (not-empty (filter #(= 200 (:status %)) responses)))))
(deftest wrap-maintenance-mode-test
(let [handler (fn [request]
(when (number? request)
(Thread/sleep request))
{:status 200 :body "Under the hood"})
protected (wrap-maintenance-limit handler)
started (promise)
finished (promise)]
(is (= 200 (:status (protected {}))))
(future
(with-maintenance-mode :world
(deliver started true)
(Thread/sleep 2000))
(deliver finished true))
(deref started)
(is (= 503 (:status (protected {}))))
(deref finished)
(is (= 200 (:status (protected {}))))))
| null | https://raw.githubusercontent.com/RutledgePaulV/ring-firewall-middleware/bdd85a41ef3350d9e2bfda2d5f814177b0e7c9b1/test/ring_firewall_middleware/core_test.clj | clojure | (ns ring-firewall-middleware.core-test
(:require [clojure.test :refer :all]
[ring-firewall-middleware.core :refer :all]))
(deftest wrap-allow-ips-test
(let [handler (fn [req] {:status 200 :body "You have access!"})
protected (wrap-allow-ips handler {:allow-list ["10.0.0.0/8"]})]
(testing "remote-addr only"
(is (= 200 (:status (protected {:remote-addr "10.20.206.46"}))))
(is (= 403 (:status (protected {:remote-addr "192.1.1.1"})))))
(testing "remote and forwarded"
(is (= 200 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24"}
:remote-addr "10.20.206.46"}))))
(is (= 403 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24,192.10.1.1"}
:remote-addr "10.20.206.46"})))))))
(deftest wrap-deny-ips-test
(let [handler (fn [req] {:status 200 :body "You have access!"})
protected (wrap-deny-ips handler {:deny-list ["10.0.0.0/8"]})]
(testing "remote-addr only"
(is (= 403 (:status (protected {:remote-addr "10.20.206.46"}))))
(is (= 200 (:status (protected {:remote-addr "192.1.1.1"})))))
(testing "remote and forwarded"
(is (= 200 (:status (protected {:headers {"x-forwarded-for" "192.1.1.2"}
:remote-addr "192.1.1.1"}))))
(is (= 403 (:status (protected {:headers {"x-forwarded-for" "10.20.205.24,192.10.1.2"}
:remote-addr "192.1.1.1"})))))))
(deftest wrap-blocking-concurrency-limit-test
(let [handler (fn [req] (Thread/sleep 1000) {:status 200 :body "Response!"})
protected (wrap-concurrency-throttle handler {:max-concurrent 1})
start (System/currentTimeMillis)
one (future (protected {}))
two (future (protected {}))]
(deref one)
(deref two)
(is (<= 2000 (- (System/currentTimeMillis) start)))))
(deftest wrap-rejecting-concurrency-limit-test
(let [handler (fn [req] (Thread/sleep 1000) {:status 200 :body "Response!"})
protected (wrap-concurrency-limit handler {:max-concurrent 1})
one (future (protected {}))
two (future (protected {}))
responses [(deref one) (deref two)]]
(is (not-empty (filter #(= 429 (:status %)) responses)))
(is (not-empty (filter #(= 200 (:status %)) responses)))))
(deftest wrap-maintenance-mode-test
(let [handler (fn [request]
(when (number? request)
(Thread/sleep request))
{:status 200 :body "Under the hood"})
protected (wrap-maintenance-limit handler)
started (promise)
finished (promise)]
(is (= 200 (:status (protected {}))))
(future
(with-maintenance-mode :world
(deliver started true)
(Thread/sleep 2000))
(deliver finished true))
(deref started)
(is (= 503 (:status (protected {}))))
(deref finished)
(is (= 200 (:status (protected {}))))))
| |
2c7c2f0ce6c2f3fd48569f384b7731d4efd4c5a560121185d90deb6924705ff7 | NorfairKing/cursor | PromoteSpec.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
module Cursor.Simple.Tree.PromoteSpec
( spec,
)
where
import Cursor.Simple.Tree hiding (TreeCursor)
import Cursor.Simple.Tree.Gen ()
import Cursor.Simple.Tree.TestUtils
import Cursor.Tree
( TreeCursor (..),
closedForest,
emptyCForest,
openForest,
)
import Data.Tree
import Test.Hspec
import Test.Validity
spec :: Spec
spec = do
functorSpec @PromoteElemResult
applicativeSpec @PromoteElemResult
monadSpec @PromoteElemResult
describe "treeCursorPromoteElem" $ do
it "produces valids on valids" $ producesValid $ treeCursorPromoteElem @Bool
it "Works on the example from the docs" $
let promoteStart =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts = [node 'b' [node 'c' []]],
treeAboveAbove =
Just
TreeAbove
{ treeAboveLefts = [],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [node 'h' []]
},
treeAboveNode = 'a',
treeAboveRights = [node 'f' [node 'g' []]]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
promoteEnd =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts =
[ CNode 'a' $
openForest
[ CNode 'b' $
openForest [CNode 'c' emptyCForest, CNode 'e' emptyCForest],
CNode 'f' $ closedForest [Node 'g' []]
]
],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [CNode 'h' emptyCForest]
},
treeCurrent = 'd',
treeBelow = emptyCForest
}
in case treeCursorPromoteElem promoteStart of
PromotedElem tc' -> tc' `treeShouldBe` promoteEnd
_ -> expectationFailure "treeCursorPromoteElem should not have failed"
it "promotes the current node to the level of its parent" pending
functorSpec @PromoteResult
applicativeSpec @PromoteResult
monadSpec @PromoteResult
describe "treeCursorPromoteSubTree" $ do
it "produces valids on valids" $ producesValid $ treeCursorPromoteSubTree @Bool
it "Works on the example from the docs" $
let promoteStart =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts = [CNode 'b' $ closedForest [Node 'c' []]],
treeAboveAbove =
Just
TreeAbove
{ treeAboveLefts = [],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [node 'h' []]
},
treeAboveNode = 'a',
treeAboveRights = [CNode 'f' $ closedForest [Node 'g' []]]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
promoteEnd =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts =
[ CNode 'a' $
openForest
[ CNode 'b' $ closedForest [Node 'c' []],
CNode 'f' $ closedForest [Node 'g' []]
]
],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [CNode 'h' $ closedForest []]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
in case treeCursorPromoteSubTree promoteStart of
Promoted tc' -> tc' `treeShouldBe` promoteEnd
_ -> expectationFailure "treeCursorPromoteSubTree should not have failed"
it "promotes the current subtree to the level of its parent" pending
| null | https://raw.githubusercontent.com/NorfairKing/cursor/ff27e78281430c298a25a7805c9c61ca1e69f4c5/cursor-gen/test/Cursor/Simple/Tree/PromoteSpec.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes # | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
module Cursor.Simple.Tree.PromoteSpec
( spec,
)
where
import Cursor.Simple.Tree hiding (TreeCursor)
import Cursor.Simple.Tree.Gen ()
import Cursor.Simple.Tree.TestUtils
import Cursor.Tree
( TreeCursor (..),
closedForest,
emptyCForest,
openForest,
)
import Data.Tree
import Test.Hspec
import Test.Validity
spec :: Spec
spec = do
functorSpec @PromoteElemResult
applicativeSpec @PromoteElemResult
monadSpec @PromoteElemResult
describe "treeCursorPromoteElem" $ do
it "produces valids on valids" $ producesValid $ treeCursorPromoteElem @Bool
it "Works on the example from the docs" $
let promoteStart =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts = [node 'b' [node 'c' []]],
treeAboveAbove =
Just
TreeAbove
{ treeAboveLefts = [],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [node 'h' []]
},
treeAboveNode = 'a',
treeAboveRights = [node 'f' [node 'g' []]]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
promoteEnd =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts =
[ CNode 'a' $
openForest
[ CNode 'b' $
openForest [CNode 'c' emptyCForest, CNode 'e' emptyCForest],
CNode 'f' $ closedForest [Node 'g' []]
]
],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [CNode 'h' emptyCForest]
},
treeCurrent = 'd',
treeBelow = emptyCForest
}
in case treeCursorPromoteElem promoteStart of
PromotedElem tc' -> tc' `treeShouldBe` promoteEnd
_ -> expectationFailure "treeCursorPromoteElem should not have failed"
it "promotes the current node to the level of its parent" pending
functorSpec @PromoteResult
applicativeSpec @PromoteResult
monadSpec @PromoteResult
describe "treeCursorPromoteSubTree" $ do
it "produces valids on valids" $ producesValid $ treeCursorPromoteSubTree @Bool
it "Works on the example from the docs" $
let promoteStart =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts = [CNode 'b' $ closedForest [Node 'c' []]],
treeAboveAbove =
Just
TreeAbove
{ treeAboveLefts = [],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [node 'h' []]
},
treeAboveNode = 'a',
treeAboveRights = [CNode 'f' $ closedForest [Node 'g' []]]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
promoteEnd =
TreeCursor
{ treeAbove =
Just
TreeAbove
{ treeAboveLefts =
[ CNode 'a' $
openForest
[ CNode 'b' $ closedForest [Node 'c' []],
CNode 'f' $ closedForest [Node 'g' []]
]
],
treeAboveAbove = Nothing,
treeAboveNode = 'p',
treeAboveRights = [CNode 'h' $ closedForest []]
},
treeCurrent = 'd',
treeBelow = closedForest [Node 'e' []]
}
in case treeCursorPromoteSubTree promoteStart of
Promoted tc' -> tc' `treeShouldBe` promoteEnd
_ -> expectationFailure "treeCursorPromoteSubTree should not have failed"
it "promotes the current subtree to the level of its parent" pending
|
cd35691c5873e29b41bfd28761b0274fb745aa12256533feb359793ec83ce994 | art-w/sherlodoc | www.ml | module Storage = Db.Storage
module Succ = Query.Succ
module Sort = Query.Sort
let db_filename = Sys.argv.(1)
let shards =
let h = Storage.db_open_in db_filename in
Array.to_list h.Storage.shards
let search (has_typ, query_name, query_typ) =
let open Lwt.Syntax in
let* results_name = Query.find_names ~shards query_name in
let+ results =
if has_typ
then
let+ results_typ = Query.find_inter ~shards query_typ in
Succ.inter results_name results_typ
else Lwt.return results_name
in
results
open Lwt.Syntax
module H = Tyxml.Html
let api raw_query =
let has_typ, query_name, query_typ, query_typ_arrow, pretty =
Query.Parser.of_string raw_query
in
let* results = search (has_typ, query_name, query_typ) in
let+ results = Succ.to_list results in
let results = Sort.list query_name query_typ_arrow results in
Ui.render ~pretty results
let api query =
if String.trim query = "" then Lwt.return Ui.explain else api query
open Lwt.Syntax
let get_query params = Option.value ~default:"" (Dream.query params "q")
let root ~query fn _params =
let* result = fn query in
Dream.html result
let string_of_tyxml html = Format.asprintf "%a" (Tyxml.Html.pp ()) html
let string_of_tyxml' html = Format.asprintf "%a" (Tyxml.Html.pp_elt ()) html
let root fn params =
let query = get_query params in
try root ~query fn params
with err ->
Format.printf "ERROR: %S@." (Printexc.to_string err) ;
Dream.html (string_of_tyxml @@ Ui.template query Ui.explain)
let root fn params =
try root fn params
with _ -> Dream.html (string_of_tyxml @@ Ui.template "" Ui.explain)
let cache : int -> Dream.middleware =
fun max_age f req ->
let+ response = f req in
Dream.add_header response "Cache-Control"
("public, max-age=" ^ string_of_int max_age) ;
response
let () =
Dream.run ~interface:"127.0.0.1" ~port:1234
@@ Dream.logger @@ cache 3600
@@ Dream.router
[ Dream.get "/"
(root (fun q ->
let+ result = api q in
string_of_tyxml @@ Ui.template q result))
; Dream.get "/api"
(root (fun q ->
let+ result = api q in
string_of_tyxml' result))
; Dream.get "/s.css" (Dream.from_filesystem "static" "style.css")
; Dream.get "/robots.txt" (Dream.from_filesystem "static" "robots.txt")
; Dream.get "/favicon.ico" (Dream.from_filesystem "static" "favicon.ico")
]
| null | https://raw.githubusercontent.com/art-w/sherlodoc/e560cd7448ec61723afda5f2e5d94cb52635fd0c/www/www.ml | ocaml | module Storage = Db.Storage
module Succ = Query.Succ
module Sort = Query.Sort
let db_filename = Sys.argv.(1)
let shards =
let h = Storage.db_open_in db_filename in
Array.to_list h.Storage.shards
let search (has_typ, query_name, query_typ) =
let open Lwt.Syntax in
let* results_name = Query.find_names ~shards query_name in
let+ results =
if has_typ
then
let+ results_typ = Query.find_inter ~shards query_typ in
Succ.inter results_name results_typ
else Lwt.return results_name
in
results
open Lwt.Syntax
module H = Tyxml.Html
let api raw_query =
let has_typ, query_name, query_typ, query_typ_arrow, pretty =
Query.Parser.of_string raw_query
in
let* results = search (has_typ, query_name, query_typ) in
let+ results = Succ.to_list results in
let results = Sort.list query_name query_typ_arrow results in
Ui.render ~pretty results
let api query =
if String.trim query = "" then Lwt.return Ui.explain else api query
open Lwt.Syntax
let get_query params = Option.value ~default:"" (Dream.query params "q")
let root ~query fn _params =
let* result = fn query in
Dream.html result
let string_of_tyxml html = Format.asprintf "%a" (Tyxml.Html.pp ()) html
let string_of_tyxml' html = Format.asprintf "%a" (Tyxml.Html.pp_elt ()) html
let root fn params =
let query = get_query params in
try root ~query fn params
with err ->
Format.printf "ERROR: %S@." (Printexc.to_string err) ;
Dream.html (string_of_tyxml @@ Ui.template query Ui.explain)
let root fn params =
try root fn params
with _ -> Dream.html (string_of_tyxml @@ Ui.template "" Ui.explain)
let cache : int -> Dream.middleware =
fun max_age f req ->
let+ response = f req in
Dream.add_header response "Cache-Control"
("public, max-age=" ^ string_of_int max_age) ;
response
let () =
Dream.run ~interface:"127.0.0.1" ~port:1234
@@ Dream.logger @@ cache 3600
@@ Dream.router
[ Dream.get "/"
(root (fun q ->
let+ result = api q in
string_of_tyxml @@ Ui.template q result))
; Dream.get "/api"
(root (fun q ->
let+ result = api q in
string_of_tyxml' result))
; Dream.get "/s.css" (Dream.from_filesystem "static" "style.css")
; Dream.get "/robots.txt" (Dream.from_filesystem "static" "robots.txt")
; Dream.get "/favicon.ico" (Dream.from_filesystem "static" "favicon.ico")
]
| |
689466f94f15176f3be1e7fbfb762fbdb62fe59932227757de426165d5bc6b54 | clash-lang/clash-compiler | TestIndex.hs | module TestIndex where
import Clash.Prelude
type NrI = Index 8
topEntity = c1
c1 :: Signal System (Maybe NrI) -> Signal System (Maybe NrI)
c1 = fmap (fmap (+1))
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/Basic/TestIndex.hs | haskell | module TestIndex where
import Clash.Prelude
type NrI = Index 8
topEntity = c1
c1 :: Signal System (Maybe NrI) -> Signal System (Maybe NrI)
c1 = fmap (fmap (+1))
| |
7f2a80f5728981db4e95411425fa7e4c9954bde840ab97d8464107cd8fd627e8 | orionsbelt-battlegrounds/obb-rules | attack.cljc | (ns obb-rules.actions.attack
(:require [obb-rules.actions.direction :as dir]
[obb-rules.element :as element]
[obb-rules.game :as game]
[obb-rules.actions.hooks :as hooks]
[obb-rules.simplifier :as simplify]
[obb-rules.actions.damage-calculator :as calculator]
[obb-rules.result :as result]
[obb-rules.board :as board]
[obb-rules.element :as element]
[obb-rules.unit :as unit]))
(defn- advance-and-check-target
"Goes to the next coordinate and checks if the target is valid"
[board attacker target current-coordinate distance bypassed-element?]
(let [unit (element/element-unit attacker)
direction (element/element-direction attacker)
next-coordinate (dir/update direction current-coordinate)
next-element (board/get-element-consider-removed board next-coordinate)
may-try-next? (or (nil? next-element) (element/catapult-attack? attacker))
bypassed? (or bypassed-element? (and (some? next-element) (not= next-element target)))]
(cond
(= next-element target) (if bypassed-element? :catapult :direct)
(>= distance (unit/unit-range unit)) :out-of-range
may-try-next? (recur board attacker target next-coordinate (+ 1 distance) bypassed?)
:else :out-of-range)))
(defn- attack-restrictions
"Checks if the attack is possible"
[board player attacker target]
(cond
(nil? attacker) "EmptyAttacker"
(not (game/player-turn? board player)) "StateMismatch"
(element/frozen? attacker) "FrozenElement"
(nil? target) "EmptyTarget"
(simplify/not-name= player (element/element-player attacker)) "NotOwnedElement"
(= (element/element-player attacker) (element/element-player target)) "SamePlayer"))
(defn- resolve-attack
"Checks if the target element is in range"
[board player attacker target]
(if-let [error-msg (attack-restrictions board player attacker target)]
[false error-msg]
(let [lock-target (advance-and-check-target board attacker target (element/element-coordinate attacker) 1 false)]
(if (= :out-of-range lock-target)
[false "OutOfRange"]
[true lock-target]))))
(defn- build-basic-attack-info
"Builds information about the concrete attack, without the side effects"
[attack-type destroyed target]
(let [target-unit (element/element-unit target)]
[{:attack-type attack-type
:destroyed destroyed
:unit (unit/unit-name target-unit)
:target (element/element-player target)}]))
(defn- process-after-attack
"Processes registered handlers for after-attack"
[board attacker target unused-damage info]
(hooks/process :after-attack {:board board
:attacker attacker
:target target
:unused-damage unused-damage
:info info}))
(defn- process-after-hit
"Processes registered handlers for after-hit"
[board attacker target unused-damage info]
(hooks/process :after-hit {:board board
:attacker attacker
:target target
:unused-damage unused-damage
:info info}))
(defn- process-hooks
"Processes registered handlers on hooks"
[board attacker target unused-damage info]
(let [[board info] (process-after-attack board attacker target unused-damage info)]
(process-after-hit board attacker target unused-damage info)))
(defn- update-board-state
"Updates the board with state from this action, that's relevant to next
actions on the turn"
[board attacker target destroyed]
(let [coordinate (element/element-coordinate target)
attacker-coordinate (element/element-coordinate attacker)]
(-> board
(board/swap-element attacker-coordinate (element/freeze attacker))
(board/destroy-from-element coordinate destroyed))))
(defn- process-attack
"Processes the attack"
[board attacker target attack-type]
(let [[destroyed unused-damage] (calculator/destroyed-with-unused-damage board attacker target)
attacked-board (update-board-state board attacker target destroyed)
attack-info (build-basic-attack-info attack-type destroyed target)
[final-board final-info] (process-hooks attacked-board attacker target unused-damage attack-info)]
(result/action-success final-board 1 "OK" final-info)))
(defn build-attack
"Builds an attack action on a board"
[[coord target-coord]]
(fn attacker [board player]
(let [attacker (board/get-element board coord)
target (board/get-element board target-coord)
[success? info] (resolve-attack board player attacker target)]
(if-not success?
(result/action-failed info)
(process-attack board attacker target info)))))
| null | https://raw.githubusercontent.com/orionsbelt-battlegrounds/obb-rules/97fad6506eb81142f74f4722aca58b80d618bf45/src/obb_rules/actions/attack.cljc | clojure | (ns obb-rules.actions.attack
(:require [obb-rules.actions.direction :as dir]
[obb-rules.element :as element]
[obb-rules.game :as game]
[obb-rules.actions.hooks :as hooks]
[obb-rules.simplifier :as simplify]
[obb-rules.actions.damage-calculator :as calculator]
[obb-rules.result :as result]
[obb-rules.board :as board]
[obb-rules.element :as element]
[obb-rules.unit :as unit]))
(defn- advance-and-check-target
"Goes to the next coordinate and checks if the target is valid"
[board attacker target current-coordinate distance bypassed-element?]
(let [unit (element/element-unit attacker)
direction (element/element-direction attacker)
next-coordinate (dir/update direction current-coordinate)
next-element (board/get-element-consider-removed board next-coordinate)
may-try-next? (or (nil? next-element) (element/catapult-attack? attacker))
bypassed? (or bypassed-element? (and (some? next-element) (not= next-element target)))]
(cond
(= next-element target) (if bypassed-element? :catapult :direct)
(>= distance (unit/unit-range unit)) :out-of-range
may-try-next? (recur board attacker target next-coordinate (+ 1 distance) bypassed?)
:else :out-of-range)))
(defn- attack-restrictions
"Checks if the attack is possible"
[board player attacker target]
(cond
(nil? attacker) "EmptyAttacker"
(not (game/player-turn? board player)) "StateMismatch"
(element/frozen? attacker) "FrozenElement"
(nil? target) "EmptyTarget"
(simplify/not-name= player (element/element-player attacker)) "NotOwnedElement"
(= (element/element-player attacker) (element/element-player target)) "SamePlayer"))
(defn- resolve-attack
"Checks if the target element is in range"
[board player attacker target]
(if-let [error-msg (attack-restrictions board player attacker target)]
[false error-msg]
(let [lock-target (advance-and-check-target board attacker target (element/element-coordinate attacker) 1 false)]
(if (= :out-of-range lock-target)
[false "OutOfRange"]
[true lock-target]))))
(defn- build-basic-attack-info
"Builds information about the concrete attack, without the side effects"
[attack-type destroyed target]
(let [target-unit (element/element-unit target)]
[{:attack-type attack-type
:destroyed destroyed
:unit (unit/unit-name target-unit)
:target (element/element-player target)}]))
(defn- process-after-attack
"Processes registered handlers for after-attack"
[board attacker target unused-damage info]
(hooks/process :after-attack {:board board
:attacker attacker
:target target
:unused-damage unused-damage
:info info}))
(defn- process-after-hit
"Processes registered handlers for after-hit"
[board attacker target unused-damage info]
(hooks/process :after-hit {:board board
:attacker attacker
:target target
:unused-damage unused-damage
:info info}))
(defn- process-hooks
"Processes registered handlers on hooks"
[board attacker target unused-damage info]
(let [[board info] (process-after-attack board attacker target unused-damage info)]
(process-after-hit board attacker target unused-damage info)))
(defn- update-board-state
"Updates the board with state from this action, that's relevant to next
actions on the turn"
[board attacker target destroyed]
(let [coordinate (element/element-coordinate target)
attacker-coordinate (element/element-coordinate attacker)]
(-> board
(board/swap-element attacker-coordinate (element/freeze attacker))
(board/destroy-from-element coordinate destroyed))))
(defn- process-attack
"Processes the attack"
[board attacker target attack-type]
(let [[destroyed unused-damage] (calculator/destroyed-with-unused-damage board attacker target)
attacked-board (update-board-state board attacker target destroyed)
attack-info (build-basic-attack-info attack-type destroyed target)
[final-board final-info] (process-hooks attacked-board attacker target unused-damage attack-info)]
(result/action-success final-board 1 "OK" final-info)))
(defn build-attack
"Builds an attack action on a board"
[[coord target-coord]]
(fn attacker [board player]
(let [attacker (board/get-element board coord)
target (board/get-element board target-coord)
[success? info] (resolve-attack board player attacker target)]
(if-not success?
(result/action-failed info)
(process-attack board attacker target info)))))
| |
eacc548c70a09ff508b3927e5b743d071041321315ef8f6ec7ca995dffa84103 | static-analysis-engineering/codehawk | xSumTypeSerializer.ml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Analyzer Infrastructure Utilities
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Analyzer Infrastructure Utilities
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
(** Serialization of sum types *)
chlib
open CHCommon
(* chutil *)
open CHPrettyUtil
open CHSumTypeSerializer
(* xprlib *)
open XprTypes
let xop_mfts:xop_t mfts_int =
mk_fn_mfts
"xop_t"
[(XNeg, "neg");
(XBNot, "bnot");
(XLNot, "lnot");
(XPlus, "plus");
(XMinus, "minus");
(XMult, "mult");
(XDiv, "div");
(XMod, "mod");
(XPow, "pow");
(XShiftlt, "shiftlt");
(XShiftrt, "shiftrt");
(XLsr, "lsr");
(XAsr, "asr");
(XLsl, "lsl");
(XLt, "lt");
(XGt, "gt");
(XLe, "le");
(XGe, "ge");
(XEq, "eq");
(XNe, "ne");
(XSubset, "subset");
(XDisjoint, "disjoint");
(XBAnd, "band");
(XBXor,"bxor");
(XBOr, "bor");
(XBNor, "bnor");
(XLAnd, "land");
(XLOr, "lor");
(XXlsb, "lsb");
(XXlsh, "lsh");
(XXbyte, "xbyte");
(XNumJoin, "numjoin");
(XNumRange, "range")]
(fun x ->
match x with
| Xf f -> "xf_" ^ f
| _ ->
raise
(CHFailure
(LBLOCK [
STR "internal error in xop_t sumtype";
STR " serializer"])))
(fun s ->
match (nsplit '_' s) with
| ["xf"; f] -> Xf f
| _ ->
raise
(CHFailure
(LBLOCK [
STR "String ";
STR s;
STR " not recognized as a valid xop_t type"])))
class xcst_mcts: [xcst_t] mfts_int =
object
inherit [xcst_t] mcts_t "xcst_t"
method ts (c:xcst_t) =
match c with
| SymSet _ -> "ss"
| IntConst _ -> "ic"
| BoolConst _ -> "bc"
| XRandom -> "r"
| XUnknownInt -> "ui"
| XUnknownSet -> "us"
method tags = [ "bc"; "ic"; "r"; "ss"; "ui"; "us" ]
end
let xcst_mcts = new xcst_mcts
class xpr_mcts_t: [xpr_t] mfts_int =
object
inherit [xpr_t] mcts_t "xpr_t"
method ts (x:xpr_t) =
match x with
| XVar _ -> "v"
| XConst _ -> "c"
| XOp _ -> "x"
| XAttr _ -> "a"
method tags = [ "a"; "c"; "v"; "x" ]
end
let xpr_mcts = new xpr_mcts_t
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/45f39248b404eeab91ae225243d79ec0583c3331/CodeHawk/CH/xprlib/xSumTypeSerializer.ml | ocaml | * Serialization of sum types
chutil
xprlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Analyzer Infrastructure Utilities
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Analyzer Infrastructure Utilities
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHCommon
open CHPrettyUtil
open CHSumTypeSerializer
open XprTypes
let xop_mfts:xop_t mfts_int =
mk_fn_mfts
"xop_t"
[(XNeg, "neg");
(XBNot, "bnot");
(XLNot, "lnot");
(XPlus, "plus");
(XMinus, "minus");
(XMult, "mult");
(XDiv, "div");
(XMod, "mod");
(XPow, "pow");
(XShiftlt, "shiftlt");
(XShiftrt, "shiftrt");
(XLsr, "lsr");
(XAsr, "asr");
(XLsl, "lsl");
(XLt, "lt");
(XGt, "gt");
(XLe, "le");
(XGe, "ge");
(XEq, "eq");
(XNe, "ne");
(XSubset, "subset");
(XDisjoint, "disjoint");
(XBAnd, "band");
(XBXor,"bxor");
(XBOr, "bor");
(XBNor, "bnor");
(XLAnd, "land");
(XLOr, "lor");
(XXlsb, "lsb");
(XXlsh, "lsh");
(XXbyte, "xbyte");
(XNumJoin, "numjoin");
(XNumRange, "range")]
(fun x ->
match x with
| Xf f -> "xf_" ^ f
| _ ->
raise
(CHFailure
(LBLOCK [
STR "internal error in xop_t sumtype";
STR " serializer"])))
(fun s ->
match (nsplit '_' s) with
| ["xf"; f] -> Xf f
| _ ->
raise
(CHFailure
(LBLOCK [
STR "String ";
STR s;
STR " not recognized as a valid xop_t type"])))
class xcst_mcts: [xcst_t] mfts_int =
object
inherit [xcst_t] mcts_t "xcst_t"
method ts (c:xcst_t) =
match c with
| SymSet _ -> "ss"
| IntConst _ -> "ic"
| BoolConst _ -> "bc"
| XRandom -> "r"
| XUnknownInt -> "ui"
| XUnknownSet -> "us"
method tags = [ "bc"; "ic"; "r"; "ss"; "ui"; "us" ]
end
let xcst_mcts = new xcst_mcts
class xpr_mcts_t: [xpr_t] mfts_int =
object
inherit [xpr_t] mcts_t "xpr_t"
method ts (x:xpr_t) =
match x with
| XVar _ -> "v"
| XConst _ -> "c"
| XOp _ -> "x"
| XAttr _ -> "a"
method tags = [ "a"; "c"; "v"; "x" ]
end
let xpr_mcts = new xpr_mcts_t
|
eb4ab49ec58eb2169d287a77ec370cb07e4d8d06c119ae7c20baa2aa892f761e | mariari/Misc-ML-Scripts | NameSymb.hs | module NameSymb where
--------------------------------------------------------------------------------
import Mari.Library
import qualified Mari.Library.NameSymbol as NameSymbol
import qualified Test.Tasty as T
import qualified Test.Tasty.HUnit as T
import qualified Test.Tasty.QuickCheck as T
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- Top Level Test
--------------------------------------------------------------------------------
top :: T.TestTree
top =
T.testGroup
"NameSymbol tests:"
[idIsId, infixSymbolCase]
--------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------
idL :: Symbol -> Symbol
idL = NameSymbol.toSymbol . NameSymbol.fromSymbol
idR :: NameSymbol.T -> NameSymbol.T
idR = NameSymbol.fromSymbol . NameSymbol.toSymbol
--------------------------------------------------------------------------------
-- Tests
--------------------------------------------------------------------------------
idIsId :: T.TestTree
idIsId =
T.forAll (T.listOf T.arbitraryUnicodeChar) (appenDot . intern)
|> T.testProperty "toSymbol and fromSymbol are inverses"
------------------
IdL subset
------------------
infixSymbolCase :: T.TestTree
infixSymbolCase =
let str = "Foo.Bar._Foo_-_.-..->.Bar.(Foo)...-..>.."
in T.testCase
"infix functions are properly reserved"
(idL str T.@=? str)
--------------------------------------------------------------------------------
-- property Helpers
--------------------------------------------------------------------------------
appenDot :: Symbol -> T.Property
appenDot symb =
eq symb T..&&. eq dotEnd T..&&. eq dotMiddle T..&&. eq dotStart
where
eq s = idL s T.=== s
--
dotEnd = symb <> "."
dotStart = "." <> symb
dotMiddle = symb <> "." <> symb
| null | https://raw.githubusercontent.com/mariari/Misc-ML-Scripts/376a7d55b565bf9205e697c5c3b78e1d6b6aedcd/Haskell/StandardLibrary/test/NameSymb.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Top Level Test
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Functions
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Tests
------------------------------------------------------------------------------
----------------
----------------
------------------------------------------------------------------------------
property Helpers
------------------------------------------------------------------------------
| module NameSymb where
import Mari.Library
import qualified Mari.Library.NameSymbol as NameSymbol
import qualified Test.Tasty as T
import qualified Test.Tasty.HUnit as T
import qualified Test.Tasty.QuickCheck as T
top :: T.TestTree
top =
T.testGroup
"NameSymbol tests:"
[idIsId, infixSymbolCase]
idL :: Symbol -> Symbol
idL = NameSymbol.toSymbol . NameSymbol.fromSymbol
idR :: NameSymbol.T -> NameSymbol.T
idR = NameSymbol.fromSymbol . NameSymbol.toSymbol
idIsId :: T.TestTree
idIsId =
T.forAll (T.listOf T.arbitraryUnicodeChar) (appenDot . intern)
|> T.testProperty "toSymbol and fromSymbol are inverses"
IdL subset
infixSymbolCase :: T.TestTree
infixSymbolCase =
let str = "Foo.Bar._Foo_-_.-..->.Bar.(Foo)...-..>.."
in T.testCase
"infix functions are properly reserved"
(idL str T.@=? str)
appenDot :: Symbol -> T.Property
appenDot symb =
eq symb T..&&. eq dotEnd T..&&. eq dotMiddle T..&&. eq dotStart
where
eq s = idL s T.=== s
dotEnd = symb <> "."
dotStart = "." <> symb
dotMiddle = symb <> "." <> symb
|
a95cb861d01cc5f0b24c5b4d993635797b1154b1a2c7660e6b667b1c7a7cdcf6 | erlang/otp | fix.erl | -module(fix).
-export([m/0]).
-spec m() -> integer().
-ifdef(error).
m() -> 3.14.
-else.
m() -> 3.
-endif.
| null | https://raw.githubusercontent.com/erlang/otp/1633a01279ec782273e8c100280a5d3f10ac36a8/lib/dialyzer/test/incremental_SUITE_data/fix.erl | erlang | -module(fix).
-export([m/0]).
-spec m() -> integer().
-ifdef(error).
m() -> 3.14.
-else.
m() -> 3.
-endif.
| |
3da8f94408aafdd730dc1f3b05e61f1c5bbe53846322472fea9bcfdfb592d45e | links-lang/links | phrase_typesugar.mli | type 'a error = { msg : string; data : 'a }
val tc_columns :
columns:Column.t list ->
'a Phrase_sugar.phrase ->
(Phrase_type.t, 'a error) result
| null | https://raw.githubusercontent.com/links-lang/links/2923893c80677b67cacc6747a25b5bcd65c4c2b6/lens/phrase_typesugar.mli | ocaml | type 'a error = { msg : string; data : 'a }
val tc_columns :
columns:Column.t list ->
'a Phrase_sugar.phrase ->
(Phrase_type.t, 'a error) result
| |
b797c127f0cbc5c85545821c5605b43166bd5be420d85166894177413958418b | rtoy/ansi-cl-tests | nsubstitute-if-not.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat Aug 31 19:00:55 2002
;;;; Contains: Tests for NSUBSTITUTE-IF-NOT
(in-package :cl-test)
(deftest nsubstitute-if-not-list.1
(nsubstitute-if-not 'b 'identity nil)
nil)
(deftest nsubstitute-if-not-list.2
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x) x)
(b b b c))
(deftest nsubstitute-if-not-list.3
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count nil))
(b b b c))
(deftest nsubstitute-if-not-list.4
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2))
(b b b c))
(deftest nsubstitute-if-not-list.5
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1))
(b b a c))
(deftest nsubstitute-if-not-list.6
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0))
(a b a c))
(deftest nsubstitute-if-not-list.7
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1))
(a b a c))
(deftest nsubstitute-if-not-list.8
(nsubstitute-if-not 'b (is-not-eql-p 'a) nil :from-end t)
nil)
(deftest nsubstitute-if-not-list.9
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
(b b b c))
(deftest nsubstitute-if-not-list.10
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t :count nil))
(b b b c))
(deftest nsubstitute-if-not-list.11
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2 :from-end t))
(b b b c))
(deftest nsubstitute-if-not-list.12
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1 :from-end t))
(a b b c))
(deftest nsubstitute-if-not-list.13
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0 :from-end t))
(a b a c))
(deftest nsubstitute-if-not-list.14
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1 :from-end t))
(a b a c))
(deftest nsubstitute-if-not-list.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list (- j i) :initial-element 'x)
(make-list (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-list.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :from-end t)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list (- j i) :initial-element 'x)
(make-list (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-list.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list c :initial-element 'x)
(make-list (- 10 (+ i c)) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-list.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c :from-end t)))
(equal y (nconc (make-list (- j c) :initial-element 'a)
(make-list c :initial-element 'x)
(make-list (- 10 j) :initial-element 'a)))))))
t)
;;; Tests on vectors
(deftest nsubstitute-if-not-vector.1
(let ((x #())) (nsubstitute-if-not 'b (is-not-eql-p 'a) x))
#())
(deftest nsubstitute-if-not-vector.2
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x))
#(b b b c))
(deftest nsubstitute-if-not-vector.3
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count nil) x)
#(b b b c))
(deftest nsubstitute-if-not-vector.4
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2))
#(b b b c))
(deftest nsubstitute-if-not-vector.5
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1))
#(b b a c))
(deftest nsubstitute-if-not-vector.6
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0))
#(a b a c))
(deftest nsubstitute-if-not-vector.7
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1))
#(a b a c))
(deftest nsubstitute-if-not-vector.8
(let ((x #())) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
#())
(deftest nsubstitute-if-not-vector.9
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
#(b b b c))
(deftest nsubstitute-if-not-vector.10
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t :count nil))
#(b b b c))
(deftest nsubstitute-if-not-vector.11
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2 :from-end t))
#(b b b c))
(deftest nsubstitute-if-not-vector.12
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1 :from-end t))
#(a b b c))
(deftest nsubstitute-if-not-vector.13
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0 :from-end t))
#(a b a c))
(deftest nsubstitute-if-not-vector.14
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1 :from-end t))
#(a b a c))
(deftest nsubstitute-if-not-vector.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array (- j i) :initial-element 'x)
(make-array (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-vector.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :from-end t)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array (- j i) :initial-element 'x)
(make-array (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-vector.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array c :initial-element 'x)
(make-array (- 10 (+ i c)) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-vector.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c :from-end t)))
(equalp y (concatenate 'simple-vector
(make-array (- j c) :initial-element 'a)
(make-array c :initial-element 'x)
(make-array (- 10 j) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-vector.28
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x)))
result)
#(z b z c b))
(deftest nsubstitute-if-not-vector.29
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x :from-end t)))
result)
#(z b z c b))
(deftest nsubstitute-if-not-vector.30
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x :count 1)))
result)
#(z b a c b))
(deftest nsubstitute-if-not-vector.31
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x
:from-end t :count 1)))
result)
#(a b z c b))
(deftest nsubstitute-if-not-vector.32
(let* ((v1 (copy-seq #(a b c d a b c d a b c d a b c d)))
(v2 (make-array '(8) :displaced-to v1
:displaced-index-offset 3)))
(nsubstitute-if-not 'x (is-not-eql-p 'c) v2 :count 1))
#(d a b x d a b c))
(deftest nsubstitute-if-not-vector.33
(let* ((v1 (copy-seq #(a b c d a b c d a b c d a b c d)))
(v2 (make-array '(8) :displaced-to v1
:displaced-index-offset 3)))
(nsubstitute-if-not 'x (is-not-eql-p 'c) v2 :count 1 :from-end t))
#(d a b c d a b x))
;;; Tests on strings
(deftest nsubstitute-if-not-string.1
(let ((x "")) (nsubstitute-if-not #\b (is-not-eql-p #\a) x))
"")
(deftest nsubstitute-if-not-string.2
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x))
"bbbc")
(deftest nsubstitute-if-not-string.3
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count nil))
"bbbc")
(deftest nsubstitute-if-not-string.4
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 2))
"bbbc")
(deftest nsubstitute-if-not-string.5
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 1))
"bbac")
(deftest nsubstitute-if-not-string.6
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 0))
"abac")
(deftest nsubstitute-if-not-string.7
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count -1))
"abac")
(deftest nsubstitute-if-not-string.8
(let ((x "")) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t))
"")
(deftest nsubstitute-if-not-string.9
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t))
"bbbc")
(deftest nsubstitute-if-not-string.10
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t :count nil))
"bbbc")
(deftest nsubstitute-if-not-string.11
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 2 :from-end t))
"bbbc")
(deftest nsubstitute-if-not-string.12
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 1 :from-end t))
"abbc")
(deftest nsubstitute-if-not-string.13
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 0 :from-end t))
"abac")
(deftest nsubstitute-if-not-string.14
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count -1 :from-end t))
"abac")
(deftest nsubstitute-if-not-string.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array (- j i) :initial-element #\x)
(make-array (- 10 j) :initial-element #\a))))))
t)
(deftest nsubstitute-if-not-string.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :from-end t)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array (- j i) :initial-element #\x)
(make-array (- 10 j) :initial-element #\a))))))
t)
(deftest nsubstitute-if-not-string.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :count c)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array c :initial-element #\x)
(make-array (- 10 (+ i c)) :initial-element #\a)))))))
t)
(deftest nsubstitute-if-not-string.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :count c :from-end t)))
(equalp y (concatenate 'simple-string
(make-array (- j c) :initial-element #\a)
(make-array c :initial-element #\x)
(make-array (- 10 j) :initial-element #\a)))))))
t)
(deftest nsubstitute-if-not-string.28
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x)))
result)
"zbzcb")
(deftest nsubstitute-if-not-string.29
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x :from-end t)))
result)
"zbzcb")
(deftest nsubstitute-if-not-string.30
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x :count 1)))
result)
"zbacb")
(deftest nsubstitute-if-not-string.31
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x
:from-end t :count 1)))
result)
"abzcb")
(deftest nsubstitute-if-not-string.32
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s) "xyz!bcxyz!bc")))
nil)
(deftest nsubstitute-if-not-string.33
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s :count 1) "xyz!bcxyzabc")))
nil)
(deftest nsubstitute-if-not-string.34
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s :count 1 :from-end t) "xyzabcxyz!bc")))
nil)
;;; Tests on bit-vectors
(deftest nsubstitute-if-not-bit-vector.1
(let* ((orig #*)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x)))
result)
#*)
(deftest nsubstitute-if-not-bit-vector.2
(let* ((orig #*)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x)))
result)
#*)
(deftest nsubstitute-if-not-bit-vector.3
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x)))
result)
#*000000)
(deftest nsubstitute-if-not-bit-vector.4
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.5
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :start 1)))
result)
#*011111)
(deftest nsubstitute-if-not-bit-vector.6
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :start 2 :end nil)))
result)
#*010000)
(deftest nsubstitute-if-not-bit-vector.7
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :end 4)))
result)
#*111101)
(deftest nsubstitute-if-not-bit-vector.8
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :end nil)))
result)
#*000000)
(deftest nsubstitute-if-not-bit-vector.9
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :end 3)))
result)
#*000101)
(deftest nsubstitute-if-not-bit-vector.10
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :start 2 :end 4)))
result)
#*010001)
(deftest nsubstitute-if-not-bit-vector.11
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :start 2 :end 4)))
result)
#*011101)
(deftest nsubstitute-if-not-bit-vector.12
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 1)))
result)
#*110101)
(deftest nsubstitute-if-not-bit-vector.13
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 0)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.14
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count -1)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.15
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 1 :from-end t)))
result)
#*010111)
(deftest nsubstitute-if-not-bit-vector.16
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 0 :from-end t)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.17
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count -1 :from-end t)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.18
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count nil)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.19
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count nil :from-end t)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.20
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #*0000000000)
(x (copy-seq orig))
(y (nsubstitute-if-not 1 (is-not-eql-p 0) x :start i :end j :count c)))
(equalp y (concatenate
'simple-bit-vector
(make-list i :initial-element 0)
(make-list c :initial-element 1)
(make-list (- 10 (+ i c)) :initial-element 0)))))))
t)
(deftest nsubstitute-if-not-bit-vector.21
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #*1111111111)
(x (copy-seq orig))
(y (nsubstitute-if-not 0 (is-not-eql-p 1) x :start i :end j :count c :from-end t)))
(equalp y (concatenate
'simple-bit-vector
(make-list (- j c) :initial-element 1)
(make-list c :initial-element 0)
(make-list (- 10 j) :initial-element 1)))))))
t)
;;; More tests
(deftest nsubstitute-if-not-list.24
(let* ((orig '((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car)))
result)
((a 10) (b 2) (a 10) (c 4) (d 5) (a 10) (e 7)))
(deftest nsubstitute-if-not-list.25
(let* ((orig '((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x
:key #'car :start 1 :end 5)))
result)
((a 1) (b 2) (a 10) (c 4) (d 5) (a 6) (e 7)))
(deftest nsubstitute-if-not-vector.24
(let* ((orig #((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car)))
result)
#((a 10) (b 2) (a 10) (c 4) (d 5) (a 10) (e 7)))
(deftest nsubstitute-if-not-vector.25
(let* ((orig #((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car :start 1 :end 5)))
result)
#((a 1) (b 2) (a 10) (c 4) (d 5) (a 6) (e 7)))
(deftest nsubstitute-if-not-string.24
(let* ((orig "0102342015")
(x (copy-seq orig))
(result (nsubstitute-if-not #\a (is-not-eql-p #\1) x :key #'nextdigit)))
result)
"a1a2342a15")
(deftest nsubstitute-if-not-string.25
(let* ((orig "0102342015")
(x (copy-seq orig))
(result (nsubstitute-if-not #\a (is-not-eql-p #\1) x :key #'nextdigit :start 1 :end 6)))
result)
"01a2342015")
(deftest nsubstitute-if-not-bit-vector.26
(let* ((orig #*00111001011010110)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 1) x :key #'1+)))
result)
#*11111111111111111)
(deftest nsubstitute-if-not-bit-vector.27
(let* ((orig #*00111001011010110)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 1) x :key #'1+ :start 1 :end 10)))
result)
#*01111111111010110)
(deftest nsubstitute-if-not-bit-vector.30
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x)))
result)
#*11111)
(deftest nsubstitute-if-not-bit-vector.31
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :from-end t)))
result)
#*11111)
(deftest nsubstitute-if-not-bit-vector.32
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :count 1)))
result)
#*11011)
(deftest nsubstitute-if-not-bit-vector.33
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :from-end t :count 1)))
result)
#*01111)
(deftest nsubstitute-if-not.order.1
(let ((i 0) a b c d e f g h)
(values
(nsubstitute-if-not
(progn (setf a (incf i)) 'a)
(progn (setf b (incf i)) #'identity)
(progn (setf c (incf i)) (list nil 1 2 nil 3 4 nil 5))
:count (progn (setf d (incf i)) 2)
:start (progn (setf e (incf i)) 0)
:end (progn (setf f (incf i)) 7)
:key (progn (setf g (incf i)) #'identity)
:from-end (setf h (incf i))
)
i a b c d e f g h))
(nil 1 2 a 3 4 a 5)
8 1 2 3 4 5 6 7 8)
(deftest nsubstitute-if-not.order.2
(let ((i 0) a b c d e f g h)
(values
(nsubstitute-if-not
(progn (setf a (incf i)) 'a)
(progn (setf b (incf i)) #'identity)
(progn (setf c (incf i)) (list nil 1 2 nil 3 4 nil 5))
:from-end (setf h (incf i))
:key (progn (setf g (incf i)) #'identity)
:end (progn (setf f (incf i)) 7)
:start (progn (setf e (incf i)) 0)
:count (progn (setf d (incf i)) 2)
)
i a b c d e f g h))
(nil 1 2 a 3 4 a 5)
8 1 2 3 8 7 6 5 4)
;;; Keyword tests
(deftest nsubstitute-if-not.allow-other-keys.1
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:allow-other-keys t :bad t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.2
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:bad t :allow-other-keys t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.3
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys t
:allow-other-keys nil :bad t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.4
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :bad t
:allow-other-keys t :allow-other-keys nil)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.5
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:allow-other-keys t :key #'1-)
(1 a a a 1 a a))
(deftest nsubstitute-if-not.keywords.6
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:key #'1- :key #'identity)
(1 a a a 1 a a))
(deftest nsubstitute-if-not.allow-other-keys.7
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys t
:bad t :allow-other-keys nil)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.8
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys nil)
(a a 0 a a 0 a))
;;; Error cases
(deftest nsubstitute-if-not.error.1
(signals-error (nsubstitute-if-not) program-error)
t)
(deftest nsubstitute-if-not.error.2
(signals-error (nsubstitute-if-not 'a) program-error)
t)
(deftest nsubstitute-if-not.error.3
(signals-error (nsubstitute-if-not 'a #'null) program-error)
t)
(deftest nsubstitute-if-not.error.4
(signals-error (nsubstitute-if-not 'a #'null nil 'bad t) program-error)
t)
(deftest nsubstitute-if-not.error.5
(signals-error (nsubstitute-if-not 'a #'null nil
'bad t :allow-other-keys nil)
program-error)
t)
(deftest nsubstitute-if-not.error.6
(signals-error (nsubstitute-if-not 'a #'null nil :key) program-error)
t)
(deftest nsubstitute-if-not.error.7
(signals-error (nsubstitute-if-not 'a #'null nil 1 2) program-error)
t)
(deftest nsubstitute-if-not.error.8
(signals-error (nsubstitute-if-not 'a #'cons (list 'a 'b 'c)) program-error)
t)
(deftest nsubstitute-if-not.error.9
(signals-error (nsubstitute-if-not 'a #'car (list 'a 'b 'c)) type-error)
t)
(deftest nsubstitute-if-not.error.10
(signals-error (nsubstitute-if-not 'a #'identity (list 'a 'b 'c)
:key #'car)
type-error)
t)
(deftest nsubstitute-if-not.error.11
(signals-error (nsubstitute-if-not 'a #'identity (list 'a 'b 'c)
:key #'cons)
program-error)
t)
(deftest nsubstitute-if-not.error.12
(check-type-error #'(lambda (x) (nsubstitute-if-not 1 #'null x)) #'sequencep)
nil)
| null | https://raw.githubusercontent.com/rtoy/ansi-cl-tests/9708f3977220c46def29f43bb237e97d62033c1d/nsubstitute-if-not.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests for NSUBSTITUTE-IF-NOT
Tests on vectors
Tests on strings
Tests on bit-vectors
More tests
Keyword tests
Error cases | Author :
Created : Sat Aug 31 19:00:55 2002
(in-package :cl-test)
(deftest nsubstitute-if-not-list.1
(nsubstitute-if-not 'b 'identity nil)
nil)
(deftest nsubstitute-if-not-list.2
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x) x)
(b b b c))
(deftest nsubstitute-if-not-list.3
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count nil))
(b b b c))
(deftest nsubstitute-if-not-list.4
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2))
(b b b c))
(deftest nsubstitute-if-not-list.5
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1))
(b b a c))
(deftest nsubstitute-if-not-list.6
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0))
(a b a c))
(deftest nsubstitute-if-not-list.7
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1))
(a b a c))
(deftest nsubstitute-if-not-list.8
(nsubstitute-if-not 'b (is-not-eql-p 'a) nil :from-end t)
nil)
(deftest nsubstitute-if-not-list.9
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
(b b b c))
(deftest nsubstitute-if-not-list.10
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t :count nil))
(b b b c))
(deftest nsubstitute-if-not-list.11
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2 :from-end t))
(b b b c))
(deftest nsubstitute-if-not-list.12
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1 :from-end t))
(a b b c))
(deftest nsubstitute-if-not-list.13
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0 :from-end t))
(a b a c))
(deftest nsubstitute-if-not-list.14
(let ((x (copy-seq '(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1 :from-end t))
(a b a c))
(deftest nsubstitute-if-not-list.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list (- j i) :initial-element 'x)
(make-list (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-list.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :from-end t)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list (- j i) :initial-element 'x)
(make-list (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-list.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c)))
(equal y (nconc (make-list i :initial-element 'a)
(make-list c :initial-element 'x)
(make-list (- 10 (+ i c)) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-list.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig '(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c :from-end t)))
(equal y (nconc (make-list (- j c) :initial-element 'a)
(make-list c :initial-element 'x)
(make-list (- 10 j) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-vector.1
(let ((x #())) (nsubstitute-if-not 'b (is-not-eql-p 'a) x))
#())
(deftest nsubstitute-if-not-vector.2
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x))
#(b b b c))
(deftest nsubstitute-if-not-vector.3
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count nil) x)
#(b b b c))
(deftest nsubstitute-if-not-vector.4
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2))
#(b b b c))
(deftest nsubstitute-if-not-vector.5
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1))
#(b b a c))
(deftest nsubstitute-if-not-vector.6
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0))
#(a b a c))
(deftest nsubstitute-if-not-vector.7
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1))
#(a b a c))
(deftest nsubstitute-if-not-vector.8
(let ((x #())) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
#())
(deftest nsubstitute-if-not-vector.9
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t))
#(b b b c))
(deftest nsubstitute-if-not-vector.10
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :from-end t :count nil))
#(b b b c))
(deftest nsubstitute-if-not-vector.11
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 2 :from-end t))
#(b b b c))
(deftest nsubstitute-if-not-vector.12
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 1 :from-end t))
#(a b b c))
(deftest nsubstitute-if-not-vector.13
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count 0 :from-end t))
#(a b a c))
(deftest nsubstitute-if-not-vector.14
(let ((x (copy-seq #(a b a c)))) (nsubstitute-if-not 'b (is-not-eql-p 'a) x :count -1 :from-end t))
#(a b a c))
(deftest nsubstitute-if-not-vector.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array (- j i) :initial-element 'x)
(make-array (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-vector.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :from-end t)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array (- j i) :initial-element 'x)
(make-array (- 10 j) :initial-element 'a))))))
t)
(deftest nsubstitute-if-not-vector.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c)))
(equalp y (concatenate 'simple-vector
(make-array i :initial-element 'a)
(make-array c :initial-element 'x)
(make-array (- 10 (+ i c)) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-vector.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #(a a a a a a a a a a))
(x (copy-seq orig))
(y (nsubstitute-if-not 'x (is-not-eql-p 'a) x :start i :end j :count c :from-end t)))
(equalp y (concatenate 'simple-vector
(make-array (- j c) :initial-element 'a)
(make-array c :initial-element 'x)
(make-array (- 10 j) :initial-element 'a)))))))
t)
(deftest nsubstitute-if-not-vector.28
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x)))
result)
#(z b z c b))
(deftest nsubstitute-if-not-vector.29
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x :from-end t)))
result)
#(z b z c b))
(deftest nsubstitute-if-not-vector.30
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x :count 1)))
result)
#(z b a c b))
(deftest nsubstitute-if-not-vector.31
(let* ((x (make-array '(10) :initial-contents '(a b a c b a d e a f)
:fill-pointer 5))
(result (nsubstitute-if-not 'z (is-not-eql-p 'a) x
:from-end t :count 1)))
result)
#(a b z c b))
(deftest nsubstitute-if-not-vector.32
(let* ((v1 (copy-seq #(a b c d a b c d a b c d a b c d)))
(v2 (make-array '(8) :displaced-to v1
:displaced-index-offset 3)))
(nsubstitute-if-not 'x (is-not-eql-p 'c) v2 :count 1))
#(d a b x d a b c))
(deftest nsubstitute-if-not-vector.33
(let* ((v1 (copy-seq #(a b c d a b c d a b c d a b c d)))
(v2 (make-array '(8) :displaced-to v1
:displaced-index-offset 3)))
(nsubstitute-if-not 'x (is-not-eql-p 'c) v2 :count 1 :from-end t))
#(d a b c d a b x))
(deftest nsubstitute-if-not-string.1
(let ((x "")) (nsubstitute-if-not #\b (is-not-eql-p #\a) x))
"")
(deftest nsubstitute-if-not-string.2
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x))
"bbbc")
(deftest nsubstitute-if-not-string.3
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count nil))
"bbbc")
(deftest nsubstitute-if-not-string.4
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 2))
"bbbc")
(deftest nsubstitute-if-not-string.5
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 1))
"bbac")
(deftest nsubstitute-if-not-string.6
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 0))
"abac")
(deftest nsubstitute-if-not-string.7
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count -1))
"abac")
(deftest nsubstitute-if-not-string.8
(let ((x "")) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t))
"")
(deftest nsubstitute-if-not-string.9
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t))
"bbbc")
(deftest nsubstitute-if-not-string.10
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :from-end t :count nil))
"bbbc")
(deftest nsubstitute-if-not-string.11
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 2 :from-end t))
"bbbc")
(deftest nsubstitute-if-not-string.12
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 1 :from-end t))
"abbc")
(deftest nsubstitute-if-not-string.13
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count 0 :from-end t))
"abac")
(deftest nsubstitute-if-not-string.14
(let ((x (copy-seq "abac"))) (nsubstitute-if-not #\b (is-not-eql-p #\a) x :count -1 :from-end t))
"abac")
(deftest nsubstitute-if-not-string.15
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array (- j i) :initial-element #\x)
(make-array (- 10 j) :initial-element #\a))))))
t)
(deftest nsubstitute-if-not-string.16
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :from-end t)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array (- j i) :initial-element #\x)
(make-array (- 10 j) :initial-element #\a))))))
t)
(deftest nsubstitute-if-not-string.17
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :count c)))
(equalp y (concatenate 'simple-string
(make-array i :initial-element #\a)
(make-array c :initial-element #\x)
(make-array (- 10 (+ i c)) :initial-element #\a)))))))
t)
(deftest nsubstitute-if-not-string.18
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig "aaaaaaaaaa")
(x (copy-seq orig))
(y (nsubstitute-if-not #\x (is-not-eql-p #\a) x :start i :end j :count c :from-end t)))
(equalp y (concatenate 'simple-string
(make-array (- j c) :initial-element #\a)
(make-array c :initial-element #\x)
(make-array (- 10 j) :initial-element #\a)))))))
t)
(deftest nsubstitute-if-not-string.28
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x)))
result)
"zbzcb")
(deftest nsubstitute-if-not-string.29
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x :from-end t)))
result)
"zbzcb")
(deftest nsubstitute-if-not-string.30
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x :count 1)))
result)
"zbacb")
(deftest nsubstitute-if-not-string.31
(let* ((x (make-array '(10) :initial-contents "abacbadeaf"
:fill-pointer 5 :element-type 'character))
(result (nsubstitute-if-not #\z (is-not-eql-p #\a) x
:from-end t :count 1)))
result)
"abzcb")
(deftest nsubstitute-if-not-string.32
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s) "xyz!bcxyz!bc")))
nil)
(deftest nsubstitute-if-not-string.33
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s :count 1) "xyz!bcxyzabc")))
nil)
(deftest nsubstitute-if-not-string.34
(do-special-strings
(s "xyzabcxyzabc" nil)
(assert (string= (nsubstitute-if-not #\! (is-not-eql-p #\a) s :count 1 :from-end t) "xyzabcxyz!bc")))
nil)
(deftest nsubstitute-if-not-bit-vector.1
(let* ((orig #*)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x)))
result)
#*)
(deftest nsubstitute-if-not-bit-vector.2
(let* ((orig #*)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x)))
result)
#*)
(deftest nsubstitute-if-not-bit-vector.3
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x)))
result)
#*000000)
(deftest nsubstitute-if-not-bit-vector.4
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.5
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :start 1)))
result)
#*011111)
(deftest nsubstitute-if-not-bit-vector.6
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :start 2 :end nil)))
result)
#*010000)
(deftest nsubstitute-if-not-bit-vector.7
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :end 4)))
result)
#*111101)
(deftest nsubstitute-if-not-bit-vector.8
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :end nil)))
result)
#*000000)
(deftest nsubstitute-if-not-bit-vector.9
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :end 3)))
result)
#*000101)
(deftest nsubstitute-if-not-bit-vector.10
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 0 (is-not-eql-p 1) x :start 2 :end 4)))
result)
#*010001)
(deftest nsubstitute-if-not-bit-vector.11
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :start 2 :end 4)))
result)
#*011101)
(deftest nsubstitute-if-not-bit-vector.12
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 1)))
result)
#*110101)
(deftest nsubstitute-if-not-bit-vector.13
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 0)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.14
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count -1)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.15
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 1 :from-end t)))
result)
#*010111)
(deftest nsubstitute-if-not-bit-vector.16
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count 0 :from-end t)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.17
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count -1 :from-end t)))
result)
#*010101)
(deftest nsubstitute-if-not-bit-vector.18
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count nil)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.19
(let* ((orig #*010101)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 0) x :count nil :from-end t)))
result)
#*111111)
(deftest nsubstitute-if-not-bit-vector.20
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #*0000000000)
(x (copy-seq orig))
(y (nsubstitute-if-not 1 (is-not-eql-p 0) x :start i :end j :count c)))
(equalp y (concatenate
'simple-bit-vector
(make-list i :initial-element 0)
(make-list c :initial-element 1)
(make-list (- 10 (+ i c)) :initial-element 0)))))))
t)
(deftest nsubstitute-if-not-bit-vector.21
(loop for i from 0 to 9 always
(loop for j from i to 10 always
(loop for c from 0 to (- j i) always
(let* ((orig #*1111111111)
(x (copy-seq orig))
(y (nsubstitute-if-not 0 (is-not-eql-p 1) x :start i :end j :count c :from-end t)))
(equalp y (concatenate
'simple-bit-vector
(make-list (- j c) :initial-element 1)
(make-list c :initial-element 0)
(make-list (- 10 j) :initial-element 1)))))))
t)
(deftest nsubstitute-if-not-list.24
(let* ((orig '((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car)))
result)
((a 10) (b 2) (a 10) (c 4) (d 5) (a 10) (e 7)))
(deftest nsubstitute-if-not-list.25
(let* ((orig '((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x
:key #'car :start 1 :end 5)))
result)
((a 1) (b 2) (a 10) (c 4) (d 5) (a 6) (e 7)))
(deftest nsubstitute-if-not-vector.24
(let* ((orig #((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car)))
result)
#((a 10) (b 2) (a 10) (c 4) (d 5) (a 10) (e 7)))
(deftest nsubstitute-if-not-vector.25
(let* ((orig #((a 1) (b 2) (a 3) (c 4) (d 5) (a 6) (e 7)))
(x (copy-seq orig))
(result (nsubstitute-if-not '(a 10) (is-not-eql-p 'a) x :key #'car :start 1 :end 5)))
result)
#((a 1) (b 2) (a 10) (c 4) (d 5) (a 6) (e 7)))
(deftest nsubstitute-if-not-string.24
(let* ((orig "0102342015")
(x (copy-seq orig))
(result (nsubstitute-if-not #\a (is-not-eql-p #\1) x :key #'nextdigit)))
result)
"a1a2342a15")
(deftest nsubstitute-if-not-string.25
(let* ((orig "0102342015")
(x (copy-seq orig))
(result (nsubstitute-if-not #\a (is-not-eql-p #\1) x :key #'nextdigit :start 1 :end 6)))
result)
"01a2342015")
(deftest nsubstitute-if-not-bit-vector.26
(let* ((orig #*00111001011010110)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 1) x :key #'1+)))
result)
#*11111111111111111)
(deftest nsubstitute-if-not-bit-vector.27
(let* ((orig #*00111001011010110)
(x (copy-seq orig))
(result (nsubstitute-if-not 1 (is-not-eql-p 1) x :key #'1+ :start 1 :end 10)))
result)
#*01111111111010110)
(deftest nsubstitute-if-not-bit-vector.30
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x)))
result)
#*11111)
(deftest nsubstitute-if-not-bit-vector.31
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :from-end t)))
result)
#*11111)
(deftest nsubstitute-if-not-bit-vector.32
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :count 1)))
result)
#*11011)
(deftest nsubstitute-if-not-bit-vector.33
(let* ((x (make-array '(10) :initial-contents '(0 1 0 1 1 0 1 1 0 1)
:fill-pointer 5 :element-type 'bit))
(result (nsubstitute-if-not 1 #'onep x :from-end t :count 1)))
result)
#*01111)
(deftest nsubstitute-if-not.order.1
(let ((i 0) a b c d e f g h)
(values
(nsubstitute-if-not
(progn (setf a (incf i)) 'a)
(progn (setf b (incf i)) #'identity)
(progn (setf c (incf i)) (list nil 1 2 nil 3 4 nil 5))
:count (progn (setf d (incf i)) 2)
:start (progn (setf e (incf i)) 0)
:end (progn (setf f (incf i)) 7)
:key (progn (setf g (incf i)) #'identity)
:from-end (setf h (incf i))
)
i a b c d e f g h))
(nil 1 2 a 3 4 a 5)
8 1 2 3 4 5 6 7 8)
(deftest nsubstitute-if-not.order.2
(let ((i 0) a b c d e f g h)
(values
(nsubstitute-if-not
(progn (setf a (incf i)) 'a)
(progn (setf b (incf i)) #'identity)
(progn (setf c (incf i)) (list nil 1 2 nil 3 4 nil 5))
:from-end (setf h (incf i))
:key (progn (setf g (incf i)) #'identity)
:end (progn (setf f (incf i)) 7)
:start (progn (setf e (incf i)) 0)
:count (progn (setf d (incf i)) 2)
)
i a b c d e f g h))
(nil 1 2 a 3 4 a 5)
8 1 2 3 8 7 6 5 4)
(deftest nsubstitute-if-not.allow-other-keys.1
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:allow-other-keys t :bad t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.2
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:bad t :allow-other-keys t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.3
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys t
:allow-other-keys nil :bad t)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.4
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :bad t
:allow-other-keys t :allow-other-keys nil)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.5
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:allow-other-keys t :key #'1-)
(1 a a a 1 a a))
(deftest nsubstitute-if-not.keywords.6
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3)
:key #'1- :key #'identity)
(1 a a a 1 a a))
(deftest nsubstitute-if-not.allow-other-keys.7
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys t
:bad t :allow-other-keys nil)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.allow-other-keys.8
(nsubstitute-if-not 'a #'zerop (list 1 2 0 3 1 0 3) :allow-other-keys nil)
(a a 0 a a 0 a))
(deftest nsubstitute-if-not.error.1
(signals-error (nsubstitute-if-not) program-error)
t)
(deftest nsubstitute-if-not.error.2
(signals-error (nsubstitute-if-not 'a) program-error)
t)
(deftest nsubstitute-if-not.error.3
(signals-error (nsubstitute-if-not 'a #'null) program-error)
t)
(deftest nsubstitute-if-not.error.4
(signals-error (nsubstitute-if-not 'a #'null nil 'bad t) program-error)
t)
(deftest nsubstitute-if-not.error.5
(signals-error (nsubstitute-if-not 'a #'null nil
'bad t :allow-other-keys nil)
program-error)
t)
(deftest nsubstitute-if-not.error.6
(signals-error (nsubstitute-if-not 'a #'null nil :key) program-error)
t)
(deftest nsubstitute-if-not.error.7
(signals-error (nsubstitute-if-not 'a #'null nil 1 2) program-error)
t)
(deftest nsubstitute-if-not.error.8
(signals-error (nsubstitute-if-not 'a #'cons (list 'a 'b 'c)) program-error)
t)
(deftest nsubstitute-if-not.error.9
(signals-error (nsubstitute-if-not 'a #'car (list 'a 'b 'c)) type-error)
t)
(deftest nsubstitute-if-not.error.10
(signals-error (nsubstitute-if-not 'a #'identity (list 'a 'b 'c)
:key #'car)
type-error)
t)
(deftest nsubstitute-if-not.error.11
(signals-error (nsubstitute-if-not 'a #'identity (list 'a 'b 'c)
:key #'cons)
program-error)
t)
(deftest nsubstitute-if-not.error.12
(check-type-error #'(lambda (x) (nsubstitute-if-not 1 #'null x)) #'sequencep)
nil)
|
252ca749a06d3f257faa1b743bda10f490da7050cda1b53af7803ffbfa24273f | c4-project/c4f | reify_stm.ml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open Import
A lot of this module involves dealing with the prospect of one FIR
statement expanding into multiple C statements ; hence why reified blocks
have statements that themselves are lists of statements . At time of
writing ( note that comments lie , including this one ) , the types of FIR
statement that introduce multiple C statements are :
- implicit flow blocks ( these expand to the statements they contain )
statement expanding into multiple C statements; hence why reified blocks
have statements that themselves are lists of statements. At time of
writing (note that comments lie, including this one), the types of FIR
statement that introduce multiple C statements are:
- implicit flow blocks (these expand to the statements they contain) *)
let atomic = Reify_atomic.reify_stm ~expr:Reify_expr.reify
let assign_expr (asn : Fir.Assign.t) : Ast.Expr.t =
let dst = Reify_prim.lvalue asn.@(Fir.Assign.dst) in
match asn.@(Fir.Assign.src) with
| Inc -> Reify_expr.postfix `Inc dst
| Dec -> Reify_expr.postfix `Dec dst
| Expr e -> Binary (dst, `Assign, Reify_expr.reify e)
let assign (asn : Fir.Assign.t) : Ast.Stm.t = Expr (Some (assign_expr asn))
let lift_stms : Ast.Stm.t list -> Ast.Compound_stm.t =
List.map ~f:(fun s -> `Stm s)
let flat_statements =
[%accessor Fir.Block.each_statement @> Accessor.List.each]
let merge_stms (b : ('meta, Ast.Stm.t list) Fir.Block.t) : Ast.Stm.t list =
b.@*(flat_statements)
let block_compound (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) :
Ast.Compound_stm.t =
lift_stms (merge_stms b)
let block (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) : Ast.Stm.t =
Compound (block_compound b)
let ne_block (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) :
Ast.Stm.t option =
We ca n't use Fir . Block.is_empty here , as it 'd suggest a block whose
statement list is [ [ ] ] is not empty .
statement list is [[]] is not empty. *)
if Accessor.is_empty flat_statements b then None else Some (block b)
let nop (_ : 'meta) : Ast.Stm.t = Ast.Stm.Expr None
let early_out : Fir.Early_out.t -> Ast.Stm.t = function
| Break -> Ast.Stm.Break
| Continue -> Ast.Stm.Continue
| Return -> Ast.Stm.Return None
let label (l : C4f_common.C_id.t) : Ast.Stm.t =
(* This might need revisiting later. *)
Label (Normal l, Expr None)
let goto (l : C4f_common.C_id.t) : Ast.Stm.t = Goto l
let procedure_call (c : Fir.Call.t) : Ast.Stm.t =
Ast.Stm.Expr
(Some
(Ast.Expr.Call
{ func= Identifier (Fir.Call.function_id c)
; arguments= List.map ~f:Reify_expr.reify (Fir.Call.arguments c) }
) )
let prim ({value; _} : (_, Fir.Prim_statement.t) Fir.With_meta.t) :
Ast.Stm.t list =
[ Fir.Prim_statement.value_map value ~assign ~atomic ~early_out
~procedure_call ~label ~goto ~nop ]
let if_stm ({cond; t_branch; f_branch} : (_, Ast.Stm.t list) Fir.If.t) :
Ast.Stm.t list =
[ If
{ cond= Reify_expr.reify cond
; t_branch= block t_branch
; f_branch= ne_block f_branch } ]
let for_loop (header : Fir.Flow_block.For.t) (body : Ast.Compound_stm.t) :
Ast.Stm.t =
let init = Option.map ~f:assign_expr header.init in
let cond = Option.map ~f:Reify_expr.reify header.cmp in
let update = Option.map ~f:assign_expr header.update in
let body = Ast.Stm.Compound body in
For {init; cond; update; body}
let while_loop (kind : Fir.Flow_block.While.t) (cond : Fir.Expression.t)
(body : Ast.Compound_stm.t) : Ast.Stm.t =
let cond' = Reify_expr.reify cond in
let body' = Ast.Stm.Compound body in
match kind with
| While -> While (cond', body')
| Do_while -> Do_while (body', cond')
let lock (kind : Fir.Flow_block.Lock.t) (body : Ast.Compound_stm.t) :
Ast.Stm.t =
match kind with Atomic -> Atomic body | Synchronized -> Synchronized body
let flow ({header; body} : (_, Ast.Stm.t list) Fir.Flow_block.t) :
Ast.Stm.t list =
let body' = block_compound body in
match header with
| For f -> [for_loop f body']
| Lock l -> [lock l body']
| While (w, c) -> [while_loop w c body']
| Explicit -> [Compound body']
| Implicit -> merge_stms body
let reify (type meta) (m : meta Fir.Statement.t) : Ast.Stm.t list =
Fir.Statement.reduce m ~prim ~if_stm ~flow
let pp : type meta. meta Fir.Statement.t Fmt.t =
fun x -> Fmt.(using reify (list ~sep:sp Ast.Stm.pp)) x
(* Yay, value restriction... *)
let reify_compound (type meta) (m : meta Fir.Statement.t list) :
Ast.Compound_stm.t =
List.concat_map ~f:(fun x -> lift_stms (reify x)) m
| null | https://raw.githubusercontent.com/c4-project/c4f/8939477732861789abc807c8c1532a302b2848a5/lib/litmus_c/src/reify_stm.ml | ocaml | This might need revisiting later.
Yay, value restriction... | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open Import
A lot of this module involves dealing with the prospect of one FIR
statement expanding into multiple C statements ; hence why reified blocks
have statements that themselves are lists of statements . At time of
writing ( note that comments lie , including this one ) , the types of FIR
statement that introduce multiple C statements are :
- implicit flow blocks ( these expand to the statements they contain )
statement expanding into multiple C statements; hence why reified blocks
have statements that themselves are lists of statements. At time of
writing (note that comments lie, including this one), the types of FIR
statement that introduce multiple C statements are:
- implicit flow blocks (these expand to the statements they contain) *)
let atomic = Reify_atomic.reify_stm ~expr:Reify_expr.reify
let assign_expr (asn : Fir.Assign.t) : Ast.Expr.t =
let dst = Reify_prim.lvalue asn.@(Fir.Assign.dst) in
match asn.@(Fir.Assign.src) with
| Inc -> Reify_expr.postfix `Inc dst
| Dec -> Reify_expr.postfix `Dec dst
| Expr e -> Binary (dst, `Assign, Reify_expr.reify e)
let assign (asn : Fir.Assign.t) : Ast.Stm.t = Expr (Some (assign_expr asn))
let lift_stms : Ast.Stm.t list -> Ast.Compound_stm.t =
List.map ~f:(fun s -> `Stm s)
let flat_statements =
[%accessor Fir.Block.each_statement @> Accessor.List.each]
let merge_stms (b : ('meta, Ast.Stm.t list) Fir.Block.t) : Ast.Stm.t list =
b.@*(flat_statements)
let block_compound (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) :
Ast.Compound_stm.t =
lift_stms (merge_stms b)
let block (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) : Ast.Stm.t =
Compound (block_compound b)
let ne_block (type meta) (b : (meta, Ast.Stm.t list) Fir.Block.t) :
Ast.Stm.t option =
We ca n't use Fir . Block.is_empty here , as it 'd suggest a block whose
statement list is [ [ ] ] is not empty .
statement list is [[]] is not empty. *)
if Accessor.is_empty flat_statements b then None else Some (block b)
let nop (_ : 'meta) : Ast.Stm.t = Ast.Stm.Expr None
let early_out : Fir.Early_out.t -> Ast.Stm.t = function
| Break -> Ast.Stm.Break
| Continue -> Ast.Stm.Continue
| Return -> Ast.Stm.Return None
let label (l : C4f_common.C_id.t) : Ast.Stm.t =
Label (Normal l, Expr None)
let goto (l : C4f_common.C_id.t) : Ast.Stm.t = Goto l
let procedure_call (c : Fir.Call.t) : Ast.Stm.t =
Ast.Stm.Expr
(Some
(Ast.Expr.Call
{ func= Identifier (Fir.Call.function_id c)
; arguments= List.map ~f:Reify_expr.reify (Fir.Call.arguments c) }
) )
let prim ({value; _} : (_, Fir.Prim_statement.t) Fir.With_meta.t) :
Ast.Stm.t list =
[ Fir.Prim_statement.value_map value ~assign ~atomic ~early_out
~procedure_call ~label ~goto ~nop ]
let if_stm ({cond; t_branch; f_branch} : (_, Ast.Stm.t list) Fir.If.t) :
Ast.Stm.t list =
[ If
{ cond= Reify_expr.reify cond
; t_branch= block t_branch
; f_branch= ne_block f_branch } ]
let for_loop (header : Fir.Flow_block.For.t) (body : Ast.Compound_stm.t) :
Ast.Stm.t =
let init = Option.map ~f:assign_expr header.init in
let cond = Option.map ~f:Reify_expr.reify header.cmp in
let update = Option.map ~f:assign_expr header.update in
let body = Ast.Stm.Compound body in
For {init; cond; update; body}
let while_loop (kind : Fir.Flow_block.While.t) (cond : Fir.Expression.t)
(body : Ast.Compound_stm.t) : Ast.Stm.t =
let cond' = Reify_expr.reify cond in
let body' = Ast.Stm.Compound body in
match kind with
| While -> While (cond', body')
| Do_while -> Do_while (body', cond')
let lock (kind : Fir.Flow_block.Lock.t) (body : Ast.Compound_stm.t) :
Ast.Stm.t =
match kind with Atomic -> Atomic body | Synchronized -> Synchronized body
let flow ({header; body} : (_, Ast.Stm.t list) Fir.Flow_block.t) :
Ast.Stm.t list =
let body' = block_compound body in
match header with
| For f -> [for_loop f body']
| Lock l -> [lock l body']
| While (w, c) -> [while_loop w c body']
| Explicit -> [Compound body']
| Implicit -> merge_stms body
let reify (type meta) (m : meta Fir.Statement.t) : Ast.Stm.t list =
Fir.Statement.reduce m ~prim ~if_stm ~flow
let pp : type meta. meta Fir.Statement.t Fmt.t =
fun x -> Fmt.(using reify (list ~sep:sp Ast.Stm.pp)) x
let reify_compound (type meta) (m : meta Fir.Statement.t list) :
Ast.Compound_stm.t =
List.concat_map ~f:(fun x -> lift_stms (reify x)) m
|
4dfbcf8fd407447ba1012c10abc80fc1c1953b8f5657ddedec0ab18b871e9a18 | nuvla/api-server | configuration_template_session_mitreid.cljc | (ns sixsq.nuvla.server.resources.spec.configuration-template-session-mitreid
(:require
[clojure.spec.alpha :as s]
[sixsq.nuvla.server.resources.spec.configuration-template :as ps]
[sixsq.nuvla.server.resources.spec.core :as cimi-core]
[sixsq.nuvla.server.util.spec :as su]
[spec-tools.core :as st]))
(s/def ::client-id
(-> (st/spec ::cimi-core/token)
(assoc :name "client-id"
:json-schema/displayName "client ID"
:json-schema/description "MITREid client ID"
:json-schema/group "body"
:json-schema/order 20
:json-schema/hidden false
:json-schema/sensitive false)))
(s/def ::client-secret
(-> (st/spec ::cimi-core/token)
(assoc :name "client-secret"
:json-schema/displayName "client secret"
:json-schema/description "MITREid client secret associated with registered application"
:json-schema/group "body"
:json-schema/order 21
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::authorize-url
(-> (st/spec ::cimi-core/token)
(assoc :name "authorize-url"
:json-schema/displayName "authorization URL"
:json-schema/description "URL for the authorization phase of the OIDC protocol"
:json-schema/group "body"
:json-schema/order 22
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::token-url
(-> (st/spec ::cimi-core/token)
(assoc :name "token-url"
:json-schema/displayName "token URL"
:json-schema/description "URL for the obtaining a token in the OIDC protocol"
:json-schema/group "body"
:json-schema/order 23
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::user-profile-url
(-> (st/spec ::cimi-core/token)
(assoc :name "user-profile-url"
:json-schema/displayName "user profile URL"
:json-schema/description "URL for user profile attributes"
:json-schema/group "body"
:json-schema/order 24
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::public-key
(-> (st/spec ::cimi-core/nonblank-string) ;; allows jwk JSON representation
(assoc :name "public-key"
:json-schema/displayName "public key"
:json-schema/description "public key of the server in PEM or JWK JSON format"
:json-schema/group "body"
:json-schema/order 25
:json-schema/hidden false
:json-schema/sensitive true)))
(def configuration-template-keys-spec-req
{:req-un [::ps/instance ::client-id ::client-secret ::public-key ::authorize-url ::token-url ::user-profile-url]})
(def configuration-template-keys-spec-create
{:req-un [::ps/instance ::client-id ::client-secret ::public-key ::authorize-url ::token-url ::user-profile-url]})
Defines the contents of the Mi authentication configuration - template resource itself .
(s/def ::schema
(su/only-keys-maps ps/resource-keys-spec
configuration-template-keys-spec-req))
Defines the contents of the MitreId authentication template used in a create resource .
(s/def ::template
(su/only-keys-maps ps/template-keys-spec
configuration-template-keys-spec-create))
(s/def ::schema-create
(su/only-keys-maps ps/create-keys-spec
{:req-un [::template]}))
| null | https://raw.githubusercontent.com/nuvla/api-server/a64a61b227733f1a0a945003edf5abaf5150a15c/code/src/sixsq/nuvla/server/resources/spec/configuration_template_session_mitreid.cljc | clojure | allows jwk JSON representation | (ns sixsq.nuvla.server.resources.spec.configuration-template-session-mitreid
(:require
[clojure.spec.alpha :as s]
[sixsq.nuvla.server.resources.spec.configuration-template :as ps]
[sixsq.nuvla.server.resources.spec.core :as cimi-core]
[sixsq.nuvla.server.util.spec :as su]
[spec-tools.core :as st]))
(s/def ::client-id
(-> (st/spec ::cimi-core/token)
(assoc :name "client-id"
:json-schema/displayName "client ID"
:json-schema/description "MITREid client ID"
:json-schema/group "body"
:json-schema/order 20
:json-schema/hidden false
:json-schema/sensitive false)))
(s/def ::client-secret
(-> (st/spec ::cimi-core/token)
(assoc :name "client-secret"
:json-schema/displayName "client secret"
:json-schema/description "MITREid client secret associated with registered application"
:json-schema/group "body"
:json-schema/order 21
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::authorize-url
(-> (st/spec ::cimi-core/token)
(assoc :name "authorize-url"
:json-schema/displayName "authorization URL"
:json-schema/description "URL for the authorization phase of the OIDC protocol"
:json-schema/group "body"
:json-schema/order 22
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::token-url
(-> (st/spec ::cimi-core/token)
(assoc :name "token-url"
:json-schema/displayName "token URL"
:json-schema/description "URL for the obtaining a token in the OIDC protocol"
:json-schema/group "body"
:json-schema/order 23
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::user-profile-url
(-> (st/spec ::cimi-core/token)
(assoc :name "user-profile-url"
:json-schema/displayName "user profile URL"
:json-schema/description "URL for user profile attributes"
:json-schema/group "body"
:json-schema/order 24
:json-schema/hidden false
:json-schema/sensitive true)))
(s/def ::public-key
(assoc :name "public-key"
:json-schema/displayName "public key"
:json-schema/description "public key of the server in PEM or JWK JSON format"
:json-schema/group "body"
:json-schema/order 25
:json-schema/hidden false
:json-schema/sensitive true)))
(def configuration-template-keys-spec-req
{:req-un [::ps/instance ::client-id ::client-secret ::public-key ::authorize-url ::token-url ::user-profile-url]})
(def configuration-template-keys-spec-create
{:req-un [::ps/instance ::client-id ::client-secret ::public-key ::authorize-url ::token-url ::user-profile-url]})
Defines the contents of the Mi authentication configuration - template resource itself .
(s/def ::schema
(su/only-keys-maps ps/resource-keys-spec
configuration-template-keys-spec-req))
Defines the contents of the MitreId authentication template used in a create resource .
(s/def ::template
(su/only-keys-maps ps/template-keys-spec
configuration-template-keys-spec-create))
(s/def ::schema-create
(su/only-keys-maps ps/create-keys-spec
{:req-un [::template]}))
|
118b376469a7957b6dab17a13b11cac101a7b831621072ae51220ae3e5def65e | change-metrics/monocle | Lentille.hs | {-# LANGUAGE DeriveAnyClass #-}
| A shared library between lentilles and macroscope
module Lentille (
-- * The lentille context
CrawlerEnv (..),
LentilleStream,
unlessStopped,
-- * Lentille Errors
LentilleError (..),
RequestLog (..),
GraphQLError (..),
-- * Facilities
getChangeId,
isMerged,
isClosed,
nobody,
toIdent,
ghostIdent,
sanitizeID,
isChangeTooOld,
swapDuration,
-- * Re-export
module Monocle.Class,
module Monocle.Logging,
) where
import Data.Text qualified as T
import Google.Protobuf.Timestamp qualified as T
import Monocle.Class
import Monocle.Client (MonocleClient)
import Monocle.Logging
import Monocle.Prelude
import Monocle.Protob.Change (
Change (changeUpdatedAt),
ChangeEvent,
ChangeEventOptionalDuration (ChangeEventOptionalDurationDuration),
ChangeOptionalDuration (ChangeOptionalDurationDuration),
Change_ChangeState (Change_ChangeStateClosed, Change_ChangeStateMerged),
Ident (..),
)
import Network.HTTP.Client qualified as HTTP
import Proto3.Suite (Enumerated (Enumerated))
import Effectful.Reader.Static qualified as E
-------------------------------------------------------------------------------
-- The Lentille context
data CrawlerEnv = CrawlerEnv
{ crawlerClient :: MonocleClient
, crawlerStop :: IORef Bool
}
-- | unlessStopped skips the action when the config is changed
unlessStopped :: E.Reader CrawlerEnv :> es => Eff es () -> Eff es ()
unlessStopped action = do
stopRef <- E.asks crawlerStop
-- TODO: replace IORef with Concurrent TVar
stopped <- unsafeEff_ (readIORef stopRef)
unless stopped action
data RequestLog = RequestLog
{ rlRequest :: HTTP.Request
, rlRequestBody :: LByteString
, rlResponse :: HTTP.Response LByteString
, rlResponseBody :: LByteString
}
deriving (Show)
instance ToJSON RequestLog where
toJSON (RequestLog _ body _ resp) =
object
["body" .= decodeUtf8 @Text body, "resp" .= decodeUtf8 @Text resp]
| ErrorGraphQL is a wrapper around the morpheus 's FetchError .
data GraphQLError = GraphQLError
{ -- TODO: keep the original error data type (instead of the Text)
err :: Text
, request :: RequestLog
}
deriving (Show, Generic, ToJSON)
data LentilleError
= DecodeError [Text]
| GraphError GraphQLError
deriving (Show, Generic, ToJSON)
instance Exception LentilleError
type LentilleStream es a = Stream (Of (Either LentilleError a)) (Eff es) ()
-------------------------------------------------------------------------------
-- Utility functions for crawlers
-------------------------------------------------------------------------------
getChangeId :: Text -> Text -> LText
getChangeId fullName iid = from . stripSpaces $ T.replace "/" "@" fullName <> "@" <> from iid
isMerged :: Enumerated Change_ChangeState -> Bool
isMerged state' = case state' of
Enumerated (Right Change_ChangeStateMerged) -> True
_otherwise -> False
isClosed :: Enumerated Change_ChangeState -> Bool
isClosed state' = case state' of
Enumerated (Right Change_ChangeStateClosed) -> True
_otherwise -> False
sanitizeID :: Text -> Text
sanitizeID = T.replace ":" "@" . T.replace "/" "@"
nobody :: Text
nobody = "ghost"
toIdent :: Text -> (Text -> Maybe Text) -> Text -> Ident
toIdent host cb username = Ident {..}
where
uid = host <> "/" <> username
identUid = from uid
identMuid = from $ fromMaybe username (cb uid)
ghostIdent :: Text -> Ident
ghostIdent host = toIdent host (const Nothing) nobody
isChangeTooOld :: UTCTime -> Either LentilleError (Change, [ChangeEvent]) -> Bool
isChangeTooOld _ (Left _) = True
isChangeTooOld date (Right (change, _)) =
case changeUpdatedAt change of
Just changeDate -> T.toUTCTime changeDate < date
_ -> True
swapDuration :: ChangeOptionalDuration -> ChangeEventOptionalDuration
swapDuration (ChangeOptionalDurationDuration v) = ChangeEventOptionalDurationDuration v
| null | https://raw.githubusercontent.com/change-metrics/monocle/a746679b01814ded5737a2584cd19cad574e0581/src/Lentille.hs | haskell | # LANGUAGE DeriveAnyClass #
* The lentille context
* Lentille Errors
* Facilities
* Re-export
-----------------------------------------------------------------------------
The Lentille context
| unlessStopped skips the action when the config is changed
TODO: replace IORef with Concurrent TVar
TODO: keep the original error data type (instead of the Text)
-----------------------------------------------------------------------------
Utility functions for crawlers
----------------------------------------------------------------------------- |
| A shared library between lentilles and macroscope
module Lentille (
CrawlerEnv (..),
LentilleStream,
unlessStopped,
LentilleError (..),
RequestLog (..),
GraphQLError (..),
getChangeId,
isMerged,
isClosed,
nobody,
toIdent,
ghostIdent,
sanitizeID,
isChangeTooOld,
swapDuration,
module Monocle.Class,
module Monocle.Logging,
) where
import Data.Text qualified as T
import Google.Protobuf.Timestamp qualified as T
import Monocle.Class
import Monocle.Client (MonocleClient)
import Monocle.Logging
import Monocle.Prelude
import Monocle.Protob.Change (
Change (changeUpdatedAt),
ChangeEvent,
ChangeEventOptionalDuration (ChangeEventOptionalDurationDuration),
ChangeOptionalDuration (ChangeOptionalDurationDuration),
Change_ChangeState (Change_ChangeStateClosed, Change_ChangeStateMerged),
Ident (..),
)
import Network.HTTP.Client qualified as HTTP
import Proto3.Suite (Enumerated (Enumerated))
import Effectful.Reader.Static qualified as E
data CrawlerEnv = CrawlerEnv
{ crawlerClient :: MonocleClient
, crawlerStop :: IORef Bool
}
unlessStopped :: E.Reader CrawlerEnv :> es => Eff es () -> Eff es ()
unlessStopped action = do
stopRef <- E.asks crawlerStop
stopped <- unsafeEff_ (readIORef stopRef)
unless stopped action
data RequestLog = RequestLog
{ rlRequest :: HTTP.Request
, rlRequestBody :: LByteString
, rlResponse :: HTTP.Response LByteString
, rlResponseBody :: LByteString
}
deriving (Show)
instance ToJSON RequestLog where
toJSON (RequestLog _ body _ resp) =
object
["body" .= decodeUtf8 @Text body, "resp" .= decodeUtf8 @Text resp]
| ErrorGraphQL is a wrapper around the morpheus 's FetchError .
data GraphQLError = GraphQLError
err :: Text
, request :: RequestLog
}
deriving (Show, Generic, ToJSON)
data LentilleError
= DecodeError [Text]
| GraphError GraphQLError
deriving (Show, Generic, ToJSON)
instance Exception LentilleError
type LentilleStream es a = Stream (Of (Either LentilleError a)) (Eff es) ()
getChangeId :: Text -> Text -> LText
getChangeId fullName iid = from . stripSpaces $ T.replace "/" "@" fullName <> "@" <> from iid
isMerged :: Enumerated Change_ChangeState -> Bool
isMerged state' = case state' of
Enumerated (Right Change_ChangeStateMerged) -> True
_otherwise -> False
isClosed :: Enumerated Change_ChangeState -> Bool
isClosed state' = case state' of
Enumerated (Right Change_ChangeStateClosed) -> True
_otherwise -> False
sanitizeID :: Text -> Text
sanitizeID = T.replace ":" "@" . T.replace "/" "@"
nobody :: Text
nobody = "ghost"
toIdent :: Text -> (Text -> Maybe Text) -> Text -> Ident
toIdent host cb username = Ident {..}
where
uid = host <> "/" <> username
identUid = from uid
identMuid = from $ fromMaybe username (cb uid)
ghostIdent :: Text -> Ident
ghostIdent host = toIdent host (const Nothing) nobody
isChangeTooOld :: UTCTime -> Either LentilleError (Change, [ChangeEvent]) -> Bool
isChangeTooOld _ (Left _) = True
isChangeTooOld date (Right (change, _)) =
case changeUpdatedAt change of
Just changeDate -> T.toUTCTime changeDate < date
_ -> True
swapDuration :: ChangeOptionalDuration -> ChangeEventOptionalDuration
swapDuration (ChangeOptionalDurationDuration v) = ChangeEventOptionalDurationDuration v
|
923901f29adbc858d42d5acc195f4828d10ecf5e7a4ebd127e76beb756be3fc9 | phylogeography/spread | svg_renderer.cljs | (ns analysis-viewer.svg-renderer
"
Render svg hiccup structure from geo-json maps.
Api :
- geojson->svg
"
(:require [clojure.spec.alpha :as s]
[clojure.string :as str]
[shared.geojson :as geojson]
[shared.math-utils :as math-utils]))
(def ^:dynamic *coord-transform-fn* identity)
(s/def ::geojson any?)
(s/def :html/color string?)
(s/def ::poly-stroke-color :html/color)
(s/def ::poly-fill-color :html/color)
(s/def ::poly-stroke-width number?)
(s/def ::point-color :html/color)
(s/def ::point-radius number?)
(s/def ::line-color :html/color)
(s/def ::line-width number?)
(s/def ::text-color :html/color)
(s/def ::text-size number?)
(s/def ::opts (s/keys :opt-un [::poly-stroke-color
::poly-fill-color
::poly-stroke-width
::point-color
::point-radius
::line-color
::line-width
::text-color]))
(s/def ::svg any?)
;; (s/fdef geojson->svg
: args ( s / cat : : : geojson
;; :opts ::opts)
;; :ret ::svg)
(declare geojson->svg)
(defn geojson->svg-point [{:keys [coordinates]} opts]
(let [[long lat] (*coord-transform-fn* coordinates)]
[:circle {:cx long :cy lat :r (:point-radius opts) :fill (:data-point-color opts)}]))
(defn svg-polygon [coords opts]
(let [all-polys (->> coords
(mapv (fn [cs]
[:polygon
{:points (->> cs
(mapv (fn [coord]
(->> (*coord-transform-fn* coord)
(str/join " "))))
(str/join ","))
:stroke (:poly-stroke-color opts)
:fill (:poly-fill-color opts)
:stroke-width (:poly-stroke-width opts)}])))]
(into [:g {}] all-polys)))
(defn geojson->svg-polygon [{:keys [coordinates]} opts]
(svg-polygon coordinates opts))
(defn geojson->svg-multi-polygon [{:keys [coordinates]} opts]
(let [all-paths (->> coordinates
(map (fn [poly-coords]
(svg-polygon poly-coords opts))))]
(into [:g {}] all-paths)))
(defn svg-line [[[x1 y1] [x2 y2]] opts]
[:line {:x1 x1 :y1 y1
:x2 x2 :y2 y2
:stroke (:line-color opts)
:stroke-width (:line-width opts)}])
(defn geojson->svg-line-string [{:keys [coordinates]} opts]
(svg-line (map *coord-transform-fn* coordinates) opts))
(defn geojson->svg-multi-line-string [{:keys [coordinates]} opts]
(let [all-lines (->> coordinates
(map (fn [coor]
(svg-line (*coord-transform-fn* coor) opts))))]
(into [:g {} all-lines])))
(defn text-for-box [box text opts]
(let [{:keys [min-x min-y max-x max-y]} box
[x1 y1] (*coord-transform-fn* [min-x min-y])
[x2 y2] (*coord-transform-fn* [max-x max-y])
[text-x text-y] [(+ (/ (Math/abs (- x1 x2)) 2) (min x1 x2))
(+ (/ (Math/abs (- y1 y2)) 2) (min y1 y2))]]
[:text.label {:x text-x :y text-y
;; :font-size (str (:text-size opts) "px")
:fill (:text-color opts)
:text-anchor "middle"} text]))
(defn geojson->svg-feature [{:keys [geometry properties]} opts]
(when geometry
(let [geo-box (geojson/geo-json-bounding-box geometry) ;; this is in [long lat]
feature-text (:name properties)]
(when (or (nil? (:clip-box opts))
(math-utils/box-overlap? (:clip-box opts)
(math-utils/map-box->proj-box geo-box)))
(into [:g {}] (cond-> [(geojson->svg geometry opts)]
feature-text (into [(text-for-box geo-box feature-text opts)])))))))
(defn geojson->svg-feature-collection [{:keys [features]} opts]
(into [:g {}] (mapv (fn [feat] (geojson->svg feat opts)) features)))
(defn geojson->svg-default [x _]
(throw (ex-info "Not implemented yet" {:type (:type x)})))
(defn geojson->svg [{:keys [type] :as geoj} opts]
(case (keyword type)
:Point (geojson->svg-point geoj opts)
:Polygon (geojson->svg-polygon geoj opts)
:MultiPolygon (geojson->svg-multi-polygon geoj opts)
:LineString (geojson->svg-line-string geoj opts)
:MultiLineString (geojson->svg-multi-line-string geoj opts)
:Feature (geojson->svg-feature geoj opts)
:FeatureCollection (geojson->svg-feature-collection geoj opts)
(geojson->svg-default geoj opts)
))
| null | https://raw.githubusercontent.com/phylogeography/spread/56f3500e6d83e0ebd50041dc336ffa0697d7baf8/src/cljs/analysis_viewer/svg_renderer.cljs | clojure | (s/fdef geojson->svg
:opts ::opts)
:ret ::svg)
:font-size (str (:text-size opts) "px")
this is in [long lat] | (ns analysis-viewer.svg-renderer
"
Render svg hiccup structure from geo-json maps.
Api :
- geojson->svg
"
(:require [clojure.spec.alpha :as s]
[clojure.string :as str]
[shared.geojson :as geojson]
[shared.math-utils :as math-utils]))
(def ^:dynamic *coord-transform-fn* identity)
(s/def ::geojson any?)
(s/def :html/color string?)
(s/def ::poly-stroke-color :html/color)
(s/def ::poly-fill-color :html/color)
(s/def ::poly-stroke-width number?)
(s/def ::point-color :html/color)
(s/def ::point-radius number?)
(s/def ::line-color :html/color)
(s/def ::line-width number?)
(s/def ::text-color :html/color)
(s/def ::text-size number?)
(s/def ::opts (s/keys :opt-un [::poly-stroke-color
::poly-fill-color
::poly-stroke-width
::point-color
::point-radius
::line-color
::line-width
::text-color]))
(s/def ::svg any?)
: args ( s / cat : : : geojson
(declare geojson->svg)
(defn geojson->svg-point [{:keys [coordinates]} opts]
(let [[long lat] (*coord-transform-fn* coordinates)]
[:circle {:cx long :cy lat :r (:point-radius opts) :fill (:data-point-color opts)}]))
(defn svg-polygon [coords opts]
(let [all-polys (->> coords
(mapv (fn [cs]
[:polygon
{:points (->> cs
(mapv (fn [coord]
(->> (*coord-transform-fn* coord)
(str/join " "))))
(str/join ","))
:stroke (:poly-stroke-color opts)
:fill (:poly-fill-color opts)
:stroke-width (:poly-stroke-width opts)}])))]
(into [:g {}] all-polys)))
(defn geojson->svg-polygon [{:keys [coordinates]} opts]
(svg-polygon coordinates opts))
(defn geojson->svg-multi-polygon [{:keys [coordinates]} opts]
(let [all-paths (->> coordinates
(map (fn [poly-coords]
(svg-polygon poly-coords opts))))]
(into [:g {}] all-paths)))
(defn svg-line [[[x1 y1] [x2 y2]] opts]
[:line {:x1 x1 :y1 y1
:x2 x2 :y2 y2
:stroke (:line-color opts)
:stroke-width (:line-width opts)}])
(defn geojson->svg-line-string [{:keys [coordinates]} opts]
(svg-line (map *coord-transform-fn* coordinates) opts))
(defn geojson->svg-multi-line-string [{:keys [coordinates]} opts]
(let [all-lines (->> coordinates
(map (fn [coor]
(svg-line (*coord-transform-fn* coor) opts))))]
(into [:g {} all-lines])))
(defn text-for-box [box text opts]
(let [{:keys [min-x min-y max-x max-y]} box
[x1 y1] (*coord-transform-fn* [min-x min-y])
[x2 y2] (*coord-transform-fn* [max-x max-y])
[text-x text-y] [(+ (/ (Math/abs (- x1 x2)) 2) (min x1 x2))
(+ (/ (Math/abs (- y1 y2)) 2) (min y1 y2))]]
[:text.label {:x text-x :y text-y
:fill (:text-color opts)
:text-anchor "middle"} text]))
(defn geojson->svg-feature [{:keys [geometry properties]} opts]
(when geometry
feature-text (:name properties)]
(when (or (nil? (:clip-box opts))
(math-utils/box-overlap? (:clip-box opts)
(math-utils/map-box->proj-box geo-box)))
(into [:g {}] (cond-> [(geojson->svg geometry opts)]
feature-text (into [(text-for-box geo-box feature-text opts)])))))))
(defn geojson->svg-feature-collection [{:keys [features]} opts]
(into [:g {}] (mapv (fn [feat] (geojson->svg feat opts)) features)))
(defn geojson->svg-default [x _]
(throw (ex-info "Not implemented yet" {:type (:type x)})))
(defn geojson->svg [{:keys [type] :as geoj} opts]
(case (keyword type)
:Point (geojson->svg-point geoj opts)
:Polygon (geojson->svg-polygon geoj opts)
:MultiPolygon (geojson->svg-multi-polygon geoj opts)
:LineString (geojson->svg-line-string geoj opts)
:MultiLineString (geojson->svg-multi-line-string geoj opts)
:Feature (geojson->svg-feature geoj opts)
:FeatureCollection (geojson->svg-feature-collection geoj opts)
(geojson->svg-default geoj opts)
))
|
766ddad48f6a4f4e85a2b31b0e34913c9be150792f21a073558e097556472691 | Zilliqa/scilla | discover.ml | open List
module C = Configurator.V1
let () =
C.main ~name:"foo" (fun c ->
let default : C.Pkg_config.package_conf = { libs = []; cflags = [] } in
let conf =
match C.Pkg_config.get c with
| None -> default
| Some pc -> (
match C.Pkg_config.query pc ~package:"openssl" with
| None -> default
| Some deps -> deps)
in
let lflags =
if Sys.os_type = "Unix" then
let ic = Unix.open_process_in "uname" in
let uname = input_line ic in
let () = close_in ic in
(* macOS requires -keep_dwarf_unwind for exceptions to work. *)
if uname = "Darwin" then [ "-cclib"; "-Wl,-keep_dwarf_unwind" ]
else []
else []
in
Find the absolute path to vcpkg 's libraries & headers .
Unfortunately , % { project_root } in the dune file always returns a relative path and in order
for -L to work properly , we need to give an absolute path .
We rely on a simple python program ( in vcpkg_cmd ) to do so portably .
Find the absolute path to vcpkg's libraries & headers.
Unfortunately, %{project_root} in the dune file always returns a relative path and in order
for -L to work properly, we need to give an absolute path.
We rely on a simple python program (in vcpkg_cmd) to do so portably.
*)
let vcpkg_cmd =
"python3 -c 'import os, sys; print(os.path.realpath(\"../../../../../"
^^ "%s" ^^ "\"))'"
in
let vcpkg_triplet_path =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd "scripts/vcpkg_triplet.sh"))
in
let vcpkg_triplet =
input_line (Unix.open_process_in vcpkg_triplet_path)
in
let vcpkg_include_dir =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd
("vcpkg_installed/" ^ vcpkg_triplet ^ "/include")))
in
let vcpkg_lib_dir =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd
("vcpkg_installed/" ^ vcpkg_triplet ^ "/lib")))
in
let c_flags = List.append conf.cflags [ "-I" ^ vcpkg_include_dir ] in
let clib_flags = List.append conf.libs [ "-L" ^ vcpkg_lib_dir ] in
C.Flags.write_sexp "c_flags.sexp" c_flags;
C.Flags.write_sexp "c_library_flags.sexp" clib_flags;
C.Flags.write_sexp "library_flags.sexp" lflags)
| null | https://raw.githubusercontent.com/Zilliqa/scilla/0f5ba940511f16f08197d509b121d724defe21c1/src/base/cpp/config/discover.ml | ocaml | macOS requires -keep_dwarf_unwind for exceptions to work. | open List
module C = Configurator.V1
let () =
C.main ~name:"foo" (fun c ->
let default : C.Pkg_config.package_conf = { libs = []; cflags = [] } in
let conf =
match C.Pkg_config.get c with
| None -> default
| Some pc -> (
match C.Pkg_config.query pc ~package:"openssl" with
| None -> default
| Some deps -> deps)
in
let lflags =
if Sys.os_type = "Unix" then
let ic = Unix.open_process_in "uname" in
let uname = input_line ic in
let () = close_in ic in
if uname = "Darwin" then [ "-cclib"; "-Wl,-keep_dwarf_unwind" ]
else []
else []
in
Find the absolute path to vcpkg 's libraries & headers .
Unfortunately , % { project_root } in the dune file always returns a relative path and in order
for -L to work properly , we need to give an absolute path .
We rely on a simple python program ( in vcpkg_cmd ) to do so portably .
Find the absolute path to vcpkg's libraries & headers.
Unfortunately, %{project_root} in the dune file always returns a relative path and in order
for -L to work properly, we need to give an absolute path.
We rely on a simple python program (in vcpkg_cmd) to do so portably.
*)
let vcpkg_cmd =
"python3 -c 'import os, sys; print(os.path.realpath(\"../../../../../"
^^ "%s" ^^ "\"))'"
in
let vcpkg_triplet_path =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd "scripts/vcpkg_triplet.sh"))
in
let vcpkg_triplet =
input_line (Unix.open_process_in vcpkg_triplet_path)
in
let vcpkg_include_dir =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd
("vcpkg_installed/" ^ vcpkg_triplet ^ "/include")))
in
let vcpkg_lib_dir =
input_line
(Unix.open_process_in
(Printf.sprintf vcpkg_cmd
("vcpkg_installed/" ^ vcpkg_triplet ^ "/lib")))
in
let c_flags = List.append conf.cflags [ "-I" ^ vcpkg_include_dir ] in
let clib_flags = List.append conf.libs [ "-L" ^ vcpkg_lib_dir ] in
C.Flags.write_sexp "c_flags.sexp" c_flags;
C.Flags.write_sexp "c_library_flags.sexp" clib_flags;
C.Flags.write_sexp "library_flags.sexp" lflags)
|
ba00f19948df42bc6f5fb4f8378b340df292e606b32fbd125bd16fddf7a27922 | ocsigen/js_of_ocaml | print_seq.ml | (* TEST
include testing
*)
(*
A test file for the Format module.
*)
open Testing;;
open Format;;
let say s = Printf.printf s;;
let pp_print_intseq = pp_print_seq ~pp_sep:(fun fmt () -> pp_print_char fmt ' ') pp_print_int;;
try
say "empty\n%!";
test (asprintf "%a%!" pp_print_intseq Seq.empty = "");
say "\nmisc\n%!";
test (asprintf "%a" pp_print_intseq (List.to_seq [0]) = "0");
test (asprintf "%a" pp_print_intseq (List.to_seq [0;1;2]) = "0 1 2");
test (asprintf "%a" pp_print_intseq (List.to_seq [0;0]) = "0 0");
say "\nend of tests\n%!";
with e ->
say "unexpected exception: %s\n%!" (Printexc.to_string e);
test false;
;;
| null | https://raw.githubusercontent.com/ocsigen/js_of_ocaml/31c8a3d9d4e34f3fd573dd5056e733233ca4f4f6/compiler/tests-ocaml/lib-format/print_seq.ml | ocaml | TEST
include testing
A test file for the Format module.
|
open Testing;;
open Format;;
let say s = Printf.printf s;;
let pp_print_intseq = pp_print_seq ~pp_sep:(fun fmt () -> pp_print_char fmt ' ') pp_print_int;;
try
say "empty\n%!";
test (asprintf "%a%!" pp_print_intseq Seq.empty = "");
say "\nmisc\n%!";
test (asprintf "%a" pp_print_intseq (List.to_seq [0]) = "0");
test (asprintf "%a" pp_print_intseq (List.to_seq [0;1;2]) = "0 1 2");
test (asprintf "%a" pp_print_intseq (List.to_seq [0;0]) = "0 0");
say "\nend of tests\n%!";
with e ->
say "unexpected exception: %s\n%!" (Printexc.to_string e);
test false;
;;
|
dce936d0e0c2bad7aad576edfa1ca2956b3cbe827956982b4b62a83fd85ac116 | ropas/sparrow | itv.ml | (***********************************************************************)
(* *)
Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
(* All rights reserved. *)
(* *)
This software is distributed under the term of the BSD license .
(* See the LICENSE file for details. *)
(* *)
(***********************************************************************)
open Vocab
(* ****************** *
* Widening threshold *
* ****************** *)
let threshold = BatSet.of_list [0;1;16;64]
(* ************************ *
* Integer = Z + {-oo, +oo} *
* ************************ *)
module Integer =
struct
type t = Int of int | MInf | PInf [@@deriving compare]
let pinf = PInf
let minf = MInf
let zero = Int 0
let of_int i = Int i
let to_string : t -> string = function
| Int i -> string_of_int i
| PInf -> "+oo"
| MInf -> "-oo"
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
let le : t -> t -> bool = fun x y ->
match x, y with
| MInf, _ -> true
| _, PInf -> true
| Int i, Int j -> i <= j
| _, _ -> false
let eq : t -> t -> bool = fun x y ->
match x, y with
| MInf, MInf
| PInf, PInf -> true
| Int i, Int j -> i = j
| _, _ -> false
let absolute = function
| Int i -> Int (abs i)
| MInf -> PInf
| PInf -> PInf
let min : t -> t -> t = fun x y -> if le x y then x else y
let max : t -> t -> t = fun x y -> if le x y then y else x
let lower_widen : t -> t -> t = fun x y ->
if le y x then
if eq y x then y else
let filtered = BatSet.filter (fun k -> le (Int k) y) threshold in
if BatSet.is_empty filtered
then MInf else Int (BatSet.max_elt filtered)
else x
let upper_widen : t -> t -> t = fun x y ->
if le x y then
if eq x y then y else
let filtered = BatSet.filter (fun k -> le y (Int k)) threshold in
if BatSet.is_empty filtered
then PInf else Int (BatSet.min_elt filtered)
else x
let lower_narrow : t -> t -> t = fun x y ->
if le x y then
if eq x MInf || BatSet.exists (fun k -> x = Int k) threshold then y
else x
else invalid_arg ("itv.ml: Integer.lower_narrow (x, y). y < x : "^(to_string y)^" < "^(to_string x))
let upper_narrow : t -> t -> t = fun x y ->
if le y x then
if eq x PInf || BatSet.exists (fun k -> x = Int k) threshold then y
else x
else invalid_arg "itv.ml: Integer.upper_narrow (x, y). x < y"
let plus x y =
match x, y with
| Int n1, Int n2 -> Int (n1 + n2)
| PInf, MInf | MInf, PInf -> invalid_arg "itv.ml: Integer.plus"
| PInf, _ -> PInf
| MInf, _ -> MInf
| _, PInf -> PInf
| _, MInf -> MInf
let minus x y =
match x, y with
| Int n1, Int n2 -> Int (n1 - n2)
| PInf, PInf | MInf, MInf -> invalid_arg "itv.ml: Integer.minus"
| PInf, _ -> PInf
| MInf, _ -> MInf
| _, PInf -> MInf
| _, MInf -> PInf
let times x y =
match x, y with
| Int n1, Int n2 -> Int (n1 * n2)
| PInf, PInf
| MInf, MInf -> PInf
| PInf, MInf
| MInf, PInf -> MInf
| PInf, Int n
| Int n, PInf ->
if n < 0 then MInf else
if n > 0 then PInf else
Int 0
| (MInf, Int n)
| (Int n, MInf) ->
if n < 0 then PInf else
if n > 0 then MInf else
Int 0
let divide x y =
match x, y with
| Int n1, Int n2 ->
if n2 = 0 then invalid_arg "itv.ml: Integer.divide (_, 0)" else Int (n1 / n2)
| PInf, PInf
| MInf, MInf -> PInf
| MInf, PInf
| PInf, MInf -> MInf
| MInf, Int n ->
if n < 0 then PInf else
if n > 0 then MInf else
invalid_arg "itv.ml: Integer.divide (-oo, 0)"
| PInf, Int n ->
if n < 0 then MInf else
if n > 0 then PInf else
invalid_arg "itv.ml: Integer.divide (+oo, 0)"
| Int _, PInf
| Int _, MInf -> Int 0
let min4 : t -> t -> t -> t -> t = fun x y z w ->
min (min x y) (min z w)
let max4 : t -> t -> t -> t -> t = fun x y z w ->
max (max x y) (max z w)
end
* { 6 Main definitions of interval }
open Integer
type t = V of Integer.t * Integer.t | Bot [@@deriving compare]
let zero = V (Int 0, Int 0)
let one = V (Int 1, Int 1)
let pos = V (Int 1, PInf)
let neg = V (MInf, Int (-1))
let nat = V (Int 0, PInf)
let upper = function V (_, Int x) -> x | _ -> invalid_arg "Itv.upper"
let lower = function V (Int x, _) -> x | _ -> invalid_arg "Itv.lower"
let upper_integer = function V (_, x) -> x | _ -> invalid_arg "Itv.upper_integer"
let lower_integer = function V (x, _) -> x | _ -> invalid_arg "Itv.lower_integer"
let of_int : int -> t = fun i -> V (Int i, Int i)
let of_ints : int -> int -> t = fun lb ub -> V (Int lb, Int ub)
let of_integer : Integer.t -> Integer.t -> t = fun l u -> V (l, u)
let to_string : t -> string = function
| Bot -> "bot"
| V (l, u) -> "["^(Integer.to_string l)^", "^(Integer.to_string u)^"]"
let pp fmt = function
| Bot -> Format.fprintf fmt "bot"
| V (l, u) -> Format.fprintf fmt "[%a, %a]" Integer.pp l Integer.pp u
let to_json : t -> Yojson.Safe.json = fun itv ->
`String (to_string itv)
let is_bot : t -> bool = function
| Bot -> true
| V (l, u) -> l = PInf || u = MInf || not (Integer.le l u)
* Normalizes invalid intervals such as [ \[u , l\ ] ] with [ u > l ] to
[ Bot ] .
[Bot].*)
let normalize x = if is_bot x then Bot else x
let absolute = function
| Bot -> Bot
| V (l, u) ->
if Integer.le Integer.zero l then V (l, u)
else if Integer.le l Integer.zero && Integer.le Integer.zero u then V (Integer.zero, Integer.max (Integer.absolute l) u)
else V (Integer.absolute u, Integer.absolute l)
let le : t -> t -> bool = fun x y ->
if is_bot x then true else
if is_bot y then false else
match x, y with
| V (l1, u1), V (l2, u2) -> Integer.le l2 l1 && Integer.le u1 u2
| _, _ -> assert false
let eq : t -> t -> bool = fun x y ->
if is_bot x && is_bot y then true else
if is_bot x || is_bot y then false else
match x, y with
| V (l1, u1), V (l2, u2) -> Integer.eq l2 l1 && Integer.eq u1 u2
| _, _ -> assert false
let top : t = V (MInf, PInf)
let bot : t = Bot
let join : t -> t -> t = fun x y ->
if le x y then y else
if le y x then x else
if is_bot x then normalize y else
if is_bot y then normalize x else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.min l1 l2, Integer.max u1 u2)
| _, _ -> assert false
let meet : t -> t -> t = fun x y ->
if le x y then x else
if le y x then y else
if is_bot x then Bot else
if is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) ->
normalize (V (Integer.max l1 l2, Integer.min u1 u2))
| _, _ -> assert false
let widen : t -> t -> t = fun x y ->
if x == y then x else
if is_bot x then normalize y else
if is_bot y then normalize x else
match x, y with
| V (l1, u1), V (l2, u2) ->
V (Integer.lower_widen l1 l2, Integer.upper_widen u1 u2)
| _, _ -> assert false
let narrow : t -> t -> t = fun x y ->
if x == y then x else
if is_bot y then Bot else
if is_bot x then invalid_arg "itv.ml: narrow(bot, _)" else
match x, y with
| V (l1, u1), V (l2, u2) ->
V (Integer.lower_narrow l1 l2, Integer.upper_narrow u1 u2)
| _, _ -> assert false
* { 6 Auxiliary functions for interval }
let open_right (x:t) : bool =
if is_bot x then false else
match x with
| V (_, PInf) -> true
| _ -> false
let close_left (x:t) : bool =
if is_bot x then false else
match x with
| V (Int _, _) -> true
| _ -> false
let open_left (x:t) : bool =
if is_bot x then false else
match x with
| V (MInf, _) -> true
| _ -> false
let is_range (x:t) : bool = not (is_bot x)
let is_const (x:t) : bool =
match x with
| V (Int x, Int y) -> x = y
| _ -> false
let is_finite (x:t) : bool =
if is_bot x then false else
match x with
| V (Int _, Int _) -> true
| _ -> false
let is_infinite x = not (is_finite x)
let is_negative (x:t) : bool =
if is_bot x then false else
match x with
| V (MInf, _) -> true
| V (Int x, Int _) -> x < 0
| _ -> false
let height (x:t) : int =
let h_bound = 1000 in
if is_bot x then 0 else
match x with
| V (Int l, Int u) -> if u - l + 1 > h_bound then h_bound else u - l + 1
| _ -> h_bound
let diff (x:t) : int =
if is_bot x then 0 else
match x with
| V (Int l, Int u) -> u - l
| _ -> 0
* { 6 Binary / Unary operations for interval }
let plus (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.plus l1 l2, Integer.plus u1 u2)
| _, _ -> assert false
let minus (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.minus l1 u2, Integer.minus u1 l2)
| _, _ -> assert false
let times (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) ->
let x1 = Integer.times l1 l2 in
let x2 = Integer.times l1 u2 in
let x3 = Integer.times u1 l2 in
let x4 = Integer.times u1 u2 in
V (Integer.min4 x1 x2 x3 x4, Integer.max4 x1 x2 x3 x4)
| _, _ -> assert false
let divide (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
if le (V (Int 0, Int 0)) y then top else
match x, y with
| V (l1, u1), V (l2, u2) ->
let x1 = Integer.divide l1 l2 in
let x2 = Integer.divide l1 u2 in
let x3 = Integer.divide u1 l2 in
let x4 = Integer.divide u1 u2 in
V (Integer.min4 x1 x2 x3 x4, Integer.max4 x1 x2 x3 x4)
| _, _ -> assert false
let false_itv : t = V (Int 0, Int 0)
let true_itv : t = V (Int 1, Int 1)
let unknown_bool_itv : t = V (Int 0, Int 1)
let l_and (x:t) (y:t) : t =
if is_bot x || is_bot y then
Bot
else if eq false_itv x || eq false_itv y then
false_itv
else if not (le false_itv x) && not (le false_itv y) then
true_itv
else
unknown_bool_itv
let l_or (x:t) (y:t) : t =
if is_bot x || is_bot y then
Bot
else if eq false_itv x && eq false_itv y then
false_itv
else if not (le false_itv x) || not (le false_itv y) then
true_itv
else
unknown_bool_itv
let eq_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (Int l1, Int u1), V (Int l2, Int u2)
when l1 = u1 && u1 = l2 && l2 = u2 -> true_itv
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> false_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> false_itv
| _, _ -> unknown_bool_itv
let ne_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (Int l1, Int u1), V (Int l2, Int u2)
when l1 = u1 && u1 = l2 && l2 = u2 -> false_itv
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> true_itv
| _, _ -> unknown_bool_itv
let lt_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 <= l1 -> false_itv
| _, _ -> unknown_bool_itv
let le_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (_, Int u1), V (Int l2, _) when u1 <= l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> false_itv
| _, _ -> unknown_bool_itv
let gt_itv (x:t) (y:t) : t = lt_itv y x
let ge_itv (x:t) (y:t) : t = le_itv y x
let l_not (x:t) : t =
if is_bot x then Bot else
if eq false_itv x then true_itv else
if le false_itv x then unknown_bool_itv else
false_itv
let unknown_binary (x:t) (y:t) : t =
if is_bot x || is_bot y then bot
else top
let unknown_unary (x:t) : t =
if is_bot x then Bot
else top
let l_shift (x:t) (y:t) : t =
match x, y with
V (Int l1, Int u1), V (Int l2, Int u2) when l1 = u1 && l2 = u2 ->
let x = l1 lsl l2 in
V (Int x, Int x)
| _ -> unknown_binary x y
let itv_of_type : Cil.typ -> t = function
| Cil.TInt (Cil.IUChar, _) -> of_ints 0 255
| Cil.TInt (Cil.IUShort, _) -> of_ints 0 65535
| Cil.TInt (Cil.IUInt, _) | Cil.TInt (Cil.ILong, _)
| Cil.TInt (Cil.IULongLong, _) -> of_ints 0 4294967295
| Cil.TInt (Cil.IChar, _) -> of_ints (-128) 255
| Cil.TInt (Cil.IShort, _) -> of_ints (-32768) 32767
| Cil.TInt (Cil.IInt, _) | Cil.TInt (Cil.IULong, _)
| Cil.TInt (Cil.ILongLong, _) -> of_ints (-2147483648) 2147483648
| _ -> top
let cast : Cil.typ -> Cil.typ -> t -> t
= fun from_typ to_typ itv ->
if !Options.int_overflow then
begin
match itv with
Bot -> Bot
| _ ->
let (from_size, to_size) =
((try CilHelper.byteSizeOf from_typ |> of_int with _ -> top),
(try CilHelper.byteSizeOf to_typ |> of_int with _ -> top))
in
if CilHelper.is_unsigned from_typ && CilHelper.is_unsigned to_typ then
if from_size <= to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else top (* possibly overflow *)
else if not (CilHelper.is_unsigned from_typ) && CilHelper.is_unsigned to_typ then
if from_size <= to_size then absolute itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else top (* possibly overflow *)
else if CilHelper.is_unsigned from_typ && not (CilHelper.is_unsigned to_typ) then
if from_size < to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else top (* possibly overflow *)
else
if from_size <= to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else top (* possibly overflow *)
end
else
begin
if CilHelper.is_unsigned to_typ then absolute itv
else itv
end
let prune : Cil.binop -> t -> t -> t = fun op x y ->
if is_bot x || is_bot y then Bot else
let pruned =
match op, x, y with
| Cil.Lt, V (a, b), V (c, d) -> V (a, Integer.min b (Integer.minus d (Int 1)))
| Cil.Gt, V (a, b), V (c, d) -> V (Integer.max a (Integer.plus c (Int 1)), b)
| Cil.Le, V (a, b), V (c, d) -> V (a, Integer.min b d)
| Cil.Ge, V (a, b), V (c, d) -> V (Integer.max a c, b)
| Cil.Eq, V (a, b), V (c, d) -> meet x y
| Cil.Ne, V (a, b), V (c, d) when Integer.eq b c && Integer.eq c d ->
V (a, Integer.minus b (Int 1))
| Cil.Ne, V (a, b), V (c, d) when Integer.eq a c && Integer.eq c d ->
V (Integer.plus a (Int 1), b)
| Cil.Ne, V _, V _ -> x
| _ -> invalid_arg "itv.ml:prune" in
normalize pruned
| null | https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/itv.ml | ocaml | *********************************************************************
All rights reserved.
See the LICENSE file for details.
*********************************************************************
****************** *
* Widening threshold *
* ******************
************************ *
* Integer = Z + {-oo, +oo} *
* ************************
possibly overflow
possibly overflow
possibly overflow
possibly overflow | Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
This software is distributed under the term of the BSD license .
open Vocab
let threshold = BatSet.of_list [0;1;16;64]
module Integer =
struct
type t = Int of int | MInf | PInf [@@deriving compare]
let pinf = PInf
let minf = MInf
let zero = Int 0
let of_int i = Int i
let to_string : t -> string = function
| Int i -> string_of_int i
| PInf -> "+oo"
| MInf -> "-oo"
let pp fmt x = Format.fprintf fmt "%s" (to_string x)
let le : t -> t -> bool = fun x y ->
match x, y with
| MInf, _ -> true
| _, PInf -> true
| Int i, Int j -> i <= j
| _, _ -> false
let eq : t -> t -> bool = fun x y ->
match x, y with
| MInf, MInf
| PInf, PInf -> true
| Int i, Int j -> i = j
| _, _ -> false
let absolute = function
| Int i -> Int (abs i)
| MInf -> PInf
| PInf -> PInf
let min : t -> t -> t = fun x y -> if le x y then x else y
let max : t -> t -> t = fun x y -> if le x y then y else x
let lower_widen : t -> t -> t = fun x y ->
if le y x then
if eq y x then y else
let filtered = BatSet.filter (fun k -> le (Int k) y) threshold in
if BatSet.is_empty filtered
then MInf else Int (BatSet.max_elt filtered)
else x
let upper_widen : t -> t -> t = fun x y ->
if le x y then
if eq x y then y else
let filtered = BatSet.filter (fun k -> le y (Int k)) threshold in
if BatSet.is_empty filtered
then PInf else Int (BatSet.min_elt filtered)
else x
let lower_narrow : t -> t -> t = fun x y ->
if le x y then
if eq x MInf || BatSet.exists (fun k -> x = Int k) threshold then y
else x
else invalid_arg ("itv.ml: Integer.lower_narrow (x, y). y < x : "^(to_string y)^" < "^(to_string x))
let upper_narrow : t -> t -> t = fun x y ->
if le y x then
if eq x PInf || BatSet.exists (fun k -> x = Int k) threshold then y
else x
else invalid_arg "itv.ml: Integer.upper_narrow (x, y). x < y"
let plus x y =
match x, y with
| Int n1, Int n2 -> Int (n1 + n2)
| PInf, MInf | MInf, PInf -> invalid_arg "itv.ml: Integer.plus"
| PInf, _ -> PInf
| MInf, _ -> MInf
| _, PInf -> PInf
| _, MInf -> MInf
let minus x y =
match x, y with
| Int n1, Int n2 -> Int (n1 - n2)
| PInf, PInf | MInf, MInf -> invalid_arg "itv.ml: Integer.minus"
| PInf, _ -> PInf
| MInf, _ -> MInf
| _, PInf -> MInf
| _, MInf -> PInf
let times x y =
match x, y with
| Int n1, Int n2 -> Int (n1 * n2)
| PInf, PInf
| MInf, MInf -> PInf
| PInf, MInf
| MInf, PInf -> MInf
| PInf, Int n
| Int n, PInf ->
if n < 0 then MInf else
if n > 0 then PInf else
Int 0
| (MInf, Int n)
| (Int n, MInf) ->
if n < 0 then PInf else
if n > 0 then MInf else
Int 0
let divide x y =
match x, y with
| Int n1, Int n2 ->
if n2 = 0 then invalid_arg "itv.ml: Integer.divide (_, 0)" else Int (n1 / n2)
| PInf, PInf
| MInf, MInf -> PInf
| MInf, PInf
| PInf, MInf -> MInf
| MInf, Int n ->
if n < 0 then PInf else
if n > 0 then MInf else
invalid_arg "itv.ml: Integer.divide (-oo, 0)"
| PInf, Int n ->
if n < 0 then MInf else
if n > 0 then PInf else
invalid_arg "itv.ml: Integer.divide (+oo, 0)"
| Int _, PInf
| Int _, MInf -> Int 0
let min4 : t -> t -> t -> t -> t = fun x y z w ->
min (min x y) (min z w)
let max4 : t -> t -> t -> t -> t = fun x y z w ->
max (max x y) (max z w)
end
* { 6 Main definitions of interval }
open Integer
type t = V of Integer.t * Integer.t | Bot [@@deriving compare]
let zero = V (Int 0, Int 0)
let one = V (Int 1, Int 1)
let pos = V (Int 1, PInf)
let neg = V (MInf, Int (-1))
let nat = V (Int 0, PInf)
let upper = function V (_, Int x) -> x | _ -> invalid_arg "Itv.upper"
let lower = function V (Int x, _) -> x | _ -> invalid_arg "Itv.lower"
let upper_integer = function V (_, x) -> x | _ -> invalid_arg "Itv.upper_integer"
let lower_integer = function V (x, _) -> x | _ -> invalid_arg "Itv.lower_integer"
let of_int : int -> t = fun i -> V (Int i, Int i)
let of_ints : int -> int -> t = fun lb ub -> V (Int lb, Int ub)
let of_integer : Integer.t -> Integer.t -> t = fun l u -> V (l, u)
let to_string : t -> string = function
| Bot -> "bot"
| V (l, u) -> "["^(Integer.to_string l)^", "^(Integer.to_string u)^"]"
let pp fmt = function
| Bot -> Format.fprintf fmt "bot"
| V (l, u) -> Format.fprintf fmt "[%a, %a]" Integer.pp l Integer.pp u
let to_json : t -> Yojson.Safe.json = fun itv ->
`String (to_string itv)
let is_bot : t -> bool = function
| Bot -> true
| V (l, u) -> l = PInf || u = MInf || not (Integer.le l u)
* Normalizes invalid intervals such as [ \[u , l\ ] ] with [ u > l ] to
[ Bot ] .
[Bot].*)
let normalize x = if is_bot x then Bot else x
let absolute = function
| Bot -> Bot
| V (l, u) ->
if Integer.le Integer.zero l then V (l, u)
else if Integer.le l Integer.zero && Integer.le Integer.zero u then V (Integer.zero, Integer.max (Integer.absolute l) u)
else V (Integer.absolute u, Integer.absolute l)
let le : t -> t -> bool = fun x y ->
if is_bot x then true else
if is_bot y then false else
match x, y with
| V (l1, u1), V (l2, u2) -> Integer.le l2 l1 && Integer.le u1 u2
| _, _ -> assert false
let eq : t -> t -> bool = fun x y ->
if is_bot x && is_bot y then true else
if is_bot x || is_bot y then false else
match x, y with
| V (l1, u1), V (l2, u2) -> Integer.eq l2 l1 && Integer.eq u1 u2
| _, _ -> assert false
let top : t = V (MInf, PInf)
let bot : t = Bot
let join : t -> t -> t = fun x y ->
if le x y then y else
if le y x then x else
if is_bot x then normalize y else
if is_bot y then normalize x else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.min l1 l2, Integer.max u1 u2)
| _, _ -> assert false
let meet : t -> t -> t = fun x y ->
if le x y then x else
if le y x then y else
if is_bot x then Bot else
if is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) ->
normalize (V (Integer.max l1 l2, Integer.min u1 u2))
| _, _ -> assert false
let widen : t -> t -> t = fun x y ->
if x == y then x else
if is_bot x then normalize y else
if is_bot y then normalize x else
match x, y with
| V (l1, u1), V (l2, u2) ->
V (Integer.lower_widen l1 l2, Integer.upper_widen u1 u2)
| _, _ -> assert false
let narrow : t -> t -> t = fun x y ->
if x == y then x else
if is_bot y then Bot else
if is_bot x then invalid_arg "itv.ml: narrow(bot, _)" else
match x, y with
| V (l1, u1), V (l2, u2) ->
V (Integer.lower_narrow l1 l2, Integer.upper_narrow u1 u2)
| _, _ -> assert false
* { 6 Auxiliary functions for interval }
let open_right (x:t) : bool =
if is_bot x then false else
match x with
| V (_, PInf) -> true
| _ -> false
let close_left (x:t) : bool =
if is_bot x then false else
match x with
| V (Int _, _) -> true
| _ -> false
let open_left (x:t) : bool =
if is_bot x then false else
match x with
| V (MInf, _) -> true
| _ -> false
let is_range (x:t) : bool = not (is_bot x)
let is_const (x:t) : bool =
match x with
| V (Int x, Int y) -> x = y
| _ -> false
let is_finite (x:t) : bool =
if is_bot x then false else
match x with
| V (Int _, Int _) -> true
| _ -> false
let is_infinite x = not (is_finite x)
let is_negative (x:t) : bool =
if is_bot x then false else
match x with
| V (MInf, _) -> true
| V (Int x, Int _) -> x < 0
| _ -> false
let height (x:t) : int =
let h_bound = 1000 in
if is_bot x then 0 else
match x with
| V (Int l, Int u) -> if u - l + 1 > h_bound then h_bound else u - l + 1
| _ -> h_bound
let diff (x:t) : int =
if is_bot x then 0 else
match x with
| V (Int l, Int u) -> u - l
| _ -> 0
* { 6 Binary / Unary operations for interval }
let plus (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.plus l1 l2, Integer.plus u1 u2)
| _, _ -> assert false
let minus (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) -> V (Integer.minus l1 u2, Integer.minus u1 l2)
| _, _ -> assert false
let times (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (l1, u1), V (l2, u2) ->
let x1 = Integer.times l1 l2 in
let x2 = Integer.times l1 u2 in
let x3 = Integer.times u1 l2 in
let x4 = Integer.times u1 u2 in
V (Integer.min4 x1 x2 x3 x4, Integer.max4 x1 x2 x3 x4)
| _, _ -> assert false
let divide (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
if le (V (Int 0, Int 0)) y then top else
match x, y with
| V (l1, u1), V (l2, u2) ->
let x1 = Integer.divide l1 l2 in
let x2 = Integer.divide l1 u2 in
let x3 = Integer.divide u1 l2 in
let x4 = Integer.divide u1 u2 in
V (Integer.min4 x1 x2 x3 x4, Integer.max4 x1 x2 x3 x4)
| _, _ -> assert false
let false_itv : t = V (Int 0, Int 0)
let true_itv : t = V (Int 1, Int 1)
let unknown_bool_itv : t = V (Int 0, Int 1)
let l_and (x:t) (y:t) : t =
if is_bot x || is_bot y then
Bot
else if eq false_itv x || eq false_itv y then
false_itv
else if not (le false_itv x) && not (le false_itv y) then
true_itv
else
unknown_bool_itv
let l_or (x:t) (y:t) : t =
if is_bot x || is_bot y then
Bot
else if eq false_itv x && eq false_itv y then
false_itv
else if not (le false_itv x) || not (le false_itv y) then
true_itv
else
unknown_bool_itv
let eq_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (Int l1, Int u1), V (Int l2, Int u2)
when l1 = u1 && u1 = l2 && l2 = u2 -> true_itv
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> false_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> false_itv
| _, _ -> unknown_bool_itv
let ne_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (Int l1, Int u1), V (Int l2, Int u2)
when l1 = u1 && u1 = l2 && l2 = u2 -> false_itv
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> true_itv
| _, _ -> unknown_bool_itv
let lt_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (_, Int u1), V (Int l2, _) when u1 < l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 <= l1 -> false_itv
| _, _ -> unknown_bool_itv
let le_itv (x:t) (y:t) : t =
if is_bot x || is_bot y then Bot else
match x, y with
| V (_, Int u1), V (Int l2, _) when u1 <= l2 -> true_itv
| V (Int l1, _), V (_, Int u2) when u2 < l1 -> false_itv
| _, _ -> unknown_bool_itv
let gt_itv (x:t) (y:t) : t = lt_itv y x
let ge_itv (x:t) (y:t) : t = le_itv y x
let l_not (x:t) : t =
if is_bot x then Bot else
if eq false_itv x then true_itv else
if le false_itv x then unknown_bool_itv else
false_itv
let unknown_binary (x:t) (y:t) : t =
if is_bot x || is_bot y then bot
else top
let unknown_unary (x:t) : t =
if is_bot x then Bot
else top
let l_shift (x:t) (y:t) : t =
match x, y with
V (Int l1, Int u1), V (Int l2, Int u2) when l1 = u1 && l2 = u2 ->
let x = l1 lsl l2 in
V (Int x, Int x)
| _ -> unknown_binary x y
let itv_of_type : Cil.typ -> t = function
| Cil.TInt (Cil.IUChar, _) -> of_ints 0 255
| Cil.TInt (Cil.IUShort, _) -> of_ints 0 65535
| Cil.TInt (Cil.IUInt, _) | Cil.TInt (Cil.ILong, _)
| Cil.TInt (Cil.IULongLong, _) -> of_ints 0 4294967295
| Cil.TInt (Cil.IChar, _) -> of_ints (-128) 255
| Cil.TInt (Cil.IShort, _) -> of_ints (-32768) 32767
| Cil.TInt (Cil.IInt, _) | Cil.TInt (Cil.IULong, _)
| Cil.TInt (Cil.ILongLong, _) -> of_ints (-2147483648) 2147483648
| _ -> top
let cast : Cil.typ -> Cil.typ -> t -> t
= fun from_typ to_typ itv ->
if !Options.int_overflow then
begin
match itv with
Bot -> Bot
| _ ->
let (from_size, to_size) =
((try CilHelper.byteSizeOf from_typ |> of_int with _ -> top),
(try CilHelper.byteSizeOf to_typ |> of_int with _ -> top))
in
if CilHelper.is_unsigned from_typ && CilHelper.is_unsigned to_typ then
if from_size <= to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else if not (CilHelper.is_unsigned from_typ) && CilHelper.is_unsigned to_typ then
if from_size <= to_size then absolute itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else if CilHelper.is_unsigned from_typ && not (CilHelper.is_unsigned to_typ) then
if from_size < to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
else
if from_size <= to_size then itv
else if Integer.le (upper_integer itv) (upper_integer (itv_of_type to_typ)) then itv
end
else
begin
if CilHelper.is_unsigned to_typ then absolute itv
else itv
end
let prune : Cil.binop -> t -> t -> t = fun op x y ->
if is_bot x || is_bot y then Bot else
let pruned =
match op, x, y with
| Cil.Lt, V (a, b), V (c, d) -> V (a, Integer.min b (Integer.minus d (Int 1)))
| Cil.Gt, V (a, b), V (c, d) -> V (Integer.max a (Integer.plus c (Int 1)), b)
| Cil.Le, V (a, b), V (c, d) -> V (a, Integer.min b d)
| Cil.Ge, V (a, b), V (c, d) -> V (Integer.max a c, b)
| Cil.Eq, V (a, b), V (c, d) -> meet x y
| Cil.Ne, V (a, b), V (c, d) when Integer.eq b c && Integer.eq c d ->
V (a, Integer.minus b (Int 1))
| Cil.Ne, V (a, b), V (c, d) when Integer.eq a c && Integer.eq c d ->
V (Integer.plus a (Int 1), b)
| Cil.Ne, V _, V _ -> x
| _ -> invalid_arg "itv.ml:prune" in
normalize pruned
|
776c96379a87741a537bf9d013af40dcad9f6be8f57ceebca5317033ab683337 | input-output-hk/cardano-rest | test.hs |
main :: IO ()
main = putStrLn "cardano-tx-submit test"
| null | https://raw.githubusercontent.com/input-output-hk/cardano-rest/040b123b45af06060aae04479d92fada68820f12/submit-api/test/test.hs | haskell |
main :: IO ()
main = putStrLn "cardano-tx-submit test"
| |
e9c1ea0b0a6b9c3783f986ce678cf72e1ee13685d5b043f4d7033d10877be661 | GU-CLASP/TypedFlow | Types.hs | # LANGUAGE QuantifiedConstraints #
# LANGUAGE CPP #
#if __GLASGOW_HASKELL__ >= 806
# LANGUAGE NoStarIsType #
#endif
# LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeInType #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE UndecidableSuperClasses #
# LANGUAGE UnicodeSyntax #
# LANGUAGE LambdaCase #
# LANGUAGE ApplicativeDo #
# OPTIONS_GHC -fplugin GHC.TypeLits . KnownNat . Solver #
module TypedFlow.Types where
import GHC.TypeLits
import Data.Proxy
import Control . Monad . State
import Control . Monad . RWS ( RWS ( .. ) , local , ask , tell )
import Data.Kind (Constraint,Type)
import qualified Data.Int as Hask
import Data.Type.Equality
import Data.Monoid hiding (Sum,Product,Last,All,Ap)
import Data.Complex
newtype (∘) f (g :: k -> k2) (a::k) where
Comp :: forall f g a. f (g a) -> (f ∘ g) a
type Sat = (∘) Dict
type Sat' f x = f x
data Dict :: Constraint -> Type where
Dict :: a => Dict a
the second context is the PROVIDED constraint !
pattern Sat = Comp Dict
instance (Show (Sat a b)) where
show _ = "Sat"
proxySat :: forall k (b::k) (a :: k -> Constraint) proxy. a b => proxy b -> Sat a b
proxySat _ = Sat
natSat :: forall n. KnownNat n => Sat KnownNat n
natSat = Sat
type i < j = CmpNat i j ~ ' LT
type i < j = (i+1) <= j
-- type i <= j = (i <=? j) ~ 'True
type family Product xs where
Product '[] = 1
Product (x ': xs) = x * Product xs
type family Sum xs where
Sum '[] = 0
Sum (x ': xs) = x + Sum xs
type family (++) xs ys where
'[] ++ xs = xs
(x ': xs) ++ ys = x ': (xs ++ ys)
type family Tail xs where
Tail (x ': xs) = xs
type family Last xs where
Last '[x] = x
Last (x ': xs) = Last xs
type family Init xs where
Init '[x] = '[]
Init (x ': xs) = x ': Init xs
type family Length xs where
Length '[] = 0
Length (x ': xs) = 1 + Length xs
type family Reverse' xs ys where
Reverse' '[] ys = ys
Reverse' (x ': xs) ys = Reverse' xs (x ': ys )
type family Reverse xs where
Reverse xs = Reverse' xs '[]
-- From:
data NP f (xs :: [k]) where
Unit :: NP f '[]
(:*) :: f x -> NP f xs -> NP f (x ': xs)
deriving instance (forall x. Show (f x)) => Show (NP f xs)
type SList' = NP
(.+.) = appSList
infixr 5 .+.
infixr 5 *:
infixr 5 :*
(*:) :: forall x xs f. NP f xs -> f x -> NP f (xs ++ '[x])
xs *: x = appSList xs (x :* Unit)
hlookup :: Axis n xs -> NP f xs -> f (At n xs)
hlookup AxZero (x :* _) = x
hlookup (AxSucc n) (_ :* xs) = hlookup n xs
newtype I a = I a
newtype K a x = K a
type HList = NP I
pattern HSingle :: f a -> NP f '[a]
pattern HSingle x = x :* Unit
pattern VecSing :: Tensor s t -> HTV t '[s]
pattern VecSing t1 = F t1 :* Unit
pattern VecPair :: Tensor s t -> Tensor s' t -> HTV t '[s,s']
pattern VecPair t1 t2 = F t1 :* F t2 :* Unit
pattern VecTriple :: Tensor s t -> Tensor s' t -> Tensor s3 t -> HTV t '[s,s',s3]
pattern VecTriple t1 t2 t3 = F t1 :* F t2 :* F t3 :* Unit
type family All (c :: k -> Constraint) (xs :: [k]) :: Constraint where
All c '[] = ()
All c (x ': xs) = (c x, All c xs)
knownAll :: forall constraint s k. NP (Sat constraint) s -> (All constraint s => KnownLen s => k) -> k
knownAll Unit k = k
knownAll (Sat :* xs) k = knownAll xs $ k
allKnown' :: forall constraint s proxy. All constraint s => NP proxy s -> NP (Sat constraint) s
allKnown' Unit = Unit
allKnown' (_ :* xs) = Sat :* allKnown' xs
allKnown :: forall k s. KnownLen s => All k s => NP (Sat k) s
allKnown = allKnown' typeSList
data SomeSuch k f where
SomeSuch :: k x => f x -> SomeSuch k f
FIXME : use type , not constraint ?
type Ap c (t :: k) :: l
class Cons (x :: k) (xs :: [k])
instance Fun (Cons x) where type Ap (Cons x) xs = x ': xs
class Snoc (x :: k) (xs :: [k])
instance Fun (Snoc x) where
type Ap (Snoc x) '[] = '[x]
type Ap (Snoc x) (y ': ys) = y ': Ap (Snoc x) ys
class FMap (c :: k -> Constraint) (xs :: [k]) where
instance Fun c => Fun (FMap c) where
type Ap (FMap c) '[] = '[]
type Ap (FMap c) (x ': xs) = Ap c x ': Ap (FMap c) xs
mapFMap :: forall g f xs. (forall x. f x -> f (Ap g x)) -> NP f xs -> NP f (Ap (FMap g) xs)
mapFMap _ Unit = Unit
mapFMap f (x :* xs) = f x :* mapFMap @g @f f xs
type family All2 ( c : : k - > l - > Constraint ) ( xs : : [ k ] ) ( ys : : [ l ] ) : : Constraint where
-- All2 c '[] '[] = ()
All2 c ( x ' : xs ) ( y ' : ys ) = ( c x y , All2 c xs ys )
-- All2 c '[] (y ': ys) = 'True ~ 'False
-- All2 c (y ': ys) '[] = 'True ~ 'False
-- | Flip at type level
newtype F g t s = F {fromF :: g s t}
-- | Tensor vector. (Elements in the indexing list are ignored.)
type TV s t = NP (K (Tensor s t))
-- | Heterogeneous tensor vector with varying shapes and the same kind of elements
type HTV t = NP (F T t)
class Scnd' (x::(a,b))
instance Fun (Scnd') where type Ap Scnd' '(a,b) = b
class Frst' (x::(a,b))
instance Fun (Frst') where type Ap Frst' '(a,b) = a
type family Frst (x :: (a,b)) where Frst '(x,y) = x
type family Scnd (x :: (a,b)) where Scnd '(x,y) = y
type family Frst3 (x :: (a,b,c)) where Frst3 '(x,y,z) = x
type family Scnd3 (x :: (a,b,c)) where Scnd3 '(x,y,z) = y
type family Thrd3 (x :: (a,b,c)) where Thrd3 '(x,y,z) = z
class (KnownShape (Scnd3 r), KnownTyp (Thrd3 r), KnownSymbol (Frst3 r)) => KnownPlaceholder r where
placeHolderRef :: proxy r -> Ref String (Scnd3 r) (Thrd3 r)
instance (KnownShape y, KnownTyp z, KnownSymbol x) => KnownPlaceholder '(x,y,z) where
placeHolderRef _ = Ref (symbolVal (Proxy @x)) typeSShape typeSTyp
class (KnownShape (Frst r), KnownTyp (Scnd r)) => KnownPair r
instance (KnownShape x, KnownTyp y) => KnownPair '(x,y)
newtype Uncurry g (s :: (a,b)) = Uncurry {fromUncurry :: g (Frst s) (Scnd s)}
-- | Tensor vector heterogenous in types and shapes.
type HHTV = NP (Uncurry T)
type Placeholders = NP Placeholder
type PH = (Symbol,Shape,Typ)
newtype Placeholder (s :: PH) = PHT (T (Scnd3 s) (Thrd3 s))
hhead :: NP f (x ': xs) -> f x
hhead (x :* _) = x
htail :: NP f (x ': xs) -> NP f xs
htail (_ :* xs) = xs
htmap :: forall f ss t u. (forall s. Tensor s t -> Tensor (Ap f s) u) -> HTV t ss -> HTV u (Ap (FMap f) ss)
htmap _ Unit = Unit
htmap f (F x :* xs) = F (f x) :* htmap @f f xs
htmap ' : : forall f ss t u. All KnownShape ss = > ( forall s = > Tensor ( Ap f s ) t - > Tensor s u ) - > SList ss - > HTV t ( Ap ( FMap f ) ss ) - > HTV u ss
-- htmap' _ Unit Unit = Unit
-- htmap' f ((:*) _ n)(F x :* xs) = F (f x) :* htmap' @f f n xs
-- | Map a natural transformation
hmap :: (forall x. f x -> g x) -> NP f xs -> NP g xs
hmap _ Unit = Unit
hmap f (x :* xs) = f x :* hmap f xs
hTraverse :: forall f g xs m. Applicative m => (forall x. f x -> m (g x)) -> NP f xs -> m (NP g xs)
hTraverse _ Unit = pure Unit
hTraverse f (x :* xs) = do
x' <- f x
xs' <- hTraverse f xs
return (x' :* xs')
| Variant of hmap with a constraint
hmapK :: forall k f g xs. All k xs => (forall x. k x => f x -> g x) -> NP f xs -> NP g xs
hmapK _ Unit = Unit
hmapK f (x :* xs) = f x :* hmapK @k f xs
hMapToList :: forall k f xs a. All k xs => (forall x. k x => f x -> a) -> NP f xs -> [a]
hMapToList f = htoList . hmapK @k (K . f)
| If NP is in fact a vector , we have a " usual " map .
kmap :: (a -> b) -> NP (K a) xs -> NP (K b) xs
kmap _ Unit = Unit
kmap f (K x :* xs) = K (f x) :* kmap f xs
| If NP is in fact a tuple , we can apply a tuple of endomorphisms . ( special case of < * > )
hendo :: NP Endo xs -> HList xs -> HList xs
hendo Unit Unit = Unit
hendo (Endo f :* fs) (I x :* xs) = (I (f x) :* hendo fs xs)
appSList, (.+.), happ :: NP f xs -> NP f ys -> NP f (xs ++ ys)
happ Unit xs = xs
happ (x :* xs) ys = x :* (happ xs ys)
appSList = happ
data Both f g x = Both {frst :: f x, scnd :: g x}
bothFromPair :: (f x, g x) -> Both f g x
bothFromPair (x,y) = (Both x y)
bothToPair :: Both f g x -> (f x, g x)
bothToPair (Both x y) = (x,y)
hzip :: NP f xs -> NP g xs -> NP (Both f g) xs
hzip = hzipWith Both
hzipWith :: (forall x. f x -> g x -> h x) -> NP f xs -> NP g xs -> NP h xs
hzipWith _ Unit Unit = Unit
hzipWith f (x :* xs) (y :* ys) = f x y :* hzipWith f xs ys
hfor :: forall k f xs m a. All k xs => Applicative m => NP f xs -> (forall x. k x => f x -> m a) -> m [a]
hfor Unit _ = pure []
hfor (x :* xs) f = (:) <$> f x <*> hfor @k xs f
htoList :: NP (K a) xs -> [a]
htoList Unit = []
htoList (K x :* xs) = x : htoList xs
hsplit' :: SPeano n -> NP f xs -> (NP f (Take n xs), NP f (Drop n xs))
hsplit' SZero xs = (Unit,xs)
hsplit' (SSucc _n) Unit = (Unit,Unit)
hsplit' (SSucc n) (x :* xs) = case hsplit' n xs of
(l,r) -> (x :* l,r)
hsplit :: forall xs ys f. KnownLen xs => NP f (xs++ys) -> (NP f xs, NP f ys)
hsplit xys = splitApp @xs @ys (hsplit' (shapePeano @xs) xys)
splitApp' :: forall ys xs k. SList xs -> ((Take (PeanoLength xs) (xs ++ ys) ~ xs,
Drop (PeanoLength xs) (xs ++ ys) ~ ys) => k) -> k
splitApp' Unit k = k
splitApp' ((:*) _ n) k = splitApp' @ys n k
splitApp :: forall xs ys k. KnownLen xs => ((Take (PeanoLength xs) (xs ++ ys) ~ xs,
Drop (PeanoLength xs) (xs ++ ys) ~ ys) => k) -> k
splitApp = splitApp' @ys (typeSList @xs)
hsnoc :: NP f xs -> f x -> NP f (xs ++ '[x])
hsnoc xs x = happ xs (x :* Unit)
TODO : type = ' [ ( ) ] ( And then SPeano = NP ) ?
axis0 :: Axis 'Zero (x ': xs)
axis0 = AxZero
axis1 :: Axis ('Succ 'Zero) (x0 ': (x1 ': xs))
axis1 = AxSucc axis0
axis2 :: Axis ('Succ ('Succ 'Zero)) (x0 ': (x1 ': (x2 ': xs)))
axis2 = AxSucc axis1
axis3 :: Axis ('Succ ('Succ ('Succ 'Zero))) (x0 ': (x1 ': (x2 ': (x3 ': xs))))
axis3 = AxSucc axis2
data Axis n xs where
AxZero :: Axis 'Zero (x ': xs)
AxSucc :: Axis n xs -> Axis ('Succ n) (x ': xs)
axisInt :: Axis n xs -> Integer
axisInt AxZero = 0
axisInt (AxSucc n) = 1 + axisInt n
sPeanoInt :: SPeano n -> Integer
sPeanoInt (SSucc n) = 1 + sPeanoInt n
sPeanoInt SZero = 0
type family PeanoNat (n::Peano) :: Nat where
PeanoNat 'Zero = 0
PeanoNat ('Succ n) = PeanoNat n + 1
data SPeano n where
SZero :: SPeano 'Zero
SSucc :: SPeano n -> SPeano ('Succ n)
class KnownPeano n where
knownPeano :: SPeano n
instance KnownPeano 'Zero where
knownPeano = SZero
instance KnownPeano n => KnownPeano ('Succ n) where
knownPeano = SSucc knownPeano
type family Take n xs where
Take 'Zero xs = '[]
Take ('Succ n) '[] = '[]
Take ('Succ n) (x ': xs) = x ': Take n xs
type family Drop n xs where
Drop 'Zero xs = xs
Drop _ '[] = '[]
Drop ('Succ n) (x ': xs) = Drop n xs
type family At n xs where
At 'Zero (x ': xs) = x
At ('Succ n) (x ': xs) = At n xs
-- type family Drop n xs where
-- Drop 'Zero xs = xs
-- Drop _ '[] = '[]
( ' Succ n ) ( x ' : xs ) = Drop n xs
-- type family At n xs where
At ' Zero ( x ' : xs ) = x
-- At ('Succ n) (x ': xs) = At n xs
data Kind = Float | Cmplx | Int | Bool deriving (Show,Eq,Ord)
data SKind (s::Kind) where
SFloat :: SKind 'Float
SCmplx :: SKind 'Cmplx
SInt :: SKind 'Int
SBool :: SKind 'Bool
data NBits = B32 | B64 | B1 deriving (Show,Eq,Ord)
data SNBits s where
SB32 :: SNBits 'B32
SB64 :: SNBits 'B64
data Typ = Typ Kind NBits deriving (Eq,Ord)
type family TypKind (t :: Typ) where TypKind ('Typ k b) = k
type family TypBits (t :: Typ) where TypBits ('Typ k b) = b
type KnownNumeric t = (NumericKind (TypKind t), KnownBits (TypBits t), t ~ 'Typ (TypKind t) (TypBits t))
type KnownFloat t = (TypKind t ~ 'Float, KnownBits (TypBits t), t ~ 'Typ 'Float (TypBits t))
type KnownAlgebraic t = (AlgebraicKind (TypKind t), KnownBits (TypBits t), t ~ 'Typ (TypKind t) (TypBits t))
class KnownKind t => NumericKind t where
instance NumericKind 'Float
instance NumericKind 'Cmplx
instance NumericKind 'Int
class NumericKind t => AlgebraicKind t where
instance AlgebraicKind 'Float
instance AlgebraicKind 'Cmplx
kVal :: SKind t1 -> Kind
kVal SFloat = Float
kVal SInt = Int
kVal SBool = Bool
kVal SCmplx = Cmplx
instance Eq (SKind t) where x == y = kVal x == kVal y
instance Ord (SKind t) where compare x y = compare (kVal x) (kVal y)
nbitsVal :: SNBits w -> NBits
nbitsVal SB64 = B64
nbitsVal SB32 = B32
instance Eq (SNBits t) where x == y = nbitsVal x == nbitsVal y
instance Ord (SNBits t) where compare x y = compare (nbitsVal x) (nbitsVal y)
sTypTyp :: STyp t1 -> Typ
sTypTyp (STyp k b Refl) = Typ (kVal k) (nbitsVal b)
instance Eq (STyp t) where x == y = sTypTyp x == sTypTyp y
instance Ord (STyp t) where compare x y = compare (sTypTyp x) (sTypTyp y)
data STyp t where
STyp :: SKind (TypKind t) -> SNBits (TypBits t) -> (t :~: 'Typ (TypKind t) (TypBits t)) -> STyp t
type Flt t = 'Typ 'Float t
type Float32 = 'Typ 'Float 'B32
type Complex32 = 'Typ 'Cmplx 'B32
type Int32 = 'Typ 'Int 'B32
type Int64 = 'Typ 'Int 'B64
type TFBool = 'Typ 'Bool 'B32
type Scalar t = T '[] t
type Shape = [Nat]
class (KnownLen s, All KnownNat s) => KnownShape s where
instance KnownShape '[]
instance (KnownNat x, KnownShape xs) => KnownShape (x ': xs)
type KnownTyp t = (KnownBits (TypBits t), KnownKind (TypKind t), t ~ 'Typ (TypKind t) (TypBits t))
typeSTyp :: forall t. KnownTyp t => STyp t
typeSTyp = STyp (kindVal @(TypKind t)) (bitsVal @(TypBits t)) Refl
type family HaskType t where
HaskType Float32 = Float
HaskType ('Typ 'Float 'B64) = Double
HaskType ('Typ 'Cmplx 'B32) = Complex Float
HaskType ('Typ 'Cmplx 'B64) = Complex Double
HaskType ('Typ 'Int 'B64) = Hask.Int64
HaskType ('Typ 'Int 'B32) = Hask.Int32
HaskType ('Typ 'Bool w) = Bool
class KnownBits t where
bitsVal :: SNBits t
instance KnownBits 'B32 where bitsVal = SB32
instance KnownBits 'B64 where bitsVal = SB64
typVal :: forall t. KnownTyp t => Typ
typVal = Typ (kVal k) (nbitsVal b)
where k = kindVal @(TypKind t)
b = bitsVal @(TypBits t)
knownBits :: SNBits t -> (KnownBits t => Fractional (HaskType ('Typ 'Float t)) => Floating (HaskType ('Typ 'Float t)) => k) -> k
knownBits SB32 k = k
knownBits SB64 k = k
knownKind :: SKind t -> (KnownKind t => k) -> k
knownKind SFloat k = k
knownKind SInt k = k
knownKind SBool k = k
knownKind SCmplx k = k
knownTyp :: STyp t -> (KnownTyp t => k) -> k
knownTyp (STyp k b Refl) r = knownKind k $ knownBits b r
knownAlgebraic :: forall t k. KnownAlgebraic t => ((Fractional (HaskType t), Floating (HaskType t)) => k) -> k
knownAlgebraic k = case kindVal @(TypKind t) of
SFloat -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SCmplx -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
_ -> error "KnownAlgebraic bug"
knownNum :: forall t k. KnownNumeric t => (KnownTyp t => Num (HaskType t) => k) -> k
knownNum k = case kindVal @(TypKind t) of
SFloat -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SCmplx -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SBool -> error "KnownNumeric bug"
SInt -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
class KnownKind t where kindVal :: SKind t
instance KnownKind 'Bool where kindVal = SBool
instance KnownKind 'Cmplx where kindVal = SCmplx
instance KnownKind 'Float where kindVal = SFloat
instance KnownKind 'Int where kindVal = SInt
type SList = NP Proxy
instance Ord (Sat KnownNat t) where
compare x@Sat y@Sat = compare (natVal x) (natVal y)
instance Eq (Sat KnownNat t) where
x@Sat == y@Sat = (natVal x) == (natVal y)
type SShape = NP (Sat KnownNat)
instance Ord (SShape s) where
compare x y = compare (shapeToList' x) (shapeToList' y)
instance Eq (SShape s) where
Unit == Unit = True
((:*) x xs) == ((:*) y ys) = x == y && xs == ys
instance {-# OVERLAPPING #-} Show (SShape s) where
show x = show (shapeToList' x)
sListLength :: NP f s -> Integer
sListLength Unit = 0
sListLength ((:*) _ s) = 1+sListLength s
sListLen :: NP f s -> Int
sListLen = fromIntegral . sListLength
sListLenAsNat :: NP f s -> Sat KnownNat (Length s)
sListLenAsNat Unit = Sat
sListLenAsNat ((:*) _ s) = case sListLenAsNat s of
Sat -> Sat
type family PeanoLength xs :: Peano where
PeanoLength '[] = 'Zero
PeanoLength (x ': xs) = 'Succ (PeanoLength xs)
withKnownNat :: forall k. Int -> (forall (n::Nat). KnownNat n => Proxy n -> k) -> k
withKnownNat 0 f = f (Proxy @0)
withKnownNat 1 f = f (Proxy @1)
withKnownNat n f = withKnownNat (n `div` 2) (if n `mod` 2 == 0 then f2x else f2x1)
where f2x,f2x1 :: forall (n::Nat). KnownNat n => Proxy n -> k
f2x _ = f (Proxy @(n*2))
f2x1 _ = f (Proxy @(n*2+1))
Probably a GHC bug :
withKnownNat '' : : forall ( forall ( n::Nat ) . KnownNat n = > k ) - > k
-- withKnownNat'' 0 f = f @0
withKnownNat '' n f = withKnownNat '' ( n-1 ) fsucc
where fsucc : : forall ( n::Nat ) . KnownNat n = > k
= f @(n+1 )
-- This also fails:
appProxy : : forall ( n::Nat ) n = > Proxy n - > ( forall ( m::Nat ) . KnownNat m = > k ) - > k
-- appProxy f _ = f @n
withKnownNat : : forall ( forall ( n::Nat ) . KnownNat n = > k ) - > k
-- withKnownNat n f = withKnownNat' n (\proxy -> appProxy proxy f)
class KnownNat (Length s) => KnownLen s where
shapePeano :: SPeano (PeanoLength s)
typeSList :: SList s
instance KnownLen '[] where
shapePeano = SZero
typeSList = Unit
instance KnownLen xs => KnownLen (x ': xs) where
shapePeano = SSucc (shapePeano @xs)
typeSList = (:*) Proxy (typeSList @xs)
listTypeLen :: forall xs. KnownLen xs => Integer
listTypeLen = sListLength (typeSList @xs)
typeSListProxy :: KnownLen xs => proxy xs -> SList xs
typeSListProxy _ = typeSList
sListProxy :: NP f xs -> Proxy xs
sListProxy _ = Proxy
knownNatVal :: forall x. Sat KnownNat x -> Integer
knownNatVal Sat = natVal (Proxy @x)
shapeToList' :: SShape s -> [Integer]
shapeToList' Unit = []
shapeToList' ((:*) x xs) = knownNatVal x : shapeToList' xs
shapeToList'' :: All KnownNat s => NP proxy s -> [Integer]
shapeToList'' Unit = []
shapeToList'' ((:*) x xs) = natVal x : shapeToList'' xs
shapeToList :: ∀(s::Shape). KnownShape s => [Integer]
shapeToList = shapeToList'' (typeSList @ s)
typeSShape :: forall s. KnownShape s => SShape s
typeSShape = sListSShape (typeSList @s)
proxySShape :: forall s. KnownShape s => Proxy s -> SShape s
proxySShape _ = typeSShape
sListSShape :: forall s. All KnownNat s => SList s -> SShape s
sListSShape = allKnown'
type None = 514229 -- fibonnaci prime.
type None = 0 - 1 -- GHC does not like negative .
-- Using a maybe type would be a RPITA.
--------------------------------
-- Generation Effects (TODO: move to other module)
data VarInfo = forall s t. (KnownShape s, KnownTyp t) => VarInfo {varTrainable :: Bool,
varRef :: Ref String s t,
varInitial :: Maybe (T s t)}
varName :: VarInfo -> String
varName VarInfo {varRef = Ref {..}} = refName
data GState = GState {nextVar :: Integer, -- ^ next free variable
genRegularizers :: [Scalar Float32] -- ^ accumulated regularizers
}
initialGstate :: GState
initialGstate = (GState {nextVar = 0
,genRegularizers=[]
})
data Gen a where
GPId :: Gen Integer
GPVariable :: forall (shape :: Shape) t. (KnownTyp t,KnownShape shape) => Bool -> String -> Maybe (T shape t) -> Gen (Ref String shape t)
GPModify :: (KnownShape s,KnownTyp t) => Ref Int s t -> T s t -> Gen (T s t)
GPReturn :: a -> Gen a
GPState :: (GState -> (a,GState)) -> Gen a
GPApp :: (Gen (a -> b)) -> Gen a -> Gen b
GPBind :: Gen a -> (a -> Gen b) -> Gen b
genGets :: (GState -> a) -> Gen a
genGets f = GPState (\s -> (f s, s))
instance Applicative Gen where
(<*>) = GPApp
pure = GPReturn
instance Monad Gen where
(>>=) = GPBind
instance Functor Gen where
fmap f = (pure f <*>)
--------------------------
-- Tensors
-- | An indexing tensor in the format expected by GatherND
type IndexTensor indexShape containerShape w = T (indexShape ++ '[Length containerShape]) ('Typ 'Int w)
-- | Description of a random distribution
data Distribution (s :: Shape) (t :: Typ) where
-- | Each element is from a truncated normal distribution with given standard dev.
TruncatedNormalD :: Float -> Distribution s ('Typ 'Float w)
-- | Each element is from a uniform distribution with given bounds (low, high)
UniformD :: Float -> Float -> Distribution s ('Typ 'Float w)
OrthogonalD :: Distribution '[m,n] ('Typ 'Float w)
data Ref r s t = Ref {refName :: r,
refShape :: SShape s,
refTyp :: STyp t}
data NilOp s t where
ExternalVar :: Ref String s t -> NilOp s t
Variable :: Ref Int s t -> NilOp s t
Constant :: HaskType t -> NilOp '[] t
Range :: KnownBits w => Sat KnownNat n -> NilOp '[n] ('Typ 'Int w)
data Catable s1 s2 t n = Catable (Sat KnownNat n) (T (s1 ++ (n ': s2)) t)
-- deriving Show
type Unique = Int
data T (s :: Shape) (t :: Typ) where
BroadcastT :: KnownTyp t => Maybe Unique -> Bool -> Sat KnownNat n -> SShape s -> T s t -> T (n ': s) t
MapT :: KnownTyp t => Sat KnownNat n -> SShape s -> (T s t -> T r u) -> T (n ': s) t -> T (n ': r) u
ZipT :: (KnownTyp t, KnownTyp u) => Sat KnownNat n -> SShape s -> SShape r -> (T s t -> T r u -> T q v) -> T (n ': s) t -> T (n ': r) u -> T (n ': q) v
Zip3T :: (KnownTyp t, KnownTyp u, KnownTyp v) => Sat KnownNat n -> SShape s -> SShape r -> SShape q -> (T s t -> T r u -> T q v -> T p w) -> T (n ': s) t -> T (n ': r) u -> T (n ': q) v -> T (n ': p) w
T :: NilOp s t -> T s t
this is the unique noise identifier , preventing two different noises to ever be re - shared .
SShape s0 -> SShape s1 ->
Distribution s1 t ->
T (s0 ++ s1) t
BinOp :: (KnownTyp t,KnownTyp u) => BinOp s1 t s2 u s3 v -> SShape s0 -> SShape s1 -> STyp t -> SShape s2 -> STyp u -> T (s0 ++ s1) t -> T (s0 ++ s2) u -> T (s0 ++ s3) v
UnOp :: KnownTyp t => UnOp s1 t s2 u -> SShape s0 -> T (s0 ++ s1) t -> T (s0 ++ s2) u
Unbroadcast :: Sat KnownNat n -> Unique -> T (n ': s) t -> T s t
DirectBroadcast :: SShape s0 -> NP proxy' s1 -> SShape s2 -> NP proxy' s3 -> T (s0 ++ s2) t -> T (s0 ++ (s1 ++ (s2 ++ s3))) t
ReshapeFrom :: Product s ~ Product s0 => SShape s0 -> T s0 t -> T s t
Transpose :: SShape s0 -> Permutation s0 s -> T s0 t -> T s t
Concat :: SShape s0 -> SShape s1 -> NP (Catable s0 s1 t) ns -> T (s0 ++ (Sum ns ': s1)) t
Gather :: KnownTyp ('Typ 'Int w) => SShape indexShape -> SShape s0 -> Sat KnownNat m -> SShape s1
-> T (s0 ++ (m ': s1)) t -> T (s0 ++ indexShape) ('Typ 'Int w) -> T (s0 ++ indexShape ++ s1) t
GatherND :: KnownTyp ('Typ 'Int w) => SShape containerShape -> SShape elementShape -> SShape indexShape
-> T (containerShape ++ elementShape) t -> IndexTensor indexShape containerShape w -> T (indexShape ++ elementShape) t
MatMul :: forall s m n o t. KnownNumeric t => SShape s -> Sat KnownNat n -> Sat KnownNat o -> Sat KnownNat m -> T (s ++ '[n,o]) t -> T (s ++ [o,m]) t -> T (s ++ [n,m]) t
Where :: T s TFBool -> T s t -> T s t -> T s t
If :: Scalar TFBool -> T s t -> T s t -> T s t
Convolution :: KnownAlgebraic t => Sat KnownNat bs -> Sat KnownNat inChannels -> Sat KnownNat outChannels -> SShape filterSpatialShape -> SShape s
-> T (bs ': s ++ '[inChannels]) t -- input tensor (batched)
-> T (filterSpatialShape ++ '[inChannels,outChannels]) t -- filters
-> T (bs ': s ++ '[outChannels]) t
Pool :: Length outSpatial ~ Length window =>
Sat KnownNat bs -> SShape window -> PoolingType -> Sat KnownNat numChannels -> SShape outSpatial
-> T (bs ': ZipWithMulShapes window outSpatial ++ '[numChannels]) t
-> T (bs ': outSpatial ++ '[numChannels]) t
Softmax :: Sat KnownNat bs -> Sat KnownNat n -> T '[bs,n] (Flt w) -> T '[bs,n] (Flt w)
-- yes, softmax is shape-fixed:
instance Show Unique where
-- show _ = "<Unique>"
-- deriving instance (Show (T s t))
type family ZipWithMulShapes (xs::Shape) (xy::Shape) :: Shape
type instance ZipWithMulShapes (x ': xs) (y ': ys) = x*y ': ZipWithMulShapes xs ys
type instance ZipWithMulShapes '[] _ = '[]
type instance ZipWithMulShapes _ '[] = '[]
satMul :: forall n m. Sat KnownNat n -> Sat KnownNat m -> Sat KnownNat (n*m)
satMul Sat Sat = Sat
satProd :: SShape s -> Sat KnownNat (Product s)
satProd Unit = natSat @1
satProd (x :* xs) = satMul x (satProd xs)
satAdd :: forall n m. Sat KnownNat n -> Sat KnownNat m -> Sat KnownNat (n+m)
satAdd Sat Sat = Sat
zipWithMulSShapes :: SShape xs -> SShape ys -> SShape (ZipWithMulShapes xs ys)
zipWithMulSShapes Unit _ = Unit
zipWithMulSShapes _ Unit = Unit
zipWithMulSShapes ((:*) x xs) ((:*) y ys) = (:*) (satMul x y) (zipWithMulSShapes xs ys)
data PoolingType = MaxPool | AvgPool deriving Show
type Tensor shape = T shape
data ReduceOp = Mean | Max | Min | Sum
data Axis1Op s1 t s2 u where
ReverseT :: Sat KnownNat n -> Axis1Op '[n] t '[n] t
ArgMax :: KnownNumeric t => Sat KnownNat n -> Axis1Op '[n] t '[] ('Typ 'Int b)
OneHot :: KnownNumeric t => Sat KnownNat n -> Axis1Op '[] ('Typ 'Int b) '[n] t
ReduceOp :: KnownNumeric t => Sat KnownNat n -> ReduceOp -> Axis1Op '[n] t '[] t
SliceOp :: forall m n t proxy. proxy m -> Sat KnownNat n -> Integer -> Integer -> Axis1Op '[n] t '[m] t
AccessOp :: forall n t. Sat KnownNat n -> Integer -> Axis1Op '[n] t '[] t
data Float1Op
= ClipByValue Float Float
| Tanh
| Sin
| Exp
| Sigmoid
| HardSigmoid
| Relu
| Floor
| Round
| Cos
| Log
| Asin
| Acos
| Sinh
| Cosh
| Asinh
| Acosh
| Atan
| Atanh
| Sqrt
deriving Show
data Num1Op = Square | Negate | Abs | Sign
deriving Show
data Side = Upper | Lower
data UnOp (s1 :: Shape) (t :: Typ) (s2 :: Shape) (u :: Typ) where
ExpM :: KnownNumeric t => Sat KnownNat n -> UnOp '[n,n] t '[n,n] t
Diag :: Sat KnownNat n -> UnOp '[n] t '[n,n] t
StopGradient :: UnOp '[] t '[] t
Cast :: UnOp '[] t '[] u
Conjugate :: UnOp '[] ('Typ 'Cmplx w) '[] ('Typ 'Cmplx w)
RealPart :: UnOp '[] ('Typ 'Cmplx w) '[] ('Typ 'Float w)
Num1Op :: KnownNumeric t => Num1Op -> UnOp '[] t '[] t
Float1Op :: Float1Op -> UnOp '[] (Flt w) '[] (Flt w)
Axis1Op :: SShape s -> Axis1Op s1 t s2 u -> UnOp (s1 ++ s) t (s2 ++ s) u
-- deriving Show
data CompOp = Less | Greater | LessOrEqual | GreaterOrEqual
data LogicOp = And | Or
data Simple2Op t u where
Divide :: KnownAlgebraic t => Simple2Op t t
IntegerDiv :: Simple2Op ('Typ 'Int w) ('Typ 'Int w)
Equal :: KnownTyp t => Simple2Op t TFBool
Subtract :: KnownNumeric t => Simple2Op t t
Multiply :: KnownNumeric t => Simple2Op t t
Add :: KnownNumeric t => Simple2Op t t
Minimum :: KnownNumeric t => Simple2Op t t
Maximum :: KnownNumeric t => Simple2Op t t
FloorMod :: KnownNumeric t => Simple2Op t t
Comparision :: KnownNumeric t => CompOp -> Simple2Op t TFBool
Logic :: LogicOp -> Simple2Op TFBool TFBool
MkComplex :: Simple2Op (Flt w) ('Typ 'Cmplx w)
-- deriving instance Show (Simple2Op t u)
data BinOp s1 t1 s2 t2 s3 t3 where
Simple2Op :: Simple2Op t u -> BinOp '[] t '[] t '[] u
SigmoidCrossEntropyWithLogits :: KnownFloat t => BinOp '[] t '[] t '[] t
SoftmaxCrossEntropyWithLogits :: KnownFloat t => BinOp '[n] t '[n] t '[] t
SparseSoftmaxCrossEntropyWithLogits :: BinOp '[] Int32 '[n] (Flt w) '[] (Flt w)
deriving instance Show ( a b c d e f )
data Permutation (s :: [k]) (t :: [k]) where
PermId :: Permutation s s
PermSkip :: Permutation s t -> Permutation (n ': s) (n ': t)
PermSwap :: Permutation (n ': m ': s) (m ': n ': s)
PermTrans :: Permutation s t -> Permutation t u -> Permutation s u
deriving instance Show (Permutation s t)
class KnownTensors p where -- TODO: delete
-- | traverse all the tensors contained in p.
travTensor :: Applicative m => (forall s t. (KnownTyp t, KnownShape s) => String -> (T s t) -> m (T s t)) -> String -> p -> m p
instance (KnownTyp t, KnownShape shape) => KnownTensors (T shape t) where
travTensor f = f
instance (All KnownPair ys) => KnownTensors (HHTV ys) where
travTensor :: forall m. Applicative m => (forall s t'. (KnownTyp t', KnownShape s) => String -> T s t' -> m (T s t')) -> String -> HHTV ys -> m (HHTV ys)
travTensor f s = ttr 0
where ttr :: forall xs. All KnownPair xs => Int -> HHTV xs -> m (HHTV xs)
ttr _ Unit = pure Unit
ttr n (Uncurry x :* xs) = do
x' <- f (s <> "_" <> show n) x
xs' <- ttr (n + 1) xs
return (Uncurry x' :* xs')
instance (KnownTyp t, All KnownShape ys) => KnownTensors (HTV t ys) where
travTensor :: forall m. Applicative m => (forall s t'. (KnownTyp t', KnownShape s) => String -> T s t' -> m (T s t')) -> String -> (HTV t ys) -> m (HTV t ys)
travTensor f s = ttr 0
where ttr :: forall xs. All KnownShape xs => Int -> HTV t xs -> m (HTV t xs)
ttr _ Unit = pure Unit
ttr n (F x :* xs) = do
x' <- f (s <> "_" <> show n) x
xs' <- ttr (n + 1) xs
return (F x' :* xs')
instance (KnownTensors p, KnownTensors q) => KnownTensors (p,q) where
travTensor f s (x,y) = (,) <$> travTensor f (s<>"_fst") x <*> travTensor f (s<>"_snd") y
instance (KnownTensors p1, KnownTensors p2, KnownTensors p3) => KnownTensors (p1,p2,p3) where
travTensor f s (x,y,z) = (,,) <$> travTensor f (s<>"_1") x <*> travTensor f (s<>"_2") y <*> travTensor f (s<>"_3") z
instance (KnownTensors p1, KnownTensors p2, KnownTensors p3, KnownTensors p4) => KnownTensors (p1,p2,p3,p4) where
travTensor f s (x,y,z,w) = (,,,) <$> travTensor f (s<>"_1") x <*> travTensor f (s<>"_2") y <*> travTensor f (s<>"_3") z <*> travTensor f (s<>"_4") w
class KnownTensors p => ParamWithDefault p where
defaultInitializer :: Gen p
| null | https://raw.githubusercontent.com/GU-CLASP/TypedFlow/875bf1b26474e38ca99e25055990466d36af6377/TypedFlow/Types.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveTraversable #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE TypeInType #
type i <= j = (i <=? j) ~ 'True
From:
All2 c '[] '[] = ()
All2 c '[] (y ': ys) = 'True ~ 'False
All2 c (y ': ys) '[] = 'True ~ 'False
| Flip at type level
| Tensor vector. (Elements in the indexing list are ignored.)
| Heterogeneous tensor vector with varying shapes and the same kind of elements
| Tensor vector heterogenous in types and shapes.
htmap' _ Unit Unit = Unit
htmap' f ((:*) _ n)(F x :* xs) = F (f x) :* htmap' @f f n xs
| Map a natural transformation
type family Drop n xs where
Drop 'Zero xs = xs
Drop _ '[] = '[]
type family At n xs where
At ('Succ n) (x ': xs) = At n xs
# OVERLAPPING #
withKnownNat'' 0 f = f @0
This also fails:
appProxy f _ = f @n
withKnownNat n f = withKnownNat' n (\proxy -> appProxy proxy f)
fibonnaci prime.
GHC does not like negative .
Using a maybe type would be a RPITA.
------------------------------
Generation Effects (TODO: move to other module)
^ next free variable
^ accumulated regularizers
------------------------
Tensors
| An indexing tensor in the format expected by GatherND
| Description of a random distribution
| Each element is from a truncated normal distribution with given standard dev.
| Each element is from a uniform distribution with given bounds (low, high)
deriving Show
input tensor (batched)
filters
yes, softmax is shape-fixed:
show _ = "<Unique>"
deriving instance (Show (T s t))
deriving Show
deriving instance Show (Simple2Op t u)
TODO: delete
| traverse all the tensors contained in p. | # LANGUAGE QuantifiedConstraints #
# LANGUAGE CPP #
#if __GLASGOW_HASKELL__ >= 806
# LANGUAGE NoStarIsType #
#endif
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE UndecidableSuperClasses #
# LANGUAGE UnicodeSyntax #
# LANGUAGE LambdaCase #
# LANGUAGE ApplicativeDo #
# OPTIONS_GHC -fplugin GHC.TypeLits . KnownNat . Solver #
module TypedFlow.Types where
import GHC.TypeLits
import Data.Proxy
import Control . Monad . State
import Control . Monad . RWS ( RWS ( .. ) , local , ask , tell )
import Data.Kind (Constraint,Type)
import qualified Data.Int as Hask
import Data.Type.Equality
import Data.Monoid hiding (Sum,Product,Last,All,Ap)
import Data.Complex
newtype (∘) f (g :: k -> k2) (a::k) where
Comp :: forall f g a. f (g a) -> (f ∘ g) a
type Sat = (∘) Dict
type Sat' f x = f x
data Dict :: Constraint -> Type where
Dict :: a => Dict a
the second context is the PROVIDED constraint !
pattern Sat = Comp Dict
instance (Show (Sat a b)) where
show _ = "Sat"
proxySat :: forall k (b::k) (a :: k -> Constraint) proxy. a b => proxy b -> Sat a b
proxySat _ = Sat
natSat :: forall n. KnownNat n => Sat KnownNat n
natSat = Sat
type i < j = CmpNat i j ~ ' LT
type i < j = (i+1) <= j
type family Product xs where
Product '[] = 1
Product (x ': xs) = x * Product xs
type family Sum xs where
Sum '[] = 0
Sum (x ': xs) = x + Sum xs
type family (++) xs ys where
'[] ++ xs = xs
(x ': xs) ++ ys = x ': (xs ++ ys)
type family Tail xs where
Tail (x ': xs) = xs
type family Last xs where
Last '[x] = x
Last (x ': xs) = Last xs
type family Init xs where
Init '[x] = '[]
Init (x ': xs) = x ': Init xs
type family Length xs where
Length '[] = 0
Length (x ': xs) = 1 + Length xs
type family Reverse' xs ys where
Reverse' '[] ys = ys
Reverse' (x ': xs) ys = Reverse' xs (x ': ys )
type family Reverse xs where
Reverse xs = Reverse' xs '[]
data NP f (xs :: [k]) where
Unit :: NP f '[]
(:*) :: f x -> NP f xs -> NP f (x ': xs)
deriving instance (forall x. Show (f x)) => Show (NP f xs)
type SList' = NP
(.+.) = appSList
infixr 5 .+.
infixr 5 *:
infixr 5 :*
(*:) :: forall x xs f. NP f xs -> f x -> NP f (xs ++ '[x])
xs *: x = appSList xs (x :* Unit)
hlookup :: Axis n xs -> NP f xs -> f (At n xs)
hlookup AxZero (x :* _) = x
hlookup (AxSucc n) (_ :* xs) = hlookup n xs
newtype I a = I a
newtype K a x = K a
type HList = NP I
pattern HSingle :: f a -> NP f '[a]
pattern HSingle x = x :* Unit
pattern VecSing :: Tensor s t -> HTV t '[s]
pattern VecSing t1 = F t1 :* Unit
pattern VecPair :: Tensor s t -> Tensor s' t -> HTV t '[s,s']
pattern VecPair t1 t2 = F t1 :* F t2 :* Unit
pattern VecTriple :: Tensor s t -> Tensor s' t -> Tensor s3 t -> HTV t '[s,s',s3]
pattern VecTriple t1 t2 t3 = F t1 :* F t2 :* F t3 :* Unit
type family All (c :: k -> Constraint) (xs :: [k]) :: Constraint where
All c '[] = ()
All c (x ': xs) = (c x, All c xs)
knownAll :: forall constraint s k. NP (Sat constraint) s -> (All constraint s => KnownLen s => k) -> k
knownAll Unit k = k
knownAll (Sat :* xs) k = knownAll xs $ k
allKnown' :: forall constraint s proxy. All constraint s => NP proxy s -> NP (Sat constraint) s
allKnown' Unit = Unit
allKnown' (_ :* xs) = Sat :* allKnown' xs
allKnown :: forall k s. KnownLen s => All k s => NP (Sat k) s
allKnown = allKnown' typeSList
data SomeSuch k f where
SomeSuch :: k x => f x -> SomeSuch k f
FIXME : use type , not constraint ?
type Ap c (t :: k) :: l
class Cons (x :: k) (xs :: [k])
instance Fun (Cons x) where type Ap (Cons x) xs = x ': xs
class Snoc (x :: k) (xs :: [k])
instance Fun (Snoc x) where
type Ap (Snoc x) '[] = '[x]
type Ap (Snoc x) (y ': ys) = y ': Ap (Snoc x) ys
class FMap (c :: k -> Constraint) (xs :: [k]) where
instance Fun c => Fun (FMap c) where
type Ap (FMap c) '[] = '[]
type Ap (FMap c) (x ': xs) = Ap c x ': Ap (FMap c) xs
mapFMap :: forall g f xs. (forall x. f x -> f (Ap g x)) -> NP f xs -> NP f (Ap (FMap g) xs)
mapFMap _ Unit = Unit
mapFMap f (x :* xs) = f x :* mapFMap @g @f f xs
type family All2 ( c : : k - > l - > Constraint ) ( xs : : [ k ] ) ( ys : : [ l ] ) : : Constraint where
All2 c ( x ' : xs ) ( y ' : ys ) = ( c x y , All2 c xs ys )
newtype F g t s = F {fromF :: g s t}
type TV s t = NP (K (Tensor s t))
type HTV t = NP (F T t)
class Scnd' (x::(a,b))
instance Fun (Scnd') where type Ap Scnd' '(a,b) = b
class Frst' (x::(a,b))
instance Fun (Frst') where type Ap Frst' '(a,b) = a
type family Frst (x :: (a,b)) where Frst '(x,y) = x
type family Scnd (x :: (a,b)) where Scnd '(x,y) = y
type family Frst3 (x :: (a,b,c)) where Frst3 '(x,y,z) = x
type family Scnd3 (x :: (a,b,c)) where Scnd3 '(x,y,z) = y
type family Thrd3 (x :: (a,b,c)) where Thrd3 '(x,y,z) = z
class (KnownShape (Scnd3 r), KnownTyp (Thrd3 r), KnownSymbol (Frst3 r)) => KnownPlaceholder r where
placeHolderRef :: proxy r -> Ref String (Scnd3 r) (Thrd3 r)
instance (KnownShape y, KnownTyp z, KnownSymbol x) => KnownPlaceholder '(x,y,z) where
placeHolderRef _ = Ref (symbolVal (Proxy @x)) typeSShape typeSTyp
class (KnownShape (Frst r), KnownTyp (Scnd r)) => KnownPair r
instance (KnownShape x, KnownTyp y) => KnownPair '(x,y)
newtype Uncurry g (s :: (a,b)) = Uncurry {fromUncurry :: g (Frst s) (Scnd s)}
type HHTV = NP (Uncurry T)
type Placeholders = NP Placeholder
type PH = (Symbol,Shape,Typ)
newtype Placeholder (s :: PH) = PHT (T (Scnd3 s) (Thrd3 s))
hhead :: NP f (x ': xs) -> f x
hhead (x :* _) = x
htail :: NP f (x ': xs) -> NP f xs
htail (_ :* xs) = xs
htmap :: forall f ss t u. (forall s. Tensor s t -> Tensor (Ap f s) u) -> HTV t ss -> HTV u (Ap (FMap f) ss)
htmap _ Unit = Unit
htmap f (F x :* xs) = F (f x) :* htmap @f f xs
htmap ' : : forall f ss t u. All KnownShape ss = > ( forall s = > Tensor ( Ap f s ) t - > Tensor s u ) - > SList ss - > HTV t ( Ap ( FMap f ) ss ) - > HTV u ss
hmap :: (forall x. f x -> g x) -> NP f xs -> NP g xs
hmap _ Unit = Unit
hmap f (x :* xs) = f x :* hmap f xs
hTraverse :: forall f g xs m. Applicative m => (forall x. f x -> m (g x)) -> NP f xs -> m (NP g xs)
hTraverse _ Unit = pure Unit
hTraverse f (x :* xs) = do
x' <- f x
xs' <- hTraverse f xs
return (x' :* xs')
| Variant of hmap with a constraint
hmapK :: forall k f g xs. All k xs => (forall x. k x => f x -> g x) -> NP f xs -> NP g xs
hmapK _ Unit = Unit
hmapK f (x :* xs) = f x :* hmapK @k f xs
hMapToList :: forall k f xs a. All k xs => (forall x. k x => f x -> a) -> NP f xs -> [a]
hMapToList f = htoList . hmapK @k (K . f)
| If NP is in fact a vector , we have a " usual " map .
kmap :: (a -> b) -> NP (K a) xs -> NP (K b) xs
kmap _ Unit = Unit
kmap f (K x :* xs) = K (f x) :* kmap f xs
| If NP is in fact a tuple , we can apply a tuple of endomorphisms . ( special case of < * > )
hendo :: NP Endo xs -> HList xs -> HList xs
hendo Unit Unit = Unit
hendo (Endo f :* fs) (I x :* xs) = (I (f x) :* hendo fs xs)
appSList, (.+.), happ :: NP f xs -> NP f ys -> NP f (xs ++ ys)
happ Unit xs = xs
happ (x :* xs) ys = x :* (happ xs ys)
appSList = happ
data Both f g x = Both {frst :: f x, scnd :: g x}
bothFromPair :: (f x, g x) -> Both f g x
bothFromPair (x,y) = (Both x y)
bothToPair :: Both f g x -> (f x, g x)
bothToPair (Both x y) = (x,y)
hzip :: NP f xs -> NP g xs -> NP (Both f g) xs
hzip = hzipWith Both
hzipWith :: (forall x. f x -> g x -> h x) -> NP f xs -> NP g xs -> NP h xs
hzipWith _ Unit Unit = Unit
hzipWith f (x :* xs) (y :* ys) = f x y :* hzipWith f xs ys
hfor :: forall k f xs m a. All k xs => Applicative m => NP f xs -> (forall x. k x => f x -> m a) -> m [a]
hfor Unit _ = pure []
hfor (x :* xs) f = (:) <$> f x <*> hfor @k xs f
htoList :: NP (K a) xs -> [a]
htoList Unit = []
htoList (K x :* xs) = x : htoList xs
hsplit' :: SPeano n -> NP f xs -> (NP f (Take n xs), NP f (Drop n xs))
hsplit' SZero xs = (Unit,xs)
hsplit' (SSucc _n) Unit = (Unit,Unit)
hsplit' (SSucc n) (x :* xs) = case hsplit' n xs of
(l,r) -> (x :* l,r)
hsplit :: forall xs ys f. KnownLen xs => NP f (xs++ys) -> (NP f xs, NP f ys)
hsplit xys = splitApp @xs @ys (hsplit' (shapePeano @xs) xys)
splitApp' :: forall ys xs k. SList xs -> ((Take (PeanoLength xs) (xs ++ ys) ~ xs,
Drop (PeanoLength xs) (xs ++ ys) ~ ys) => k) -> k
splitApp' Unit k = k
splitApp' ((:*) _ n) k = splitApp' @ys n k
splitApp :: forall xs ys k. KnownLen xs => ((Take (PeanoLength xs) (xs ++ ys) ~ xs,
Drop (PeanoLength xs) (xs ++ ys) ~ ys) => k) -> k
splitApp = splitApp' @ys (typeSList @xs)
hsnoc :: NP f xs -> f x -> NP f (xs ++ '[x])
hsnoc xs x = happ xs (x :* Unit)
TODO : type = ' [ ( ) ] ( And then SPeano = NP ) ?
axis0 :: Axis 'Zero (x ': xs)
axis0 = AxZero
axis1 :: Axis ('Succ 'Zero) (x0 ': (x1 ': xs))
axis1 = AxSucc axis0
axis2 :: Axis ('Succ ('Succ 'Zero)) (x0 ': (x1 ': (x2 ': xs)))
axis2 = AxSucc axis1
axis3 :: Axis ('Succ ('Succ ('Succ 'Zero))) (x0 ': (x1 ': (x2 ': (x3 ': xs))))
axis3 = AxSucc axis2
data Axis n xs where
AxZero :: Axis 'Zero (x ': xs)
AxSucc :: Axis n xs -> Axis ('Succ n) (x ': xs)
axisInt :: Axis n xs -> Integer
axisInt AxZero = 0
axisInt (AxSucc n) = 1 + axisInt n
sPeanoInt :: SPeano n -> Integer
sPeanoInt (SSucc n) = 1 + sPeanoInt n
sPeanoInt SZero = 0
type family PeanoNat (n::Peano) :: Nat where
PeanoNat 'Zero = 0
PeanoNat ('Succ n) = PeanoNat n + 1
data SPeano n where
SZero :: SPeano 'Zero
SSucc :: SPeano n -> SPeano ('Succ n)
class KnownPeano n where
knownPeano :: SPeano n
instance KnownPeano 'Zero where
knownPeano = SZero
instance KnownPeano n => KnownPeano ('Succ n) where
knownPeano = SSucc knownPeano
type family Take n xs where
Take 'Zero xs = '[]
Take ('Succ n) '[] = '[]
Take ('Succ n) (x ': xs) = x ': Take n xs
type family Drop n xs where
Drop 'Zero xs = xs
Drop _ '[] = '[]
Drop ('Succ n) (x ': xs) = Drop n xs
type family At n xs where
At 'Zero (x ': xs) = x
At ('Succ n) (x ': xs) = At n xs
( ' Succ n ) ( x ' : xs ) = Drop n xs
At ' Zero ( x ' : xs ) = x
data Kind = Float | Cmplx | Int | Bool deriving (Show,Eq,Ord)
data SKind (s::Kind) where
SFloat :: SKind 'Float
SCmplx :: SKind 'Cmplx
SInt :: SKind 'Int
SBool :: SKind 'Bool
data NBits = B32 | B64 | B1 deriving (Show,Eq,Ord)
data SNBits s where
SB32 :: SNBits 'B32
SB64 :: SNBits 'B64
data Typ = Typ Kind NBits deriving (Eq,Ord)
type family TypKind (t :: Typ) where TypKind ('Typ k b) = k
type family TypBits (t :: Typ) where TypBits ('Typ k b) = b
type KnownNumeric t = (NumericKind (TypKind t), KnownBits (TypBits t), t ~ 'Typ (TypKind t) (TypBits t))
type KnownFloat t = (TypKind t ~ 'Float, KnownBits (TypBits t), t ~ 'Typ 'Float (TypBits t))
type KnownAlgebraic t = (AlgebraicKind (TypKind t), KnownBits (TypBits t), t ~ 'Typ (TypKind t) (TypBits t))
class KnownKind t => NumericKind t where
instance NumericKind 'Float
instance NumericKind 'Cmplx
instance NumericKind 'Int
class NumericKind t => AlgebraicKind t where
instance AlgebraicKind 'Float
instance AlgebraicKind 'Cmplx
kVal :: SKind t1 -> Kind
kVal SFloat = Float
kVal SInt = Int
kVal SBool = Bool
kVal SCmplx = Cmplx
instance Eq (SKind t) where x == y = kVal x == kVal y
instance Ord (SKind t) where compare x y = compare (kVal x) (kVal y)
nbitsVal :: SNBits w -> NBits
nbitsVal SB64 = B64
nbitsVal SB32 = B32
instance Eq (SNBits t) where x == y = nbitsVal x == nbitsVal y
instance Ord (SNBits t) where compare x y = compare (nbitsVal x) (nbitsVal y)
sTypTyp :: STyp t1 -> Typ
sTypTyp (STyp k b Refl) = Typ (kVal k) (nbitsVal b)
instance Eq (STyp t) where x == y = sTypTyp x == sTypTyp y
instance Ord (STyp t) where compare x y = compare (sTypTyp x) (sTypTyp y)
data STyp t where
STyp :: SKind (TypKind t) -> SNBits (TypBits t) -> (t :~: 'Typ (TypKind t) (TypBits t)) -> STyp t
type Flt t = 'Typ 'Float t
type Float32 = 'Typ 'Float 'B32
type Complex32 = 'Typ 'Cmplx 'B32
type Int32 = 'Typ 'Int 'B32
type Int64 = 'Typ 'Int 'B64
type TFBool = 'Typ 'Bool 'B32
type Scalar t = T '[] t
type Shape = [Nat]
class (KnownLen s, All KnownNat s) => KnownShape s where
instance KnownShape '[]
instance (KnownNat x, KnownShape xs) => KnownShape (x ': xs)
type KnownTyp t = (KnownBits (TypBits t), KnownKind (TypKind t), t ~ 'Typ (TypKind t) (TypBits t))
typeSTyp :: forall t. KnownTyp t => STyp t
typeSTyp = STyp (kindVal @(TypKind t)) (bitsVal @(TypBits t)) Refl
type family HaskType t where
HaskType Float32 = Float
HaskType ('Typ 'Float 'B64) = Double
HaskType ('Typ 'Cmplx 'B32) = Complex Float
HaskType ('Typ 'Cmplx 'B64) = Complex Double
HaskType ('Typ 'Int 'B64) = Hask.Int64
HaskType ('Typ 'Int 'B32) = Hask.Int32
HaskType ('Typ 'Bool w) = Bool
class KnownBits t where
bitsVal :: SNBits t
instance KnownBits 'B32 where bitsVal = SB32
instance KnownBits 'B64 where bitsVal = SB64
typVal :: forall t. KnownTyp t => Typ
typVal = Typ (kVal k) (nbitsVal b)
where k = kindVal @(TypKind t)
b = bitsVal @(TypBits t)
knownBits :: SNBits t -> (KnownBits t => Fractional (HaskType ('Typ 'Float t)) => Floating (HaskType ('Typ 'Float t)) => k) -> k
knownBits SB32 k = k
knownBits SB64 k = k
knownKind :: SKind t -> (KnownKind t => k) -> k
knownKind SFloat k = k
knownKind SInt k = k
knownKind SBool k = k
knownKind SCmplx k = k
knownTyp :: STyp t -> (KnownTyp t => k) -> k
knownTyp (STyp k b Refl) r = knownKind k $ knownBits b r
knownAlgebraic :: forall t k. KnownAlgebraic t => ((Fractional (HaskType t), Floating (HaskType t)) => k) -> k
knownAlgebraic k = case kindVal @(TypKind t) of
SFloat -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SCmplx -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
_ -> error "KnownAlgebraic bug"
knownNum :: forall t k. KnownNumeric t => (KnownTyp t => Num (HaskType t) => k) -> k
knownNum k = case kindVal @(TypKind t) of
SFloat -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SCmplx -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
SBool -> error "KnownNumeric bug"
SInt -> case bitsVal @(TypBits t) of
SB32 -> k
SB64 -> k
class KnownKind t where kindVal :: SKind t
instance KnownKind 'Bool where kindVal = SBool
instance KnownKind 'Cmplx where kindVal = SCmplx
instance KnownKind 'Float where kindVal = SFloat
instance KnownKind 'Int where kindVal = SInt
type SList = NP Proxy
instance Ord (Sat KnownNat t) where
compare x@Sat y@Sat = compare (natVal x) (natVal y)
instance Eq (Sat KnownNat t) where
x@Sat == y@Sat = (natVal x) == (natVal y)
type SShape = NP (Sat KnownNat)
instance Ord (SShape s) where
compare x y = compare (shapeToList' x) (shapeToList' y)
instance Eq (SShape s) where
Unit == Unit = True
((:*) x xs) == ((:*) y ys) = x == y && xs == ys
show x = show (shapeToList' x)
sListLength :: NP f s -> Integer
sListLength Unit = 0
sListLength ((:*) _ s) = 1+sListLength s
sListLen :: NP f s -> Int
sListLen = fromIntegral . sListLength
sListLenAsNat :: NP f s -> Sat KnownNat (Length s)
sListLenAsNat Unit = Sat
sListLenAsNat ((:*) _ s) = case sListLenAsNat s of
Sat -> Sat
type family PeanoLength xs :: Peano where
PeanoLength '[] = 'Zero
PeanoLength (x ': xs) = 'Succ (PeanoLength xs)
withKnownNat :: forall k. Int -> (forall (n::Nat). KnownNat n => Proxy n -> k) -> k
withKnownNat 0 f = f (Proxy @0)
withKnownNat 1 f = f (Proxy @1)
withKnownNat n f = withKnownNat (n `div` 2) (if n `mod` 2 == 0 then f2x else f2x1)
where f2x,f2x1 :: forall (n::Nat). KnownNat n => Proxy n -> k
f2x _ = f (Proxy @(n*2))
f2x1 _ = f (Proxy @(n*2+1))
Probably a GHC bug :
withKnownNat '' : : forall ( forall ( n::Nat ) . KnownNat n = > k ) - > k
withKnownNat '' n f = withKnownNat '' ( n-1 ) fsucc
where fsucc : : forall ( n::Nat ) . KnownNat n = > k
= f @(n+1 )
appProxy : : forall ( n::Nat ) n = > Proxy n - > ( forall ( m::Nat ) . KnownNat m = > k ) - > k
withKnownNat : : forall ( forall ( n::Nat ) . KnownNat n = > k ) - > k
class KnownNat (Length s) => KnownLen s where
shapePeano :: SPeano (PeanoLength s)
typeSList :: SList s
instance KnownLen '[] where
shapePeano = SZero
typeSList = Unit
instance KnownLen xs => KnownLen (x ': xs) where
shapePeano = SSucc (shapePeano @xs)
typeSList = (:*) Proxy (typeSList @xs)
listTypeLen :: forall xs. KnownLen xs => Integer
listTypeLen = sListLength (typeSList @xs)
typeSListProxy :: KnownLen xs => proxy xs -> SList xs
typeSListProxy _ = typeSList
sListProxy :: NP f xs -> Proxy xs
sListProxy _ = Proxy
knownNatVal :: forall x. Sat KnownNat x -> Integer
knownNatVal Sat = natVal (Proxy @x)
shapeToList' :: SShape s -> [Integer]
shapeToList' Unit = []
shapeToList' ((:*) x xs) = knownNatVal x : shapeToList' xs
shapeToList'' :: All KnownNat s => NP proxy s -> [Integer]
shapeToList'' Unit = []
shapeToList'' ((:*) x xs) = natVal x : shapeToList'' xs
shapeToList :: ∀(s::Shape). KnownShape s => [Integer]
shapeToList = shapeToList'' (typeSList @ s)
typeSShape :: forall s. KnownShape s => SShape s
typeSShape = sListSShape (typeSList @s)
proxySShape :: forall s. KnownShape s => Proxy s -> SShape s
proxySShape _ = typeSShape
sListSShape :: forall s. All KnownNat s => SList s -> SShape s
sListSShape = allKnown'
data VarInfo = forall s t. (KnownShape s, KnownTyp t) => VarInfo {varTrainable :: Bool,
varRef :: Ref String s t,
varInitial :: Maybe (T s t)}
varName :: VarInfo -> String
varName VarInfo {varRef = Ref {..}} = refName
}
initialGstate :: GState
initialGstate = (GState {nextVar = 0
,genRegularizers=[]
})
data Gen a where
GPId :: Gen Integer
GPVariable :: forall (shape :: Shape) t. (KnownTyp t,KnownShape shape) => Bool -> String -> Maybe (T shape t) -> Gen (Ref String shape t)
GPModify :: (KnownShape s,KnownTyp t) => Ref Int s t -> T s t -> Gen (T s t)
GPReturn :: a -> Gen a
GPState :: (GState -> (a,GState)) -> Gen a
GPApp :: (Gen (a -> b)) -> Gen a -> Gen b
GPBind :: Gen a -> (a -> Gen b) -> Gen b
genGets :: (GState -> a) -> Gen a
genGets f = GPState (\s -> (f s, s))
instance Applicative Gen where
(<*>) = GPApp
pure = GPReturn
instance Monad Gen where
(>>=) = GPBind
instance Functor Gen where
fmap f = (pure f <*>)
type IndexTensor indexShape containerShape w = T (indexShape ++ '[Length containerShape]) ('Typ 'Int w)
data Distribution (s :: Shape) (t :: Typ) where
TruncatedNormalD :: Float -> Distribution s ('Typ 'Float w)
UniformD :: Float -> Float -> Distribution s ('Typ 'Float w)
OrthogonalD :: Distribution '[m,n] ('Typ 'Float w)
data Ref r s t = Ref {refName :: r,
refShape :: SShape s,
refTyp :: STyp t}
data NilOp s t where
ExternalVar :: Ref String s t -> NilOp s t
Variable :: Ref Int s t -> NilOp s t
Constant :: HaskType t -> NilOp '[] t
Range :: KnownBits w => Sat KnownNat n -> NilOp '[n] ('Typ 'Int w)
data Catable s1 s2 t n = Catable (Sat KnownNat n) (T (s1 ++ (n ': s2)) t)
type Unique = Int
data T (s :: Shape) (t :: Typ) where
BroadcastT :: KnownTyp t => Maybe Unique -> Bool -> Sat KnownNat n -> SShape s -> T s t -> T (n ': s) t
MapT :: KnownTyp t => Sat KnownNat n -> SShape s -> (T s t -> T r u) -> T (n ': s) t -> T (n ': r) u
ZipT :: (KnownTyp t, KnownTyp u) => Sat KnownNat n -> SShape s -> SShape r -> (T s t -> T r u -> T q v) -> T (n ': s) t -> T (n ': r) u -> T (n ': q) v
Zip3T :: (KnownTyp t, KnownTyp u, KnownTyp v) => Sat KnownNat n -> SShape s -> SShape r -> SShape q -> (T s t -> T r u -> T q v -> T p w) -> T (n ': s) t -> T (n ': r) u -> T (n ': q) v -> T (n ': p) w
T :: NilOp s t -> T s t
this is the unique noise identifier , preventing two different noises to ever be re - shared .
SShape s0 -> SShape s1 ->
Distribution s1 t ->
T (s0 ++ s1) t
BinOp :: (KnownTyp t,KnownTyp u) => BinOp s1 t s2 u s3 v -> SShape s0 -> SShape s1 -> STyp t -> SShape s2 -> STyp u -> T (s0 ++ s1) t -> T (s0 ++ s2) u -> T (s0 ++ s3) v
UnOp :: KnownTyp t => UnOp s1 t s2 u -> SShape s0 -> T (s0 ++ s1) t -> T (s0 ++ s2) u
Unbroadcast :: Sat KnownNat n -> Unique -> T (n ': s) t -> T s t
DirectBroadcast :: SShape s0 -> NP proxy' s1 -> SShape s2 -> NP proxy' s3 -> T (s0 ++ s2) t -> T (s0 ++ (s1 ++ (s2 ++ s3))) t
ReshapeFrom :: Product s ~ Product s0 => SShape s0 -> T s0 t -> T s t
Transpose :: SShape s0 -> Permutation s0 s -> T s0 t -> T s t
Concat :: SShape s0 -> SShape s1 -> NP (Catable s0 s1 t) ns -> T (s0 ++ (Sum ns ': s1)) t
Gather :: KnownTyp ('Typ 'Int w) => SShape indexShape -> SShape s0 -> Sat KnownNat m -> SShape s1
-> T (s0 ++ (m ': s1)) t -> T (s0 ++ indexShape) ('Typ 'Int w) -> T (s0 ++ indexShape ++ s1) t
GatherND :: KnownTyp ('Typ 'Int w) => SShape containerShape -> SShape elementShape -> SShape indexShape
-> T (containerShape ++ elementShape) t -> IndexTensor indexShape containerShape w -> T (indexShape ++ elementShape) t
MatMul :: forall s m n o t. KnownNumeric t => SShape s -> Sat KnownNat n -> Sat KnownNat o -> Sat KnownNat m -> T (s ++ '[n,o]) t -> T (s ++ [o,m]) t -> T (s ++ [n,m]) t
Where :: T s TFBool -> T s t -> T s t -> T s t
If :: Scalar TFBool -> T s t -> T s t -> T s t
Convolution :: KnownAlgebraic t => Sat KnownNat bs -> Sat KnownNat inChannels -> Sat KnownNat outChannels -> SShape filterSpatialShape -> SShape s
-> T (bs ': s ++ '[outChannels]) t
Pool :: Length outSpatial ~ Length window =>
Sat KnownNat bs -> SShape window -> PoolingType -> Sat KnownNat numChannels -> SShape outSpatial
-> T (bs ': ZipWithMulShapes window outSpatial ++ '[numChannels]) t
-> T (bs ': outSpatial ++ '[numChannels]) t
Softmax :: Sat KnownNat bs -> Sat KnownNat n -> T '[bs,n] (Flt w) -> T '[bs,n] (Flt w)
instance Show Unique where
type family ZipWithMulShapes (xs::Shape) (xy::Shape) :: Shape
type instance ZipWithMulShapes (x ': xs) (y ': ys) = x*y ': ZipWithMulShapes xs ys
type instance ZipWithMulShapes '[] _ = '[]
type instance ZipWithMulShapes _ '[] = '[]
satMul :: forall n m. Sat KnownNat n -> Sat KnownNat m -> Sat KnownNat (n*m)
satMul Sat Sat = Sat
satProd :: SShape s -> Sat KnownNat (Product s)
satProd Unit = natSat @1
satProd (x :* xs) = satMul x (satProd xs)
satAdd :: forall n m. Sat KnownNat n -> Sat KnownNat m -> Sat KnownNat (n+m)
satAdd Sat Sat = Sat
zipWithMulSShapes :: SShape xs -> SShape ys -> SShape (ZipWithMulShapes xs ys)
zipWithMulSShapes Unit _ = Unit
zipWithMulSShapes _ Unit = Unit
zipWithMulSShapes ((:*) x xs) ((:*) y ys) = (:*) (satMul x y) (zipWithMulSShapes xs ys)
data PoolingType = MaxPool | AvgPool deriving Show
type Tensor shape = T shape
data ReduceOp = Mean | Max | Min | Sum
data Axis1Op s1 t s2 u where
ReverseT :: Sat KnownNat n -> Axis1Op '[n] t '[n] t
ArgMax :: KnownNumeric t => Sat KnownNat n -> Axis1Op '[n] t '[] ('Typ 'Int b)
OneHot :: KnownNumeric t => Sat KnownNat n -> Axis1Op '[] ('Typ 'Int b) '[n] t
ReduceOp :: KnownNumeric t => Sat KnownNat n -> ReduceOp -> Axis1Op '[n] t '[] t
SliceOp :: forall m n t proxy. proxy m -> Sat KnownNat n -> Integer -> Integer -> Axis1Op '[n] t '[m] t
AccessOp :: forall n t. Sat KnownNat n -> Integer -> Axis1Op '[n] t '[] t
data Float1Op
= ClipByValue Float Float
| Tanh
| Sin
| Exp
| Sigmoid
| HardSigmoid
| Relu
| Floor
| Round
| Cos
| Log
| Asin
| Acos
| Sinh
| Cosh
| Asinh
| Acosh
| Atan
| Atanh
| Sqrt
deriving Show
data Num1Op = Square | Negate | Abs | Sign
deriving Show
data Side = Upper | Lower
data UnOp (s1 :: Shape) (t :: Typ) (s2 :: Shape) (u :: Typ) where
ExpM :: KnownNumeric t => Sat KnownNat n -> UnOp '[n,n] t '[n,n] t
Diag :: Sat KnownNat n -> UnOp '[n] t '[n,n] t
StopGradient :: UnOp '[] t '[] t
Cast :: UnOp '[] t '[] u
Conjugate :: UnOp '[] ('Typ 'Cmplx w) '[] ('Typ 'Cmplx w)
RealPart :: UnOp '[] ('Typ 'Cmplx w) '[] ('Typ 'Float w)
Num1Op :: KnownNumeric t => Num1Op -> UnOp '[] t '[] t
Float1Op :: Float1Op -> UnOp '[] (Flt w) '[] (Flt w)
Axis1Op :: SShape s -> Axis1Op s1 t s2 u -> UnOp (s1 ++ s) t (s2 ++ s) u
data CompOp = Less | Greater | LessOrEqual | GreaterOrEqual
data LogicOp = And | Or
data Simple2Op t u where
Divide :: KnownAlgebraic t => Simple2Op t t
IntegerDiv :: Simple2Op ('Typ 'Int w) ('Typ 'Int w)
Equal :: KnownTyp t => Simple2Op t TFBool
Subtract :: KnownNumeric t => Simple2Op t t
Multiply :: KnownNumeric t => Simple2Op t t
Add :: KnownNumeric t => Simple2Op t t
Minimum :: KnownNumeric t => Simple2Op t t
Maximum :: KnownNumeric t => Simple2Op t t
FloorMod :: KnownNumeric t => Simple2Op t t
Comparision :: KnownNumeric t => CompOp -> Simple2Op t TFBool
Logic :: LogicOp -> Simple2Op TFBool TFBool
MkComplex :: Simple2Op (Flt w) ('Typ 'Cmplx w)
data BinOp s1 t1 s2 t2 s3 t3 where
Simple2Op :: Simple2Op t u -> BinOp '[] t '[] t '[] u
SigmoidCrossEntropyWithLogits :: KnownFloat t => BinOp '[] t '[] t '[] t
SoftmaxCrossEntropyWithLogits :: KnownFloat t => BinOp '[n] t '[n] t '[] t
SparseSoftmaxCrossEntropyWithLogits :: BinOp '[] Int32 '[n] (Flt w) '[] (Flt w)
deriving instance Show ( a b c d e f )
data Permutation (s :: [k]) (t :: [k]) where
PermId :: Permutation s s
PermSkip :: Permutation s t -> Permutation (n ': s) (n ': t)
PermSwap :: Permutation (n ': m ': s) (m ': n ': s)
PermTrans :: Permutation s t -> Permutation t u -> Permutation s u
deriving instance Show (Permutation s t)
travTensor :: Applicative m => (forall s t. (KnownTyp t, KnownShape s) => String -> (T s t) -> m (T s t)) -> String -> p -> m p
instance (KnownTyp t, KnownShape shape) => KnownTensors (T shape t) where
travTensor f = f
instance (All KnownPair ys) => KnownTensors (HHTV ys) where
travTensor :: forall m. Applicative m => (forall s t'. (KnownTyp t', KnownShape s) => String -> T s t' -> m (T s t')) -> String -> HHTV ys -> m (HHTV ys)
travTensor f s = ttr 0
where ttr :: forall xs. All KnownPair xs => Int -> HHTV xs -> m (HHTV xs)
ttr _ Unit = pure Unit
ttr n (Uncurry x :* xs) = do
x' <- f (s <> "_" <> show n) x
xs' <- ttr (n + 1) xs
return (Uncurry x' :* xs')
instance (KnownTyp t, All KnownShape ys) => KnownTensors (HTV t ys) where
travTensor :: forall m. Applicative m => (forall s t'. (KnownTyp t', KnownShape s) => String -> T s t' -> m (T s t')) -> String -> (HTV t ys) -> m (HTV t ys)
travTensor f s = ttr 0
where ttr :: forall xs. All KnownShape xs => Int -> HTV t xs -> m (HTV t xs)
ttr _ Unit = pure Unit
ttr n (F x :* xs) = do
x' <- f (s <> "_" <> show n) x
xs' <- ttr (n + 1) xs
return (F x' :* xs')
instance (KnownTensors p, KnownTensors q) => KnownTensors (p,q) where
travTensor f s (x,y) = (,) <$> travTensor f (s<>"_fst") x <*> travTensor f (s<>"_snd") y
instance (KnownTensors p1, KnownTensors p2, KnownTensors p3) => KnownTensors (p1,p2,p3) where
travTensor f s (x,y,z) = (,,) <$> travTensor f (s<>"_1") x <*> travTensor f (s<>"_2") y <*> travTensor f (s<>"_3") z
instance (KnownTensors p1, KnownTensors p2, KnownTensors p3, KnownTensors p4) => KnownTensors (p1,p2,p3,p4) where
travTensor f s (x,y,z,w) = (,,,) <$> travTensor f (s<>"_1") x <*> travTensor f (s<>"_2") y <*> travTensor f (s<>"_3") z <*> travTensor f (s<>"_4") w
class KnownTensors p => ParamWithDefault p where
defaultInitializer :: Gen p
|
9e05789ff2a10263513b138b5f4e89906f0fc2409443e2a84f61dab5641cb93d | paurkedal/ocaml-bitpath | pkg.ml | #! /usr/bin/env ocaml
#use "topfind"
#require "topkg"
open Topkg
let licenses = List.map Pkg.std_file ["COPYING.LESSER"; "COPYING"]
let () = Pkg.describe ~licenses "bitpath" @@ fun c ->
Ok [
Pkg.mllib "lib/bitpath.mllib";
Pkg.clib "lib/libbitpath_stubs.clib";
Pkg.lib "lib/bitpath_prereq.h";
Pkg.lib "lib/bitpath.h";
Pkg.test "tests/testsuite";
]
| null | https://raw.githubusercontent.com/paurkedal/ocaml-bitpath/28ffff6fc0d211faa8776dfa12de82a34acfa847/pkg/pkg.ml | ocaml | #! /usr/bin/env ocaml
#use "topfind"
#require "topkg"
open Topkg
let licenses = List.map Pkg.std_file ["COPYING.LESSER"; "COPYING"]
let () = Pkg.describe ~licenses "bitpath" @@ fun c ->
Ok [
Pkg.mllib "lib/bitpath.mllib";
Pkg.clib "lib/libbitpath_stubs.clib";
Pkg.lib "lib/bitpath_prereq.h";
Pkg.lib "lib/bitpath.h";
Pkg.test "tests/testsuite";
]
| |
3841c5a233b03086d0b8c8706ef13cc02aa37257b9feeea2ea9f5d38c82d0567 | MaxwellBo/scratches | handler.hs | import System.Environment
import System.IO
import System.IO.Error
main = toTry `catch` handler
toTry :: IO ()
toTry = do (fileName:_) <- getArgs
contents <- readFile fileName
putStrLn $ "The file has " ++ show (length (lines contents)) ++ " lines!"
handler :: IOError -> IO ()
handler e
| isDoesNotExistError e = putStrLn "The file doesn't exist!"
| otherwise = ioError e | null | https://raw.githubusercontent.com/MaxwellBo/scratches/3ba8f44eaa40b86d4dc1c2514b10b910fbd38f5a/handler.hs | haskell | import System.Environment
import System.IO
import System.IO.Error
main = toTry `catch` handler
toTry :: IO ()
toTry = do (fileName:_) <- getArgs
contents <- readFile fileName
putStrLn $ "The file has " ++ show (length (lines contents)) ++ " lines!"
handler :: IOError -> IO ()
handler e
| isDoesNotExistError e = putStrLn "The file doesn't exist!"
| otherwise = ioError e | |
7c1475115465fff9d1c0ef80c9215bc0b9d471a52fb9574573b7a371cb052a14 | Netflix/PigPen | join.clj | ;;
;;
Copyright 2013 - 2015 Netflix , Inc.
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
;;
(ns pigpen.join
"Commands to join and group data.
Note: Most of these are present in pigpen.core. Normally you should use those instead.
"
(:refer-clojure :exclude [group-by into reduce])
(:require [pigpen.extensions.core :refer [pp-str forcat]]
[pigpen.raw :as raw]
[pigpen.code :as code]))
(set! *warn-on-reflection* true)
(defn ^:private select->bind
"Performs the key selection prior to a join. If join-nils is true, we leave
nils as frozen nils so they appear as values. Otherwise we return a nil value as
nil and let the join take its course. If sentinel-nil is true, nil keys are
coerced to ::nil so they can be differentiated from outer joins later."
;; TODO - If this is an inner join, we can filter nil keys before the join
[{:keys [join-nils sentinel-nil]} {:keys [from key-selector on by]}]
(let [key-selector (or key-selector on by 'identity)]
(raw/bind$
(if sentinel-nil
`(pigpen.runtime/key-selector->bind (comp pigpen.runtime/sentinel-nil ~key-selector))
`(pigpen.runtime/key-selector->bind ~key-selector))
{:field-type (if join-nils :frozen :frozen-with-nils)
:alias ['key 'value]}
from)))
TODO verify these are vetted at compile time
(defn fold-fn*
"See pigpen.core/fold-fn"
[pre combinef reducef post]
{:pre [pre combinef reducef post]}
(code/assert-arity* pre 1)
(code/assert-arity* combinef 0)
(code/assert-arity* combinef 2)
(code/assert-arity* reducef 2)
(code/assert-arity* post 1)
{:type :fold
:pre pre
:combinef combinef
:reducef reducef
:post post})
(defn ^:private projection-fold [fold field alias]
(if fold
(raw/projection-func$ alias false (raw/code$ :fold "" fold [field]))
(raw/projection-field$ field alias)))
(defn seq-groups
"Calls seq on the result of all co-groupings to enforce consistency across platforms"
[f]
(fn [key & groups]
(apply f key (map #(if (seq? %) (seq %) %) groups))))
(defmethod raw/ancestors->fields :group
[_ id ancestors]
(vec (cons (symbol (name id) "group") (mapcat :fields ancestors))))
(defmethod raw/fields->keys :group
[_ fields]
(filterv (comp '#{key} symbol name) fields))
(defn group*
"Similar to pigpen.core/cogroup, but is a function and takes a quoted function
as an argument. Also takes select clauses as maps.
Example:
(group*
[{:from data1, :by (trap (fn [x] (* x x)))}
{:from data2, :by 'identity}]
(trap (fn [k l r] {:key k, :left l, :right r})))
See also: pigpen.core/group-by, pigpen.core/cogroup, pigpen.core.fn/trap
"
{:added "0.3.0"}
([selects f]
(group* selects f {}))
([selects f opts]
(let [relations (mapv (partial select->bind opts) selects)
join-types (mapv #(get % :type :optional) selects)
fields (mapcat :fields relations)
{:keys [fields], :as c} (raw/group$ :group join-types (dissoc opts :fold) relations)
values (filter (comp '#{group value} symbol name) fields)]
(code/assert-arity f (count values))
(if (some :fold selects)
(let [folds (mapv projection-fold
(cons nil (map :fold selects))
values
(map #(vector (symbol (str "value" %))) (range)))]
(->> c
(raw/project$ folds {})
(raw/bind$ '[pigpen.join] `(pigpen.runtime/map->bind (seq-groups ~f)) {})))
; no folds
(->> c
(raw/bind$ '[pigpen.join] `(pigpen.runtime/map->bind (seq-groups ~f))
{:args values}))))))
(defn reduce*
"Reduces all data into a single collection and applies f to that collection.
The function `f` must be quoted prior to calling reduce*.
Example:
(reduce*
(trap (fn [xs] (count xs)))
data)
See also: pigpen.core/into, pigpen.core/reduce, pigpen.core.fn/trap
"
{:added "0.3.0"}
([f relation]
(reduce* f {} relation))
([f opts relation]
(code/assert-arity f 1)
(->> relation
(raw/reduce$ opts)
(raw/bind$ `(pigpen.runtime/map->bind ~f) {}))))
(defn fold*
"Applies the fold function `fold` to the data. Similar to pigpen.core/fold,
but is a function and `fold` must be quoted.
Example:
(fold* '(fold/count) data)
See also: pigpen.core/fold, pigpen.core.fn/trap
"
{:added "0.3.0"}
([fold relation]
(fold* fold {} relation))
([fold opts relation]
(let [{:keys [fields], :as c} (raw/reduce$ opts relation)]
(->> c
(raw/project$ [(projection-fold fold (first fields) '[value])] {})))))
(defmethod raw/ancestors->fields :join
[_ id ancestors]
(vec (mapcat :fields ancestors)))
(defmethod raw/fields->keys :join
[_ fields]
(filterv (comp '#{key} symbol name) fields))
(defn join*
"Similar to pigpen.core/join, but is a function and takes a quoted function
as an argument. Also takes select clauses as maps.
Example:
(join*
[{:from data1, :by (trap (fn [x] (* x x)))}
{:from data2, :by 'identity}]
(trap (fn [l r] {:left l, :right r})))
See also: pigpen.core/join, pigpen.core.fn/trap
"
{:arglists '([selects f] [selects f opts])
:added "0.3.0"}
([selects f]
(join* selects f {}))
([selects f {:keys [all-args] :as opts}]
(let [relations (mapv (partial select->bind opts) selects)
join-types (mapv #(get % :type :required) selects)
fields (mapcat :fields relations)
values (if all-args
fields
(filter (comp '#{value} symbol name) fields))]
(code/assert-arity f (count values))
(->> relations
(raw/join$ :join join-types opts)
(raw/bind$ `(pigpen.runtime/map->bind ~f) {:args values})))))
(defmacro group-by
"Groups relation by the result of calling (key-selector item) for each item.
This produces a sequence of map entry values, similar to using seq with a
map. Each value will be a lazy sequence of the values that match key.
Optionally takes a map of options, including :parallel and :fold.
Example:
(pig/group-by :a foo)
(pig/group-by count {:parallel 20} foo)
Options:
:parallel - The degree of parallelism to use (pig only)
See also: pigpen.core/cogroup
See pigpen.fold for more info on :fold options.
"
{:added "0.1.0"}
([key-selector relation] `(group-by ~key-selector {} ~relation))
([key-selector opts relation]
`(group* [(merge
{:from ~relation
:key-selector (code/trap ~key-selector)
:type :optional}
~(code/trap-values #{:on :by :key-selector :fold} opts))]
'(fn [~'k ~'v] (clojure.lang.MapEntry. ~'k ~'v))
(assoc ~opts :description ~(pp-str key-selector)))))
(defmacro into
"Returns a new relation with all values from relation conjoined onto to.
Note: This operation uses a single reducer and won't work for large datasets.
See also: pigpen.core/reduce
Note: Reducing an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/into {})
(pig/dump))
[]
"
{:added "0.1.0"}
[to relation]
`(reduce* (quote (partial clojure.core/into ~to))
{:description (str "into " ~to)}
~relation))
;; TODO If reduce returns a seq, should it be flattened for further processing?
(defmacro reduce
"Reduce all items in relation into a single value. Follows semantics of
clojure.core/reduce. If a sequence is returned, it is kept as a single value
for further processing.
Example:
(pig/reduce + foo)
(pig/reduce conj [] foo)
Note: This operation uses a single reducer and won't work for large datasets.
Use pig/fold to do a parallel reduce.
See also: pigpen.core/fold, pigpen.core/into
Note: Reducing an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/reduce +)
(pig/dump))
[]
"
{:added "0.1.0"}
([f relation]
`(reduce* (code/trap (partial clojure.core/reduce ~f))
{:description ~(pp-str f)}
~relation))
([f val relation]
`(reduce* (code/trap (partial clojure.core/reduce ~f ~val))
{:description ~(pp-str f)}
~relation)))
(defmacro fold
"Computes a parallel reduce of the relation. This is done in multiple stages
using reducef and combinef. First, combinef is called with no args to produce a
seed value. Then, reducef reduces portions of the data using that seed value.
Finally, combinef is used to reduce each of the intermediate values. If combinef
is not specified, reducef is used for both. Fold functions defined using
pigpen.fold/fold-fn can also be used.
Example:
(pig/fold + foo)
(pig/fold + (fn [acc _] (inc acc)) foo)
(pig/fold (fold/fold-fn + (fn [acc _] (inc acc))) foo)
See pigpen.fold for more info on fold functions.
Note: Folding an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/fold (fold/count))
(pig/dump))
[]
"
{:added "0.2.0"}
([reducef relation]
`(if (-> ~reducef :type #{:fold})
(fold* (code/trap ~reducef)
{}
~relation)
(fold ~reducef ~reducef ~relation)))
([combinef reducef relation]
`(fold* (code/trap (fold-fn* identity ~combinef ~reducef identity))
{}
~relation)))
(defmacro cogroup
"Joins many relations together by a common key. Each relation specifies a
key-selector function on which to join. A combiner function is applied to each
join key and all values from each relation that match that join key. This is
similar to join, without flattening the data. Optionally takes a map of options.
Example:
(pig/cogroup [(foo :on :a)
(bar :on :b, :type :required, :fold (fold/count))]
(fn [key foos bar-count] ...)
{:parallel 20})
In this example, foo and bar are other pig queries and :a and :b are the
key-selector functions for foo and bar, respectively. These can be any
functions - not just keywords. There can be more than two select clauses.
By default, a matching key value from eatch source relation is optional,
meaning that keys don't have to exist in all source relations to be part of the
output. To specify a relation as required, add 'required' to the select clause.
The third argument is a function used to consolidate matching key values. For
each uniqe key value, this function is called with the value of the key and all
values with that key from foo and bar. As such, foos and bars are both
collections. The last argument is an optional map of options. A fold function
can be specified to aggregate groupings in parallel. See pigpen.fold for more
info on fold functions.
Options:
:parallel - The degree of parallelism to use (pig only)
:join-nils - Whether nil keys from each relation should be treated as equal
See also: pigpen.core/join, pigpen.core/group-by
"
{:added "0.1.0"}
([selects f] `(cogroup ~selects ~f {}))
([selects f opts]
(let [selects# (->> selects
(map (partial cons :from))
(map (partial code/trap-values #{:on :by :key-selector :fold}))
vec)]
`(group* ~selects#
(code/trap ~f)
(assoc ~opts :description ~(pp-str f))))))
(defmacro join
"Joins many relations together by a common key. Each relation specifies a
key-selector function on which to join. A function is applied to each join
key and each pair of values from each relation that match that join key.
Optionally takes a map of options.
Example:
(pig/join [(foo :on :a)
(bar :on :b :type :optional)]
(fn [f b] ...)
{:parallel 20})
In this example, foo and bar are other pig queries and :a and :b are the
key-selector functions for foo and bar, respectively. These can be any
functions - not just keywords. There can be more than two select clauses.
By default, a matching key value from each source relation is required,
meaning that they must exist in all source relations to be part of the output.
To specify a relation as optional, add 'optional' to the select clause. The
third argument is a function used to consolidate matching key values. For each
uniqe key value, this function is called with each set of values from the cross
product of each source relation. By default, this does a standard inner join.
Use 'optional' to do outer joins. The last argument is an optional map of
options.
Options:
:parallel - The degree of parallelism to use (pig only)
:join-nils - Whether nil keys from each relation should be treated as equal
See also: pigpen.core/cogroup, pigpen.core/union
"
{:added "0.1.0"}
([selects f] `(join ~selects ~f {}))
([selects f opts]
(let [selects# (->> selects
(map (partial cons :from))
(map (partial code/trap-values #{:on :by :key-selector}))
vec)]
`(join* ~selects#
(code/trap ~f)
(assoc ~opts :description ~(pp-str f))))))
(defmacro filter-by
"Filters a relation by the keys in another relation. The key-selector function
is applied to each element of relation. If the resulting key is present in keys,
the value is kept. Otherwise it is dropped. nils are dropped or preserved based
on whether there is a nil value present in keys. This operation is referred to
as a semi-join in relational databases.
Example:
(let [keys (pig/return [1 3 5])
data (pig/return [{:k 1, :v \"a\"}
{:k 2, :v \"b\"}
{:k 3, :v \"c\"}
{:k 4, :v \"d\"}
{:k 5, :v \"e\"}])]
(pig/filter-by :k keys data))
=> (pig/dump *1)
[{:k 1, :v \"a\"}
{:k 3, :v \"c\"}
{:k 5, :v \"e\"}]
Options:
:parallel - The degree of parallelism to use (pig only)
Note: keys must be distinct before this is used or you will get duplicate values.
Note: Unlike filter, this joins relation with keys and can be potentially expensive.
See also: pigpen.core/filter, pigpen.core/remove-by, pigpen.core/intersection
"
{:added "0.2.3"}
([key-selector keys relation] `(filter-by ~key-selector ~keys {} ~relation))
([key-selector keys opts relation]
`(join* [{:from ~keys :key-selector 'identity}
{:from ~relation :key-selector (code/trap ~key-selector)}]
'(fn [~'k ~'v] ~'v)
(assoc ~opts :description ~(pp-str key-selector)
:sentinel-nil true))))
(defmacro remove-by
"Filters a relation by the keys in another relation. The key-selector function
is applied to each element of relation. If the resulting key is _not_ present in
keys, the value is kept. Otherwise it is dropped. nils are dropped or preserved
based on whether there is a nil value present in keys. This operation is
referred to as an anti-join in relational databases.
Example:
(let [keys (pig/return [1 3 5])
data (pig/return [{:k 1, :v \"a\"}
{:k 2, :v \"b\"}
{:k 3, :v \"c\"}
{:k 4, :v \"d\"}
{:k 5, :v \"e\"}])]
(pig/remove-by :k keys data))
=> (pig/dump *1)
[{:k 2, :v \"b\"}
{:k 4, :v \"d\"}]
Options:
:parallel - The degree of parallelism to use (pig only)
Note: Unlike remove, this joins relation with keys and can be potentially expensive.
See also: pigpen.core/remove, pigpen.core/filter-by, pigpen.core/difference
"
{:added "0.2.3"}
([key-selector keys relation] `(remove-by ~key-selector ~keys {} ~relation))
([key-selector keys opts relation]
(let [f '(fn [[k _ _ v]] (when (nil? k) [v]))]
`(->>
(join* [{:from ~keys :key-selector 'identity :type :optional}
{:from ~relation :key-selector (code/trap ~key-selector)}]
'vector
(assoc ~opts :description ~(pp-str key-selector)
:all-args true
:sentinel-nil true))
(raw/bind$ '(pigpen.runtime/mapcat->bind ~f) {})))))
| null | https://raw.githubusercontent.com/Netflix/PigPen/18d461d9b2ee6c1bb7eee7324889d32757fc7513/pigpen-core/src/main/clojure/pigpen/join.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
TODO - If this is an inner join, we can filter nil keys before the join
no folds
TODO If reduce returns a seq, should it be flattened for further processing? | Copyright 2013 - 2015 Netflix , Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(ns pigpen.join
"Commands to join and group data.
Note: Most of these are present in pigpen.core. Normally you should use those instead.
"
(:refer-clojure :exclude [group-by into reduce])
(:require [pigpen.extensions.core :refer [pp-str forcat]]
[pigpen.raw :as raw]
[pigpen.code :as code]))
(set! *warn-on-reflection* true)
(defn ^:private select->bind
"Performs the key selection prior to a join. If join-nils is true, we leave
nils as frozen nils so they appear as values. Otherwise we return a nil value as
nil and let the join take its course. If sentinel-nil is true, nil keys are
coerced to ::nil so they can be differentiated from outer joins later."
[{:keys [join-nils sentinel-nil]} {:keys [from key-selector on by]}]
(let [key-selector (or key-selector on by 'identity)]
(raw/bind$
(if sentinel-nil
`(pigpen.runtime/key-selector->bind (comp pigpen.runtime/sentinel-nil ~key-selector))
`(pigpen.runtime/key-selector->bind ~key-selector))
{:field-type (if join-nils :frozen :frozen-with-nils)
:alias ['key 'value]}
from)))
TODO verify these are vetted at compile time
(defn fold-fn*
"See pigpen.core/fold-fn"
[pre combinef reducef post]
{:pre [pre combinef reducef post]}
(code/assert-arity* pre 1)
(code/assert-arity* combinef 0)
(code/assert-arity* combinef 2)
(code/assert-arity* reducef 2)
(code/assert-arity* post 1)
{:type :fold
:pre pre
:combinef combinef
:reducef reducef
:post post})
(defn ^:private projection-fold [fold field alias]
(if fold
(raw/projection-func$ alias false (raw/code$ :fold "" fold [field]))
(raw/projection-field$ field alias)))
(defn seq-groups
"Calls seq on the result of all co-groupings to enforce consistency across platforms"
[f]
(fn [key & groups]
(apply f key (map #(if (seq? %) (seq %) %) groups))))
(defmethod raw/ancestors->fields :group
[_ id ancestors]
(vec (cons (symbol (name id) "group") (mapcat :fields ancestors))))
(defmethod raw/fields->keys :group
[_ fields]
(filterv (comp '#{key} symbol name) fields))
(defn group*
"Similar to pigpen.core/cogroup, but is a function and takes a quoted function
as an argument. Also takes select clauses as maps.
Example:
(group*
[{:from data1, :by (trap (fn [x] (* x x)))}
{:from data2, :by 'identity}]
(trap (fn [k l r] {:key k, :left l, :right r})))
See also: pigpen.core/group-by, pigpen.core/cogroup, pigpen.core.fn/trap
"
{:added "0.3.0"}
([selects f]
(group* selects f {}))
([selects f opts]
(let [relations (mapv (partial select->bind opts) selects)
join-types (mapv #(get % :type :optional) selects)
fields (mapcat :fields relations)
{:keys [fields], :as c} (raw/group$ :group join-types (dissoc opts :fold) relations)
values (filter (comp '#{group value} symbol name) fields)]
(code/assert-arity f (count values))
(if (some :fold selects)
(let [folds (mapv projection-fold
(cons nil (map :fold selects))
values
(map #(vector (symbol (str "value" %))) (range)))]
(->> c
(raw/project$ folds {})
(raw/bind$ '[pigpen.join] `(pigpen.runtime/map->bind (seq-groups ~f)) {})))
(->> c
(raw/bind$ '[pigpen.join] `(pigpen.runtime/map->bind (seq-groups ~f))
{:args values}))))))
(defn reduce*
"Reduces all data into a single collection and applies f to that collection.
The function `f` must be quoted prior to calling reduce*.
Example:
(reduce*
(trap (fn [xs] (count xs)))
data)
See also: pigpen.core/into, pigpen.core/reduce, pigpen.core.fn/trap
"
{:added "0.3.0"}
([f relation]
(reduce* f {} relation))
([f opts relation]
(code/assert-arity f 1)
(->> relation
(raw/reduce$ opts)
(raw/bind$ `(pigpen.runtime/map->bind ~f) {}))))
(defn fold*
"Applies the fold function `fold` to the data. Similar to pigpen.core/fold,
but is a function and `fold` must be quoted.
Example:
(fold* '(fold/count) data)
See also: pigpen.core/fold, pigpen.core.fn/trap
"
{:added "0.3.0"}
([fold relation]
(fold* fold {} relation))
([fold opts relation]
(let [{:keys [fields], :as c} (raw/reduce$ opts relation)]
(->> c
(raw/project$ [(projection-fold fold (first fields) '[value])] {})))))
(defmethod raw/ancestors->fields :join
[_ id ancestors]
(vec (mapcat :fields ancestors)))
(defmethod raw/fields->keys :join
[_ fields]
(filterv (comp '#{key} symbol name) fields))
(defn join*
"Similar to pigpen.core/join, but is a function and takes a quoted function
as an argument. Also takes select clauses as maps.
Example:
(join*
[{:from data1, :by (trap (fn [x] (* x x)))}
{:from data2, :by 'identity}]
(trap (fn [l r] {:left l, :right r})))
See also: pigpen.core/join, pigpen.core.fn/trap
"
{:arglists '([selects f] [selects f opts])
:added "0.3.0"}
([selects f]
(join* selects f {}))
([selects f {:keys [all-args] :as opts}]
(let [relations (mapv (partial select->bind opts) selects)
join-types (mapv #(get % :type :required) selects)
fields (mapcat :fields relations)
values (if all-args
fields
(filter (comp '#{value} symbol name) fields))]
(code/assert-arity f (count values))
(->> relations
(raw/join$ :join join-types opts)
(raw/bind$ `(pigpen.runtime/map->bind ~f) {:args values})))))
(defmacro group-by
"Groups relation by the result of calling (key-selector item) for each item.
This produces a sequence of map entry values, similar to using seq with a
map. Each value will be a lazy sequence of the values that match key.
Optionally takes a map of options, including :parallel and :fold.
Example:
(pig/group-by :a foo)
(pig/group-by count {:parallel 20} foo)
Options:
:parallel - The degree of parallelism to use (pig only)
See also: pigpen.core/cogroup
See pigpen.fold for more info on :fold options.
"
{:added "0.1.0"}
([key-selector relation] `(group-by ~key-selector {} ~relation))
([key-selector opts relation]
`(group* [(merge
{:from ~relation
:key-selector (code/trap ~key-selector)
:type :optional}
~(code/trap-values #{:on :by :key-selector :fold} opts))]
'(fn [~'k ~'v] (clojure.lang.MapEntry. ~'k ~'v))
(assoc ~opts :description ~(pp-str key-selector)))))
(defmacro into
"Returns a new relation with all values from relation conjoined onto to.
Note: This operation uses a single reducer and won't work for large datasets.
See also: pigpen.core/reduce
Note: Reducing an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/into {})
(pig/dump))
[]
"
{:added "0.1.0"}
[to relation]
`(reduce* (quote (partial clojure.core/into ~to))
{:description (str "into " ~to)}
~relation))
(defmacro reduce
"Reduce all items in relation into a single value. Follows semantics of
clojure.core/reduce. If a sequence is returned, it is kept as a single value
for further processing.
Example:
(pig/reduce + foo)
(pig/reduce conj [] foo)
Note: This operation uses a single reducer and won't work for large datasets.
Use pig/fold to do a parallel reduce.
See also: pigpen.core/fold, pigpen.core/into
Note: Reducing an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/reduce +)
(pig/dump))
[]
"
{:added "0.1.0"}
([f relation]
`(reduce* (code/trap (partial clojure.core/reduce ~f))
{:description ~(pp-str f)}
~relation))
([f val relation]
`(reduce* (code/trap (partial clojure.core/reduce ~f ~val))
{:description ~(pp-str f)}
~relation)))
(defmacro fold
"Computes a parallel reduce of the relation. This is done in multiple stages
using reducef and combinef. First, combinef is called with no args to produce a
seed value. Then, reducef reduces portions of the data using that seed value.
Finally, combinef is used to reduce each of the intermediate values. If combinef
is not specified, reducef is used for both. Fold functions defined using
pigpen.fold/fold-fn can also be used.
Example:
(pig/fold + foo)
(pig/fold + (fn [acc _] (inc acc)) foo)
(pig/fold (fold/fold-fn + (fn [acc _] (inc acc))) foo)
See pigpen.fold for more info on fold functions.
Note: Folding an empty sequence will always return an empty sequence:
=> (->>
(pig/return [])
(pig/fold (fold/count))
(pig/dump))
[]
"
{:added "0.2.0"}
([reducef relation]
`(if (-> ~reducef :type #{:fold})
(fold* (code/trap ~reducef)
{}
~relation)
(fold ~reducef ~reducef ~relation)))
([combinef reducef relation]
`(fold* (code/trap (fold-fn* identity ~combinef ~reducef identity))
{}
~relation)))
(defmacro cogroup
"Joins many relations together by a common key. Each relation specifies a
key-selector function on which to join. A combiner function is applied to each
join key and all values from each relation that match that join key. This is
similar to join, without flattening the data. Optionally takes a map of options.
Example:
(pig/cogroup [(foo :on :a)
(bar :on :b, :type :required, :fold (fold/count))]
(fn [key foos bar-count] ...)
{:parallel 20})
In this example, foo and bar are other pig queries and :a and :b are the
key-selector functions for foo and bar, respectively. These can be any
functions - not just keywords. There can be more than two select clauses.
By default, a matching key value from eatch source relation is optional,
meaning that keys don't have to exist in all source relations to be part of the
output. To specify a relation as required, add 'required' to the select clause.
The third argument is a function used to consolidate matching key values. For
each uniqe key value, this function is called with the value of the key and all
values with that key from foo and bar. As such, foos and bars are both
collections. The last argument is an optional map of options. A fold function
can be specified to aggregate groupings in parallel. See pigpen.fold for more
info on fold functions.
Options:
:parallel - The degree of parallelism to use (pig only)
:join-nils - Whether nil keys from each relation should be treated as equal
See also: pigpen.core/join, pigpen.core/group-by
"
{:added "0.1.0"}
([selects f] `(cogroup ~selects ~f {}))
([selects f opts]
(let [selects# (->> selects
(map (partial cons :from))
(map (partial code/trap-values #{:on :by :key-selector :fold}))
vec)]
`(group* ~selects#
(code/trap ~f)
(assoc ~opts :description ~(pp-str f))))))
(defmacro join
"Joins many relations together by a common key. Each relation specifies a
key-selector function on which to join. A function is applied to each join
key and each pair of values from each relation that match that join key.
Optionally takes a map of options.
Example:
(pig/join [(foo :on :a)
(bar :on :b :type :optional)]
(fn [f b] ...)
{:parallel 20})
In this example, foo and bar are other pig queries and :a and :b are the
key-selector functions for foo and bar, respectively. These can be any
functions - not just keywords. There can be more than two select clauses.
By default, a matching key value from each source relation is required,
meaning that they must exist in all source relations to be part of the output.
To specify a relation as optional, add 'optional' to the select clause. The
third argument is a function used to consolidate matching key values. For each
uniqe key value, this function is called with each set of values from the cross
product of each source relation. By default, this does a standard inner join.
Use 'optional' to do outer joins. The last argument is an optional map of
options.
Options:
:parallel - The degree of parallelism to use (pig only)
:join-nils - Whether nil keys from each relation should be treated as equal
See also: pigpen.core/cogroup, pigpen.core/union
"
{:added "0.1.0"}
([selects f] `(join ~selects ~f {}))
([selects f opts]
(let [selects# (->> selects
(map (partial cons :from))
(map (partial code/trap-values #{:on :by :key-selector}))
vec)]
`(join* ~selects#
(code/trap ~f)
(assoc ~opts :description ~(pp-str f))))))
(defmacro filter-by
"Filters a relation by the keys in another relation. The key-selector function
is applied to each element of relation. If the resulting key is present in keys,
the value is kept. Otherwise it is dropped. nils are dropped or preserved based
on whether there is a nil value present in keys. This operation is referred to
as a semi-join in relational databases.
Example:
(let [keys (pig/return [1 3 5])
data (pig/return [{:k 1, :v \"a\"}
{:k 2, :v \"b\"}
{:k 3, :v \"c\"}
{:k 4, :v \"d\"}
{:k 5, :v \"e\"}])]
(pig/filter-by :k keys data))
=> (pig/dump *1)
[{:k 1, :v \"a\"}
{:k 3, :v \"c\"}
{:k 5, :v \"e\"}]
Options:
:parallel - The degree of parallelism to use (pig only)
Note: keys must be distinct before this is used or you will get duplicate values.
Note: Unlike filter, this joins relation with keys and can be potentially expensive.
See also: pigpen.core/filter, pigpen.core/remove-by, pigpen.core/intersection
"
{:added "0.2.3"}
([key-selector keys relation] `(filter-by ~key-selector ~keys {} ~relation))
([key-selector keys opts relation]
`(join* [{:from ~keys :key-selector 'identity}
{:from ~relation :key-selector (code/trap ~key-selector)}]
'(fn [~'k ~'v] ~'v)
(assoc ~opts :description ~(pp-str key-selector)
:sentinel-nil true))))
(defmacro remove-by
"Filters a relation by the keys in another relation. The key-selector function
is applied to each element of relation. If the resulting key is _not_ present in
keys, the value is kept. Otherwise it is dropped. nils are dropped or preserved
based on whether there is a nil value present in keys. This operation is
referred to as an anti-join in relational databases.
Example:
(let [keys (pig/return [1 3 5])
data (pig/return [{:k 1, :v \"a\"}
{:k 2, :v \"b\"}
{:k 3, :v \"c\"}
{:k 4, :v \"d\"}
{:k 5, :v \"e\"}])]
(pig/remove-by :k keys data))
=> (pig/dump *1)
[{:k 2, :v \"b\"}
{:k 4, :v \"d\"}]
Options:
:parallel - The degree of parallelism to use (pig only)
Note: Unlike remove, this joins relation with keys and can be potentially expensive.
See also: pigpen.core/remove, pigpen.core/filter-by, pigpen.core/difference
"
{:added "0.2.3"}
([key-selector keys relation] `(remove-by ~key-selector ~keys {} ~relation))
([key-selector keys opts relation]
(let [f '(fn [[k _ _ v]] (when (nil? k) [v]))]
`(->>
(join* [{:from ~keys :key-selector 'identity :type :optional}
{:from ~relation :key-selector (code/trap ~key-selector)}]
'vector
(assoc ~opts :description ~(pp-str key-selector)
:all-args true
:sentinel-nil true))
(raw/bind$ '(pigpen.runtime/mapcat->bind ~f) {})))))
|
819c202439d81c5da1f292c67345121f03925046b41fee8da94099e760f31a27 | racket/htdp | img-err.rkt | #lang racket/base
(provide define/chk
x-place?
y-place?
mode?
angle?
side-count?
image-color?
pen-style?
pen-cap?
pen-join?
real-valued-posn?
step-count?
check-mode/color-combination)
(require htdp/error
racket/class
racket/contract
lang/posn
(except-in racket/draw
make-pen make-color)
mrlib/image-core
(for-syntax racket/base
racket/list))
;
;
;
;
;
;
; ;; ;; ;;
; ;; ;; ;;
; ;;;; ;;;;;;;;; ;;;; ;;;; ;;;; ;;;;;; ;;;; ;;;; ;; ;;; ;; ;; ;;; ;;;;;;
; ;; ;; ;;;; ;;;; ;;;;;; ;;;; ;;;;;; ;;;;;; ;; ;; ;;;;;; ;;;;; ;; ;;;;;; ;;;;;;
; ;;;;;;;; ;; ;; ;;; ;;; ;; ;;; ;; ;; ;;;;;;;;;;; ;;;;; ;; ;; ;; ;;; ;;
; ;;; ;; ;; ;;; ;;; ;; ;;; ;; ;; ;;; ;;; ;;;;; ;; ;; ;; ;;; ;;
; ;;; ;; ;; ;; ;;;;;; ;; ;;;;;; ;; ;; ;;; ;; ;;;;;; ;; ;; ;; ;; ;; ;;;;;;
; ;;;; ;; ;; ;;;; ;; ;;;; ;; ;; ;;;; ;;;; ;; ;;; ;; ;; ;; ;;;;;
; ;; ;;;
; ;;;;;
;
;
(define-syntax define/chk
(λ (stx)
(define (adjust-case fn-name case-args bodies)
(syntax-case case-args ()
[(args ... . final-arg)
(identifier? #'final-arg)
(let ([len (length (syntax->list #'(args ...)))])
(with-syntax ([(i ...) (build-list len add1)])
#`((args ... . final-arg)
(let ([args (check/normalize '#,fn-name 'args args i)] ...
[final-arg
(for/list ([x (in-list final-arg)]
[j (in-naturals #,(+ len 1))])
(check/normalize '#,fn-name 'final-arg x j))])
#,@bodies))))]
[(args ...)
(with-syntax ([(i ...) (build-list (length (syntax->list #'(args ...))) add1)]
[(arg-ids ...)
(map (λ (arg)
(syntax-case arg ()
[x
(identifier? #'x)
#'x]
[(x y)
(identifier? #'x)
#'x]
[_
(raise-syntax-error 'define/chk "unknown argument spec" stx arg)]))
(syntax->list #'(args ...)))])
#`((args ...)
(let ([arg-ids (check/normalize '#,fn-name 'arg-ids arg-ids i)] ...)
#,@bodies)))]))
(syntax-case stx (case-lambda)
[(define/chk fn-name (case-lambda [in-args in-body ...] ...))
(with-syntax ([((args body) ...) (map (lambda (a b) (adjust-case #'fn-name a b))
(syntax->list #'(in-args ...))
(syntax->list #'((in-body ...) ...)))])
#'(define fn-name
(case-lambda
[args body] ...)))]
[(define/chk (fn-name . args) body ...)
(with-syntax ([(args body) (adjust-case #'fn-name #'args #'(body ...))])
(quasisyntax/loc stx (define (fn-name . args) body)))])))
;; check/normalize : symbol symbol any number -> any
;; based on the name of the argument, checks to see if the input
;; is valid and, if so, transforms it to a specific kind of value
;; width, height -> number
;; mode -> 'outline 'solid
;; color -> (is-a?/c color<%>)
(define (check/normalize fn-name argname arg i)
(case argname
[(x-place)
(check-arg fn-name
(x-place? arg)
'x-place
i
arg)
(let ([sym (if (string? arg)
(string->symbol arg)
arg)])
(if (eq? sym 'center)
'middle
sym))]
[(y-place)
(check-arg fn-name
(y-place? arg)
'y-place
i
arg)
(let ([sym (if (string? arg)
(string->symbol arg)
arg)])
(if (eq? sym 'center)
'middle
sym))]
[(image image1 image2 image3)
(check-arg fn-name
(image? arg)
'image
i
arg)
(to-img arg)]
[(images)
(check-arg fn-name (and (list? arg) (andmap image? arg)) 'image-list i arg)
(for/list ([i (in-list arg)])
(to-img i))]
[(mode)
(check-arg fn-name
(mode? arg)
'mode
i
arg)
(cond
[(or (equal? arg "solid")
(equal? arg 'solid))
255]
[(equal? arg "outline")
'outline]
[(and (integer? arg)
(not (exact? arg)))
(inexact->exact arg)]
[else arg])]
[(width height radius radius1 radius2 side-length side-length1 side-length2
side-a side-b side-c)
(check-arg fn-name
(and (real? arg)
(not (negative? arg)))
'non\ negative\ real\ number
i arg)
arg]
[(point-count)
(check-arg fn-name
(and (integer? arg)
(>= arg 2))
'integer\ greater\ than\ or\ equal\ to\ 2
i arg)
(inexact->exact arg)]
[(dx dy x y x1 y1 x2 y2 pull pull1 pull2)
(check-arg fn-name
(real? arg)
'real\ number
i arg)
arg]
[(factor x-factor y-factor non-zero-radius)
(check-arg fn-name
(and (real? arg)
(positive? arg))
'positive\ real\ number
i arg)
arg]
[(side-count)
(check-arg fn-name
(side-count? arg)
'side-count
i arg)
(inexact->exact arg)]
[(step-count)
(check-arg fn-name
(step-count? arg)
'step-count
i arg)
(inexact->exact arg)]
[(angle angle1 angle2 angle-a angle-b angle-c)
(check-arg fn-name
(angle? arg)
'angle\ in\ degrees
i arg)
(angle->proper-range arg)]
[(angle-between-0-and-360)
(check-arg fn-name
(angle? arg)
'angle\ in\ degrees
i arg)
(check-arg fn-name
(<= 0 arg 360)
'|angle between 0 and 360|
i arg)
(check-arg fn-name
(not (= 0 arg))
'|angle that is not 0|
i arg)
(check-arg fn-name
(not (= 360 arg))
'|angle that is not 360|
i arg)
arg]
[(color-only)
(check-arg fn-name (image-color? arg) 'image-color i arg)
(cond
[(color? arg) arg]
[(string? arg) arg]
[(symbol? arg) (symbol->string arg)])]
[(color)
(check-arg fn-name (or (image-color? arg) (pen? arg)) 'image-color-or-pen i arg)
;; return either a string, color, or a pen,
;; (technically, the string case is redundant,
;; but since there may be saved files that have
;; strings in the color positions we leave them
;; here too; note that using a pen struct means
;; 'smoothed mode, but a color (or string) means
;; 'aligned mode, so that's not redundant).
(cond
[(color? arg) arg]
[(pen? arg) arg]
[(symbol? arg) (symbol->string arg)]
[(string? arg) arg])]
[(color-list)
(check-arg fn-name (and (list? arg) (andmap image-color? arg)) 'color-list i arg)
arg]
[(string)
(check-arg fn-name (string? arg) 'string i arg)
arg]
[(font-size)
(check-arg fn-name (and (integer? arg) (<= 1 arg 255)) 'font-size i arg)
(inexact->exact arg)]
[(face)
(check-arg fn-name (or (not arg) (string? arg)) 'face i arg)
arg]
[(family)
(let ([syms '(default decorative roman script swiss modern symbol system)])
(check-arg fn-name
(or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'family i arg))
(if (string? arg) (string->symbol arg) arg)]
[(style)
(let ([syms '(normal italic slant)])
(check-arg fn-name (or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'style i arg))
(if (string? arg) (string->symbol arg) arg)]
[(weight)
(let ([syms '(normal bold light)])
(check-arg fn-name (or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'weight i arg))
(if (string? arg) (string->symbol arg) arg)]
[(underline)
(and arg #t)]
[(posns)
(check-arg fn-name
(and (list? arg)
(andmap posn? arg))
'list-of-posns
i arg)
(check-arg fn-name
(andmap real-valued-posn? arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
(check-arg fn-name
(>= (length arg) 3)
'list-of-at-least-three-posns
i arg)
arg]
[(posns-or-pulled-points)
(check-arg fn-name
(and (list? arg)
(andmap (or/c posn? pulled-point?) arg))
'list-of-posns-or-pulled-points
i arg)
(check-arg fn-name
(andmap (or/c pulled-point? real-valued-posn?) arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
(check-arg fn-name
(>= (length arg) 3)
'list-of-at-least-three-posns-or-pulled-points
i arg)
arg]
[(zero-or-more-posns)
(check-arg fn-name
(and (list? arg)
(andmap posn? arg))
'list-of-posns
i arg)
(check-arg fn-name
(andmap real-valued-posn? arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
arg]
[(int-0-255 int0-255-1 int0-255-2 int0-255-3 int0-255-4)
(check-arg fn-name (and (integer? arg) (<= 0 arg 255))
'integer\ between\ 0\ and\ 255 i arg)
(inexact->exact arg)]
[(pen-style)
(check-arg fn-name (pen-style? arg) 'pen-style i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(pen-cap)
(check-arg fn-name (pen-cap? arg) 'pen-cap i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(pen-join)
(check-arg fn-name (pen-join? arg) 'pen-join i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(filename)
(check-arg fn-name (path-string? arg) 'path-string i arg)
arg]
[else
(error 'check "the function ~a has an argument with an unknown name: ~s"
fn-name
argname)]))
(define (y-place? arg)
(and (member arg '("top" top "bottom" bottom "middle" middle "center" center
"baseline" baseline "pinhole" pinhole))
#t))
(define (x-place? arg)
(and (member arg '("left" left "right" right "middle" middle
"center" center "pinhole" pinhole))
#t))
(define (mode? arg)
(or (and (member arg '(solid outline "solid" "outline")) #t)
(and (integer? arg)
(<= 0 arg 255))))
(define (side-count? i)
(and (integer? i)
(3 . <= . i)))
(define (step-count? i)
(and (integer? i)
(1 . <= . i)))
(define (image-color? c)
(cond
[(color? c) #t]
[(symbol? c)
(and (string->color-object/f (symbol->string c)) #t)]
[(string? c)
(and (string->color-object/f c) #t)]
[else #f]))
(define (pen-style? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(solid dot long-dash short-dash dot-dash))
#t))
(define (pen-cap? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(round projecting butt))
#t))
(define (pen-join? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(round bevel miter))
#t))
(define (real-valued-posn? arg)
(and (posn? arg)
(real? (posn-x arg))
(real? (posn-y arg))))
;; checks the dependent part of the 'color' specification
(define (check-mode/color-combination fn-name i mode color)
(cond
[(or (eq? mode 'solid)
(number? mode))
(check-arg fn-name (image-color? color) 'image-color i color)]
[(eq? mode 'outline)
(void)]))
(define (angle? arg)
(and (real? arg)
(not (or (= arg +inf.0)
(= arg -inf.0)
(equal? arg +nan.0)))))
| null | https://raw.githubusercontent.com/racket/htdp/aa78794fa1788358d6abd11dad54b3c9f4f5a80b/htdp-lib/2htdp/private/img-err.rkt | racket |
;; ;; ;;
;; ;; ;;
;;;; ;;;;;;;;; ;;;; ;;;; ;;;; ;;;;;; ;;;; ;;;; ;; ;;; ;; ;; ;;; ;;;;;;
;; ;; ;;;; ;;;; ;;;;;; ;;;; ;;;;;; ;;;;;; ;; ;; ;;;;;; ;;;;; ;; ;;;;;; ;;;;;;
;;;;;;;; ;; ;; ;;; ;;; ;; ;;; ;; ;; ;;;;;;;;;;; ;;;;; ;; ;; ;; ;;; ;;
;;; ;; ;; ;;; ;;; ;; ;;; ;; ;; ;;; ;;; ;;;;; ;; ;; ;; ;;; ;;
;;; ;; ;; ;; ;;;;;; ;; ;;;;;; ;; ;; ;;; ;; ;;;;;; ;; ;; ;; ;; ;; ;;;;;;
;;;; ;; ;; ;;;; ;; ;;;; ;; ;; ;;;; ;;;; ;; ;;; ;; ;; ;; ;;;;;
;; ;;;
;;;;;
check/normalize : symbol symbol any number -> any
based on the name of the argument, checks to see if the input
is valid and, if so, transforms it to a specific kind of value
width, height -> number
mode -> 'outline 'solid
color -> (is-a?/c color<%>)
return either a string, color, or a pen,
(technically, the string case is redundant,
but since there may be saved files that have
strings in the color positions we leave them
here too; note that using a pen struct means
'smoothed mode, but a color (or string) means
'aligned mode, so that's not redundant).
checks the dependent part of the 'color' specification | #lang racket/base
(provide define/chk
x-place?
y-place?
mode?
angle?
side-count?
image-color?
pen-style?
pen-cap?
pen-join?
real-valued-posn?
step-count?
check-mode/color-combination)
(require htdp/error
racket/class
racket/contract
lang/posn
(except-in racket/draw
make-pen make-color)
mrlib/image-core
(for-syntax racket/base
racket/list))
(define-syntax define/chk
(λ (stx)
(define (adjust-case fn-name case-args bodies)
(syntax-case case-args ()
[(args ... . final-arg)
(identifier? #'final-arg)
(let ([len (length (syntax->list #'(args ...)))])
(with-syntax ([(i ...) (build-list len add1)])
#`((args ... . final-arg)
(let ([args (check/normalize '#,fn-name 'args args i)] ...
[final-arg
(for/list ([x (in-list final-arg)]
[j (in-naturals #,(+ len 1))])
(check/normalize '#,fn-name 'final-arg x j))])
#,@bodies))))]
[(args ...)
(with-syntax ([(i ...) (build-list (length (syntax->list #'(args ...))) add1)]
[(arg-ids ...)
(map (λ (arg)
(syntax-case arg ()
[x
(identifier? #'x)
#'x]
[(x y)
(identifier? #'x)
#'x]
[_
(raise-syntax-error 'define/chk "unknown argument spec" stx arg)]))
(syntax->list #'(args ...)))])
#`((args ...)
(let ([arg-ids (check/normalize '#,fn-name 'arg-ids arg-ids i)] ...)
#,@bodies)))]))
(syntax-case stx (case-lambda)
[(define/chk fn-name (case-lambda [in-args in-body ...] ...))
(with-syntax ([((args body) ...) (map (lambda (a b) (adjust-case #'fn-name a b))
(syntax->list #'(in-args ...))
(syntax->list #'((in-body ...) ...)))])
#'(define fn-name
(case-lambda
[args body] ...)))]
[(define/chk (fn-name . args) body ...)
(with-syntax ([(args body) (adjust-case #'fn-name #'args #'(body ...))])
(quasisyntax/loc stx (define (fn-name . args) body)))])))
(define (check/normalize fn-name argname arg i)
(case argname
[(x-place)
(check-arg fn-name
(x-place? arg)
'x-place
i
arg)
(let ([sym (if (string? arg)
(string->symbol arg)
arg)])
(if (eq? sym 'center)
'middle
sym))]
[(y-place)
(check-arg fn-name
(y-place? arg)
'y-place
i
arg)
(let ([sym (if (string? arg)
(string->symbol arg)
arg)])
(if (eq? sym 'center)
'middle
sym))]
[(image image1 image2 image3)
(check-arg fn-name
(image? arg)
'image
i
arg)
(to-img arg)]
[(images)
(check-arg fn-name (and (list? arg) (andmap image? arg)) 'image-list i arg)
(for/list ([i (in-list arg)])
(to-img i))]
[(mode)
(check-arg fn-name
(mode? arg)
'mode
i
arg)
(cond
[(or (equal? arg "solid")
(equal? arg 'solid))
255]
[(equal? arg "outline")
'outline]
[(and (integer? arg)
(not (exact? arg)))
(inexact->exact arg)]
[else arg])]
[(width height radius radius1 radius2 side-length side-length1 side-length2
side-a side-b side-c)
(check-arg fn-name
(and (real? arg)
(not (negative? arg)))
'non\ negative\ real\ number
i arg)
arg]
[(point-count)
(check-arg fn-name
(and (integer? arg)
(>= arg 2))
'integer\ greater\ than\ or\ equal\ to\ 2
i arg)
(inexact->exact arg)]
[(dx dy x y x1 y1 x2 y2 pull pull1 pull2)
(check-arg fn-name
(real? arg)
'real\ number
i arg)
arg]
[(factor x-factor y-factor non-zero-radius)
(check-arg fn-name
(and (real? arg)
(positive? arg))
'positive\ real\ number
i arg)
arg]
[(side-count)
(check-arg fn-name
(side-count? arg)
'side-count
i arg)
(inexact->exact arg)]
[(step-count)
(check-arg fn-name
(step-count? arg)
'step-count
i arg)
(inexact->exact arg)]
[(angle angle1 angle2 angle-a angle-b angle-c)
(check-arg fn-name
(angle? arg)
'angle\ in\ degrees
i arg)
(angle->proper-range arg)]
[(angle-between-0-and-360)
(check-arg fn-name
(angle? arg)
'angle\ in\ degrees
i arg)
(check-arg fn-name
(<= 0 arg 360)
'|angle between 0 and 360|
i arg)
(check-arg fn-name
(not (= 0 arg))
'|angle that is not 0|
i arg)
(check-arg fn-name
(not (= 360 arg))
'|angle that is not 360|
i arg)
arg]
[(color-only)
(check-arg fn-name (image-color? arg) 'image-color i arg)
(cond
[(color? arg) arg]
[(string? arg) arg]
[(symbol? arg) (symbol->string arg)])]
[(color)
(check-arg fn-name (or (image-color? arg) (pen? arg)) 'image-color-or-pen i arg)
(cond
[(color? arg) arg]
[(pen? arg) arg]
[(symbol? arg) (symbol->string arg)]
[(string? arg) arg])]
[(color-list)
(check-arg fn-name (and (list? arg) (andmap image-color? arg)) 'color-list i arg)
arg]
[(string)
(check-arg fn-name (string? arg) 'string i arg)
arg]
[(font-size)
(check-arg fn-name (and (integer? arg) (<= 1 arg 255)) 'font-size i arg)
(inexact->exact arg)]
[(face)
(check-arg fn-name (or (not arg) (string? arg)) 'face i arg)
arg]
[(family)
(let ([syms '(default decorative roman script swiss modern symbol system)])
(check-arg fn-name
(or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'family i arg))
(if (string? arg) (string->symbol arg) arg)]
[(style)
(let ([syms '(normal italic slant)])
(check-arg fn-name (or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'style i arg))
(if (string? arg) (string->symbol arg) arg)]
[(weight)
(let ([syms '(normal bold light)])
(check-arg fn-name (or (memq arg syms)
(and (string? arg)
(memq (string->symbol arg) syms)))
'weight i arg))
(if (string? arg) (string->symbol arg) arg)]
[(underline)
(and arg #t)]
[(posns)
(check-arg fn-name
(and (list? arg)
(andmap posn? arg))
'list-of-posns
i arg)
(check-arg fn-name
(andmap real-valued-posn? arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
(check-arg fn-name
(>= (length arg) 3)
'list-of-at-least-three-posns
i arg)
arg]
[(posns-or-pulled-points)
(check-arg fn-name
(and (list? arg)
(andmap (or/c posn? pulled-point?) arg))
'list-of-posns-or-pulled-points
i arg)
(check-arg fn-name
(andmap (or/c pulled-point? real-valued-posn?) arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
(check-arg fn-name
(>= (length arg) 3)
'list-of-at-least-three-posns-or-pulled-points
i arg)
arg]
[(zero-or-more-posns)
(check-arg fn-name
(and (list? arg)
(andmap posn? arg))
'list-of-posns
i arg)
(check-arg fn-name
(andmap real-valued-posn? arg)
'list-of-posns-with-real-valued-x-and-y-coordinates
i arg)
arg]
[(int-0-255 int0-255-1 int0-255-2 int0-255-3 int0-255-4)
(check-arg fn-name (and (integer? arg) (<= 0 arg 255))
'integer\ between\ 0\ and\ 255 i arg)
(inexact->exact arg)]
[(pen-style)
(check-arg fn-name (pen-style? arg) 'pen-style i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(pen-cap)
(check-arg fn-name (pen-cap? arg) 'pen-cap i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(pen-join)
(check-arg fn-name (pen-join? arg) 'pen-join i arg)
(if (string? arg)
(string->symbol arg)
arg)]
[(filename)
(check-arg fn-name (path-string? arg) 'path-string i arg)
arg]
[else
(error 'check "the function ~a has an argument with an unknown name: ~s"
fn-name
argname)]))
(define (y-place? arg)
(and (member arg '("top" top "bottom" bottom "middle" middle "center" center
"baseline" baseline "pinhole" pinhole))
#t))
(define (x-place? arg)
(and (member arg '("left" left "right" right "middle" middle
"center" center "pinhole" pinhole))
#t))
(define (mode? arg)
(or (and (member arg '(solid outline "solid" "outline")) #t)
(and (integer? arg)
(<= 0 arg 255))))
(define (side-count? i)
(and (integer? i)
(3 . <= . i)))
(define (step-count? i)
(and (integer? i)
(1 . <= . i)))
(define (image-color? c)
(cond
[(color? c) #t]
[(symbol? c)
(and (string->color-object/f (symbol->string c)) #t)]
[(string? c)
(and (string->color-object/f c) #t)]
[else #f]))
(define (pen-style? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(solid dot long-dash short-dash dot-dash))
#t))
(define (pen-cap? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(round projecting butt))
#t))
(define (pen-join? arg)
(and (member (if (string? arg) (string->symbol arg) arg)
'(round bevel miter))
#t))
(define (real-valued-posn? arg)
(and (posn? arg)
(real? (posn-x arg))
(real? (posn-y arg))))
(define (check-mode/color-combination fn-name i mode color)
(cond
[(or (eq? mode 'solid)
(number? mode))
(check-arg fn-name (image-color? color) 'image-color i color)]
[(eq? mode 'outline)
(void)]))
(define (angle? arg)
(and (real? arg)
(not (or (= arg +inf.0)
(= arg -inf.0)
(equal? arg +nan.0)))))
|
7caac23f33c313a2663dae89d5a77a9adf7cf5efd6cddaae204f5afe36a8968f | jmingtan/clonings | require2.clj | ;; require2.clj
;; Make the code compile! No hints, you can do it :)
(ns namespaces.require2)
;; Another feature of `require` is aliasing, so we don't have to type the
;; fully-qualified name of the library every time
(require '[clojure.test :as t])
(t/deftest example-test
(t/is (= (+ 1 1) 2)))
;; Fix the following code!
(require '[clojure.test :as t])
(require '[clojure.string :as ???])
(deftest my-test
(testing "should return the joined strings"
(is (= (abcdef/join " " ["hey" "nonny" "nonny"])
"hey nonny nonny"))))
| null | https://raw.githubusercontent.com/jmingtan/clonings/ca64b031ab26a1924bed91f5c9c98b6dd69fc129/exercises/namespaces/require2.clj | clojure | require2.clj
Make the code compile! No hints, you can do it :)
Another feature of `require` is aliasing, so we don't have to type the
fully-qualified name of the library every time
Fix the following code! |
(ns namespaces.require2)
(require '[clojure.test :as t])
(t/deftest example-test
(t/is (= (+ 1 1) 2)))
(require '[clojure.test :as t])
(require '[clojure.string :as ???])
(deftest my-test
(testing "should return the joined strings"
(is (= (abcdef/join " " ["hey" "nonny" "nonny"])
"hey nonny nonny"))))
|
106acebe34a78b52ddf1531655e0ba996f2095efe58bd2737262d69b8e998ada | reanimate/reanimate | Balloon.hs | {-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
|
Copyright : Written by
License : Unlicense
Maintainer :
Stability : experimental
Portability : POSIX
Copyright : Written by David Himmelstrup
License : Unlicense
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Reanimate.Math.Balloon
( balloon
, balloon'
) where
import Control.Lens
import qualified Data.Vector as V
import Graphics.SvgTree (drawAttributes)
import Linear.V2
import Linear.Vector
import Reanimate.Animation
import Reanimate.Math.Common
import Reanimate.Math.Polygon
import Reanimate.Morph.Common (toShapes)
import Reanimate.Svg.Constructors
-- import Debug.Trace
-- | Inflate SVG shapes like a balloon. This works by hiding corners
-- that are more than @t@ percent distant from the starting point
-- relative to the maximum diameter of the shape.
--
-- Example:
--
-- @
' animate ' $ ' balloon ' ( ' scale ' 8 $ ' center ' $ ' Reanimate.LaTeX.latex ' \"X\ " )
-- @
--
-- <<docs/gifs/doc_balloon.gif>>
balloon :: SVG -> (Double -> SVG)
balloon = balloon' 0.01
| Same as @balloon'@ but with a given tolerance for converting
-- SVG shapes to polygons.
balloon' :: Double -> SVG -> (Double -> SVG)
balloon' tol svg = \t ->
mkGroup
[ polygonShape (gen t) & drawAttributes .~ attr
| (attr, gen) <- lst ]
where
polygonShape :: Polygon -> SVG
polygonShape p = mkLinePathClosed
[ (x,y) | V2 x y <- map (fmap realToFrac) $ V.toList (polygonPoints p) ]
lst =
[ (attr, balloonP $ shiftLongestDiameter poly)
| (attr, poly) <- toShapes tol svg
]
x < = 1
-- diameter (balloonP x p) = diameter p * x
balloonP :: Polygon -> Double -> Polygon
balloonP p = \t ->
let targetLength = d * t
nodeVisible x = ds V.! x <= targetLength
-- Move 'a' closer to 'target' such that the length from point 0 to 'a'
is .
moveCloser :: Int -> V2 Rational -> V2 Rational
moveCloser target a =
let targetDist = ds V.! target
aDist = distance' (pAccess p target) a
frac = min 1 $ realToFrac $ (targetLength - targetDist) / aDist
in lerp frac a (pAccess p target)
worker 0 = [pAccess p 0]
worker a =
let b = pNext p a in
if nodeVisible a && nodeVisible b
then [pAccess p a, pAccess p b]
else
chunkRight a b (pAccess p a) (pAccess p b) (fst $ getFunnel a b) ++
chunkCenter a b ++
chunkLeft a b (pAccess p a) (pAccess p b) (snd $ getFunnel a b)
chunkRight ai bi a b (x:y:xs) =
case rayIntersect (a,b) (pAccess p x,pAccess p y) of
Just u ->
if nodeVisible x
then
map (moveCloser x) (split a u) ++
chunkRight ai bi u b (y:xs)
else chunkRight ai bi u b (y:xs)
error $ " : urk : " + + show ( ai , bi , x , y )
if nodeVisible x
then map (moveCloser x) [a]
else []
chunkRight _ai _bi _a _b _ = []
chunkLeft ai bi a b (x:y:xs) =
case rayIntersect (a,b) (pAccess p x,pAccess p y) of
Just u ->
if nodeVisible x
then
chunkLeft ai bi a u (y:xs) ++
map (moveCloser x) (split u b)
else chunkLeft ai bi a u (y:xs)
error $ " chunkLeft : : " + + show ( ai , bi , x , y )
if nodeVisible x
then map (moveCloser x) [b]
else []
chunkLeft _ai _bi _a _b _ = []
chunkCenter a b =
let (aF, bF) = getFunnel a b
aP = pAccess p a
bP = pAccess p b in
case (reverse aF, reverse bF) of
([x], [_]) | nodeVisible x ->
map (moveCloser x) (split aP bP)
([x], _:left:_) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p left) of
Just v ->
map (moveCloser x) (split aP v)
Nothing -> map (moveCloser x) [aP,bP]
(x:right:_, [_]) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p right) of
Just u -> map (moveCloser x) (split u bP)
error $ " urk : " + + show ( a , b , right )
(x:right:_, _:left:_) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p right) of
Just u ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p left) of
Just v -> map (moveCloser x) (split u v)
Nothing -> map (moveCloser x) [aP,bP]
Nothing -> map (moveCloser x) [aP,bP]
_ -> []
in mkPolygon $ V.fromList $ clearDups $
concatMap worker [0..pSize p-1]
where
clearDups (x:y:xs)
| x == y = clearDups (x:xs)
clearDups (x:xs) = x : clearDups xs
clearDups [] = []
getParents 0 = []
getParents x =
let parent = pParent p 0 x
in parent : getParents parent
getFunnel a b =
let aP = getParents a
bP = getParents b in
(takeUntil (`elem` bP) aP
,takeUntil (`elem` aP) bP)
split aP bP =
let steps = 50 in
[ lerp (t/steps) bP aP
| t <- [0 .. steps]
]
d = V.maximum ds
ds = ssspDistances p
takeUntil :: (a -> Bool) -> [a] -> [a]
takeUntil _fn [] = []
takeUntil fn (x:xs)
| fn x = [x]
| otherwise = x : takeUntil fn xs
diameter :: Polygon -> Double
diameter p = V.maximum (ssspDistances p)
shiftLongestDiameter :: Polygon -> Polygon
shiftLongestDiameter p = findBest 0 p (pCycles p)
where
margin = 0.01
findBest _score elt [] = elt
findBest score elt (x:xs) =
let newScore = diameter x in
if
| newScore-score > score * margin -> findBest newScore x xs
| score-newScore > newScore * margin -> findBest score elt xs
| isTopLeft x elt -> findBest newScore x xs
| otherwise -> findBest score elt xs
isTopLeft a b =
case pAccess a 0-pAccess b 0 of
V2 x y -> y > x
Shortest distances from point 0 to all other points .
ssspDistances :: Polygon -> V.Vector Double
ssspDistances p = arr
where
arr = V.generate (pSize p) $ \i ->
case i of
0 -> 0
_ ->
let parent = pParent p 0 i in
arr V.! parent + distance' (pAccess p i) (pAccess p parent)
| null | https://raw.githubusercontent.com/reanimate/reanimate/5ea023980ff7f488934d40593cc5069f5fd038b0/src/Reanimate/Math/Balloon.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
import Debug.Trace
| Inflate SVG shapes like a balloon. This works by hiding corners
that are more than @t@ percent distant from the starting point
relative to the maximum diameter of the shape.
Example:
@
@
<<docs/gifs/doc_balloon.gif>>
SVG shapes to polygons.
diameter (balloonP x p) = diameter p * x
Move 'a' closer to 'target' such that the length from point 0 to 'a' | |
Copyright : Written by
License : Unlicense
Maintainer :
Stability : experimental
Portability : POSIX
Copyright : Written by David Himmelstrup
License : Unlicense
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Reanimate.Math.Balloon
( balloon
, balloon'
) where
import Control.Lens
import qualified Data.Vector as V
import Graphics.SvgTree (drawAttributes)
import Linear.V2
import Linear.Vector
import Reanimate.Animation
import Reanimate.Math.Common
import Reanimate.Math.Polygon
import Reanimate.Morph.Common (toShapes)
import Reanimate.Svg.Constructors
' animate ' $ ' balloon ' ( ' scale ' 8 $ ' center ' $ ' Reanimate.LaTeX.latex ' \"X\ " )
balloon :: SVG -> (Double -> SVG)
balloon = balloon' 0.01
| Same as @balloon'@ but with a given tolerance for converting
balloon' :: Double -> SVG -> (Double -> SVG)
balloon' tol svg = \t ->
mkGroup
[ polygonShape (gen t) & drawAttributes .~ attr
| (attr, gen) <- lst ]
where
polygonShape :: Polygon -> SVG
polygonShape p = mkLinePathClosed
[ (x,y) | V2 x y <- map (fmap realToFrac) $ V.toList (polygonPoints p) ]
lst =
[ (attr, balloonP $ shiftLongestDiameter poly)
| (attr, poly) <- toShapes tol svg
]
x < = 1
balloonP :: Polygon -> Double -> Polygon
balloonP p = \t ->
let targetLength = d * t
nodeVisible x = ds V.! x <= targetLength
is .
moveCloser :: Int -> V2 Rational -> V2 Rational
moveCloser target a =
let targetDist = ds V.! target
aDist = distance' (pAccess p target) a
frac = min 1 $ realToFrac $ (targetLength - targetDist) / aDist
in lerp frac a (pAccess p target)
worker 0 = [pAccess p 0]
worker a =
let b = pNext p a in
if nodeVisible a && nodeVisible b
then [pAccess p a, pAccess p b]
else
chunkRight a b (pAccess p a) (pAccess p b) (fst $ getFunnel a b) ++
chunkCenter a b ++
chunkLeft a b (pAccess p a) (pAccess p b) (snd $ getFunnel a b)
chunkRight ai bi a b (x:y:xs) =
case rayIntersect (a,b) (pAccess p x,pAccess p y) of
Just u ->
if nodeVisible x
then
map (moveCloser x) (split a u) ++
chunkRight ai bi u b (y:xs)
else chunkRight ai bi u b (y:xs)
error $ " : urk : " + + show ( ai , bi , x , y )
if nodeVisible x
then map (moveCloser x) [a]
else []
chunkRight _ai _bi _a _b _ = []
chunkLeft ai bi a b (x:y:xs) =
case rayIntersect (a,b) (pAccess p x,pAccess p y) of
Just u ->
if nodeVisible x
then
chunkLeft ai bi a u (y:xs) ++
map (moveCloser x) (split u b)
else chunkLeft ai bi a u (y:xs)
error $ " chunkLeft : : " + + show ( ai , bi , x , y )
if nodeVisible x
then map (moveCloser x) [b]
else []
chunkLeft _ai _bi _a _b _ = []
chunkCenter a b =
let (aF, bF) = getFunnel a b
aP = pAccess p a
bP = pAccess p b in
case (reverse aF, reverse bF) of
([x], [_]) | nodeVisible x ->
map (moveCloser x) (split aP bP)
([x], _:left:_) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p left) of
Just v ->
map (moveCloser x) (split aP v)
Nothing -> map (moveCloser x) [aP,bP]
(x:right:_, [_]) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p right) of
Just u -> map (moveCloser x) (split u bP)
error $ " urk : " + + show ( a , b , right )
(x:right:_, _:left:_) | nodeVisible x ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p right) of
Just u ->
case rayIntersect (aP,bP) (pAccess p x,pAccess p left) of
Just v -> map (moveCloser x) (split u v)
Nothing -> map (moveCloser x) [aP,bP]
Nothing -> map (moveCloser x) [aP,bP]
_ -> []
in mkPolygon $ V.fromList $ clearDups $
concatMap worker [0..pSize p-1]
where
clearDups (x:y:xs)
| x == y = clearDups (x:xs)
clearDups (x:xs) = x : clearDups xs
clearDups [] = []
getParents 0 = []
getParents x =
let parent = pParent p 0 x
in parent : getParents parent
getFunnel a b =
let aP = getParents a
bP = getParents b in
(takeUntil (`elem` bP) aP
,takeUntil (`elem` aP) bP)
split aP bP =
let steps = 50 in
[ lerp (t/steps) bP aP
| t <- [0 .. steps]
]
d = V.maximum ds
ds = ssspDistances p
takeUntil :: (a -> Bool) -> [a] -> [a]
takeUntil _fn [] = []
takeUntil fn (x:xs)
| fn x = [x]
| otherwise = x : takeUntil fn xs
diameter :: Polygon -> Double
diameter p = V.maximum (ssspDistances p)
shiftLongestDiameter :: Polygon -> Polygon
shiftLongestDiameter p = findBest 0 p (pCycles p)
where
margin = 0.01
findBest _score elt [] = elt
findBest score elt (x:xs) =
let newScore = diameter x in
if
| newScore-score > score * margin -> findBest newScore x xs
| score-newScore > newScore * margin -> findBest score elt xs
| isTopLeft x elt -> findBest newScore x xs
| otherwise -> findBest score elt xs
isTopLeft a b =
case pAccess a 0-pAccess b 0 of
V2 x y -> y > x
Shortest distances from point 0 to all other points .
ssspDistances :: Polygon -> V.Vector Double
ssspDistances p = arr
where
arr = V.generate (pSize p) $ \i ->
case i of
0 -> 0
_ ->
let parent = pParent p 0 i in
arr V.! parent + distance' (pAccess p i) (pAccess p parent)
|
3b3c3dc63e6072037f5d39c6d5bc27c707a8ee89a4146e763265038ebbbdc56d | hpdeifel/hledger-iadd | CommentDialog.hs | {-# LANGUAGE OverloadedStrings #-}
module Brick.Widgets.CommentDialog
( CommentWidget
, commentWidget
, renderCommentWidget
, commentDialogComment
, CommentAction(..)
, handleCommentEvent
) where
import Data.Semigroup ((<>))
import Brick
import Brick.Widgets.Dialog
import Brick.Widgets.Center
import Data.Text.Zipper
import Graphics.Vty.Input
import qualified Data.Text as T
import Data.Text (Text)
import Brick.Widgets.Edit.EmacsBindings
data CommentWidget n = CommentWidget
{ origComment :: Text
, textArea :: Editor n
, dialogWidget :: Dialog ()
, promptPrefix :: Text
}
commentWidget :: n -> Text -> Text -> CommentWidget n
commentWidget name prompt comment =
let
title = "ESC: cancel, RET: accept, Alt-RET: New line"
maxWidth = 80
diag = dialog (Just title) Nothing maxWidth
edit = editorText name (txt . T.unlines) Nothing comment
in
CommentWidget
{ origComment = comment
, textArea = applyEdit gotoEnd edit
, dialogWidget = diag
, promptPrefix = prompt
}
data CommentAction n = CommentContinue (CommentWidget n)
| CommentFinished Text
handleCommentEvent :: Event -> CommentWidget n -> EventM n (CommentAction n)
handleCommentEvent ev widget = case ev of
EvKey KEsc [] -> return $ CommentFinished (origComment widget)
EvKey KEnter [] -> return $ CommentFinished (commentDialogComment widget)
EvKey KEnter [MMeta] -> return $ CommentContinue $
widget { textArea = applyEdit breakLine (textArea widget) }
_ -> do
textArea' <- handleEditorEvent ev (textArea widget)
return $ CommentContinue $
CommentWidget (origComment widget) textArea' (dialogWidget widget) (promptPrefix widget)
renderCommentWidget :: (Ord n, Show n) => CommentWidget n -> Widget n
renderCommentWidget widget =
let
height = min (length (getEditContents (textArea widget)) + 4) 24
textArea' = padTop (Pad 1) $
txt (promptPrefix widget <> ": ") <+> renderEditor True (textArea widget)
in
vCenterLayer $ vLimit height $ renderDialog (dialogWidget widget) textArea'
commentDialogComment :: CommentWidget n -> Text
commentDialogComment = T.intercalate "\n" . getEditContents . textArea
gotoEnd :: Monoid a => TextZipper a -> TextZipper a
gotoEnd zipper =
let
lengths = lineLengths zipper
(row, col) = (length lengths, last lengths)
in
moveCursor (row-1, col) zipper
| null | https://raw.githubusercontent.com/hpdeifel/hledger-iadd/782239929d411bce4714e65dd5c7bb97b2ba4e75/src/Brick/Widgets/CommentDialog.hs | haskell | # LANGUAGE OverloadedStrings # |
module Brick.Widgets.CommentDialog
( CommentWidget
, commentWidget
, renderCommentWidget
, commentDialogComment
, CommentAction(..)
, handleCommentEvent
) where
import Data.Semigroup ((<>))
import Brick
import Brick.Widgets.Dialog
import Brick.Widgets.Center
import Data.Text.Zipper
import Graphics.Vty.Input
import qualified Data.Text as T
import Data.Text (Text)
import Brick.Widgets.Edit.EmacsBindings
data CommentWidget n = CommentWidget
{ origComment :: Text
, textArea :: Editor n
, dialogWidget :: Dialog ()
, promptPrefix :: Text
}
commentWidget :: n -> Text -> Text -> CommentWidget n
commentWidget name prompt comment =
let
title = "ESC: cancel, RET: accept, Alt-RET: New line"
maxWidth = 80
diag = dialog (Just title) Nothing maxWidth
edit = editorText name (txt . T.unlines) Nothing comment
in
CommentWidget
{ origComment = comment
, textArea = applyEdit gotoEnd edit
, dialogWidget = diag
, promptPrefix = prompt
}
data CommentAction n = CommentContinue (CommentWidget n)
| CommentFinished Text
handleCommentEvent :: Event -> CommentWidget n -> EventM n (CommentAction n)
handleCommentEvent ev widget = case ev of
EvKey KEsc [] -> return $ CommentFinished (origComment widget)
EvKey KEnter [] -> return $ CommentFinished (commentDialogComment widget)
EvKey KEnter [MMeta] -> return $ CommentContinue $
widget { textArea = applyEdit breakLine (textArea widget) }
_ -> do
textArea' <- handleEditorEvent ev (textArea widget)
return $ CommentContinue $
CommentWidget (origComment widget) textArea' (dialogWidget widget) (promptPrefix widget)
renderCommentWidget :: (Ord n, Show n) => CommentWidget n -> Widget n
renderCommentWidget widget =
let
height = min (length (getEditContents (textArea widget)) + 4) 24
textArea' = padTop (Pad 1) $
txt (promptPrefix widget <> ": ") <+> renderEditor True (textArea widget)
in
vCenterLayer $ vLimit height $ renderDialog (dialogWidget widget) textArea'
commentDialogComment :: CommentWidget n -> Text
commentDialogComment = T.intercalate "\n" . getEditContents . textArea
gotoEnd :: Monoid a => TextZipper a -> TextZipper a
gotoEnd zipper =
let
lengths = lineLengths zipper
(row, col) = (length lengths, last lengths)
in
moveCursor (row-1, col) zipper
|
781c799a9c2be522206212c43463b58eda4ea68cf4357e1547ed07e37c34e281 | avsm/mirage-duniverse | static_ipv4.mli |
* Copyright ( c ) 2010 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2010 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
module Make (R: Mirage_random.C) (C: Mirage_clock.MCLOCK) (E: Mirage_protocols_lwt.ETHIF) (A: Mirage_protocols_lwt.ARP) : sig
include Mirage_protocols_lwt.IPV4
val connect :
?ip:Ipaddr.V4.t ->
?network:Ipaddr.V4.Prefix.t ->
?gateway:Ipaddr.V4.t option ->
C.t -> E.t -> A.t -> t Lwt.t
* Connect to an ipv4 device .
Default ip is { ! }
Default network is { ! Ipaddr . V4.any}/0
Default gateway is None .
Default ip is {!Ipaddr.V4.any}
Default network is {!Ipaddr.V4.any}/0
Default gateway is None. *)
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/tcpip/src/ipv4/static_ipv4.mli | ocaml |
* Copyright ( c ) 2010 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2010 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
module Make (R: Mirage_random.C) (C: Mirage_clock.MCLOCK) (E: Mirage_protocols_lwt.ETHIF) (A: Mirage_protocols_lwt.ARP) : sig
include Mirage_protocols_lwt.IPV4
val connect :
?ip:Ipaddr.V4.t ->
?network:Ipaddr.V4.Prefix.t ->
?gateway:Ipaddr.V4.t option ->
C.t -> E.t -> A.t -> t Lwt.t
* Connect to an ipv4 device .
Default ip is { ! }
Default network is { ! Ipaddr . V4.any}/0
Default gateway is None .
Default ip is {!Ipaddr.V4.any}
Default network is {!Ipaddr.V4.any}/0
Default gateway is None. *)
end
| |
b59453d44a11d1c54a2d47767bebf5ece25bf9709b5d9c0cf0f1c6e0ec70e59e | blindglobe/clocc | section22.lisp | section 22 : printer -*- mode : lisp -*-
(in-package :cl-user)
from : < >
(check-for-bug :section22-legacy-6
(format nil "~V,,,'-A" 10 "abc")
"abc-------")
0123456789
(check-for-bug :section22-legacy-11
(format nil "foo")
"foo")
(check-for-bug :section22-legacy-15
(setq x 5)
5)
(check-for-bug :section22-legacy-19
(format nil "The answer is ~D." x)
"The answer is 5.")
(check-for-bug :section22-legacy-23
(format nil "The answer is ~3D." x)
"The answer is 5.")
(check-for-bug :section22-legacy-27
(format nil "The answer is ~3,'0D." x)
"The answer is 005.")
(check-for-bug :section22-legacy-31
(format nil "The answer is ~:D." (expt 47 x))
"The answer is 229,345,007.")
(check-for-bug :section22-legacy-35
(setq y "elephant")
"elephant")
(check-for-bug :section22-legacy-39
(format nil "Look at the ~A!" y)
"Look at the elephant!")
(check-for-bug :section22-legacy-43
(setq n 3)
3)
(check-for-bug :section22-legacy-47
(format nil "~D item~:P found." n)
"3 items found.")
(check-for-bug :section22-legacy-51
(format nil "~R dog~:[s are~; is~] here." n (= n 1))
"three dogs are here.")
(check-for-bug :section22-legacy-55
(format nil "~R dog~:*~[s are~; is~:;s are~] here." n)
"three dogs are here.")
(check-for-bug :section22-legacy-59
(format nil "Here ~[are~;is~:;are~] ~:*~R pupp~:@P." n)
"Here are three puppies.")
(check-for-bug :section22-legacy-63
(defun foo (x)
(format nil "~6,2F|~6,2,1,'*F|~6,2,,'?F|~6F|~,2F|~F"
x x x x x x))
FOO)
(check-for-bug :section22-legacy-69
(foo 3.14159)
" 3.14| 31.42| 3.14|3.1416|3.14|3.14159")
(check-for-bug :section22-legacy-73
(foo -3.14159)
" -3.14|-31.42| -3.14|-3.142|-3.14|-3.14159")
(check-for-bug :section22-legacy-77
(foo 100.0)
"100.00|******|100.00| 100.0|100.00|100.0")
(check-for-bug :section22-legacy-81
(foo 1234.0)
"1234.00|******|??????|1234.0|1234.00|1234.0")
(check-for-bug :section22-legacy-85
(foo 0.006)
" 0.01| 0.06| 0.01| 0.006|0.01|0.006")
(check-for-bug :section22-legacy-89
(defun foo (x)
(format nil
"~9,2,1,,'*E|~10,3,2,2,'?,,'$E|~
~9,3,2,-2,'%@E|~9,2E"
x x x x))
FOO)
(check-for-bug :section22-legacy-97
(foo 3.14159)
" 3.14E+0| 31.42$-01|+.003E+03| 3.14E+0")
(check-for-bug :section22-legacy-101
(foo -3.14159)
" -3.14E+0|-31.42$-01|-.003E+03| -3.14E+0")
(check-for-bug :section22-legacy-105
(foo 1100.0)
" 1.10E+3| 11.00$+02|+.001E+06| 1.10E+3")
(check-for-bug :section22-legacy-109
(foo 1100.0L0)
#-(or cmu sbcl) " 1.10L+3| 11.00$+02|+.001L+06| 1.10L+3"
#+(or cmu sbcl) " 1.10d+3| 11.00$+02|+.001d+06| 1.10d+3")
(check-for-bug :section22-legacy-114
(foo 1.1E13)
"*********| 11.00$+12|+.001E+16| 1.10E+13")
(check-for-bug :section22-legacy-118
(foo 1.1L120)
#-(or cmu sbcl) "*********|??????????|%%%%%%%%%|1.10L+120"
#+(or cmu sbcl) "*********|??????????|%%%%%%%%%|1.10d+120")
(check-for-bug :section22-legacy-123
(defun foo (x)
(format nil "~9,2,1,,'*G|~9,3,2,3,'?,,'$G|~9,3,2,0,'%G|~9,2G"
x x x x))
foo)
(check-for-bug :section22-legacy-129
(foo 0.0314159)
" 3.14E-2|314.2$-04|0.314E-01| 3.14E-2")
(check-for-bug :section22-legacy-133
(foo 0.314159)
" 0.31 |0.314 |0.314 | 0.31 ")
(check-for-bug :section22-legacy-137
(foo 3.14159)
" 3.1 | 3.14 | 3.14 | 3.1 ")
(check-for-bug :section22-legacy-141
(foo 31.4159)
" 31. | 31.4 | 31.4 | 31. ")
(check-for-bug :section22-legacy-145
(foo 314.159)
" 3.14E+2| 314. | 314. | 3.14E+2")
(check-for-bug :section22-legacy-149
(foo 3141.59)
" 3.14E+3|314.2$+01|0.314E+04| 3.14E+3")
(check-for-bug :section22-legacy-153
(foo 3141.59L0)
#-(or cmu sbcl) " 3.14L+3|314.2$+01|0.314L+04| 3.14L+3"
#+(or cmu sbcl) " 3.14d+3|314.2$+01|0.314d+04| 3.14d+3")
(check-for-bug :section22-legacy-158
(foo 3.14E12)
"*********|314.0$+10|0.314E+13| 3.14E+12")
(check-for-bug :section22-legacy-162
(foo 3.14L120)
#-(or cmu sbcl) "*********|?????????|%%%%%%%%%|3.14L+120"
#+(or cmu sbcl) "*********|?????????|%%%%%%%%%|3.14d+120")
(check-for-bug :section22-legacy-167
(format nil "~10<foo~;bar~>")
"foo bar")
(check-for-bug :section22-legacy-171
(format nil "~10:<foo~;bar~>")
" foo bar")
(check-for-bug :section22-legacy-175
(format nil "~10<foobar~>")
" foobar")
(check-for-bug :section22-legacy-179
(format nil "~10:<foobar~>")
" foobar")
(check-for-bug :section22-legacy-183
(format nil "~10:@<foo~;bar~>")
#+(or sbcl cmu ecls)
" foo bar "
#+clisp
" foo bar "
#-(or sbcl cmu clisp ecls)
fill-this-in)
(check-for-bug :section22-legacy-192
(format nil "~10@<foobar~>")
"foobar ")
(check-for-bug :section22-legacy-196
(format nil "~10:@<foobar~>")
" foobar ")
(check-for-bug :section22-legacy-200
(FORMAT NIL "Written to ~A." #P"foo.bin")
"Written to foo.bin.")
| null | https://raw.githubusercontent.com/blindglobe/clocc/a50bb75edb01039b282cf320e4505122a59c59a7/src/tools/ansi-test/section22.lisp | lisp | section 22 : printer -*- mode : lisp -*-
(in-package :cl-user)
from : < >
(check-for-bug :section22-legacy-6
(format nil "~V,,,'-A" 10 "abc")
"abc-------")
0123456789
(check-for-bug :section22-legacy-11
(format nil "foo")
"foo")
(check-for-bug :section22-legacy-15
(setq x 5)
5)
(check-for-bug :section22-legacy-19
(format nil "The answer is ~D." x)
"The answer is 5.")
(check-for-bug :section22-legacy-23
(format nil "The answer is ~3D." x)
"The answer is 5.")
(check-for-bug :section22-legacy-27
(format nil "The answer is ~3,'0D." x)
"The answer is 005.")
(check-for-bug :section22-legacy-31
(format nil "The answer is ~:D." (expt 47 x))
"The answer is 229,345,007.")
(check-for-bug :section22-legacy-35
(setq y "elephant")
"elephant")
(check-for-bug :section22-legacy-39
(format nil "Look at the ~A!" y)
"Look at the elephant!")
(check-for-bug :section22-legacy-43
(setq n 3)
3)
(check-for-bug :section22-legacy-47
(format nil "~D item~:P found." n)
"3 items found.")
(check-for-bug :section22-legacy-51
(format nil "~R dog~:[s are~; is~] here." n (= n 1))
"three dogs are here.")
(check-for-bug :section22-legacy-55
(format nil "~R dog~:*~[s are~; is~:;s are~] here." n)
"three dogs are here.")
(check-for-bug :section22-legacy-59
(format nil "Here ~[are~;is~:;are~] ~:*~R pupp~:@P." n)
"Here are three puppies.")
(check-for-bug :section22-legacy-63
(defun foo (x)
(format nil "~6,2F|~6,2,1,'*F|~6,2,,'?F|~6F|~,2F|~F"
x x x x x x))
FOO)
(check-for-bug :section22-legacy-69
(foo 3.14159)
" 3.14| 31.42| 3.14|3.1416|3.14|3.14159")
(check-for-bug :section22-legacy-73
(foo -3.14159)
" -3.14|-31.42| -3.14|-3.142|-3.14|-3.14159")
(check-for-bug :section22-legacy-77
(foo 100.0)
"100.00|******|100.00| 100.0|100.00|100.0")
(check-for-bug :section22-legacy-81
(foo 1234.0)
"1234.00|******|??????|1234.0|1234.00|1234.0")
(check-for-bug :section22-legacy-85
(foo 0.006)
" 0.01| 0.06| 0.01| 0.006|0.01|0.006")
(check-for-bug :section22-legacy-89
(defun foo (x)
(format nil
"~9,2,1,,'*E|~10,3,2,2,'?,,'$E|~
~9,3,2,-2,'%@E|~9,2E"
x x x x))
FOO)
(check-for-bug :section22-legacy-97
(foo 3.14159)
" 3.14E+0| 31.42$-01|+.003E+03| 3.14E+0")
(check-for-bug :section22-legacy-101
(foo -3.14159)
" -3.14E+0|-31.42$-01|-.003E+03| -3.14E+0")
(check-for-bug :section22-legacy-105
(foo 1100.0)
" 1.10E+3| 11.00$+02|+.001E+06| 1.10E+3")
(check-for-bug :section22-legacy-109
(foo 1100.0L0)
#-(or cmu sbcl) " 1.10L+3| 11.00$+02|+.001L+06| 1.10L+3"
#+(or cmu sbcl) " 1.10d+3| 11.00$+02|+.001d+06| 1.10d+3")
(check-for-bug :section22-legacy-114
(foo 1.1E13)
"*********| 11.00$+12|+.001E+16| 1.10E+13")
(check-for-bug :section22-legacy-118
(foo 1.1L120)
#-(or cmu sbcl) "*********|??????????|%%%%%%%%%|1.10L+120"
#+(or cmu sbcl) "*********|??????????|%%%%%%%%%|1.10d+120")
(check-for-bug :section22-legacy-123
(defun foo (x)
(format nil "~9,2,1,,'*G|~9,3,2,3,'?,,'$G|~9,3,2,0,'%G|~9,2G"
x x x x))
foo)
(check-for-bug :section22-legacy-129
(foo 0.0314159)
" 3.14E-2|314.2$-04|0.314E-01| 3.14E-2")
(check-for-bug :section22-legacy-133
(foo 0.314159)
" 0.31 |0.314 |0.314 | 0.31 ")
(check-for-bug :section22-legacy-137
(foo 3.14159)
" 3.1 | 3.14 | 3.14 | 3.1 ")
(check-for-bug :section22-legacy-141
(foo 31.4159)
" 31. | 31.4 | 31.4 | 31. ")
(check-for-bug :section22-legacy-145
(foo 314.159)
" 3.14E+2| 314. | 314. | 3.14E+2")
(check-for-bug :section22-legacy-149
(foo 3141.59)
" 3.14E+3|314.2$+01|0.314E+04| 3.14E+3")
(check-for-bug :section22-legacy-153
(foo 3141.59L0)
#-(or cmu sbcl) " 3.14L+3|314.2$+01|0.314L+04| 3.14L+3"
#+(or cmu sbcl) " 3.14d+3|314.2$+01|0.314d+04| 3.14d+3")
(check-for-bug :section22-legacy-158
(foo 3.14E12)
"*********|314.0$+10|0.314E+13| 3.14E+12")
(check-for-bug :section22-legacy-162
(foo 3.14L120)
#-(or cmu sbcl) "*********|?????????|%%%%%%%%%|3.14L+120"
#+(or cmu sbcl) "*********|?????????|%%%%%%%%%|3.14d+120")
(check-for-bug :section22-legacy-167
(format nil "~10<foo~;bar~>")
"foo bar")
(check-for-bug :section22-legacy-171
(format nil "~10:<foo~;bar~>")
" foo bar")
(check-for-bug :section22-legacy-175
(format nil "~10<foobar~>")
" foobar")
(check-for-bug :section22-legacy-179
(format nil "~10:<foobar~>")
" foobar")
(check-for-bug :section22-legacy-183
(format nil "~10:@<foo~;bar~>")
#+(or sbcl cmu ecls)
" foo bar "
#+clisp
" foo bar "
#-(or sbcl cmu clisp ecls)
fill-this-in)
(check-for-bug :section22-legacy-192
(format nil "~10@<foobar~>")
"foobar ")
(check-for-bug :section22-legacy-196
(format nil "~10:@<foobar~>")
" foobar ")
(check-for-bug :section22-legacy-200
(FORMAT NIL "Written to ~A." #P"foo.bin")
"Written to foo.bin.")
| |
f7ab55f00b224f02fbf7c794da9127f67434353ac090d5c719db0c952c5c1313 | libre-man/cl-transmission | util.lisp | (in-package :cl-user)
(uiop:define-package cl-transmission.util
(:use :cl :rutils.readtable)
(:import-from :alexandria
#:define-constant))
(in-package :cl-transmission.util)
(named-readtables:in-readtable rutils.readtable:rutils-readtable)
(defmacro define-export-macro (type)
(let* ((name (intern (format nil "~A-EXPORT" (symbol-name type)))))
`(progn
(export ',name)
(defmacro ,name (exported-name args &body body)
`(progn
(export ',exported-name)
(,',type ,exported-name ,args ,@body))))))
(define-export-macro defclass)
(define-export-macro defmacro)
(define-export-macro defun)
(define-export-macro defgeneric)
(define-export-macro define-constant)
(define-export-macro define-condition)
(defmacro the-check (type val)
(rutils:once-only (val)
`(progn
(check-type ,val ,type)
(the ,type ,val))))
(defun-export make-keyword (str)
(check-type str string)
(intern str #.(find-package :keyword)))
(defun-export string->keyword (string)
(check-type string string)
(the-check symbol
(make-keyword
(string-upcase
(cl-ppcre:regex-replace-all "[A-Z]" string "-\\&")))))
(defun-export plist-to-hash-table (plist
&rest
rest
&key
(convert-key #'identity)
(convert-value #'identity)
&allow-other-keys)
(remf rest :convert-key)
(remf rest :convert-value)
(loop :with ht = (apply #'make-hash-table rest)
:for key :in plist :by #'cddr
:for val :in (cdr plist) :by #'cddr
:do (rutils:sethash (funcall convert-key key)
ht
(funcall convert-value val))
:finally (return ht)))
(declaim (inline contains-key))
(defun-export contains-key (key hash-table)
(rutils:2nd (gethash key hash-table)))
| null | https://raw.githubusercontent.com/libre-man/cl-transmission/4bbf1d2761bfa5dfa79b7bc12c3238089b994d95/src/util.lisp | lisp | (in-package :cl-user)
(uiop:define-package cl-transmission.util
(:use :cl :rutils.readtable)
(:import-from :alexandria
#:define-constant))
(in-package :cl-transmission.util)
(named-readtables:in-readtable rutils.readtable:rutils-readtable)
(defmacro define-export-macro (type)
(let* ((name (intern (format nil "~A-EXPORT" (symbol-name type)))))
`(progn
(export ',name)
(defmacro ,name (exported-name args &body body)
`(progn
(export ',exported-name)
(,',type ,exported-name ,args ,@body))))))
(define-export-macro defclass)
(define-export-macro defmacro)
(define-export-macro defun)
(define-export-macro defgeneric)
(define-export-macro define-constant)
(define-export-macro define-condition)
(defmacro the-check (type val)
(rutils:once-only (val)
`(progn
(check-type ,val ,type)
(the ,type ,val))))
(defun-export make-keyword (str)
(check-type str string)
(intern str #.(find-package :keyword)))
(defun-export string->keyword (string)
(check-type string string)
(the-check symbol
(make-keyword
(string-upcase
(cl-ppcre:regex-replace-all "[A-Z]" string "-\\&")))))
(defun-export plist-to-hash-table (plist
&rest
rest
&key
(convert-key #'identity)
(convert-value #'identity)
&allow-other-keys)
(remf rest :convert-key)
(remf rest :convert-value)
(loop :with ht = (apply #'make-hash-table rest)
:for key :in plist :by #'cddr
:for val :in (cdr plist) :by #'cddr
:do (rutils:sethash (funcall convert-key key)
ht
(funcall convert-value val))
:finally (return ht)))
(declaim (inline contains-key))
(defun-export contains-key (key hash-table)
(rutils:2nd (gethash key hash-table)))
| |
eb64b25fd8d3afee2917738a14ae99c1c71b3d8bc6bbc38efd8a304662b6e9ad | opqdonut/haskell-exercises | W4Test.hs | module W4Test where
import W4
import Impl.Test
import Control.Monad
import Data.List
import Data.IORef
import System.IO
import System.Directory
import Test.QuickCheck hiding (Result,reason,(===))
import Test.QuickCheck.Monadic
main = testExsArgs tests stdArgs {maxSize = 40, maxSuccess = 40}
tests = [[ex1_hello]
,[ex2_greet]
,[ex3_greet2]
,[ex4_readWords]
,[ex5_readUntil]
,[ex6_printFibs]
,[ex7_isums]
,[ex8_whenM_True, ex8_whenM_False]
,[ex9_while]
,[ex10_debug]
,[ex11_mapM_]
,[ex12_forM]
,[ex13_doubleCall]
,[ex14_compose]
,[ex15_mkCounter]
,[ex16_hFetchLines]
,[ex17_readCSV]
,[ex18_compareFiles]
,[ex19_interact_terminates, ex19_interact_loop]
]
-- -- -- -- --
ex1_hello = monadicIO $ do
(text,()) <- runc' hello
stop_ $ text === "HELLO\nWORLD\n"
word = listOf1 (choose ('a','z'))
ex2_greet = monadicIO $ do
name <- pick word
(text,()) <- runc' $ greet name
stop_ $ text === ("HELLO "++name++"\n")
ex3_greet2 = monadicIO $ do
name <- pick word
(text,()) <- runc (name++"\n") greet2
stop_ $ text === ("HELLO "++name++"\n")
ex4_readWords = monadicIO $ do
words <- pick $ listOf1 word
(_,ret) <- runc (unlines words) (readWords (length words - 1))
stop_ $ ret === sort (init words)
ex5_readUntil = monadicIO $ do
end <- pick word
words <- pick $ listOf1 (word `suchThat` (/=end))
let input = unlines $ words ++ [end]
(_,ret) <- runc input (readUntil (==end))
stop_ . counterexample ("readUntil (==" ++ show end ++ ")\nInput: "++show input) $
ret === words
ex6_printFibs = monadicIO $ do
n <- pick $ choose (0,40)
(text,_) <- runc' $ printFibs n
stop_ . counterexample ("printFibs "++show n) $
text === unlines (map show (take n fibs))
where fibs = 1:1:zipWith (+) fibs (tail fibs)
ex7_isums = monadicIO $ do
numbers <- pick . listOf1 $ choose (-10,10)
let n = length numbers
(text,ret) <- runc (unlines $ map show numbers) $ isums n
stop_ . counterexample ("isums "++show n) $
conjoin [counterexample "returning" $
ret === sum numbers,
counterexample "printing" $
text === unlines (map show $ scanl1 (+) numbers)]
ex8_whenM_True = monadicIO $ do
r <- run $ newIORef False
let op = writeIORef r True
let cond = return True
run $ whenM cond op
v <- run $ readIORef r
stop_ $ counterexample "whenM (return True)" $
v
ex8_whenM_False = monadicIO $ do
r <- run $ newIORef False
let op = writeIORef r True
let cond = return False
run $ whenM cond op
v <- run $ readIORef r
stop_ $ counterexample "whenM (return False)" $
not v
ex9_while = monadicIO $ do
i <- pick $ choose (0,10 :: Int)
a <- run $ newIORef 0
b <- run $ newIORef 0
let ehto = modifyIORef a (+1) >> fmap (<=i) (readIORef a)
op = modifyIORef b (+1)
run $ while ehto op
af <- run $ readIORef a
bf <- run $ readIORef b
stop_ $ counterexample "while" $
conjoin [counterexample "number of calls to condition" $ af === i+1,
counterexample "number of calls to operation" $ bf === i]
ex10_debug = monadicIO $ do
token <- pick word
value <- pick word
print <- pick word
(text,ret) <- runc' $ debug token (putStrLn print >> return value)
stop_ $ counterexample ("debug "++show token++" (do putStrLn "++show print++"; return "++show value++")") $
conjoin [counterexample "tulostus" $ text === (token ++ "\n" ++ print ++ "\n" ++ token ++ "\n"),
counterexample "palautus" $ ret === value]
ex11_mapM_ = monadicIO $ do
r <- run $ (newIORef [] :: IO (IORef [Int]))
lis <- pick $ listOf1 arbitrary
let op x = modifyIORef r (x:)
run $ mymapM_ op lis
ret <- run $ readIORef r
stop_ $ counterexample ("mapM op "++show lis) $
ret === reverse lis
ex12_forM = monadicIO $ do
r <- run $ (newIORef [] :: IO (IORef [Int]))
lis <- pick $ listOf1 arbitrary
let op x = do modifyIORef r (x:)
return $ x+1
ret <- run $ myforM lis op
out <- run $ readIORef r
stop_ $ counterexample ("forM "++show lis++" op") $
conjoin [counterexample "return value" $ ret === map (+1) lis,
counterexample "side effects" $ out === reverse lis]
ex13_doubleCall = monadicIO $ do
i <- pick $ (choose (0,20) :: Gen Int)
let op = return (return i)
out <- run $ doubleCall $ op
stop_ $ counterexample ("doubleCall (return (return "++show i++"))") $
out === i
ex14_compose = monadicIO $ do
i <- pick $ (choose (0,20) :: Gen Int)
let op1 = return . (*2)
op2 = return . (+1)
out <- run $ compose op1 op2 i
stop_ $ counterexample "compose (return . (*2)) (return . (+1))" $
out === (i+1)*2
ex15_mkCounter = monadicIO $ do
n <- pick $ choose (0,20)
m <- run $ do (i,g) <- mkCounter
replicateM_ n i
g
stop_ $ m === n
ex16_hFetchLines = monadicIO $ do
lines <- pick $ listOf1 word
inds <- fmap (nub.sort) . pick . listOf1 $ choose (1,length lines)
dir <- run $ getTemporaryDirectory
(path,h) <- run $ openTempFile dir "hFetchLines.in"
run $ hPutStr h $ unlines lines
run $ hSeek h AbsoluteSeek 0
outs <- run $ hFetchLines h inds
stop_ $ counterexample ("hFetchLines h "++show inds++"\nContents:\n"++unlines lines) $
conjoin [outs !! j === lines !! (i-1) | (j,i) <- zip [0..] inds]
toCSV = unlines . map (intercalate ",")
tmpSpit pattern conts = do
dir <- getTemporaryDirectory
(path,h) <- openTempFile dir pattern
hPutStr h conts
hClose h
return path
ex17_readCSV = monadicIO $ do
dat <- pick $ listOf1 (listOf1 word)
let dat' = toCSV dat
path <- run $ tmpSpit "readCSV.in" dat'
ret <- run $ readCSV path
stop_ $ counterexample ("File contents: "++show dat') $ ret === dat
ex18_compareFiles = monadicIO $ do
alines <- pick $ listOf1 word
lines2 <- pick $ vectorOf (length alines) word
diffs <- pick $ fmap (nub.sort) $ listOf1 (choose (0,length alines-1))
let blines = [ if elem i diffs then s1++s2 else s1 | (i,s1,s2) <- zip3 [0..] alines lines2]
ac = unlines alines
bc = unlines blines
should = concatMap (\i -> ["< "++alines!!i,"> "++alines!!i++lines2!!i]) diffs
path1 <- run $ tmpSpit "compareFilesA.in" ac
path2 <- run $ tmpSpit "compareFilesB.in" bc
(outp,()) <- runc' $ compareFiles path1 path2
let ls = lines outp
stop_ $ counterexample ("compareFiles\nFile A:\n"++ac++"File B:\n"++bc) $
conjoin [counterexample "number of lines printed" $ length ls === 2*length diffs,
counterexample "lines printed" $ ls === should]
ex19_interact_terminates = monadicIO $ do
let f :: (String,String) -> (Bool,String,String)
f (s,_) = (False,s,s)
w <- pick $ word
(text,ret) <- runc w $ interact' f ""
stop_ $ conjoin [counterexample "tulostus" $ text === w,
counterexample "palautus" $ ret === w]
ex19_interact_loop = monadicIO $ do
is <- pick $ listOf1 (arbitrary :: Gen Int)
let f :: (String,[Int]) -> (Bool,String,[Int])
f ("END",lis) = (False,"END\n", lis)
f (x,lis) = (True, "PICK\n", read x : lis)
eret = reverse $ 0:is
etext = unlines $ replicate (length is) "PICK" ++ ["END"]
(text,ret) <- runc (unlines $ map show is ++ ["END"]) $ interact' f [0]
stop_ $ conjoin [counterexample "printing" $ text === etext,
counterexample "return value" $ ret === eret]
| null | https://raw.githubusercontent.com/opqdonut/haskell-exercises/87be496ccb0125a52386e979c2502fe9695c6498/W4Test.hs | haskell | -- -- -- -- | module W4Test where
import W4
import Impl.Test
import Control.Monad
import Data.List
import Data.IORef
import System.IO
import System.Directory
import Test.QuickCheck hiding (Result,reason,(===))
import Test.QuickCheck.Monadic
main = testExsArgs tests stdArgs {maxSize = 40, maxSuccess = 40}
tests = [[ex1_hello]
,[ex2_greet]
,[ex3_greet2]
,[ex4_readWords]
,[ex5_readUntil]
,[ex6_printFibs]
,[ex7_isums]
,[ex8_whenM_True, ex8_whenM_False]
,[ex9_while]
,[ex10_debug]
,[ex11_mapM_]
,[ex12_forM]
,[ex13_doubleCall]
,[ex14_compose]
,[ex15_mkCounter]
,[ex16_hFetchLines]
,[ex17_readCSV]
,[ex18_compareFiles]
,[ex19_interact_terminates, ex19_interact_loop]
]
ex1_hello = monadicIO $ do
(text,()) <- runc' hello
stop_ $ text === "HELLO\nWORLD\n"
word = listOf1 (choose ('a','z'))
ex2_greet = monadicIO $ do
name <- pick word
(text,()) <- runc' $ greet name
stop_ $ text === ("HELLO "++name++"\n")
ex3_greet2 = monadicIO $ do
name <- pick word
(text,()) <- runc (name++"\n") greet2
stop_ $ text === ("HELLO "++name++"\n")
ex4_readWords = monadicIO $ do
words <- pick $ listOf1 word
(_,ret) <- runc (unlines words) (readWords (length words - 1))
stop_ $ ret === sort (init words)
ex5_readUntil = monadicIO $ do
end <- pick word
words <- pick $ listOf1 (word `suchThat` (/=end))
let input = unlines $ words ++ [end]
(_,ret) <- runc input (readUntil (==end))
stop_ . counterexample ("readUntil (==" ++ show end ++ ")\nInput: "++show input) $
ret === words
ex6_printFibs = monadicIO $ do
n <- pick $ choose (0,40)
(text,_) <- runc' $ printFibs n
stop_ . counterexample ("printFibs "++show n) $
text === unlines (map show (take n fibs))
where fibs = 1:1:zipWith (+) fibs (tail fibs)
ex7_isums = monadicIO $ do
numbers <- pick . listOf1 $ choose (-10,10)
let n = length numbers
(text,ret) <- runc (unlines $ map show numbers) $ isums n
stop_ . counterexample ("isums "++show n) $
conjoin [counterexample "returning" $
ret === sum numbers,
counterexample "printing" $
text === unlines (map show $ scanl1 (+) numbers)]
ex8_whenM_True = monadicIO $ do
r <- run $ newIORef False
let op = writeIORef r True
let cond = return True
run $ whenM cond op
v <- run $ readIORef r
stop_ $ counterexample "whenM (return True)" $
v
ex8_whenM_False = monadicIO $ do
r <- run $ newIORef False
let op = writeIORef r True
let cond = return False
run $ whenM cond op
v <- run $ readIORef r
stop_ $ counterexample "whenM (return False)" $
not v
ex9_while = monadicIO $ do
i <- pick $ choose (0,10 :: Int)
a <- run $ newIORef 0
b <- run $ newIORef 0
let ehto = modifyIORef a (+1) >> fmap (<=i) (readIORef a)
op = modifyIORef b (+1)
run $ while ehto op
af <- run $ readIORef a
bf <- run $ readIORef b
stop_ $ counterexample "while" $
conjoin [counterexample "number of calls to condition" $ af === i+1,
counterexample "number of calls to operation" $ bf === i]
ex10_debug = monadicIO $ do
token <- pick word
value <- pick word
print <- pick word
(text,ret) <- runc' $ debug token (putStrLn print >> return value)
stop_ $ counterexample ("debug "++show token++" (do putStrLn "++show print++"; return "++show value++")") $
conjoin [counterexample "tulostus" $ text === (token ++ "\n" ++ print ++ "\n" ++ token ++ "\n"),
counterexample "palautus" $ ret === value]
ex11_mapM_ = monadicIO $ do
r <- run $ (newIORef [] :: IO (IORef [Int]))
lis <- pick $ listOf1 arbitrary
let op x = modifyIORef r (x:)
run $ mymapM_ op lis
ret <- run $ readIORef r
stop_ $ counterexample ("mapM op "++show lis) $
ret === reverse lis
ex12_forM = monadicIO $ do
r <- run $ (newIORef [] :: IO (IORef [Int]))
lis <- pick $ listOf1 arbitrary
let op x = do modifyIORef r (x:)
return $ x+1
ret <- run $ myforM lis op
out <- run $ readIORef r
stop_ $ counterexample ("forM "++show lis++" op") $
conjoin [counterexample "return value" $ ret === map (+1) lis,
counterexample "side effects" $ out === reverse lis]
ex13_doubleCall = monadicIO $ do
i <- pick $ (choose (0,20) :: Gen Int)
let op = return (return i)
out <- run $ doubleCall $ op
stop_ $ counterexample ("doubleCall (return (return "++show i++"))") $
out === i
ex14_compose = monadicIO $ do
i <- pick $ (choose (0,20) :: Gen Int)
let op1 = return . (*2)
op2 = return . (+1)
out <- run $ compose op1 op2 i
stop_ $ counterexample "compose (return . (*2)) (return . (+1))" $
out === (i+1)*2
ex15_mkCounter = monadicIO $ do
n <- pick $ choose (0,20)
m <- run $ do (i,g) <- mkCounter
replicateM_ n i
g
stop_ $ m === n
ex16_hFetchLines = monadicIO $ do
lines <- pick $ listOf1 word
inds <- fmap (nub.sort) . pick . listOf1 $ choose (1,length lines)
dir <- run $ getTemporaryDirectory
(path,h) <- run $ openTempFile dir "hFetchLines.in"
run $ hPutStr h $ unlines lines
run $ hSeek h AbsoluteSeek 0
outs <- run $ hFetchLines h inds
stop_ $ counterexample ("hFetchLines h "++show inds++"\nContents:\n"++unlines lines) $
conjoin [outs !! j === lines !! (i-1) | (j,i) <- zip [0..] inds]
toCSV = unlines . map (intercalate ",")
tmpSpit pattern conts = do
dir <- getTemporaryDirectory
(path,h) <- openTempFile dir pattern
hPutStr h conts
hClose h
return path
ex17_readCSV = monadicIO $ do
dat <- pick $ listOf1 (listOf1 word)
let dat' = toCSV dat
path <- run $ tmpSpit "readCSV.in" dat'
ret <- run $ readCSV path
stop_ $ counterexample ("File contents: "++show dat') $ ret === dat
ex18_compareFiles = monadicIO $ do
alines <- pick $ listOf1 word
lines2 <- pick $ vectorOf (length alines) word
diffs <- pick $ fmap (nub.sort) $ listOf1 (choose (0,length alines-1))
let blines = [ if elem i diffs then s1++s2 else s1 | (i,s1,s2) <- zip3 [0..] alines lines2]
ac = unlines alines
bc = unlines blines
should = concatMap (\i -> ["< "++alines!!i,"> "++alines!!i++lines2!!i]) diffs
path1 <- run $ tmpSpit "compareFilesA.in" ac
path2 <- run $ tmpSpit "compareFilesB.in" bc
(outp,()) <- runc' $ compareFiles path1 path2
let ls = lines outp
stop_ $ counterexample ("compareFiles\nFile A:\n"++ac++"File B:\n"++bc) $
conjoin [counterexample "number of lines printed" $ length ls === 2*length diffs,
counterexample "lines printed" $ ls === should]
ex19_interact_terminates = monadicIO $ do
let f :: (String,String) -> (Bool,String,String)
f (s,_) = (False,s,s)
w <- pick $ word
(text,ret) <- runc w $ interact' f ""
stop_ $ conjoin [counterexample "tulostus" $ text === w,
counterexample "palautus" $ ret === w]
ex19_interact_loop = monadicIO $ do
is <- pick $ listOf1 (arbitrary :: Gen Int)
let f :: (String,[Int]) -> (Bool,String,[Int])
f ("END",lis) = (False,"END\n", lis)
f (x,lis) = (True, "PICK\n", read x : lis)
eret = reverse $ 0:is
etext = unlines $ replicate (length is) "PICK" ++ ["END"]
(text,ret) <- runc (unlines $ map show is ++ ["END"]) $ interact' f [0]
stop_ $ conjoin [counterexample "printing" $ text === etext,
counterexample "return value" $ ret === eret]
|
14172226bbcb3bb7b7938c171c58c251549e5931e3117401aa611aeef50bb61b | mirleft/ocaml-tls | http_client.ml |
open Lwt
open Ex_common
let http_client ?ca ?fp hostname port =
let port = int_of_string port in
auth ?ca ?fp () >>= fun authenticator ->
Tls_lwt.connect_ext
(Tls.Config.client ~authenticator ())
(hostname, port) >>= fun (ic, oc) ->
let req = String.concat "\r\n" [
"GET / HTTP/1.1" ; "Host: " ^ hostname ; "Connection: close" ; "" ; ""
] in
Lwt_io.(write oc req >>= fun () -> read ic >>= print >>= fun () -> printf "++ done.\n%!")
let () =
try
match Sys.argv with
| [| _ ; host ; port ; "FP" ; fp |] -> Lwt_main.run (http_client host port ~fp)
| [| _ ; host ; port ; trust |] -> Lwt_main.run (http_client host port ~ca:trust)
| [| _ ; host ; port |] -> Lwt_main.run (http_client host port)
| [| _ ; host |] -> Lwt_main.run (http_client host "443")
| args -> Printf.eprintf "%s <host> <port>\n%!" args.(0)
with
| Tls_lwt.Tls_alert alert as exn ->
print_alert "remote end" alert ; raise exn
| Tls_lwt.Tls_failure fail as exn ->
print_fail "our end" fail ; raise exn
| null | https://raw.githubusercontent.com/mirleft/ocaml-tls/3b7736f61c684bb11170e444126fea7df1ec7d69/lwt/examples/http_client.ml | ocaml |
open Lwt
open Ex_common
let http_client ?ca ?fp hostname port =
let port = int_of_string port in
auth ?ca ?fp () >>= fun authenticator ->
Tls_lwt.connect_ext
(Tls.Config.client ~authenticator ())
(hostname, port) >>= fun (ic, oc) ->
let req = String.concat "\r\n" [
"GET / HTTP/1.1" ; "Host: " ^ hostname ; "Connection: close" ; "" ; ""
] in
Lwt_io.(write oc req >>= fun () -> read ic >>= print >>= fun () -> printf "++ done.\n%!")
let () =
try
match Sys.argv with
| [| _ ; host ; port ; "FP" ; fp |] -> Lwt_main.run (http_client host port ~fp)
| [| _ ; host ; port ; trust |] -> Lwt_main.run (http_client host port ~ca:trust)
| [| _ ; host ; port |] -> Lwt_main.run (http_client host port)
| [| _ ; host |] -> Lwt_main.run (http_client host "443")
| args -> Printf.eprintf "%s <host> <port>\n%!" args.(0)
with
| Tls_lwt.Tls_alert alert as exn ->
print_alert "remote end" alert ; raise exn
| Tls_lwt.Tls_failure fail as exn ->
print_fail "our end" fail ; raise exn
| |
dbea9adcf5e807c26910fef774a95bb8a564908b0aa25b6ef9a0dbe9b5e83ccc | jedimahdi/soltan | Api.hs | module Hokm.Api.Network.Server.Api
( Effects
, server
) where
import qualified Data.UUID as UUID
import qualified Database.PostgreSQL.Simple as Database
import Hokm.Api.Data.User
import qualified Hokm.Api.Effect.Database.User as Database ( UserL )
import qualified Hokm.Api.Effect.Database.User as Database.User
import Hokm.Api.Effect.GamesState ( GamesStateL )
import Hokm.Api.Effect.Hub ( HubL )
import Hokm.Api.Effect.Logger ( LoggerL )
import Hokm.Api.Effect.Random ( RandomL )
import Hokm.Api.Effect.Scrypt ( ScryptL )
import Hokm.Api.Network.Anatomy.Api
import qualified Hokm.Api.Network.Server.Api.Authentication as Authentication
import qualified Hokm.Api.Network.Server.Api.Game as Game
import qualified Hokm.Api.Network.Server.Api.User as User
import qualified Hokm.Api.Network.Server.Api.Users as Users
import qualified Hokm.Api.Servant.Response as Response
import Polysemy ( Embed, Members, Sem )
import Polysemy.Error ( Error )
import Servant.API.Generic
import Servant.Server ( ServerError )
import Servant.Server.Generic ( AsServerT, genericServerT )
type Effects = '[HubL, ScryptL, Database.UserL, Error ServerError, RandomL, GamesStateL, LoggerL]
server :: Members Effects r => ToServant Routes (AsServerT (Sem r))
server = genericServerT Routes { authentication = Authentication.server
, users = Users.server
, user = User.server
, game = Game.server
}
| null | https://raw.githubusercontent.com/jedimahdi/soltan/50a97213ec83c81f5ec79898e2855d0c5e78f06c/backend/hokm-api/src/Hokm/Api/Network/Server/Api.hs | haskell | module Hokm.Api.Network.Server.Api
( Effects
, server
) where
import qualified Data.UUID as UUID
import qualified Database.PostgreSQL.Simple as Database
import Hokm.Api.Data.User
import qualified Hokm.Api.Effect.Database.User as Database ( UserL )
import qualified Hokm.Api.Effect.Database.User as Database.User
import Hokm.Api.Effect.GamesState ( GamesStateL )
import Hokm.Api.Effect.Hub ( HubL )
import Hokm.Api.Effect.Logger ( LoggerL )
import Hokm.Api.Effect.Random ( RandomL )
import Hokm.Api.Effect.Scrypt ( ScryptL )
import Hokm.Api.Network.Anatomy.Api
import qualified Hokm.Api.Network.Server.Api.Authentication as Authentication
import qualified Hokm.Api.Network.Server.Api.Game as Game
import qualified Hokm.Api.Network.Server.Api.User as User
import qualified Hokm.Api.Network.Server.Api.Users as Users
import qualified Hokm.Api.Servant.Response as Response
import Polysemy ( Embed, Members, Sem )
import Polysemy.Error ( Error )
import Servant.API.Generic
import Servant.Server ( ServerError )
import Servant.Server.Generic ( AsServerT, genericServerT )
type Effects = '[HubL, ScryptL, Database.UserL, Error ServerError, RandomL, GamesStateL, LoggerL]
server :: Members Effects r => ToServant Routes (AsServerT (Sem r))
server = genericServerT Routes { authentication = Authentication.server
, users = Users.server
, user = User.server
, game = Game.server
}
| |
d6d59510b14982b26f3da2894d59fcc0fa9abadceb0a680c809c2be7145ea8e0 | aadafinance/aada-finance | Test.hs | # LANGUAGE NumericUnderscores #
# LANGUAGE TypeApplications #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module Spec.Test where
import Data.Either
import Prelude
import Test.Tasty
import Plutus.V1.Ledger.Api
import Request
import DebtRequest
import qualified Collateral
import qualified Interest
import qualified AadaNft
import qualified OracleNft
import Plutus.Test.Model
import Ledger.Address (PaymentPubKeyHash(..))
import Ledger (validatorHash, scriptCurrencySymbol, interval)
import Ledger.Value as Value
import PlutusTx
import qualified PlutusTx.Builtins.Internal as INT
import Collateral (CollateralDatum (loanDuration))
import Control.Monad.State.Strict
import Helpers.TestValidator
import qualified Data.ByteString.UTF8 as BSC
import Plutus.V1.Ledger.Ada (adaValueOf)
mainTests :: BchConfig -> TestTree
mainTests cfg =
testGroup
"Main tests"
[
testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Debt request" debtRequestTest
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Happy path" happyPath
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower cancels loan test" borrowerCancelsLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns full interest when loan return time has passed" returnFullLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less than it should then full time has passed" (mustFail returnNotEnoughInterest)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns loan when half the time passed returning less than full interest" returnPartialLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns loan when half the time passed returning less than full interest with same currency" (mustFail returnPartialLoanSameCs)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less interest than it should because of forged mintDate" (mustFail returnPartialLoanForgedMintDate)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less interest than it should" (mustFail returnPartialLoanLessThanItShoudInterestRepayed)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "test loan return expiration date. Loan request not-expired" provideLoanOnTime
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "test loan return expiration date. Loan request expired" (mustFail provideLoanNotOnTime)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "liquidate borrower" liquidateBorrower
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds'') cfg "Lender dos borrower" (mustFail lenderDosBorrower)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds'' <> lenderInitialFunds) cfg "Borrower dos lender" (mustFail borrowerDosLender)
]
mintOracleNftTests :: BchConfig -> TestTree
mintOracleNftTests cfg =
testGroup
"Mint oracle nft tests"
[
testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft" mintOracleNft
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail2)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail3)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail4)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail4)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail5)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail6)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail7)
]
testSize :: BchConfig -> TestTree
testSize cfg =
testGroup
"tests to check transaction sizes"
[
-- testLimits (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Happy path" id happyPath -- (happyPath >> logError "show stats")
, testLimits ( adaValue 10_000_000 < > borrowerInitialFunds < > lenderInitialFunds ) cfg " Borrower liquidates " i d liquidateBorrower -- ( liquidateBorrower > > logError " show stats " )
, testLimits ( adaValue 10_000_000 < > borrowerInitialFunds < > lenderInitialFunds '' ) cfg " Lender dos borrower " i d ( lenderDosBorrower > > logError " show stats " )
, testLimits ( adaValue 10_000_000 < > borrowerInitialFunds '' < > lenderInitialFunds ) cfg " Borrower dos lender " i d ( borrowerDosLender > > logError " show stats " )
]
type RepayInterval = POSIXTime
type RequestExpirationDate = POSIXTime
type LendDate = POSIXTime
type BorrowerTokenName = TokenName
type LenderTokenName = TokenName
type BorrowersAddressPkh = PubKeyHash
type LiquidationNftCs = CurrencySymbol
TODO move to utils section later
adaValue :: Integer -> Value
adaValue = singleton adaSymbol adaToken
setupUsers :: Run [PubKeyHash]
setupUsers = sequenceA [newUser borrowerInitialFunds, newUser lenderInitialFunds]
setupUsers' :: Run [PubKeyHash]
setupUsers' = sequenceA [newUser borrowerInitialFunds', newUser lenderInitialFunds']
setupUsers'' :: Run [PubKeyHash]
setupUsers'' = sequenceA [newUser borrowerInitialFunds, newUser lenderInitialFunds'']
setupUsers''' :: Run [PubKeyHash]
setupUsers''' = sequenceA [newUser borrowerInitialFunds'', newUser lenderInitialFunds]
setupSimpleNUsers :: Int -> Run [PubKeyHash]
setupSimpleNUsers n = replicateM n $ newUser $ adaValue 1000
TODO could this be done better ?
fakeCoinCs :: FakeCoin -> CurrencySymbol
fakeCoinCs fc = case fakeCoin fc of
AssetClass (cs, _) -> cs
collateralCoin :: FakeCoin
collateralCoin = FakeCoin "collateral-coin-CONY"
loanCoin :: FakeCoin
loanCoin = FakeCoin "loan-coin-CONYMONY"
interestCoin :: FakeCoin
interestCoin = FakeCoin "interest-coin-MONY"
generateFakeValues :: Int -> [Value]
generateFakeValues n = fmap (`fakeValue` 1) (FakeCoin . toBuiltin . BSC.fromChar <$> take n ['a'..])
generateFakeValues' :: Int -> Value
generateFakeValues' n = mconcat $ generateFakeValues n
borrowerInitialFunds :: Value
borrowerInitialFunds = fakeValue collateralCoin 100 <> fakeValue interestCoin 50 <> adaValue 100
lenderInitialFunds :: Value
lenderInitialFunds = fakeValue loanCoin 150 <> adaValue 100
borrowerInitialFunds' :: Value
borrowerInitialFunds' = fakeValue collateralCoin 100 <> fakeValue loanCoin 50 <> adaValue 100
lenderInitialFunds' :: Value
lenderInitialFunds' = fakeValue loanCoin 100 <> adaValue 100
lenderDosAmount :: Int
this is actually the limit when tx can go in , but then ca n't go out
borrowerDosAmount :: Int
borrowerDosAmount = 36
lenderInitialFunds'' :: Value
lenderInitialFunds'' = lenderInitialFunds <> generateFakeValues' lenderDosAmount
borrowerInitialFunds'' :: Value
borrowerInitialFunds'' = borrowerInitialFunds <> generateFakeValues' borrowerDosAmount
getLenderNftCs :: CurrencySymbol
getLenderNftCs = scriptCurrencySymbol getLenderNftPolicy
getBorrowerNftCs :: CurrencySymbol
getBorrowerNftCs = scriptCurrencySymbol getBorrowerNftPolicy
getLenderNftPolicy :: MintingPolicy
getLenderNftPolicy = AadaNft.policy True
getBorrowerNftPolicy :: MintingPolicy
getBorrowerNftPolicy = AadaNft.policy False
getSc1Params :: Request.ContractInfo
getSc1Params = Request.ContractInfo {
Request.lenderNftCs = getLenderNftCs
, Request.borrowersNftCs = getBorrowerNftCs
, Request.collateralSc = Address (ScriptCredential (validatorHash $ Collateral.validator getSc2Params)) Nothing
}
getSc1Params' :: DebtRequest.ContractInfo
getSc1Params' = DebtRequest.ContractInfo {
DebtRequest.lenderNftCs = getLenderNftCs
, DebtRequest.borrowersNftCs = getBorrowerNftCs
, DebtRequest.collateralSc = Address (ScriptCredential (validatorHash $ Collateral.validator getSc2Params)) Nothing
}
getSc2Params :: Collateral.ContractInfo
getSc2Params = Collateral.ContractInfo {
Collateral.lenderNftCs = getLenderNftCs
, Collateral.borrowersNftCs = getBorrowerNftCs
, Collateral.interestSc = Address (ScriptCredential (validatorHash (Interest.validator (Interest.ContractInfo getLenderNftCs)))) Nothing
, Collateral.minInterestFeePercentage = 200000
}
getTestDatum :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> RequestDatum
getTestDatum returnt bNftTn liqNft pkh expiration ltn t staking = RequestDatum
{ borrowersNftTn = bNftTn
, borrowersAddress = Address (PubKeyCredential pkh) staking -- (Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 150
, interest = assetClass (fakeCoinCs interestCoin) "interest-coin-MONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, collateralAmnt = 100 -- amount of collateral
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, liquidationCommission = 150 -- How much % borrower will pay for lender when liquidated (before time passes)
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getTestDatum' :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> DebtRequestDatum
getTestDatum' returnt bNftTn liqNft pkh expiration ltn t staking = DebtRequestDatum
{ borrowersNftTn = bNftTn
, borrowersAddress = Address (PubKeyCredential pkh) staking -- (Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 150
, interest = assetClass (fakeCoinCs interestCoin) "interest-coin-MONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, collateralAmnt = 100 -- amount of collateral
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, liquidationCommission = 150 -- How much % borrower will pay for lender when liquidated (before time passes)
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getTestDatum2 :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> RequestDatum
getTestDatum2 returnt bNftTn liqNft pkh expiration ltn t staking = RequestDatum
{ borrowersNftTn = bNftTn
, borrowersAddress = Address (PubKeyCredential pkh) staking -- (Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 100
, interest = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, collateralAmnt = 100 -- amount of collateral
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, liquidationCommission = 150 -- How much % borrower will pay for lender when liquidated (before time passes)
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getCollatDatumFromRequestDat :: RequestDatum -> TokenName -> POSIXTime -> Collateral.CollateralDatum
getCollatDatumFromRequestDat rqDat@RequestDatum{..} newTn newMint = Collateral.CollateralDatum
{ Collateral.borrowersNftTn = borrowersNftTn
, Collateral.borrowersAddress = borrowersAddress
, Collateral.loan = loan
, Collateral.loanAmnt = loanAmnt
, Collateral.interest = interest
, Collateral.interestAmnt = interestAmnt
, Collateral.collateral = collateral
, Collateral.collateralAmnt = 100 -- amount of collateral
, Collateral.loanDuration = loanDuration
, Collateral.liquidateNft = liquidateNft
Colalteral factor used for liquidation
, Collateral.liquidationCommission = 150
, Collateral.requestExpiration = requestExpiration
, Collateral.lenderNftTn = newTn
, Collateral.lendDate = newMint
}
getCollatDatumFromDebtRequestDat :: DebtRequestDatum -> TokenName -> POSIXTime -> Collateral.CollateralDatum
getCollatDatumFromDebtRequestDat rqDat@DebtRequestDatum{..} newTn newMint = Collateral.CollateralDatum
{ Collateral.borrowersNftTn = newTn
, Collateral.borrowersAddress = borrowersAddress
, Collateral.loan = loan
, Collateral.loanAmnt = loanAmnt
, Collateral.interest = interest
, Collateral.interestAmnt = interestAmnt
, Collateral.collateral = collateral
, Collateral.collateralAmnt = 100 -- amount of collateral
, Collateral.loanDuration = loanDuration
, Collateral.liquidateNft = liquidateNft
Colalteral factor used for liquidation
, Collateral.liquidationCommission = 150
, Collateral.requestExpiration = requestExpiration
, Collateral.lenderNftTn = lenderNftTn
, Collateral.lendDate = newMint
}
getAadaTokenName :: TxOutRef -> TokenName
getAadaTokenName utxo = TokenName $ INT.sha2_256 (INT.consByteString (txOutRefIdx utxo) ((getTxId . txOutRefId) utxo))
createLockFundsTx :: RepayInterval -> BorrowersAddressPkh -> TxOutRef -> UserSpend -> RequestExpirationDate -> LendDate -> LiquidationNftCs -> Tx
createLockFundsTx t pkh oref usp expiration mintDate oracle =
mconcat
[ userSpend usp
, payToScript
(requestTypedValidator getSc1Params)
(getTestDatum t (getAadaTokenName oref) oracle pkh expiration "" mintDate Nothing)
(fakeValue collateralCoin 100 <> adaValue 2)
]
getCancelRequestTx :: PubKeyHash -> Value -> RequestDatum -> TxOutRef -> TokenName -> Tx
getCancelRequestTx pkh val dat lockRef lenderTn =
mconcat
[ spendScript (requestTypedValidator getSc1Params) lockRef lenderTn dat
, payToPubKey pkh val
]
getTxIn :: UserSpend -> RequestDatum -> TxOutRef -> TokenName -> Tx
getTxIn usp dat scriptTxOut lenderTn =
mconcat
[ spendScript (requestTypedValidator getSc1Params) scriptTxOut lenderTn dat
, userSpend usp
]
getOracleNftTn :: TokenName
getOracleNftTn = TokenName "ff"
getLNftVal :: Integer -> CurrencySymbol -> TxOutRef -> Value
getLNftVal n cs utxo = Value.singleton cs (getAadaTokenName utxo) n
getBNftVal :: Integer -> CurrencySymbol -> TxOutRef -> Value
getBNftVal n cs utxo = Value.singleton cs (getAadaTokenName utxo) n
getMintBorrowerNftTx :: PubKeyHash -> TxOutRef -> Tx
getMintBorrowerNftTx pkh oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue (AadaNft.policy False) (getBNftVal 1 cs oref)
, payToPubKey pkh (adaValue 1 <> getBNftVal 1 cs oref)
]
where
cs = scriptCurrencySymbol getBorrowerNftPolicy
-- getCancelRequestTx :: PubKeyHash -> Value -> RequestDatum -> TxOutRef -> Tx
getCancelRequestTx lockRef =
-- mconcat
-- [ spendScript (requestTypedValidator getSc1Params) lockRef 0 dat
, payToPubKey pkh val
-- ]
getTxOutLend :: PubKeyHash -> PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Value -> Tx
getTxOutLend borrower lender dat utxo valToScript = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal 1 getLenderNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2 <> valToScript)
, payToPubKey borrower (fakeValue loanCoin 150 <> adaValue 2)
, payToPubKey lender (adaValue 2 <> getLNftVal 1 getLenderNftCs utxo)
]
getTxOutReturn :: Integer -> PubKeyHash -> TokenName -> Value -> TxOutRef -> Tx
getTxOutReturn interest borrower dat valToInt oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 2 <> valToInt)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 3)
]
getTxOutReturn' :: Integer -> PubKeyHash -> TokenName -> Value -> TxOutRef -> Tx
getTxOutReturn' interest borrower dat valToInt oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 2 <> valToInt)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 1)
]
getTxInFromCollateral :: [UserSpend] -> Collateral.CollateralDatum -> Integer -> TxOutRef -> Tx
getTxInFromCollateral usps dat rdm scriptTxOut =
mconcat
(spendScript (Collateral.collateralTypedValidator getSc2Params) scriptTxOut rdm dat : fmap userSpend usps)
getBurnBorrowerNftTx :: PubKeyHash -> TxOutRef -> UserSpend -> Tx
getBurnBorrowerNftTx pkh oref usp = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToPubKey pkh (adaValue 1)
, userSpend usp
]
borrowerCancelsLoan :: Run Bool
borrowerCancelsLoan = do
users <- setupUsers
let u1 = head users
valToPay = fakeValue collateralCoin 100 <> adaValue 3
sp <- spend u1 valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 u1 oref sp 0 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx u1 oref
submitTx u1 tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let valFromSc1 = fakeValue collateralCoin 100 <> adaValue 2
valFromUsr = adaValue 1 <> getBNftVal 1 getBorrowerNftCs borrowerNftRef
sp <- spend u1 valFromUsr
tx <- signTx u1 $ getCancelRequestTx u1 valFromSc1 dat lockRef (getAadaTokenName lockRef) <> getBurnBorrowerNftTx u1 borrowerNftRef sp
isRight <$> sendTx tx
Nothing -> pure False
Create Loan Request tx
-- >>>>>>>>>>>>>>>> Tx 1 >>>>>>>>>>>>>>>>
┌ ─ ─ ─ ─ ┐ n collateral + 2 ADA ┌ ─ ─ ─ ┐
-- │ ├─────────────────────────────▶│SC1│
n Collateral + 2 ADA │ │ datum ─ ┘
-- Borrower ──────────────────────────▶│ │
datum │ │ NFT + 1 ADA
│ Tx ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
-- │ │
1 ADA ( for mint ) │ │
-- Borrower ──────────────────────────▶│ │
-- │ │
─ ─ ─ ─ ┘
-- >>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>
-- Provide Loan tx
-- >>>>>>>>>>>>>>>> Tx 2 >>>>>>>>>>>>>>>>
┌ ─ ─ ─ ─ ┐ n collateral + Lenders NFT + Time Nft ┌ ─ ─ ─ ┐
-- │ ├───────────────────────────────────────▶│SC2│
n Loan + 2 ADA │ │ datum ─ ┘
-- Lender ─────────────────────▶│ │
│ │ Lenders NFT + 2 ADA
2 ADA ( for mint ) │ Tx ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▶ Lender
-- Lender ─────────────────────▶│ │
-- │ │
┌ ─ ─ ─ ┐ n Collateral + 2 ADA │ │ Loan + 2 ADA
│ SC1 ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▶ │ ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
─ ─ ─ ┘ datum ─ ─ ─ ─ ┘
-- >>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>
bchUtxos : : ! ( Map TxOutRef TxOut )
returnFullLoan :: Run Bool
returnFullLoan = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (adaValueOf 0) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnNotEnoughInterest :: Run Bool
returnNotEnoughInterest = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 25 <>
adaValue 1
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 25 borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoan :: Run Bool
returnPartialLoan = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
borrowerNftRef = oref
repayint = 20000
tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
realCurTime <- currentTime
logInfo $ "current time1: " ++ show realCurTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 25 <>
adaValue 1
wait 2000
intPayDate <- currentTime
logInfo $ "intPayDate: " ++ show intPayDate
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 25 borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 (intPayDate + 2000)) tx2
wait 2000
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
createLockFundsTx2 :: POSIXTime -> PubKeyHash -> TxOutRef -> UserSpend -> POSIXTime -> POSIXTime -> Tx
createLockFundsTx2 t pkh oref usp expiration mintDate =
mconcat
[ userSpend usp
, payToScript
(requestTypedValidator getSc1Params)
(getTestDatum2 t (getAadaTokenName oref) (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") pkh expiration "" mintDate Nothing)
(fakeValue collateralCoin 100 <> adaValue 2)
]
getTxOutLend2 :: PubKeyHash -> PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Tx
getTxOutLend2 borrower lender dat utxo = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal 1 getLenderNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2)
, payToPubKey borrower (fakeValue loanCoin 100 <> adaValue 2)
, payToPubKey lender (adaValue 2 <> getLNftVal 1 getLenderNftCs utxo)
]
getTxOutReturn2 :: PubKeyHash -> TokenName -> TxOutRef -> Tx
getTxOutReturn2 borrower dat oref = addMintRedeemer getBorrowerNftPolicy rdm $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 125 <> adaValue 2)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 3)
]
where
rdm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
returnPartialLoanSameCs :: Run Bool
returnPartialLoanSameCs = do
users <- setupUsers'
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx2 repayint borrower oref sp 100000 0 <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 100 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend2 borrower lender convertedDat lockRef
logInfo $ "current time1: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 125 <>
adaValue 2
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
logInfo $ "mint date: " <> show mintTime
wait 16000
intPayDate <- currentTime
logInfo $ "pay date: " <> show intPayDate
let intDat = Collateral.lenderNftTn convertedDat
tx2 = getTxInFromCollateral [sp1, sp2] convertedDat 0 lockRef <>
getTxOutReturn2 borrower intDat borrowerNftRef
tx2 <- validateIn (interval 24000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoanForgedMintDate :: Run Bool
returnPartialLoanForgedMintDate = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
-- lender provides loan
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time1: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let interestAmount = 5
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin interestAmount <>
adaValue 1
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 2 lockRef <>
getTxOutReturn interestAmount borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (from 6000) tx2
wait 15000
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoanLessThanItShoudInterestRepayed :: Run Bool
returnPartialLoanLessThanItShoudInterestRepayed = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
-- lender provides loan
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "repay interval: " ++ show repayint
logInfo $ "loan provided and timenft minted time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let interestAmount = 25
logInfo $ "Interest amount paid: " ++ show interestAmount
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin interestAmount <>
adaValue 1
wait 15000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn interestAmount borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 intPayDate) tx2
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
getOracleNftVal :: CurrencySymbol -> Integer -> Value
getOracleNftVal cs = Value.singleton cs getOracleNftTn
getMintOracleNftTx :: Integer -> PubKeyHash -> PubKeyHash -> PubKeyHash -> UserSpend -> Tx
getMintOracleNftTx n pkh1 pkh2 pkh3 usp = addMintRedeemer mp rdm $
mconcat
[ mintValue mp (getOracleNftVal cs n)
, payToScript Helpers.TestValidator.typedValidator
0
(adaValue 2 <> getOracleNftVal cs n)
, userSpend usp
]
where
mp = OracleNft.policy getOracleNftTn pkh1 pkh2 pkh3
cs = scriptCurrencySymbol mp
rdm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
mintOracleNft :: Run ()
mintOracleNft = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx <- signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail2 :: Run ()
mintOracleNftShouldFail2 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx < - signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail3 :: Run ()
mintOracleNftShouldFail3 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx <- signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail4 :: Run ()
mintOracleNftShouldFail4 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx < - signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail5 :: Run ()
mintOracleNftShouldFail5 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx < - signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail6 :: Run ()
mintOracleNftShouldFail6 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx <- signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail7 :: Run ()
mintOracleNftShouldFail7 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx < - signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
provideLoanOnTime :: Run Bool
provideLoanOnTime = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
provideLoanNotOnTime :: Run Bool
provideLoanNotOnTime = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 0 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
getTxInFromInterestSc :: UserSpend -> TxOutRef -> TokenName -> Tx
getTxInFromInterestSc usp1 scriptTxOut dat =
mconcat
[ spendScript (Interest.typedValidator (Interest.ContractInfo getLenderNftCs)) scriptTxOut 0 dat
, userSpend usp1
]
getTxOutFromInterestSc :: Integer -> PubKeyHash -> TxOutRef -> Tx
getTxOutFromInterestSc interest lender utxo = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal (-1) getLenderNftCs utxo)
, payToPubKey lender (fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 4)
]
happyPath :: Run Bool
happyPath = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let lockRef = fst . head $ utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
curTime <- currentTime
let mintTime = POSIXTime 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "ref: " ++ show lenderNftRef
logInfo $ "hash: " ++ show (getAadaTokenName lenderNftRef)
logInfo $ "mint time: " ++ show mintTime
logInfo $ "curTime time: " ++ show curTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (adaValueOf 0) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 5000 intPayDate) tx2
submitTx lender tx2
-- retrieve loan and interest phase
utxos <- utxoAt (Interest.interestAddress (Interest.ContractInfo getLenderNftCs))
let lenderPay = adaValue 2 <> getLNftVal 1 getLenderNftCs lenderNftRef
sp <- spend lender lenderPay
case utxos of
[(lockRef, _)] -> do
let tx = getTxInFromInterestSc sp lockRef intDat <>
getTxOutFromInterestSc 50 lender lenderNftRef
submitTx lender tx
pure True
_ -> pure False
Nothing -> pure False
getTxInFromCollateraLiq :: UserSpend -> UserSpend -> Collateral.CollateralDatum -> Integer -> TxOutRef -> Tx
getTxInFromCollateraLiq lender1 lender2 dat rdm scriptTxOut =
mconcat
[ spendScript (Collateral.collateralTypedValidator getSc2Params) scriptTxOut rdm dat
, userSpend lender1
, userSpend lender2
]
getMintOracleNftTxLiq :: Integer -> PubKeyHash -> PubKeyHash -> PubKeyHash -> Tx
getMintOracleNftTxLiq n pkh1 pkh2 pkh3 =
mconcat
[ mintValue mp (getOracleNftVal cs n)
, payToScript Helpers.TestValidator.typedValidator
0
(adaValue 2 <> getOracleNftVal cs n)
]
where
valh = validatorHash Helpers.TestValidator.validator
mp = OracleNft.policy getOracleNftTn pkh1 pkh2 pkh3
cs = scriptCurrencySymbol mp
getTxOutLiquidate :: PubKeyHash -> TxOutRef -> Tx
getTxOutLiquidate lender utxo =
mconcat
[ mintValue getLenderNftPolicy (getLNftVal (-1) getLenderNftCs utxo)
, payToPubKey lender (fakeValue collateralCoin 100 <> adaValue 2)
]
liquidateBorrower :: Run Bool
liquidateBorrower = do
-- setup
logInfo "setup"
users1 <- setupSimpleNUsers 3
users2 <- setupUsers
let borrower = head users2
lender = last users2
let [oracle1, oracle2, oracle3] = users1
-- create loan request phase
logInfo "create loan request"
let valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let valh = validatorHash Helpers.TestValidator.validator
omp = OracleNft.policy getOracleNftTn oracle1 oracle2 oracle3
ordm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol omp) <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
-- provide loan phase
logInfo "provide loan phase"
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 12000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
realCurTime <- currentTime
logInfo $ "real current time: " <> show realCurTime
tx <- validateIn (interval 7000 11000) tx
submitTx lender tx
-- loan liquidate phase
logInfo "liquidate phase"
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
lenderSpend1 <- spend lender (adaValue 2)
lenderSpend2 <- spend lender (getLNftVal 1 getLenderNftCs lenderNftRef)
let liquidate = getTxInFromCollateraLiq lenderSpend1 lenderSpend2 convertedDat 0 lockRef <>
getMintOracleNftTxLiq 1 oracle1 oracle2 oracle3 <>
getTxOutLiquidate lender lenderNftRef
1 .
wait 2000
time <- currentTime
logInfo $ "current time: " ++ show time
logInfo $ " debug : " < > show tx
tx <- signTx oracle1 tx
tx <- signTx oracle2 tx
tx <- signTx oracle3 tx
tx <- validateIn (interval 9000 99999) tx
logInfo $ " debug liquidate : " < > show tx
submitTx lender tx
pure True
Nothing -> pure False
lenderDosBorrower :: Run Bool
lenderDosBorrower = do
users <- setupUsers''
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4 <> generateFakeValues' lenderDosAmount
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (generateFakeValues' lenderDosAmount)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
borrowerDosLender :: Run Bool
borrowerDosLender = do
users <- setupUsers'''
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
-- loan return phase
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1 <>
generateFakeValues' borrowerDosAmount
-- adaValue 1
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (generateFakeValues' borrowerDosAmount) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
createDebtRequestTx :: RepayInterval -> BorrowersAddressPkh -> TxOutRef -> UserSpend -> RequestExpirationDate -> LendDate -> LiquidationNftCs -> Tx
createDebtRequestTx t pkh oref usp expiration mintDate oracle =
mconcat
[ userSpend usp
, payToScript
(debtRequestTypedValidator getSc1Params')
(getTestDatum' t "" oracle pkh expiration (getAadaTokenName oref) mintDate Nothing)
(fakeValue loanCoin 150 <> adaValue 2)
]
getMintLenderNftTx :: PubKeyHash -> TxOutRef -> Tx
getMintLenderNftTx pkh oref = addMintRedeemer getLenderNftPolicy oref $
mconcat
[ mintValue (AadaNft.policy True) (getLNftVal 1 cs oref)
, payToPubKey pkh (adaValue 1 <> getLNftVal 1 cs oref)
]
where
cs = scriptCurrencySymbol getLenderNftPolicy
getDebtRequestTxIn :: UserSpend -> DebtRequestDatum -> TxOutRef -> DebtRequestRedeemer -> Tx
getDebtRequestTxIn usp dat scriptTxOut borrowerRdm =
mconcat
[ spendScript (debtRequestTypedValidator getSc1Params') scriptTxOut borrowerRdm dat
, userSpend usp
]
getTxOutBorrow :: PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Value -> Tx
getTxOutBorrow borrower dat utxo valToScript = addMintRedeemer getBorrowerNftPolicy utxo $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal 1 getBorrowerNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2 <> valToScript)
, payToPubKey borrower (fakeValue loanCoin 150 <> adaValue 2 <> getBNftVal 1 getBorrowerNftCs utxo)
]
debtRequestTest :: Run Bool
debtRequestTest = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue loanCoin 150 <> adaValue 3
sp <- spend lender valToPay
let lenderNftRef = getHeadRef sp
let tx = createDebtRequestTx 0 borrower lenderNftRef sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff")
<> getMintLenderNftTx lender lenderNftRef
submitTx lender tx
utxos <- utxoAt $ debtRequestAddress getSc1Params'
let lockRef = fst . head $ utxos
let borrowerNftRef = lockRef
lockDat <- datumAt @DebtRequestDatum lockRef
case lockDat of
Just dat -> do
let convertedDat = getCollatDatumFromDebtRequestDat dat (getAadaTokenName borrowerNftRef) 2000
valForBorrowerToSpend = fakeValue collateralCoin 100 <> adaValue 2
sp <- spend borrower valForBorrowerToSpend
let tx = getDebtRequestTxIn sp dat lockRef (TakeLoan (getAadaTokenName borrowerNftRef))
<> getTxOutBorrow borrower convertedDat lockRef (adaValueOf 0)
tx <- validateIn (interval 2000 6000) tx
submitTx borrower tx
pure True
Nothing -> pure False | null | https://raw.githubusercontent.com/aadafinance/aada-finance/1661faa203974f5bbf646fe696a9e6df4957ea0c/test/Spec/Test.hs | haskell | # LANGUAGE OverloadedStrings #
testLimits (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Happy path" id happyPath -- (happyPath >> logError "show stats")
( liquidateBorrower > > logError " show stats " )
(Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
amount of collateral
How much % borrower will pay for lender when liquidated (before time passes)
(Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
amount of collateral
How much % borrower will pay for lender when liquidated (before time passes)
(Just . StakingHash . PubKeyCredential . PubKeyHash $ "ff")
amount of collateral
How much % borrower will pay for lender when liquidated (before time passes)
amount of collateral
amount of collateral
getCancelRequestTx :: PubKeyHash -> Value -> RequestDatum -> TxOutRef -> Tx
mconcat
[ spendScript (requestTypedValidator getSc1Params) lockRef 0 dat
]
>>>>>>>>>>>>>>>> Tx 1 >>>>>>>>>>>>>>>>
│ ├─────────────────────────────▶│SC1│
Borrower ──────────────────────────▶│ │
│ │
Borrower ──────────────────────────▶│ │
│ │
>>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>
Provide Loan tx
>>>>>>>>>>>>>>>> Tx 2 >>>>>>>>>>>>>>>>
│ ├───────────────────────────────────────▶│SC2│
Lender ─────────────────────▶│ │
Lender ─────────────────────▶│ │
│ │
>>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>
loan return phase
loan return phase
loan return phase
loan return phase
lender provides loan
loan return phase
lender provides loan
loan return phase
loan return phase
retrieve loan and interest phase
setup
create loan request phase
provide loan phase
loan liquidate phase
loan return phase
adaValue 1 | # LANGUAGE NumericUnderscores #
# LANGUAGE TypeApplications #
# LANGUAGE RecordWildCards #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module Spec.Test where
import Data.Either
import Prelude
import Test.Tasty
import Plutus.V1.Ledger.Api
import Request
import DebtRequest
import qualified Collateral
import qualified Interest
import qualified AadaNft
import qualified OracleNft
import Plutus.Test.Model
import Ledger.Address (PaymentPubKeyHash(..))
import Ledger (validatorHash, scriptCurrencySymbol, interval)
import Ledger.Value as Value
import PlutusTx
import qualified PlutusTx.Builtins.Internal as INT
import Collateral (CollateralDatum (loanDuration))
import Control.Monad.State.Strict
import Helpers.TestValidator
import qualified Data.ByteString.UTF8 as BSC
import Plutus.V1.Ledger.Ada (adaValueOf)
mainTests :: BchConfig -> TestTree
mainTests cfg =
testGroup
"Main tests"
[
testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Debt request" debtRequestTest
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Happy path" happyPath
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower cancels loan test" borrowerCancelsLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns full interest when loan return time has passed" returnFullLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less than it should then full time has passed" (mustFail returnNotEnoughInterest)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns loan when half the time passed returning less than full interest" returnPartialLoan
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns loan when half the time passed returning less than full interest with same currency" (mustFail returnPartialLoanSameCs)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less interest than it should because of forged mintDate" (mustFail returnPartialLoanForgedMintDate)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "Borrower returns less interest than it should" (mustFail returnPartialLoanLessThanItShoudInterestRepayed)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "test loan return expiration date. Loan request not-expired" provideLoanOnTime
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "test loan return expiration date. Loan request expired" (mustFail provideLoanNotOnTime)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds) cfg "liquidate borrower" liquidateBorrower
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds <> lenderInitialFunds'') cfg "Lender dos borrower" (mustFail lenderDosBorrower)
, testNoErrors (adaValue 10_000_000 <> borrowerInitialFunds'' <> lenderInitialFunds) cfg "Borrower dos lender" (mustFail borrowerDosLender)
]
mintOracleNftTests :: BchConfig -> TestTree
mintOracleNftTests cfg =
testGroup
"Mint oracle nft tests"
[
testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft" mintOracleNft
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail2)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail3)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail4)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail4)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail5)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail6)
, testNoErrors (adaValue 10_000_000) cfg "test mint oracle nft without one signature" (mustFail mintOracleNftShouldFail7)
]
testSize :: BchConfig -> TestTree
testSize cfg =
testGroup
"tests to check transaction sizes"
[
, testLimits ( adaValue 10_000_000 < > borrowerInitialFunds < > lenderInitialFunds '' ) cfg " Lender dos borrower " i d ( lenderDosBorrower > > logError " show stats " )
, testLimits ( adaValue 10_000_000 < > borrowerInitialFunds '' < > lenderInitialFunds ) cfg " Borrower dos lender " i d ( borrowerDosLender > > logError " show stats " )
]
type RepayInterval = POSIXTime
type RequestExpirationDate = POSIXTime
type LendDate = POSIXTime
type BorrowerTokenName = TokenName
type LenderTokenName = TokenName
type BorrowersAddressPkh = PubKeyHash
type LiquidationNftCs = CurrencySymbol
TODO move to utils section later
adaValue :: Integer -> Value
adaValue = singleton adaSymbol adaToken
setupUsers :: Run [PubKeyHash]
setupUsers = sequenceA [newUser borrowerInitialFunds, newUser lenderInitialFunds]
setupUsers' :: Run [PubKeyHash]
setupUsers' = sequenceA [newUser borrowerInitialFunds', newUser lenderInitialFunds']
setupUsers'' :: Run [PubKeyHash]
setupUsers'' = sequenceA [newUser borrowerInitialFunds, newUser lenderInitialFunds'']
setupUsers''' :: Run [PubKeyHash]
setupUsers''' = sequenceA [newUser borrowerInitialFunds'', newUser lenderInitialFunds]
setupSimpleNUsers :: Int -> Run [PubKeyHash]
setupSimpleNUsers n = replicateM n $ newUser $ adaValue 1000
TODO could this be done better ?
fakeCoinCs :: FakeCoin -> CurrencySymbol
fakeCoinCs fc = case fakeCoin fc of
AssetClass (cs, _) -> cs
collateralCoin :: FakeCoin
collateralCoin = FakeCoin "collateral-coin-CONY"
loanCoin :: FakeCoin
loanCoin = FakeCoin "loan-coin-CONYMONY"
interestCoin :: FakeCoin
interestCoin = FakeCoin "interest-coin-MONY"
generateFakeValues :: Int -> [Value]
generateFakeValues n = fmap (`fakeValue` 1) (FakeCoin . toBuiltin . BSC.fromChar <$> take n ['a'..])
generateFakeValues' :: Int -> Value
generateFakeValues' n = mconcat $ generateFakeValues n
borrowerInitialFunds :: Value
borrowerInitialFunds = fakeValue collateralCoin 100 <> fakeValue interestCoin 50 <> adaValue 100
lenderInitialFunds :: Value
lenderInitialFunds = fakeValue loanCoin 150 <> adaValue 100
borrowerInitialFunds' :: Value
borrowerInitialFunds' = fakeValue collateralCoin 100 <> fakeValue loanCoin 50 <> adaValue 100
lenderInitialFunds' :: Value
lenderInitialFunds' = fakeValue loanCoin 100 <> adaValue 100
lenderDosAmount :: Int
this is actually the limit when tx can go in , but then ca n't go out
borrowerDosAmount :: Int
borrowerDosAmount = 36
lenderInitialFunds'' :: Value
lenderInitialFunds'' = lenderInitialFunds <> generateFakeValues' lenderDosAmount
borrowerInitialFunds'' :: Value
borrowerInitialFunds'' = borrowerInitialFunds <> generateFakeValues' borrowerDosAmount
getLenderNftCs :: CurrencySymbol
getLenderNftCs = scriptCurrencySymbol getLenderNftPolicy
getBorrowerNftCs :: CurrencySymbol
getBorrowerNftCs = scriptCurrencySymbol getBorrowerNftPolicy
getLenderNftPolicy :: MintingPolicy
getLenderNftPolicy = AadaNft.policy True
getBorrowerNftPolicy :: MintingPolicy
getBorrowerNftPolicy = AadaNft.policy False
getSc1Params :: Request.ContractInfo
getSc1Params = Request.ContractInfo {
Request.lenderNftCs = getLenderNftCs
, Request.borrowersNftCs = getBorrowerNftCs
, Request.collateralSc = Address (ScriptCredential (validatorHash $ Collateral.validator getSc2Params)) Nothing
}
getSc1Params' :: DebtRequest.ContractInfo
getSc1Params' = DebtRequest.ContractInfo {
DebtRequest.lenderNftCs = getLenderNftCs
, DebtRequest.borrowersNftCs = getBorrowerNftCs
, DebtRequest.collateralSc = Address (ScriptCredential (validatorHash $ Collateral.validator getSc2Params)) Nothing
}
getSc2Params :: Collateral.ContractInfo
getSc2Params = Collateral.ContractInfo {
Collateral.lenderNftCs = getLenderNftCs
, Collateral.borrowersNftCs = getBorrowerNftCs
, Collateral.interestSc = Address (ScriptCredential (validatorHash (Interest.validator (Interest.ContractInfo getLenderNftCs)))) Nothing
, Collateral.minInterestFeePercentage = 200000
}
getTestDatum :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> RequestDatum
getTestDatum returnt bNftTn liqNft pkh expiration ltn t staking = RequestDatum
{ borrowersNftTn = bNftTn
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 150
, interest = assetClass (fakeCoinCs interestCoin) "interest-coin-MONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getTestDatum' :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> DebtRequestDatum
getTestDatum' returnt bNftTn liqNft pkh expiration ltn t staking = DebtRequestDatum
{ borrowersNftTn = bNftTn
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 150
, interest = assetClass (fakeCoinCs interestCoin) "interest-coin-MONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getTestDatum2 :: RepayInterval -> BorrowerTokenName -> LiquidationNftCs -> BorrowersAddressPkh -> RequestExpirationDate -> LenderTokenName -> LendDate -> Maybe StakingCredential -> RequestDatum
getTestDatum2 returnt bNftTn liqNft pkh expiration ltn t staking = RequestDatum
{ borrowersNftTn = bNftTn
, loan = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, loanAmnt = 100
, interest = assetClass (fakeCoinCs loanCoin) "loan-coin-CONYMONY"
, interestAmnt = 50
, collateral = assetClass (fakeCoinCs collateralCoin) "collateral-coin-CONY"
, loanDuration = returnt
, liquidateNft = liqNft
Colalteral factor used for liquidation
, requestExpiration = expiration
, lenderNftTn = ltn
, lendDate = t
}
getCollatDatumFromRequestDat :: RequestDatum -> TokenName -> POSIXTime -> Collateral.CollateralDatum
getCollatDatumFromRequestDat rqDat@RequestDatum{..} newTn newMint = Collateral.CollateralDatum
{ Collateral.borrowersNftTn = borrowersNftTn
, Collateral.borrowersAddress = borrowersAddress
, Collateral.loan = loan
, Collateral.loanAmnt = loanAmnt
, Collateral.interest = interest
, Collateral.interestAmnt = interestAmnt
, Collateral.collateral = collateral
, Collateral.loanDuration = loanDuration
, Collateral.liquidateNft = liquidateNft
Colalteral factor used for liquidation
, Collateral.liquidationCommission = 150
, Collateral.requestExpiration = requestExpiration
, Collateral.lenderNftTn = newTn
, Collateral.lendDate = newMint
}
getCollatDatumFromDebtRequestDat :: DebtRequestDatum -> TokenName -> POSIXTime -> Collateral.CollateralDatum
getCollatDatumFromDebtRequestDat rqDat@DebtRequestDatum{..} newTn newMint = Collateral.CollateralDatum
{ Collateral.borrowersNftTn = newTn
, Collateral.borrowersAddress = borrowersAddress
, Collateral.loan = loan
, Collateral.loanAmnt = loanAmnt
, Collateral.interest = interest
, Collateral.interestAmnt = interestAmnt
, Collateral.collateral = collateral
, Collateral.loanDuration = loanDuration
, Collateral.liquidateNft = liquidateNft
Colalteral factor used for liquidation
, Collateral.liquidationCommission = 150
, Collateral.requestExpiration = requestExpiration
, Collateral.lenderNftTn = lenderNftTn
, Collateral.lendDate = newMint
}
getAadaTokenName :: TxOutRef -> TokenName
getAadaTokenName utxo = TokenName $ INT.sha2_256 (INT.consByteString (txOutRefIdx utxo) ((getTxId . txOutRefId) utxo))
createLockFundsTx :: RepayInterval -> BorrowersAddressPkh -> TxOutRef -> UserSpend -> RequestExpirationDate -> LendDate -> LiquidationNftCs -> Tx
createLockFundsTx t pkh oref usp expiration mintDate oracle =
mconcat
[ userSpend usp
, payToScript
(requestTypedValidator getSc1Params)
(getTestDatum t (getAadaTokenName oref) oracle pkh expiration "" mintDate Nothing)
(fakeValue collateralCoin 100 <> adaValue 2)
]
getCancelRequestTx :: PubKeyHash -> Value -> RequestDatum -> TxOutRef -> TokenName -> Tx
getCancelRequestTx pkh val dat lockRef lenderTn =
mconcat
[ spendScript (requestTypedValidator getSc1Params) lockRef lenderTn dat
, payToPubKey pkh val
]
getTxIn :: UserSpend -> RequestDatum -> TxOutRef -> TokenName -> Tx
getTxIn usp dat scriptTxOut lenderTn =
mconcat
[ spendScript (requestTypedValidator getSc1Params) scriptTxOut lenderTn dat
, userSpend usp
]
getOracleNftTn :: TokenName
getOracleNftTn = TokenName "ff"
getLNftVal :: Integer -> CurrencySymbol -> TxOutRef -> Value
getLNftVal n cs utxo = Value.singleton cs (getAadaTokenName utxo) n
getBNftVal :: Integer -> CurrencySymbol -> TxOutRef -> Value
getBNftVal n cs utxo = Value.singleton cs (getAadaTokenName utxo) n
getMintBorrowerNftTx :: PubKeyHash -> TxOutRef -> Tx
getMintBorrowerNftTx pkh oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue (AadaNft.policy False) (getBNftVal 1 cs oref)
, payToPubKey pkh (adaValue 1 <> getBNftVal 1 cs oref)
]
where
cs = scriptCurrencySymbol getBorrowerNftPolicy
getCancelRequestTx lockRef =
, payToPubKey pkh val
getTxOutLend :: PubKeyHash -> PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Value -> Tx
getTxOutLend borrower lender dat utxo valToScript = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal 1 getLenderNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2 <> valToScript)
, payToPubKey borrower (fakeValue loanCoin 150 <> adaValue 2)
, payToPubKey lender (adaValue 2 <> getLNftVal 1 getLenderNftCs utxo)
]
getTxOutReturn :: Integer -> PubKeyHash -> TokenName -> Value -> TxOutRef -> Tx
getTxOutReturn interest borrower dat valToInt oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 2 <> valToInt)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 3)
]
getTxOutReturn' :: Integer -> PubKeyHash -> TokenName -> Value -> TxOutRef -> Tx
getTxOutReturn' interest borrower dat valToInt oref = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 2 <> valToInt)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 1)
]
getTxInFromCollateral :: [UserSpend] -> Collateral.CollateralDatum -> Integer -> TxOutRef -> Tx
getTxInFromCollateral usps dat rdm scriptTxOut =
mconcat
(spendScript (Collateral.collateralTypedValidator getSc2Params) scriptTxOut rdm dat : fmap userSpend usps)
getBurnBorrowerNftTx :: PubKeyHash -> TxOutRef -> UserSpend -> Tx
getBurnBorrowerNftTx pkh oref usp = addMintRedeemer getBorrowerNftPolicy oref $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToPubKey pkh (adaValue 1)
, userSpend usp
]
borrowerCancelsLoan :: Run Bool
borrowerCancelsLoan = do
users <- setupUsers
let u1 = head users
valToPay = fakeValue collateralCoin 100 <> adaValue 3
sp <- spend u1 valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 u1 oref sp 0 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx u1 oref
submitTx u1 tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let valFromSc1 = fakeValue collateralCoin 100 <> adaValue 2
valFromUsr = adaValue 1 <> getBNftVal 1 getBorrowerNftCs borrowerNftRef
sp <- spend u1 valFromUsr
tx <- signTx u1 $ getCancelRequestTx u1 valFromSc1 dat lockRef (getAadaTokenName lockRef) <> getBurnBorrowerNftTx u1 borrowerNftRef sp
isRight <$> sendTx tx
Nothing -> pure False
Create Loan Request tx
┌ ─ ─ ─ ─ ┐ n collateral + 2 ADA ┌ ─ ─ ─ ┐
n Collateral + 2 ADA │ │ datum ─ ┘
datum │ │ NFT + 1 ADA
│ Tx ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
1 ADA ( for mint ) │ │
─ ─ ─ ─ ┘
┌ ─ ─ ─ ─ ┐ n collateral + Lenders NFT + Time Nft ┌ ─ ─ ─ ┐
n Loan + 2 ADA │ │ datum ─ ┘
│ │ Lenders NFT + 2 ADA
2 ADA ( for mint ) │ Tx ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▶ Lender
┌ ─ ─ ─ ┐ n Collateral + 2 ADA │ │ Loan + 2 ADA
│ SC1 ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ▶ │ ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
─ ─ ─ ┘ datum ─ ─ ─ ─ ┘
bchUtxos : : ! ( Map TxOutRef TxOut )
returnFullLoan :: Run Bool
returnFullLoan = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (adaValueOf 0) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnNotEnoughInterest :: Run Bool
returnNotEnoughInterest = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 25 <>
adaValue 1
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 25 borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoan :: Run Bool
returnPartialLoan = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
borrowerNftRef = oref
repayint = 20000
tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
realCurTime <- currentTime
logInfo $ "current time1: " ++ show realCurTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 25 <>
adaValue 1
wait 2000
intPayDate <- currentTime
logInfo $ "intPayDate: " ++ show intPayDate
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 25 borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 (intPayDate + 2000)) tx2
wait 2000
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
createLockFundsTx2 :: POSIXTime -> PubKeyHash -> TxOutRef -> UserSpend -> POSIXTime -> POSIXTime -> Tx
createLockFundsTx2 t pkh oref usp expiration mintDate =
mconcat
[ userSpend usp
, payToScript
(requestTypedValidator getSc1Params)
(getTestDatum2 t (getAadaTokenName oref) (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") pkh expiration "" mintDate Nothing)
(fakeValue collateralCoin 100 <> adaValue 2)
]
getTxOutLend2 :: PubKeyHash -> PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Tx
getTxOutLend2 borrower lender dat utxo = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal 1 getLenderNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2)
, payToPubKey borrower (fakeValue loanCoin 100 <> adaValue 2)
, payToPubKey lender (adaValue 2 <> getLNftVal 1 getLenderNftCs utxo)
]
getTxOutReturn2 :: PubKeyHash -> TokenName -> TxOutRef -> Tx
getTxOutReturn2 borrower dat oref = addMintRedeemer getBorrowerNftPolicy rdm $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal (-1) getBorrowerNftCs oref)
, payToScript
(Interest.typedValidator (Interest.ContractInfo getLenderNftCs))
dat
(fakeValue loanCoin 125 <> adaValue 2)
, payToPubKey borrower (fakeValue collateralCoin 100 <> adaValue 3)
]
where
rdm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
returnPartialLoanSameCs :: Run Bool
returnPartialLoanSameCs = do
users <- setupUsers'
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx2 repayint borrower oref sp 100000 0 <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 100 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend2 borrower lender convertedDat lockRef
logInfo $ "current time1: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 125 <>
adaValue 2
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
logInfo $ "mint date: " <> show mintTime
wait 16000
intPayDate <- currentTime
logInfo $ "pay date: " <> show intPayDate
let intDat = Collateral.lenderNftTn convertedDat
tx2 = getTxInFromCollateral [sp1, sp2] convertedDat 0 lockRef <>
getTxOutReturn2 borrower intDat borrowerNftRef
tx2 <- validateIn (interval 24000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoanForgedMintDate :: Run Bool
returnPartialLoanForgedMintDate = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time1: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let interestAmount = 5
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin interestAmount <>
adaValue 1
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 2 lockRef <>
getTxOutReturn interestAmount borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (from 6000) tx2
wait 15000
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
returnPartialLoanLessThanItShoudInterestRepayed :: Run Bool
returnPartialLoanLessThanItShoudInterestRepayed = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let repayint = 20000
let tx = createLockFundsTx repayint borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "repay interval: " ++ show repayint
logInfo $ "loan provided and timenft minted time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let interestAmount = 25
logInfo $ "Interest amount paid: " ++ show interestAmount
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin interestAmount <>
adaValue 1
wait 15000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn interestAmount borrower intDat (adaValueOf 0) borrowerNftRef
tx2 <- validateIn (interval 6000 intPayDate) tx2
time <- currentTime
logInfo $ "time before repaying: " ++ show time
submitTx lender tx2
pure True
Nothing -> pure False
getOracleNftVal :: CurrencySymbol -> Integer -> Value
getOracleNftVal cs = Value.singleton cs getOracleNftTn
getMintOracleNftTx :: Integer -> PubKeyHash -> PubKeyHash -> PubKeyHash -> UserSpend -> Tx
getMintOracleNftTx n pkh1 pkh2 pkh3 usp = addMintRedeemer mp rdm $
mconcat
[ mintValue mp (getOracleNftVal cs n)
, payToScript Helpers.TestValidator.typedValidator
0
(adaValue 2 <> getOracleNftVal cs n)
, userSpend usp
]
where
mp = OracleNft.policy getOracleNftTn pkh1 pkh2 pkh3
cs = scriptCurrencySymbol mp
rdm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
mintOracleNft :: Run ()
mintOracleNft = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx <- signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail2 :: Run ()
mintOracleNftShouldFail2 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx < - signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail3 :: Run ()
mintOracleNftShouldFail3 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx <- signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail4 :: Run ()
mintOracleNftShouldFail4 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx < - signTx u2 tx
tx <- signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail5 :: Run ()
mintOracleNftShouldFail5 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx <- signTx u1 tx
tx < - signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail6 :: Run ()
mintOracleNftShouldFail6 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx <- signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
mintOracleNftShouldFail7 :: Run ()
mintOracleNftShouldFail7 = do
users <- setupSimpleNUsers 3
let [u1, u2, u3] = users
sp1 <- spend u1 (adaValue 2)
let tx = getMintOracleNftTx 1 u1 u2 u3 sp1
tx < - signTx u1 tx
tx < - signTx u2 tx
tx < - signTx u3 tx
submitTx u1 tx
provideLoanOnTime :: Run Bool
provideLoanOnTime = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
provideLoanNotOnTime :: Run Bool
provideLoanNotOnTime = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 0 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxoAt : : addr = > addr - > Run [ ( TxOutRef , TxOut ) ]
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
getTxInFromInterestSc :: UserSpend -> TxOutRef -> TokenName -> Tx
getTxInFromInterestSc usp1 scriptTxOut dat =
mconcat
[ spendScript (Interest.typedValidator (Interest.ContractInfo getLenderNftCs)) scriptTxOut 0 dat
, userSpend usp1
]
getTxOutFromInterestSc :: Integer -> PubKeyHash -> TxOutRef -> Tx
getTxOutFromInterestSc interest lender utxo = addMintRedeemer getLenderNftPolicy utxo $
mconcat
[ mintValue getLenderNftPolicy (getLNftVal (-1) getLenderNftCs utxo)
, payToPubKey lender (fakeValue loanCoin 150 <> fakeValue interestCoin interest <> adaValue 4)
]
happyPath :: Run Bool
happyPath = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let lockRef = fst . head $ utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
curTime <- currentTime
let mintTime = POSIXTime 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "ref: " ++ show lenderNftRef
logInfo $ "hash: " ++ show (getAadaTokenName lenderNftRef)
logInfo $ "mint time: " ++ show mintTime
logInfo $ "curTime time: " ++ show curTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (adaValueOf 0) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 5000 intPayDate) tx2
submitTx lender tx2
utxos <- utxoAt (Interest.interestAddress (Interest.ContractInfo getLenderNftCs))
let lenderPay = adaValue 2 <> getLNftVal 1 getLenderNftCs lenderNftRef
sp <- spend lender lenderPay
case utxos of
[(lockRef, _)] -> do
let tx = getTxInFromInterestSc sp lockRef intDat <>
getTxOutFromInterestSc 50 lender lenderNftRef
submitTx lender tx
pure True
_ -> pure False
Nothing -> pure False
getTxInFromCollateraLiq :: UserSpend -> UserSpend -> Collateral.CollateralDatum -> Integer -> TxOutRef -> Tx
getTxInFromCollateraLiq lender1 lender2 dat rdm scriptTxOut =
mconcat
[ spendScript (Collateral.collateralTypedValidator getSc2Params) scriptTxOut rdm dat
, userSpend lender1
, userSpend lender2
]
getMintOracleNftTxLiq :: Integer -> PubKeyHash -> PubKeyHash -> PubKeyHash -> Tx
getMintOracleNftTxLiq n pkh1 pkh2 pkh3 =
mconcat
[ mintValue mp (getOracleNftVal cs n)
, payToScript Helpers.TestValidator.typedValidator
0
(adaValue 2 <> getOracleNftVal cs n)
]
where
valh = validatorHash Helpers.TestValidator.validator
mp = OracleNft.policy getOracleNftTn pkh1 pkh2 pkh3
cs = scriptCurrencySymbol mp
getTxOutLiquidate :: PubKeyHash -> TxOutRef -> Tx
getTxOutLiquidate lender utxo =
mconcat
[ mintValue getLenderNftPolicy (getLNftVal (-1) getLenderNftCs utxo)
, payToPubKey lender (fakeValue collateralCoin 100 <> adaValue 2)
]
liquidateBorrower :: Run Bool
liquidateBorrower = do
logInfo "setup"
users1 <- setupSimpleNUsers 3
users2 <- setupUsers
let borrower = head users2
lender = last users2
let [oracle1, oracle2, oracle3] = users1
logInfo "create loan request"
let valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let valh = validatorHash Helpers.TestValidator.validator
omp = OracleNft.policy getOracleNftTn oracle1 oracle2 oracle3
ordm = Redeemer (PlutusTx.toBuiltinData (0 :: Integer))
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol omp) <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
logInfo "provide loan phase"
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 12000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
realCurTime <- currentTime
logInfo $ "real current time: " <> show realCurTime
tx <- validateIn (interval 7000 11000) tx
submitTx lender tx
logInfo "liquidate phase"
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
lenderSpend1 <- spend lender (adaValue 2)
lenderSpend2 <- spend lender (getLNftVal 1 getLenderNftCs lenderNftRef)
let liquidate = getTxInFromCollateraLiq lenderSpend1 lenderSpend2 convertedDat 0 lockRef <>
getMintOracleNftTxLiq 1 oracle1 oracle2 oracle3 <>
getTxOutLiquidate lender lenderNftRef
1 .
wait 2000
time <- currentTime
logInfo $ "current time: " ++ show time
logInfo $ " debug : " < > show tx
tx <- signTx oracle1 tx
tx <- signTx oracle2 tx
tx <- signTx oracle3 tx
tx <- validateIn (interval 9000 99999) tx
logInfo $ " debug liquidate : " < > show tx
submitTx lender tx
pure True
Nothing -> pure False
lenderDosBorrower :: Run Bool
lenderDosBorrower = do
users <- setupUsers''
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4 <> generateFakeValues' lenderDosAmount
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (generateFakeValues' lenderDosAmount)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
pure True
Nothing -> pure False
borrowerDosLender :: Run Bool
borrowerDosLender = do
users <- setupUsers'''
let borrower = head users
lender = last users
valToPay = fakeValue collateralCoin 100 <> adaValue 2 <> adaValue 1
sp <- spend borrower valToPay
let oref = getHeadRef sp
let borrowerNftRef = oref
let tx = createLockFundsTx 0 borrower oref sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff") <> getMintBorrowerNftTx borrower oref
submitTx borrower tx
utxos <- utxoAt $ requestAddress getSc1Params
let [(lockRef, _)] = utxos
let lenderNftRef = lockRef
lockDat <- datumAt @RequestDatum lockRef
case lockDat of
Just dat -> do
let mintTime = 7000
let convertedDat = getCollatDatumFromRequestDat dat (getAadaTokenName lenderNftRef) mintTime
valForLenderToSpend = fakeValue loanCoin 150 <> adaValue 4
sp <- spend lender valForLenderToSpend
let tx = getTxIn sp dat lockRef (getAadaTokenName lenderNftRef) <> getTxOutLend borrower lender convertedDat lockRef (adaValueOf 0)
logInfo $ "current time: " ++ show mintTime
tx <- validateIn (interval 2000 6000) tx
submitTx lender tx
let valTmp1 = getBNftVal 1 getBorrowerNftCs borrowerNftRef <>
adaValue 1
valTmp2 = fakeValue loanCoin 150 <>
adaValue 1
valTmp3 = fakeValue interestCoin 50 <>
adaValue 1 <>
generateFakeValues' borrowerDosAmount
wait 2000
intPayDate <- currentTime
sp1 <- spend borrower valTmp1
sp2 <- spend borrower valTmp2
sp3 <- spend borrower valTmp3
utxos <- utxoAt $ Collateral.collateralAddress getSc2Params
let [(lockRef, _)] = utxos
let intDat = Collateral.lenderNftTn convertedDat
let tx2 = getTxInFromCollateral [sp1, sp2, sp3] convertedDat 0 lockRef <>
getTxOutReturn 50 borrower intDat (generateFakeValues' borrowerDosAmount) borrowerNftRef
logInfo $ "int pay date time: " ++ show intPayDate
tx2 <- validateIn (interval 6000 intPayDate) tx2
submitTx lender tx2
pure True
Nothing -> pure False
createDebtRequestTx :: RepayInterval -> BorrowersAddressPkh -> TxOutRef -> UserSpend -> RequestExpirationDate -> LendDate -> LiquidationNftCs -> Tx
createDebtRequestTx t pkh oref usp expiration mintDate oracle =
mconcat
[ userSpend usp
, payToScript
(debtRequestTypedValidator getSc1Params')
(getTestDatum' t "" oracle pkh expiration (getAadaTokenName oref) mintDate Nothing)
(fakeValue loanCoin 150 <> adaValue 2)
]
getMintLenderNftTx :: PubKeyHash -> TxOutRef -> Tx
getMintLenderNftTx pkh oref = addMintRedeemer getLenderNftPolicy oref $
mconcat
[ mintValue (AadaNft.policy True) (getLNftVal 1 cs oref)
, payToPubKey pkh (adaValue 1 <> getLNftVal 1 cs oref)
]
where
cs = scriptCurrencySymbol getLenderNftPolicy
getDebtRequestTxIn :: UserSpend -> DebtRequestDatum -> TxOutRef -> DebtRequestRedeemer -> Tx
getDebtRequestTxIn usp dat scriptTxOut borrowerRdm =
mconcat
[ spendScript (debtRequestTypedValidator getSc1Params') scriptTxOut borrowerRdm dat
, userSpend usp
]
getTxOutBorrow :: PubKeyHash -> Collateral.CollateralDatum -> TxOutRef -> Value -> Tx
getTxOutBorrow borrower dat utxo valToScript = addMintRedeemer getBorrowerNftPolicy utxo $
mconcat
[ mintValue getBorrowerNftPolicy (getBNftVal 1 getBorrowerNftCs utxo)
, payToScript
(Collateral.collateralTypedValidator getSc2Params)
dat
(fakeValue collateralCoin 100 <> adaValue 2 <> valToScript)
, payToPubKey borrower (fakeValue loanCoin 150 <> adaValue 2 <> getBNftVal 1 getBorrowerNftCs utxo)
]
debtRequestTest :: Run Bool
debtRequestTest = do
users <- setupUsers
let borrower = head users
lender = last users
valToPay = fakeValue loanCoin 150 <> adaValue 3
sp <- spend lender valToPay
let lenderNftRef = getHeadRef sp
let tx = createDebtRequestTx 0 borrower lenderNftRef sp 100000 0 (scriptCurrencySymbol $ OracleNft.policy "ff" "ff" "ff" "ff")
<> getMintLenderNftTx lender lenderNftRef
submitTx lender tx
utxos <- utxoAt $ debtRequestAddress getSc1Params'
let lockRef = fst . head $ utxos
let borrowerNftRef = lockRef
lockDat <- datumAt @DebtRequestDatum lockRef
case lockDat of
Just dat -> do
let convertedDat = getCollatDatumFromDebtRequestDat dat (getAadaTokenName borrowerNftRef) 2000
valForBorrowerToSpend = fakeValue collateralCoin 100 <> adaValue 2
sp <- spend borrower valForBorrowerToSpend
let tx = getDebtRequestTxIn sp dat lockRef (TakeLoan (getAadaTokenName borrowerNftRef))
<> getTxOutBorrow borrower convertedDat lockRef (adaValueOf 0)
tx <- validateIn (interval 2000 6000) tx
submitTx borrower tx
pure True
Nothing -> pure False |
fd6830c7586a366fcb1ce34eb342d939a2981b9770395717ddc6dc2231391435 | ds26gte/tex2page | plt-tex2page.rkt | last change : 2009 - 04 - 09
(scmxlate-insert
";tex2page
( c ) , 1997 - 2002
(module tex2page-aux mzscheme
(require mzlib/process)
(require mzlib/date)
(require scheme/private/more-scheme)
( require ( lib \"process.ss\ " ) )
;(require (lib \"date.ss\"))
(provide (all-defined-except ))
")
(scmxlate-uncall
require)
(scmxlate-ignore
;get-arg1
main
)
(scmxlate-rename
;(tex2page tex2page-aux)
)
(scmxlate-include "plt-common-tex2page.scm")
(scmxlate-postamble)
(scmxlate-insert
(string-append
")"
"
"))
(scmxlate-postprocess
(define save-file
(lambda (f1 f2 . replace?)
(let ((replace? (and (pair? replace?) (car replace?))))
(when (and (file-exists? f2) replace?)
(delete-file f2))
(when (and (file-exists? f1)
(not (file-exists? f2)))
(copy-file f1 f2)))))
;(save-file "t2p4plt.tex" "t2p4plt.tex.orig")
; (call-with-output-file "t2p4plt.tex"
; (lambda (o)
( o " % Delete this file for ~
; non-PLT-specific document~%~%~
\\let\\inpltdist t~% " ) )
; 'replace)
(printf "Generated files are tex2page.ss and tex2page-aux.ss.~%~
Put them in PLTHOME/collects/tex2page.~%")
(save-file "my-tex2page" "tex2page-aux.ss" #t)
(call-with-output-file "tex2page.ss"
(lambda (o)
(pretty-print
`(module tex2page mzscheme
;(require (lib "etc.ss"))
(require mzlib/etc)
(provide tex2page)
;
(define tex2page
(lambda (f)
(parameterize ((current-namespace (make-namespace)))
(namespace-require `(file
,(path->string
(build-path (this-expression-source-directory)
"tex2page-aux.ss"))))
((namespace-variable-value 'tex2page) f)))))
o))
#:exists
'replace)
)
| null | https://raw.githubusercontent.com/ds26gte/tex2page/fb664716e062fe56e2963367d14e4732bdc93813/dialects/plt-tex2page.rkt | racket | tex2page
(require (lib \"date.ss\"))
get-arg1
(tex2page tex2page-aux)
(save-file "t2p4plt.tex" "t2p4plt.tex.orig")
(call-with-output-file "t2p4plt.tex"
(lambda (o)
non-PLT-specific document~%~%~
'replace)
(require (lib "etc.ss"))
| last change : 2009 - 04 - 09
(scmxlate-insert
( c ) , 1997 - 2002
(module tex2page-aux mzscheme
(require mzlib/process)
(require mzlib/date)
(require scheme/private/more-scheme)
( require ( lib \"process.ss\ " ) )
(provide (all-defined-except ))
")
(scmxlate-uncall
require)
(scmxlate-ignore
main
)
(scmxlate-rename
)
(scmxlate-include "plt-common-tex2page.scm")
(scmxlate-postamble)
(scmxlate-insert
(string-append
")"
"
"))
(scmxlate-postprocess
(define save-file
(lambda (f1 f2 . replace?)
(let ((replace? (and (pair? replace?) (car replace?))))
(when (and (file-exists? f2) replace?)
(delete-file f2))
(when (and (file-exists? f1)
(not (file-exists? f2)))
(copy-file f1 f2)))))
( o " % Delete this file for ~
\\let\\inpltdist t~% " ) )
(printf "Generated files are tex2page.ss and tex2page-aux.ss.~%~
Put them in PLTHOME/collects/tex2page.~%")
(save-file "my-tex2page" "tex2page-aux.ss" #t)
(call-with-output-file "tex2page.ss"
(lambda (o)
(pretty-print
`(module tex2page mzscheme
(require mzlib/etc)
(provide tex2page)
(define tex2page
(lambda (f)
(parameterize ((current-namespace (make-namespace)))
(namespace-require `(file
,(path->string
(build-path (this-expression-source-directory)
"tex2page-aux.ss"))))
((namespace-variable-value 'tex2page) f)))))
o))
#:exists
'replace)
)
|
ec626126bbef104b3c8584c4ebf353389d36daf58f7665f107c309e051bc56f9 | facebookarchive/pfff | meta_ast_css.ml |
generated by ocamltarzan with : camlp4o -o /tmp / yyy.ml -I pa/ pa_type_conv.cmo pa_vof.cmo pr_o.cmo /tmp / xxx.ml
open Ast_css
let rec vof_selector (v1, v2) =
let v1 = vof_simplesel v1
and v2 =
Ocaml.vof_list
(fun (v1, v2) ->
let v1 = vof_combinator v1
and v2 = vof_simplesel v2
in Ocaml.VTuple [ v1; v2 ])
v2
in Ocaml.VTuple [ v1; v2 ]
and vof_simplesel =
function
| Explicit ((v1, v2)) ->
let v1 = vof_element v1
and v2 = Ocaml.vof_list vof_qualifier v2
in Ocaml.VSum (("Explicit", [ v1; v2 ]))
| Generic ((v1, v2)) ->
let v1 = vof_qualifier v1
and v2 = Ocaml.vof_list vof_qualifier v2
in Ocaml.VSum (("Generic", [ v1; v2 ]))
and vof_combinator =
function
| Descendant -> Ocaml.VSum (("Descendant", []))
| GeneralSibling -> Ocaml.VSum (("GeneralSibling", []))
| AdjacentSibling -> Ocaml.VSum (("AdjacentSibling", []))
| Child -> Ocaml.VSum (("Child", []))
and vof_element =
function
| Tag v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Tag", [ v1 ]))
| Universal -> Ocaml.VSum (("Universal", []))
and vof_qualifier =
function
| Id v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Id", [ v1 ]))
| Class v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Class", [ v1 ]))
| Attr ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_attr v2
in Ocaml.VSum (("Attr", [ v1; v2 ]))
| PseudoClass v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("PseudoClass", [ v1 ]))
| PseudoElement v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("PseudoElement", [ v1 ]))
| SelFunc ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_functiont v2
in Ocaml.VSum (("SelFunc", [ v1; v2 ]))
and vof_functiont =
function
| Qualified v1 ->
let v1 = Ocaml.vof_list vof_qualifier v1
in Ocaml.VSum (("Qualified", [ v1 ]))
| Nth v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Nth", [ v1 ]))
and vof_attr =
function
| AttrExists -> Ocaml.VSum (("AttrExists", []))
| AttrEquals v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrEquals", [ v1 ]))
| AttrIncludes v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrIncludes", [ v1 ]))
| AttrDashmatch v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrDashmatch", [ v1 ]))
| AttrPrefix v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrPrefix", [ v1 ]))
| AttrSuffix v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrSuffix", [ v1 ]))
| AttrSubstring v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrSubstring", [ v1 ]))
let rec vof_declaration (v1, v2, v3) =
let v1 = vof_property v1
and v2 = vof_expression v2
and v3 = vof_important v3
in Ocaml.VTuple [ v1; v2; v3 ]
and vof_property v = Ocaml.vof_string v
and vof_important v = Ocaml.vof_bool v
and vof_expression v = Ocaml.vof_list vof_sentence v
and vof_sentence v = Ocaml.vof_list vof_term v
and vof_term =
function
| Calc v1 -> let v1 = vof_calc v1 in Ocaml.VSum (("Calc", [ v1 ]))
| String v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("String", [ v1 ]))
| Ident v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Ident", [ v1 ]))
| Uri v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Uri", [ v1 ]))
| Hash v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Hash", [ v1 ]))
| TermFunc ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_expression v2
in Ocaml.VSum (("TermFunc", [ v1; v2 ]))
| Slash -> Ocaml.VSum (("Slash", []))
and vof_calc =
function
| Varref v1 -> let v1 = vof_variable v1 in Ocaml.VSum (("Varref", [ v1 ]))
| Quantity v1 ->
let v1 = vof_quantity v1 in Ocaml.VSum (("Quantity", [ v1 ]))
| Sum ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Sum", [ v1; v2 ]))
| Sub ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Sub", [ v1; v2 ]))
| Mul ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Mul", [ v1; v2 ]))
| Div ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Div", [ v1; v2 ]))
and vof_quantity (v1, v2) =
let v1 = Ocaml.vof_float v1
and v2 = Ocaml.vof_option Ocaml.vof_string v2
in Ocaml.VTuple [ v1; v2 ]
and vof_variable v = Ocaml.vof_string v
let vof_rule (v1, v2) =
let v1 = Ocaml.vof_list vof_selector v1
and v2 = Ocaml.vof_list vof_declaration v2
in Ocaml.VTuple [ v1; v2 ]
let vof_stylesheet v = Ocaml.vof_list vof_rule v
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/lang_css/parsing/meta_ast_css.ml | ocaml |
generated by ocamltarzan with : camlp4o -o /tmp / yyy.ml -I pa/ pa_type_conv.cmo pa_vof.cmo pr_o.cmo /tmp / xxx.ml
open Ast_css
let rec vof_selector (v1, v2) =
let v1 = vof_simplesel v1
and v2 =
Ocaml.vof_list
(fun (v1, v2) ->
let v1 = vof_combinator v1
and v2 = vof_simplesel v2
in Ocaml.VTuple [ v1; v2 ])
v2
in Ocaml.VTuple [ v1; v2 ]
and vof_simplesel =
function
| Explicit ((v1, v2)) ->
let v1 = vof_element v1
and v2 = Ocaml.vof_list vof_qualifier v2
in Ocaml.VSum (("Explicit", [ v1; v2 ]))
| Generic ((v1, v2)) ->
let v1 = vof_qualifier v1
and v2 = Ocaml.vof_list vof_qualifier v2
in Ocaml.VSum (("Generic", [ v1; v2 ]))
and vof_combinator =
function
| Descendant -> Ocaml.VSum (("Descendant", []))
| GeneralSibling -> Ocaml.VSum (("GeneralSibling", []))
| AdjacentSibling -> Ocaml.VSum (("AdjacentSibling", []))
| Child -> Ocaml.VSum (("Child", []))
and vof_element =
function
| Tag v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Tag", [ v1 ]))
| Universal -> Ocaml.VSum (("Universal", []))
and vof_qualifier =
function
| Id v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Id", [ v1 ]))
| Class v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Class", [ v1 ]))
| Attr ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_attr v2
in Ocaml.VSum (("Attr", [ v1; v2 ]))
| PseudoClass v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("PseudoClass", [ v1 ]))
| PseudoElement v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("PseudoElement", [ v1 ]))
| SelFunc ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_functiont v2
in Ocaml.VSum (("SelFunc", [ v1; v2 ]))
and vof_functiont =
function
| Qualified v1 ->
let v1 = Ocaml.vof_list vof_qualifier v1
in Ocaml.VSum (("Qualified", [ v1 ]))
| Nth v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Nth", [ v1 ]))
and vof_attr =
function
| AttrExists -> Ocaml.VSum (("AttrExists", []))
| AttrEquals v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrEquals", [ v1 ]))
| AttrIncludes v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrIncludes", [ v1 ]))
| AttrDashmatch v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrDashmatch", [ v1 ]))
| AttrPrefix v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrPrefix", [ v1 ]))
| AttrSuffix v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrSuffix", [ v1 ]))
| AttrSubstring v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("AttrSubstring", [ v1 ]))
let rec vof_declaration (v1, v2, v3) =
let v1 = vof_property v1
and v2 = vof_expression v2
and v3 = vof_important v3
in Ocaml.VTuple [ v1; v2; v3 ]
and vof_property v = Ocaml.vof_string v
and vof_important v = Ocaml.vof_bool v
and vof_expression v = Ocaml.vof_list vof_sentence v
and vof_sentence v = Ocaml.vof_list vof_term v
and vof_term =
function
| Calc v1 -> let v1 = vof_calc v1 in Ocaml.VSum (("Calc", [ v1 ]))
| String v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("String", [ v1 ]))
| Ident v1 ->
let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Ident", [ v1 ]))
| Uri v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Uri", [ v1 ]))
| Hash v1 -> let v1 = Ocaml.vof_string v1 in Ocaml.VSum (("Hash", [ v1 ]))
| TermFunc ((v1, v2)) ->
let v1 = Ocaml.vof_string v1
and v2 = vof_expression v2
in Ocaml.VSum (("TermFunc", [ v1; v2 ]))
| Slash -> Ocaml.VSum (("Slash", []))
and vof_calc =
function
| Varref v1 -> let v1 = vof_variable v1 in Ocaml.VSum (("Varref", [ v1 ]))
| Quantity v1 ->
let v1 = vof_quantity v1 in Ocaml.VSum (("Quantity", [ v1 ]))
| Sum ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Sum", [ v1; v2 ]))
| Sub ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Sub", [ v1; v2 ]))
| Mul ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Mul", [ v1; v2 ]))
| Div ((v1, v2)) ->
let v1 = vof_calc v1
and v2 = vof_calc v2
in Ocaml.VSum (("Div", [ v1; v2 ]))
and vof_quantity (v1, v2) =
let v1 = Ocaml.vof_float v1
and v2 = Ocaml.vof_option Ocaml.vof_string v2
in Ocaml.VTuple [ v1; v2 ]
and vof_variable v = Ocaml.vof_string v
let vof_rule (v1, v2) =
let v1 = Ocaml.vof_list vof_selector v1
and v2 = Ocaml.vof_list vof_declaration v2
in Ocaml.VTuple [ v1; v2 ]
let vof_stylesheet v = Ocaml.vof_list vof_rule v
| |
a79f38a9ab0a29e3e563a7f910a5b50003ccc2673782c873fe1faf56135aebd8 | ANSSI-FR/xsvgen | message_debug.ml | (***********************************************************************)
(* *)
(* XML Schema Validator Generator *)
(* *)
( SafeRiver )
(* *)
Copyright 2012 , ANSSI and SafeRiver .
(* *)
(***********************************************************************)
$ I d : message_debug.ml 1704 2012 - 06 - 04 16:49:33Z maarek $
* { 1 Message handling library , debug version }
(** Information *)
type info =
| INFO_file_XML_valid of File_in.t
| INFO_file_XML_invalid
| INFO_file_XML_notwf
| INFO_validating_element of Stringdata.t
| INFO_testing_element of Stringdata.t
| INFO_valid_element of Stringdata.t
| INFO_invalid_element of Stringdata.t
| INFO_fetching_td of int
| INFO_fetching_ed of int
| INFO_fetching_ad of int
| INFO_fetching_agd of int
| INFO_fetching_mgd of int
| INFO_document_valid
| INFO_pattern of Stringdata.t
| INFO_XSD_primary_file of File_in.t
| INFO_XSD_secondary_file of File_in.t
| INFO_validator_configuration of Xml.configuration
let valid_exit_code = 10
let default_error_exit_code = 13
let error_exit_code = function
11 : invalid but well - formed
| Error.RE_block_unknown _
| Error.RE_syntax _
| Error.RE_UTF8_invalid
| Error.RE_LIM_charprop_not_implemented
| Error.XSDT_skip_process_contents_risky
| Error.XSDT_version_not_supported
| Error.XSDT_version_1_0_not_supported
| Error.XSDT_version_missing
| Error.XSDT_qname_namespace_missing
| Error.XSDT_qname_not_well_formed
| Error.XSDT_mixed_values_contradictory
| Error.XSDT_min_max_inconsistent
| Error.XSDT_constructions_mutually_exclusive _
| Error.XSDT_extra_xsd_attribute_not_recognised
| Error.XSDT_extra_xsd_element_in _
| Error.XSDT_LIM_schema_location_required
| Error.XSDT_multiple_name_for_type_definition
| Error.XSDT_constructions_not_allowed _
| Error.XSDT_constructions_expected _
| Error.XSDT_occurrence_value_invalid
| Error.XSDT_LIM_id_idref_unicity_reference_not_implemented
| Error.XSDT_default_value_not_verified
| Error.XSDT_fixed_value_not_verified
| Error.XSV_input_file_not_provided
| Error.XSV_multiple_input_files_not_allowed
| Error.XSV_command_line_incorrect
| Error.XSV_fail
| Error.XSV_uri_not_matching
11 : invalid but well - formed
-> 11
12 : not well - formed
| Error.UTF8_invalid
| Error.UTF8_overlong
| Error.UTF8_first_byte_invalid
| Error.UTF8_range_split_invalid
| Error.Unicode_code_point_out_of_range
| Error.XMLL_expected _
| Error.XMLL_token_not_finished
| Error.XMLL_standalone_not_recognised
| Error.XMLL_version_missing
| Error.XMLL_version_not_supported
| Error.XMLL_empty_input
| Error.XMLL_not_well_formed _
| Error.XMLL_char_reference_not_well_formed
| Error.XMLP_opening_tag_missing
| Error.XMLP_content_after_root_not_allowed
| Error.XMLP_data_before_root_not_allowed
| Error.XMLP_closing_tag_missing
| Error.XMLP_tree_not_recognized
| Error.XMLP_opening_closing_tags_not_matching
| Error.XMLL_XMLP_attribute_definition_not_unique
| Error.XMLP_attribute_required _
| Error.XMLP_LIM_attribute_ignored _
| Error.XMLP_LIM_attribute_not_implemented _
| Error.XMLP_element_expected _
| Error.XMLP_LIM_element_ignored _
| Error.XMLP_LIM_element_not_implemented _
| Error.XMLP_LIM_xml_id_unicity_not_implemented
| Error.XMLP_xml_space_value_invalid
| Error.XMLNS_namespace_uri_reserved
| Error.XMLNS_namespace_prefix_reserved
| Error.XMLNS_XMLP_xml_attribute_not_recognised
| Error.XMLNS_namespace_prefix_not_declared
| Error.XMLNS_prefixed_declaration_value_empty
| Error.XMLNS_XSDT_uri_not_well_formed
| Error.XMLL_pi_target_not_allowed
| Error.XMLL_entity_reference_unknown
| Error.XSDL_multiple_definition _
| Error.XSDL_missing_definition _
| Error.XSDL_missing_definition_s
| Error.XSDL_recursive_definitions
| Error.XSDL_non_deterministic_grammar
| Error.XSDL_facet_not_applicable
| Error.XSDL_counter_not_applicable
| Error.XSDL_order_not_applicable
| Error.XSDL_XSV_LIM_notation_not_implemented
| Error.XSDL_XSV_LIM_attribute_group_wildcard_not_implemented
| Error.XSDL_LIM_order_not_implemented
| Error.XSDL_simple_type_derivation_invalid
| Error.XSDL_LIM_facet_not_implemented
| Error.XSDL_whitespace_values_not_combinable
| Error.XSDL_extension_of_anytype_risky
| Error.XSDL_complex_type_extension_invalid
| Error.XSDL_complex_type_restriction_invalid
| Error.XSDL_all_model_not_well_formed
| Error.XSDL_XSV_LIM_more_than_one_wildcard_in_all_model
| Error.XSDL_LIM_determinism_check_not_implemented
| Error.GEN_file_not_referenced
| Error.GEN_referenced_file_missing
| Error.GEN_import_effective_uris_mismatch
| Error.GEN_imports_mismatch
| Error.GEN_primary_uri_import_not_allowed
| Error.GEN_XSDT_primary_target_uri_missing
| Error.GEN_debug_mode
12 : not well - formed
-> 12
restrictions - > 13
| Error.XMLP_SEC_comment
| Error.XMLP_SEC_pi
| Error.XMLP_SEC_cdata
| Error.XMLP_SEC_doctype
| Error.XMLL_encoding_missing
| Error.XMLL_encoding_not_supported
| Error.XMLL_SEC_ascii_char_reference_not_allowed
| Error.XSDT_SEC_appinfo_not_allowed
| Error.XSDL_SEC_no_root_element_declared
| Error.XSV_SEC_xsi_attribute_not_allowed
| Error.XSV_SEC_xsi_schemaLocation_ignored
| Error.XSV_SEC_xsi_attribute_ignored
restrictions - > 13
-> default_error_exit_code
limitations - > 13
| Error.LIM_nc_disallowed
limitations - > 13
-> default_error_exit_code
13 : error
| Error.EXE _
| Error.A_function _
| Error.A_stringdata_comparison_output
| Error.A_empty_parser_stack
| Error.A_double_log_setting
| Error.A_empty_members_list
| Error.A_local_global_function_types
| Error.ML_max_string_reached
| Error.ML_max_int_reached
| Error.SD_subtraction_sources
| Error.SD_subtraction_limits
| Error.SD_subtraction_positions
| Error.SD_structure
| Error.SD_building_regexp_failed
| Error.SEC_length_limit_reached
| Error.SEC_depth_limit_reached
| Error.SEC_contents_limit_reached
| Error.SEC_attributes_limit_reached
| Error.F_wrong_input_file
| Error.F_wrong_input_extension
| Error.F_wrong_printing_file_extension
| Error.F_wrong_output_file_extension
| Error.F_wrong_output_basename
| Error.F_GEN_primary_file_not_provided
| Error.IO_open_out_file_fail _
| Error.IO_open_in_file_fail _
| Error.IO_input
| Error.IO_lib_dir_missing
| Error.IO_output_dir
| Error.XSV_uri_table_corrupted _
| Error.XSV_type_table_corrupted _
| Error.XSV_element_table_corrupted _
| Error.XSV_attribute_table_corrupted _
| Error.XSV_attribute_group_table_corrupted _
| Error.XSV_model_group_table_corrupted _
| Error.XSV_debug_mode
13 : error
-> default_error_exit_code
exception E of Error.t
type mtype =
| Error
| Warning
| Info
| Debug
| Todo
let log_formatter = ref Format.err_formatter
let log_formatter_setup = ref false
let set_log formatter =
if !log_formatter_setup
then
raise (Error.E Error.A_double_log_setting)
else
begin
log_formatter := formatter;
log_formatter_setup := true;
end
* { 2 Printing library for debug }
module Pr_debug = struct
let string_of_version = function
| Xml.Version_1_0 -> "1.0"
| Xml.Version_1_1 -> "1.1"
let string_of_encoding = function
| None -> "-"
| Some Xml.Encoding_UTF8 -> "UTF-8"
let string_of_standalone = function
| None -> "-"
| Some true -> "yes"
| Some false -> "no"
let string_of_sd sd =
Format.sprintf "%s \"%s\""
(Stringdata.string_debug sd)
(Stringdata.to_string sd)
let string_of_prefix_option f = function
| None -> "_"
| Some s -> f s
let string_of_attrs m_attrs =
Xml.M_unexp.fold
(fun {Xml.prefix =p; Xml.local_name = l} value s ->
(string_of_prefix_option string_of_sd p) ^
":" ^
(string_of_sd l) ^
" " ^
(string_of_sd value) ^
s)
m_attrs
""
let string_of_lexeme = function
| Xml.Tag ({Xml.prefix =_; Xml.local_name = l},_) ->
Format.sprintf "TAG: %s"
(string_of_sd l)
| Xml.Gat {Xml.prefix =_; Xml.local_name = l} ->
Format.sprintf "GAT: %s"
(string_of_sd l)
| Xml.Taggat ({Xml.prefix =_; Xml.local_name = l},_) ->
Format.sprintf "TAGGAT: %s"
(string_of_sd l)
| Xml.Data (t,b) ->
Format.sprintf "DATA %s: %s"
(if b then "t" else "f")
(string_of_sd t)
| Xml.Doctype t ->
Format.sprintf
"DOCTYPE: %s"
(string_of_sd t)
| Xml.PI (t1,t2) ->
Format.sprintf "PI: %s - %s"
(string_of_sd t1)
(string_of_sd t2)
let string_of_declaration = function
| { Xml.version = v;
Xml.encoding = e;
Xml.standalone = s} ->
Format.sprintf "Xmldecl %s %s %s"
(string_of_version v)
(string_of_encoding e)
(string_of_standalone s)
let td_id = function
Lxsd.Id_td i -> "td_id " ^ (string_of_int i)
let ed_id = function
Lxsd.Id_ed i -> "ed_id " ^ (string_of_int i)
let mgd_id = function
Lxsd.Id_mgd i -> "mgd_id " ^ (string_of_int i)
let rec fp_list f (func, l, sep) =
match l with
| [] -> ()
| [h] ->
Format.fprintf f "%a"
func h
| h :: t ->
Format.fprintf f "%a%s@,%a"
func h
sep
fp_list (func, t, sep)
let fp_range f = function
| (bfun,Utf8.R(c1,c2,b)) ->
Format.fprintf f "[%02X-%02X]%a"
(Char.code c1)
(Char.code c2)
bfun b
let fp_b1 f = function
| Utf8.B1 ->
Format.fprintf f ""
let fp_b2 f = function
| Utf8.B2 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b1,r)),l,"")
let fp_b3 f = function
| Utf8.B3 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b2,r)),l,"")
let fp_b4 f = function
| Utf8.B4 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b3,r)),l,"")
let fp_utf8_range f = function
| { Utf8.byte1 = l1;
Utf8.byte2 = l2;
Utf8.byte3 = l3;
Utf8.byte4 = l4 } ->
Format.fprintf f "@[<v>%s @[<v>%a@]@,%s @[<v>%a@]@,%s @[<v>%a@]@,%s @[<v>%a@]@]@."
"1"
fp_list ((fun f r -> fp_range f (fp_b1,r)),l1,"")
"2"
fp_list ((fun f r -> fp_range f (fp_b2,r)),l2,"")
"3"
fp_list ((fun f r -> fp_range f (fp_b3,r)),l3,"")
"4"
fp_list ((fun f r -> fp_range f (fp_b4,r)),l4,"")
let string_of_range = function
| Utf8.R(c1,c2,_) ->
Format.sprintf "[\\%i-\\%i]"
(Char.code c1)
(Char.code c2)
let string_of_ranges l =
List.fold_left
(fun s r -> s ^ (string_of_range r))
""
l
let string_of_utf8_range : Utf8.range -> string = function
| { Utf8.byte1 = l1;
Utf8.byte2 = l2;
Utf8.byte3 = l3;
Utf8.byte4 = l4 } ->
"1 " ^ string_of_ranges l1 ^ "\n" ^
"2 " ^ string_of_ranges l2 ^ "\n" ^
"3 " ^ string_of_ranges l3 ^ "\n" ^
"4 " ^ string_of_ranges l4 ^ "\n"
let string_code_of_string s =
let len = String.length s in
let rlen = len * 4 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
"\\" ^ (Format.sprintf "%03i" (Char.code s.[i]))
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
let string_hex_of_string s =
let len = String.length s in
let rec calc_len i ri =
if i >= len
then ri
else if Char.code s.[i] <= 0x7F
then calc_len (i + 1) (ri + 1)
else calc_len (i + 1) (ri + 2)
in
let rlen = calc_len 0 0 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
if Char.code s.[i] <= 0x7F
then
String.make 0 s.[i]
else
Format.sprintf "%02X" (Char.code s.[i])
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
let hex_string_of_string s =
let len = String.length s in
let rec calc_len i ri =
if i >= len
then ri
else if s.[i] >= '\032' && s.[i] <= '\126'
then calc_len (i + 1) (ri + 1)
else calc_len (i + 1) (ri + 2)
in
let rlen = calc_len 0 0 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
if s.[i] >= '\032' && s.[i] <= '\126'
then
String.make 0 s.[i]
else
Format.sprintf "%02X" (Char.code s.[i])
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
end
module Pr_error = struct
* { 2 Printing library for errors }
let string_of_wfc = function
| Error.WFC_uniqattspec ->
"Unique Att Spec"
| Error.WFC_GIMatch ->
"Element Type Match"
| Error.WFC_Legalchar ->
"Legal Character"
let string_of_nsc = function
| Error.NSC_xmlReserved ->
"Reserved Prefixes and Namespace Names"
| Error.NSC_NSDeclared ->
"Prefix Declared"
| Error.NSC_NoPrefixUndecl ->
"No Prefix Undeclaring"
| Error.NSC_AttrsUnique ->
"Attributes Unique"
let primitive = function
| Lxsd.PT_string -> "string"
| Lxsd.PT_boolean -> "boolean"
| Lxsd.PT_decimal -> "decimal"
| Lxsd.PT_float -> "float"
| Lxsd.PT_double -> "double"
| Lxsd.PT_duration -> "duration"
| Lxsd.PT_dateTime -> "dateTime"
| Lxsd.PT_time -> "time"
| Lxsd.PT_date -> "date"
| Lxsd.PT_gYearMonth -> "gYearMonth"
| Lxsd.PT_gYear -> "gYear"
| Lxsd.PT_gMonthDay -> "gMonthDay"
| Lxsd.PT_gDay -> "gDay"
| Lxsd.PT_gMonth -> "gMonth"
| Lxsd.PT_hexBinary -> "hexBinary"
| Lxsd.PT_base64Binary -> "base64Binary"
| Lxsd.PT_anyURI -> "anyURI"
| Lxsd.PT_QName -> "QName"
| Lxsd.PT_NOTATION -> "NOTATION"
let non_primitive = function
| Lxsd.NPT_normalizedString -> "normalizedString"
| Lxsd.NPT_token -> "token"
| Lxsd.NPT_language -> "language"
| Lxsd.NPT_IDREFS -> "IDREFS"
| Lxsd.NPT_ENTITIES -> "ENTITIES"
| Lxsd.NPT_NMTOKEN -> "NMTOKEN"
| Lxsd.NPT_NMTOKENS -> "NMTOKENS"
| Lxsd.NPT_Name -> "Name"
| Lxsd.NPT_NCName -> "NCName"
| Lxsd.NPT_ID -> "ID"
| Lxsd.NPT_IDREF -> "IDREF"
| Lxsd.NPT_ENTITY -> "ENTITY"
| Lxsd.NPT_integer -> "integer"
| Lxsd.NPT_nonPositiveInteger -> "nonPositiveInteger"
| Lxsd.NPT_negativeInteger -> "negativeInteger"
| Lxsd.NPT_long -> "long"
| Lxsd.NPT_int -> "int"
| Lxsd.NPT_short -> "short"
| Lxsd.NPT_byte -> "byte"
| Lxsd.NPT_nonNegativeInteger -> "nonNegativeInteger"
| Lxsd.NPT_unsignedLong -> "unsignedLong"
| Lxsd.NPT_unsignedInt -> "unsignedInt"
| Lxsd.NPT_unsignedShort -> "unsignedShort"
| Lxsd.NPT_unsignedByte -> "unsignedByte"
| Lxsd.NPT_positiveInteger -> "positiveInteger"
| Lxsd.NPT_yearMonthDuration -> "yearMonthDuration"
| Lxsd.NPT_dayTimeDuration -> "dayTimeDuration"
| Lxsd.NPT_dateTimeStamp -> "dateTimeStamp"
let primitive_opt = function
| None -> ""
| Some p -> " '" ^ primitive p ^ "'"
let non_primitive_opt = function
| None -> ""
| Some p -> " '" ^ non_primitive p ^ "'"
let string_of_cvc = function
| Lxsd.CVC_accept ->
"Element Sequence Accepted (Particle)"
| Lxsd.CVC_accept_1 ->
"Element Sequence Accepted (Particle) /1"
| Lxsd.CVC_accept_2 ->
"Element Sequence Accepted (Particle) /2"
| Lxsd.CVC_accept_3 ->
"Element Sequence Accepted (Particle) /3"
| Lxsd.CVC_assess_attr ->
"Schema-Validity Assessment (Attribute)"
| Lxsd.CVC_assess_elt ->
"Schema-Validity Assessment (Element)"
| Lxsd.CVC_assess_elt_1 ->
"Schema-Validity Assessment (Element) /1"
| Lxsd.CVC_assess_elt_2 ->
"Schema-Validity Assessment (Element) /2"
| Lxsd.CVC_assess_elt_3 ->
"Schema-Validity Assessment (Element) /3"
| Lxsd.CVC_attribute ->
"Attribute Locally Valid"
| Lxsd.CVC_au ->
"Attribute Locally Valid (Use)"
| Lxsd.CVC_complex_content ->
"complex content"
| Lxsd.CVC_complex_content_1 ->
"complex content /1"
| Lxsd.CVC_complex_type ->
"complex type"
| Lxsd.CVC_complex_type_1_1 ->
"complex type /1.1"
| Lxsd.CVC_complex_type_1_2 ->
"complex type /1.2"
| Lxsd.CVC_complex_type_1_3 ->
"complex type /1.3"
| Lxsd.CVC_complex_type_1_4 ->
"complex type /1.4"
| Lxsd.CVC_complex_type_2 ->
"complex type /2"
| Lxsd.CVC_complex_type_3 ->
"complex type /3"
| Lxsd.CVC_complex_type_4 ->
"complex type /4"
| Lxsd.CVC_datatype_valid ->
"datatype valid"
| Lxsd.CVC_datatype_valid_1 ->
"datatype valid /1"
| Lxsd.CVC_datatype_valid_2_1 ->
"datatype valid /2.1"
| Lxsd.CVC_datatype_valid_2_2 ->
"datatype valid /2.2"
| Lxsd.CVC_datatype_valid_2_3 ->
"datatype valid /2.3"
| Lxsd.CVC_elt ->
"Element Locally Valid (Element)"
| Lxsd.CVC_elt_5_2_1 ->
"Element Locally Valid (Element) /5.2.1"
| Lxsd.CVC_elt_5_2_2 ->
"Element Locally Valid (Element) /5.2.2"
| Lxsd.CVC_enumeration_valid ->
"enumeration valid"
| Lxsd.CVC_facet_valid ->
"Facet Valid"
| Lxsd.CVC_length_valid ->
"length valid"
| Lxsd.CVC_maxLength_valid ->
"maxLength valid"
| Lxsd.CVC_minLength_valid ->
"minLength valid"
| Lxsd.CVC_minInclusive_valid ->
"minInclusive valid"
| Lxsd.CVC_maxInclusive_valid ->
"maxInclusive valid"
| Lxsd.CVC_minExclusive_valid ->
"minExclusive valid"
| Lxsd.CVC_maxExclusive_valid ->
"maxExclusive valid"
| Lxsd.CVC_model_group ->
"model group"
| Lxsd.CVC_model_group_all ->
"model group /all"
| Lxsd.CVC_model_group_choice ->
"model group /choice"
| Lxsd.CVC_model_group_sequence ->
"model group /sequence"
| Lxsd.CVC_particle ->
"Element Sequence Valid"
| Lxsd.CVC_pattern_valid ->
"pattern valid"
| Lxsd.CVC_simple_type_p opt_p ->
"string valid" ^ (primitive_opt opt_p)
| Lxsd.CVC_simple_type_np opt_p ->
"string valid" ^ (non_primitive_opt opt_p)
| Lxsd.CVC_type ->
"Element Locally Valid (Type)"
| Lxsd.CVC_type_3_1 ->
"Element Locally Valid (Type) /3.1"
| Lxsd.CVC_type_3_1_1 ->
"Element Locally Valid (Type) /3.1.1"
| Lxsd.CVC_type_3_1_2 ->
"Element Locally Valid (Type) /3.1.2"
| Lxsd.CVC_type_3_2 ->
"Element Locally Valid (Type) /3.2"
| Lxsd.CVC_wildcard ->
"Item Valid (Wildcard)"
| Lxsd.CVC_wildcard_1 ->
"Item Valid (Wildcard) /1"
| Lxsd.CVC_wildcard_name ->
"Wildcard allows Expanded Name"
| Lxsd.CVC_wildcard_name_1 ->
"Wildcard allows Expanded Name /1"
| Lxsd.CVC_wildcard_namespace ->
"Wildcard allows Namespace Name"
| Lxsd.CVC_wildcard_namespace_1 ->
"Wildcard allows Namespace Name /1"
| Lxsd.CVC_wildcard_namespace_2 ->
"Wildcard allows Namespace Name /2"
| Lxsd.CVC_wildcard_namespace_3 ->
"Wildcard allows Namespace Name /3"
| Lxsd.Skip_wildcard_valid ->
"skip wildcard valid"
| Lxsd.Strict_wildcard_valid ->
"strict wildcard valid"
| Lxsd.Lax_wildcard_valid ->
"lax wildcard valid"
| Lxsd.AnyType_valid ->
"anyType valid"
| Lxsd.Schema_root_valid ->
"schema root valid"
let string_of_cos = function
| Lxsd.COS_applicable_facets ->
"Applicable Facets"
| Lxsd.COS_st_restricts_2_1 ->
"Derivation Valid (Restriction, Simple) /2.1"
let string_of_src = function
| Lxsd.SRC_ct_5 ->
"Complex Type Definition Representation OK /5"
| Lxsd.SRC_import_1_1 ->
"Import Constraints and Semantics /1.1"
| Lxsd.SRC_import_3_1 ->
"Import Constraints and Semantics /3.1"
let string_of_scc = function
| Lxsd.SCC_whiteSpace_valid_restriction ->
"whiteSpace valid restriction"
| Lxsd.SCC_length_valid_restriction ->
"length valid restriction"
| Lxsd.SCC_maxminExInclusive_valid_restriction ->
"max/minEx/Inclusive valid restriction"
| Lxsd.SCC_ct_extends ->
"Derivation Valid (Extension)"
| Lxsd.SCC_derivation_ok_restriction ->
"Derivation Valid (Restriction, Complex)"
* Return the string corresponding to an XML 's Well - formedness constraint
let xml_wfc wfc =
" [WFC " ^ (string_of_wfc wfc) ^ "]"
(** Return the string corresponding to a namespace constraint *)
let ns_c nsc =
" [NSC " ^ (string_of_nsc nsc) ^ "]"
(** Return the string corresponding to a validation rule *)
let cvc r =
" [CVC " ^ (string_of_cvc r) ^ "]"
(** Return the string corresponding to a Schema component constraint *)
let cos r =
" [COS " ^ (string_of_cos r) ^ "]"
(** Return the string corresponding to a Schema representation constraint *)
let src r =
" [SRC " ^ (string_of_src r) ^ "]"
(** Return the string corresponding to a Schema Component Constraint *)
let scc r =
" [SCC " ^ (string_of_scc r) ^ "]"
let sec () =
String.copy " (security constraint)"
let lim () =
String.copy " (limitation)"
let string_of_xsdl_def = function
| Error.XSDLDEF_type ->
"type"
| Error.XSDLDEF_attribute ->
"attribute"
| Error.XSDLDEF_element ->
"element"
| Error.XSDLDEF_attribute_group ->
"attribute group"
| Error.XSDLDEF_model_group ->
"model group"
let string_of_error = function
| Error.LIM_nc_disallowed ->
"XSD construction not implemented" ^ lim ()
| Error.SEC_attributes_limit_reached ->
"maximum attributes limit reached" ^ sec ()
| Error.SEC_contents_limit_reached ->
"maximum contents limit reached" ^ sec ()
| Error.SEC_depth_limit_reached ->
"maximum depth limit reached" ^ sec ()
| Error.SEC_length_limit_reached ->
"maximum length limit reached" ^ sec ()
| Error.F_wrong_output_file_extension ->
"wrong output file extension"
| Error.F_wrong_output_basename ->
"wrong output name"
| Error.F_wrong_printing_file_extension ->
"wrong printing file extension"
| Error.F_wrong_input_extension ->
"wrong input extension"
| Error.F_wrong_input_file ->
"wrong input file"
| Error.F_GEN_primary_file_not_provided ->
"XSD primary file not provided"
| Error.IO_input ->
"fail to read input"
| Error.IO_open_in_file_fail s ->
"fail to read file " ^ s
| Error.IO_open_out_file_fail s ->
"fail to write file " ^ s
| Error.IO_lib_dir_missing ->
"validator library directory missing"
| Error.IO_output_dir ->
"fail to prepare the output directory"
| Error.SD_subtraction_positions ->
"subtraction positions error"
| Error.SD_subtraction_limits ->
"subtraction limits error"
| Error.SD_subtraction_sources ->
"subtraction sources error"
| Error.SD_structure ->
"string data error"
| Error.SD_building_regexp_failed ->
"building regular expression failed"
| Error.UTF8_invalid ->
"invalid UTF-8 encoding"
| Error.UTF8_overlong ->
"overlong UTF-8 encoding"
| Error.UTF8_first_byte_invalid ->
"first byte for 1-, 2-, 3-, 4--bytes UTF-8 encoding invalid"
| Error.UTF8_range_split_invalid ->
"UTF-8 range split invalid"
| Error.Unicode_code_point_out_of_range ->
"Unicode code point out of range"
| Error.XMLL_encoding_missing ->
"XML encoding is missing, using UTF-8"
| Error.XMLL_encoding_not_supported ->
"encoding not supported"
| Error.XMLL_token_not_finished ->
"XML token not finished"
| Error.XMLL_standalone_not_recognised ->
"'standalone' value not recognised"
| Error.XMLL_version_missing ->
"version missing in XML declaration"
| Error.XMLL_version_not_supported ->
"XML version not supported"
| Error.XMLL_pi_target_not_allowed ->
"processing instructions target not allowed"
| Error.XMLL_empty_input ->
"empty input"
| Error.XMLL_expected s ->
"XML syntax error, " ^ s ^ " expected"
| Error.XMLL_not_well_formed s ->
"XML syntax error, " ^ s ^ " not well formed"
| Error.XMLL_entity_reference_unknown ->
"entity reference unknown"
| Error.XMLL_char_reference_not_well_formed ->
"char reference not well formed" ^ (xml_wfc Error.WFC_Legalchar)
| Error.XMLL_SEC_ascii_char_reference_not_allowed ->
"ASCII char reference not allowed" ^ sec ()
| Error.XMLP_opening_tag_missing ->
"opening tag missing"
| Error.XMLP_SEC_doctype ->
"DocType in file" ^ sec ()
| Error.XMLP_SEC_cdata ->
"CDATA in file" ^ sec ()
| Error.XMLP_SEC_pi ->
"PI in file" ^ sec ()
| Error.XMLP_SEC_comment ->
"comment in file" ^ sec ()
| Error.XMLP_content_after_root_not_allowed ->
"Invalid data/elements after root element"
| Error.XMLP_data_before_root_not_allowed ->
"Unauthorised data before the root node"
| Error.XMLP_closing_tag_missing ->
"Closing tag missing"
| Error.XMLP_tree_not_recognized ->
"Unrecognized tree in XML parser"
| Error.XMLL_XMLP_attribute_definition_not_unique ->
"Attribute not unique" ^ (xml_wfc Error.WFC_uniqattspec) ^ (ns_c Error.NSC_AttrsUnique)
| Error.XMLP_opening_closing_tags_not_matching ->
"opening closing tags not matching" ^ (xml_wfc Error.WFC_GIMatch)
| Error.XMLP_attribute_required s ->
"attribute '" ^ s ^ "' required"
| Error.XMLP_LIM_attribute_ignored s ->
"attribute '" ^ s ^ "' ignored" ^ lim ()
| Error.XMLP_LIM_attribute_not_implemented s ->
"attribute '" ^ s ^ "' not implemented" ^ lim ()
| Error.XMLP_element_expected s ->
"element '" ^ s ^ "' expected"
| Error.XMLP_LIM_element_ignored s ->
"element '" ^ s ^ "' ignored" ^ lim ()
| Error.XMLP_LIM_element_not_implemented s ->
"element '" ^ s ^ "' not implemented" ^ lim ()
| Error.XMLP_LIM_xml_id_unicity_not_implemented ->
"xml:id unicity not implemented" ^ lim ()
| Error.XMLP_xml_space_value_invalid ->
"xml:space value invalid"
| Error.XMLNS_namespace_uri_reserved ->
"namespace URI reserved" ^ (ns_c Error.NSC_xmlReserved)
| Error.XMLNS_namespace_prefix_reserved ->
"namespace prefix reserved" ^ (ns_c Error.NSC_xmlReserved)
| Error.XMLNS_prefixed_declaration_value_empty ->
"empty value for a prefixed namespace declaration not allowed" ^ (ns_c Error.NSC_NoPrefixUndecl)
| Error.XMLNS_XMLP_xml_attribute_not_recognised ->
"XML attribute not recognised"
| Error.XMLNS_namespace_prefix_not_declared ->
"namespace prefix not declared" ^ (ns_c Error.NSC_NSDeclared)
| Error.XMLNS_XSDT_uri_not_well_formed ->
"URI not well formed"
| Error.XSDT_SEC_appinfo_not_allowed ->
"element 'appinfo' not allowed" ^ sec ()
| Error.XSDT_skip_process_contents_risky ->
"XSD skip process contents is risky"
| Error.XSDT_version_missing ->
"XSD version is missing, using XSD version 1.1"
| Error.XSDT_version_1_0_not_supported ->
"XSD version 1.0 not supported, using XSD version 1.1 instead"
| Error.XSDT_version_not_supported ->
"XSD version not supported, using XSD version 1.1 instead"
| Error.XSDT_qname_not_well_formed ->
"QName not well formed"
| Error.XSDT_qname_namespace_missing ->
"QName's namespace missing"
| Error.XSDT_mixed_values_contradictory ->
"mixed values contradictory" ^ (src Lxsd.SRC_ct_5)
| Error.XSDT_min_max_inconsistent ->
"min max values inconsistent"
| Error.XSDT_constructions_mutually_exclusive s ->
s ^ " mutually exclusive"
| Error.XSDT_extra_xsd_attribute_not_recognised ->
"extra XSD attribute not recognised"
| Error.XSDT_extra_xsd_element_in s ->
"extra XSD element in " ^ s ^ " not recognised"
| Error.XSDT_LIM_schema_location_required ->
"schemaLocation required" ^ lim ()
| Error.XSDT_multiple_name_for_type_definition ->
"multiple name for a type definition"
| Error.XSDT_constructions_not_allowed s ->
s ^ " not allowed"
| Error.XSDT_constructions_expected s ->
s ^ " expected"
| Error.XSDT_occurrence_value_invalid ->
"occurrence value invalid"
| Error.XSDT_LIM_id_idref_unicity_reference_not_implemented ->
"ID unicity and IDREF, IDREFS references existence not implemented" ^ lim ()
| Error.XSDT_default_value_not_verified ->
"default value not verified"
| Error.XSDT_fixed_value_not_verified ->
"fixed value not verified"
| Error.RE_syntax s ->
"regular expression syntax error '" ^ s ^ "'"
| Error.RE_block_unknown s ->
"unknown Unicode block '" ^ s ^ "'"
| Error.RE_UTF8_invalid ->
"invalid UTF-8 encoding in regular expression"
| Error.RE_LIM_charprop_not_implemented ->
"charProp not implemented" ^ lim ()
| Error.XSDL_multiple_definition xsdl_def ->
"multiple " ^ string_of_xsdl_def xsdl_def ^ " definition"
| Error.XSDL_missing_definition xsdl_def ->
"missing " ^ string_of_xsdl_def xsdl_def ^ " definition"
| Error.XSDL_missing_definition_s ->
"missing definition(s) in XSD grammar"
| Error.XSDL_recursive_definitions ->
"recursive definitions in XSD grammar"
| Error.XSDL_non_deterministic_grammar ->
"non deterministic XSD grammar"
| Error.XSDL_facet_not_applicable ->
"facet not applicable" ^ (cos Lxsd.COS_applicable_facets)
| Error.XSDL_counter_not_applicable ->
"length counter not applicable" ^ (scc Lxsd.SCC_length_valid_restriction)
| Error.XSDL_order_not_applicable ->
"order not applicable" ^ (scc Lxsd.SCC_maxminExInclusive_valid_restriction)
| Error.XSDL_XSV_LIM_notation_not_implemented ->
"notation not implemented" ^ lim ()
| Error.XSDL_XSV_LIM_attribute_group_wildcard_not_implemented ->
"attribute group wildcard not implemented" ^ lim ()
| Error.XSDL_LIM_order_not_implemented ->
"order not implemented" ^ lim ()
| Error.XSDL_simple_type_derivation_invalid ->
"simple type derivation invalid" ^ (cos Lxsd.COS_st_restricts_2_1)
| Error.XSDL_LIM_facet_not_implemented ->
"facet not implemented" ^ lim ()
| Error.XSDL_whitespace_values_not_combinable ->
"whitespace values not combinable" ^ (scc Lxsd.SCC_whiteSpace_valid_restriction)
| Error.XSDL_extension_of_anytype_risky ->
"extending anyType is risky"
| Error.XSDL_complex_type_extension_invalid ->
"complex type extension" ^ (scc Lxsd.SCC_ct_extends)
| Error.XSDL_complex_type_restriction_invalid ->
"complex type restriction" ^ (scc Lxsd.SCC_derivation_ok_restriction)
| Error.XSDL_SEC_no_root_element_declared ->
"no root element declared" ^ sec ()
| Error.XSDL_all_model_not_well_formed ->
"all-model group not well formed"
| Error.XSDL_XSV_LIM_more_than_one_wildcard_in_all_model ->
"more than one wildcard in all-model" ^ lim ()
| Error.XSDL_LIM_determinism_check_not_implemented ->
"determinism check not implemented" ^ lim ()
| Error.GEN_referenced_file_missing ->
"referenced XSD's file missing"
| Error.GEN_file_not_referenced ->
"XSD file not referenced"
| Error.GEN_import_effective_uris_mismatch ->
"imported and effective target URI do not match" ^ (src Lxsd.SRC_import_3_1)
| Error.GEN_imports_mismatch ->
"importations do not match"
| Error.GEN_primary_uri_import_not_allowed ->
"importation of primary URI not allowed" ^ (src Lxsd.SRC_import_1_1)
| Error.GEN_XSDT_primary_target_uri_missing ->
"primary target URI missing"
| Error.GEN_debug_mode ->
"generating debug mode validator"
| Error.XSV_input_file_not_provided ->
"input file not provided"
| Error.XSV_multiple_input_files_not_allowed ->
"multiple input files not allowed"
| Error.XSV_command_line_incorrect ->
"command line incorrect"
| Error.XSV_fail ->
"invalid document"
| Error.XSV_uri_not_matching ->
"invalid document (URI not matching)"
| Error.XSV_uri_table_corrupted i ->
"validation table corrupted (URI " ^
string_of_int i ^ " missing)"
| Error.XSV_type_table_corrupted i ->
"validation table corrupted (type " ^
string_of_int i ^ " missing)"
| Error.XSV_element_table_corrupted i ->
"validation table corrupted (element " ^
string_of_int i ^ " missing)"
| Error.XSV_attribute_table_corrupted i ->
"validation table corrupted (attribute " ^
string_of_int i ^ " missing)"
| Error.XSV_attribute_group_table_corrupted i ->
"validation table corrupted (attribute group " ^
string_of_int i ^ " missing)"
| Error.XSV_model_group_table_corrupted i ->
"validation table corrupted (model group " ^
string_of_int i ^ " missing)"
| Error.XSV_SEC_xsi_attribute_not_allowed ->
"XML Schema Instance attribute not allowed" ^ sec ()
| Error.XSV_SEC_xsi_schemaLocation_ignored ->
"XML Schema Instance's schemaLocation attribute ignored" ^ sec ()
| Error.XSV_SEC_xsi_attribute_ignored ->
"XML Schema Instance attribute ignored" ^ sec ()
| Error.XSV_debug_mode ->
"validator in debug mode"
| Error.A_empty_parser_stack
| Error.A_stringdata_comparison_output
| Error.A_double_log_setting
| Error.A_empty_members_list
| Error.A_local_global_function_types
->
"unexpected error"
| Error.A_function qfun ->
"unexprected error (" ^ qfun ^ ")"
| Error.ML_max_string_reached
| Error.ML_max_int_reached ->
"system limit reached"
| Error.EXE (s,(Invalid_argument s')) ->
"execution error, " ^ s ^ ", invalid argument " ^ s'
| Error.EXE (s,_e) ->
"execution error, " ^ s ^ ", unexpected exception"
let fprintf_error f e =
Format.fprintf f "%s"
(string_of_error e)
let string_of_max= function
| None -> "unbounded"
| Some i -> string_of_int i
let string_of_status = function
| true -> "rejected"
| false -> "ignored"
let string_of_xsi_status = function
| Xml.XSI_reject_all ->
"rejected"
| Xml.XSI_ignore_schemaLocation_only ->
"rejected (except schemaLocation)"
| Xml.XSI_ignore_all ->
"ignored"
let string_of_info = function
| INFO_file_XML_valid in_name ->
"file " ^ File_in.to_string in_name ^ " XML valid"
| INFO_file_XML_invalid ->
"file XML invalid"
| INFO_file_XML_notwf ->
"file XML not well-formed"
| INFO_validating_element sd ->
"validating element '" ^ Stringdata.to_string sd ^ "'"
| INFO_testing_element sd ->
"testing element '" ^ Stringdata.to_string sd ^ "'"
| INFO_valid_element sd ->
"valid element '" ^ Stringdata.to_string sd ^ "'"
| INFO_invalid_element sd ->
"invalid element '" ^ Stringdata.to_string sd ^ "'"
| INFO_fetching_td i ->
"fetching type definition " ^ string_of_int i
| INFO_fetching_ed i ->
"fetching element definition " ^ string_of_int i
| INFO_fetching_ad i ->
"fetching attribute definition " ^ string_of_int i
| INFO_fetching_agd i ->
"fetching attribute group definition " ^ string_of_int i
| INFO_fetching_mgd i ->
"fetching model group definition " ^ string_of_int i
| INFO_document_valid ->
"document valid"
| INFO_pattern re ->
"regular expression in pattern " ^
(Stringdata.to_code_escaped_string re)
| INFO_XSD_primary_file in_name ->
"primary XSD file " ^ File_in.to_string in_name
| INFO_XSD_secondary_file in_name ->
"secondary XSD file " ^ File_in.to_string in_name
| INFO_validator_configuration
{ Xml.max_attributes = m_a;
Xml.max_contents = m_c;
Xml.max_depth = m_d;
Xml.max_length = m_l;
Xml.error_status =
{ Xml.doctype = dt;
Xml.cdata = cd;
Xml.pi = pi;
Xml.comment = ct;
Xml.ascii_ref = ar;
Xml.xsi = xsi_status } } ->
"configuration (" ^
"max-attributes " ^ string_of_max m_a ^ ", " ^
"max-contents " ^ string_of_max m_c ^ ", " ^
"max-depth " ^ string_of_max m_d ^ ", " ^
"max-length " ^ string_of_max m_l ^ ", " ^
"DocType" ^ string_of_status dt ^ ", " ^
"CData " ^ string_of_status cd ^ ", " ^
"PI " ^ string_of_status pi ^ ", " ^
"comment " ^ string_of_status ct ^ ", " ^
"ASCII references " ^ string_of_status ar ^ ", " ^
"XSI " ^ string_of_xsi_status xsi_status ^ ")"
let fprintf_info f i =
Format.fprintf f "%s"
(string_of_info i)
end
let pr_fun mtype loc descr_fun descr =
Format.fprintf !log_formatter "[%s] %a%s@."
(match mtype with
| Error -> "error"
| Warning -> "warning"
| Info -> "info"
| Debug -> "DEBUG"
| Todo -> "TODO")
descr_fun descr
(if loc="" then loc else " : " ^ loc)
let error_fun loc err =
pr_fun Error loc Pr_error.fprintf_error err;
raise (E err)
let error err =
error_fun "" err
let error_at s err =
error_fun
(Stringdata.string_loc s)
err
let warning_fun loc err =
pr_fun Warning loc Pr_error.fprintf_error err
let warning err =
warning_fun "" err
let warning_at s err =
warning_fun
(Stringdata.string_loc s)
err
let info_fun loc message =
pr_fun Info loc Pr_error.fprintf_info message
let info message =
info_fun "" message
let info_at s message =
info_fun (Stringdata.string_loc s) message
let warning_cvc cvc =
pr_fun Warning ""
(fun f -> Format.fprintf f "%s")
("failed validation rule" ^ Pr_error.cvc cvc)
let warning_cvc_at s cvc =
pr_fun Warning (Stringdata.string_loc s)
(fun f -> Format.fprintf f "%s")
("failed validation rule" ^ Pr_error.cvc cvc)
let info_cvc cvc =
pr_fun Info ""
(fun f -> Format.fprintf f "%s")
("validation rule" ^ Pr_error.cvc cvc)
let info_cvc_at s cvc =
pr_fun Info (Stringdata.string_loc s)
(fun f -> Format.fprintf f "%s")
("validation rule" ^ Pr_error.cvc cvc)
| null | https://raw.githubusercontent.com/ANSSI-FR/xsvgen/3c2b5e43e7adcb856a3a2aa01bfc039bf3c6459b/lib/message_debug.ml | ocaml | *********************************************************************
XML Schema Validator Generator
*********************************************************************
* Information
* Return the string corresponding to a namespace constraint
* Return the string corresponding to a validation rule
* Return the string corresponding to a Schema component constraint
* Return the string corresponding to a Schema representation constraint
* Return the string corresponding to a Schema Component Constraint | ( SafeRiver )
Copyright 2012 , ANSSI and SafeRiver .
$ I d : message_debug.ml 1704 2012 - 06 - 04 16:49:33Z maarek $
* { 1 Message handling library , debug version }
type info =
| INFO_file_XML_valid of File_in.t
| INFO_file_XML_invalid
| INFO_file_XML_notwf
| INFO_validating_element of Stringdata.t
| INFO_testing_element of Stringdata.t
| INFO_valid_element of Stringdata.t
| INFO_invalid_element of Stringdata.t
| INFO_fetching_td of int
| INFO_fetching_ed of int
| INFO_fetching_ad of int
| INFO_fetching_agd of int
| INFO_fetching_mgd of int
| INFO_document_valid
| INFO_pattern of Stringdata.t
| INFO_XSD_primary_file of File_in.t
| INFO_XSD_secondary_file of File_in.t
| INFO_validator_configuration of Xml.configuration
let valid_exit_code = 10
let default_error_exit_code = 13
let error_exit_code = function
11 : invalid but well - formed
| Error.RE_block_unknown _
| Error.RE_syntax _
| Error.RE_UTF8_invalid
| Error.RE_LIM_charprop_not_implemented
| Error.XSDT_skip_process_contents_risky
| Error.XSDT_version_not_supported
| Error.XSDT_version_1_0_not_supported
| Error.XSDT_version_missing
| Error.XSDT_qname_namespace_missing
| Error.XSDT_qname_not_well_formed
| Error.XSDT_mixed_values_contradictory
| Error.XSDT_min_max_inconsistent
| Error.XSDT_constructions_mutually_exclusive _
| Error.XSDT_extra_xsd_attribute_not_recognised
| Error.XSDT_extra_xsd_element_in _
| Error.XSDT_LIM_schema_location_required
| Error.XSDT_multiple_name_for_type_definition
| Error.XSDT_constructions_not_allowed _
| Error.XSDT_constructions_expected _
| Error.XSDT_occurrence_value_invalid
| Error.XSDT_LIM_id_idref_unicity_reference_not_implemented
| Error.XSDT_default_value_not_verified
| Error.XSDT_fixed_value_not_verified
| Error.XSV_input_file_not_provided
| Error.XSV_multiple_input_files_not_allowed
| Error.XSV_command_line_incorrect
| Error.XSV_fail
| Error.XSV_uri_not_matching
11 : invalid but well - formed
-> 11
12 : not well - formed
| Error.UTF8_invalid
| Error.UTF8_overlong
| Error.UTF8_first_byte_invalid
| Error.UTF8_range_split_invalid
| Error.Unicode_code_point_out_of_range
| Error.XMLL_expected _
| Error.XMLL_token_not_finished
| Error.XMLL_standalone_not_recognised
| Error.XMLL_version_missing
| Error.XMLL_version_not_supported
| Error.XMLL_empty_input
| Error.XMLL_not_well_formed _
| Error.XMLL_char_reference_not_well_formed
| Error.XMLP_opening_tag_missing
| Error.XMLP_content_after_root_not_allowed
| Error.XMLP_data_before_root_not_allowed
| Error.XMLP_closing_tag_missing
| Error.XMLP_tree_not_recognized
| Error.XMLP_opening_closing_tags_not_matching
| Error.XMLL_XMLP_attribute_definition_not_unique
| Error.XMLP_attribute_required _
| Error.XMLP_LIM_attribute_ignored _
| Error.XMLP_LIM_attribute_not_implemented _
| Error.XMLP_element_expected _
| Error.XMLP_LIM_element_ignored _
| Error.XMLP_LIM_element_not_implemented _
| Error.XMLP_LIM_xml_id_unicity_not_implemented
| Error.XMLP_xml_space_value_invalid
| Error.XMLNS_namespace_uri_reserved
| Error.XMLNS_namespace_prefix_reserved
| Error.XMLNS_XMLP_xml_attribute_not_recognised
| Error.XMLNS_namespace_prefix_not_declared
| Error.XMLNS_prefixed_declaration_value_empty
| Error.XMLNS_XSDT_uri_not_well_formed
| Error.XMLL_pi_target_not_allowed
| Error.XMLL_entity_reference_unknown
| Error.XSDL_multiple_definition _
| Error.XSDL_missing_definition _
| Error.XSDL_missing_definition_s
| Error.XSDL_recursive_definitions
| Error.XSDL_non_deterministic_grammar
| Error.XSDL_facet_not_applicable
| Error.XSDL_counter_not_applicable
| Error.XSDL_order_not_applicable
| Error.XSDL_XSV_LIM_notation_not_implemented
| Error.XSDL_XSV_LIM_attribute_group_wildcard_not_implemented
| Error.XSDL_LIM_order_not_implemented
| Error.XSDL_simple_type_derivation_invalid
| Error.XSDL_LIM_facet_not_implemented
| Error.XSDL_whitespace_values_not_combinable
| Error.XSDL_extension_of_anytype_risky
| Error.XSDL_complex_type_extension_invalid
| Error.XSDL_complex_type_restriction_invalid
| Error.XSDL_all_model_not_well_formed
| Error.XSDL_XSV_LIM_more_than_one_wildcard_in_all_model
| Error.XSDL_LIM_determinism_check_not_implemented
| Error.GEN_file_not_referenced
| Error.GEN_referenced_file_missing
| Error.GEN_import_effective_uris_mismatch
| Error.GEN_imports_mismatch
| Error.GEN_primary_uri_import_not_allowed
| Error.GEN_XSDT_primary_target_uri_missing
| Error.GEN_debug_mode
12 : not well - formed
-> 12
restrictions - > 13
| Error.XMLP_SEC_comment
| Error.XMLP_SEC_pi
| Error.XMLP_SEC_cdata
| Error.XMLP_SEC_doctype
| Error.XMLL_encoding_missing
| Error.XMLL_encoding_not_supported
| Error.XMLL_SEC_ascii_char_reference_not_allowed
| Error.XSDT_SEC_appinfo_not_allowed
| Error.XSDL_SEC_no_root_element_declared
| Error.XSV_SEC_xsi_attribute_not_allowed
| Error.XSV_SEC_xsi_schemaLocation_ignored
| Error.XSV_SEC_xsi_attribute_ignored
restrictions - > 13
-> default_error_exit_code
limitations - > 13
| Error.LIM_nc_disallowed
limitations - > 13
-> default_error_exit_code
13 : error
| Error.EXE _
| Error.A_function _
| Error.A_stringdata_comparison_output
| Error.A_empty_parser_stack
| Error.A_double_log_setting
| Error.A_empty_members_list
| Error.A_local_global_function_types
| Error.ML_max_string_reached
| Error.ML_max_int_reached
| Error.SD_subtraction_sources
| Error.SD_subtraction_limits
| Error.SD_subtraction_positions
| Error.SD_structure
| Error.SD_building_regexp_failed
| Error.SEC_length_limit_reached
| Error.SEC_depth_limit_reached
| Error.SEC_contents_limit_reached
| Error.SEC_attributes_limit_reached
| Error.F_wrong_input_file
| Error.F_wrong_input_extension
| Error.F_wrong_printing_file_extension
| Error.F_wrong_output_file_extension
| Error.F_wrong_output_basename
| Error.F_GEN_primary_file_not_provided
| Error.IO_open_out_file_fail _
| Error.IO_open_in_file_fail _
| Error.IO_input
| Error.IO_lib_dir_missing
| Error.IO_output_dir
| Error.XSV_uri_table_corrupted _
| Error.XSV_type_table_corrupted _
| Error.XSV_element_table_corrupted _
| Error.XSV_attribute_table_corrupted _
| Error.XSV_attribute_group_table_corrupted _
| Error.XSV_model_group_table_corrupted _
| Error.XSV_debug_mode
13 : error
-> default_error_exit_code
exception E of Error.t
type mtype =
| Error
| Warning
| Info
| Debug
| Todo
let log_formatter = ref Format.err_formatter
let log_formatter_setup = ref false
let set_log formatter =
if !log_formatter_setup
then
raise (Error.E Error.A_double_log_setting)
else
begin
log_formatter := formatter;
log_formatter_setup := true;
end
* { 2 Printing library for debug }
module Pr_debug = struct
let string_of_version = function
| Xml.Version_1_0 -> "1.0"
| Xml.Version_1_1 -> "1.1"
let string_of_encoding = function
| None -> "-"
| Some Xml.Encoding_UTF8 -> "UTF-8"
let string_of_standalone = function
| None -> "-"
| Some true -> "yes"
| Some false -> "no"
let string_of_sd sd =
Format.sprintf "%s \"%s\""
(Stringdata.string_debug sd)
(Stringdata.to_string sd)
let string_of_prefix_option f = function
| None -> "_"
| Some s -> f s
let string_of_attrs m_attrs =
Xml.M_unexp.fold
(fun {Xml.prefix =p; Xml.local_name = l} value s ->
(string_of_prefix_option string_of_sd p) ^
":" ^
(string_of_sd l) ^
" " ^
(string_of_sd value) ^
s)
m_attrs
""
let string_of_lexeme = function
| Xml.Tag ({Xml.prefix =_; Xml.local_name = l},_) ->
Format.sprintf "TAG: %s"
(string_of_sd l)
| Xml.Gat {Xml.prefix =_; Xml.local_name = l} ->
Format.sprintf "GAT: %s"
(string_of_sd l)
| Xml.Taggat ({Xml.prefix =_; Xml.local_name = l},_) ->
Format.sprintf "TAGGAT: %s"
(string_of_sd l)
| Xml.Data (t,b) ->
Format.sprintf "DATA %s: %s"
(if b then "t" else "f")
(string_of_sd t)
| Xml.Doctype t ->
Format.sprintf
"DOCTYPE: %s"
(string_of_sd t)
| Xml.PI (t1,t2) ->
Format.sprintf "PI: %s - %s"
(string_of_sd t1)
(string_of_sd t2)
let string_of_declaration = function
| { Xml.version = v;
Xml.encoding = e;
Xml.standalone = s} ->
Format.sprintf "Xmldecl %s %s %s"
(string_of_version v)
(string_of_encoding e)
(string_of_standalone s)
let td_id = function
Lxsd.Id_td i -> "td_id " ^ (string_of_int i)
let ed_id = function
Lxsd.Id_ed i -> "ed_id " ^ (string_of_int i)
let mgd_id = function
Lxsd.Id_mgd i -> "mgd_id " ^ (string_of_int i)
let rec fp_list f (func, l, sep) =
match l with
| [] -> ()
| [h] ->
Format.fprintf f "%a"
func h
| h :: t ->
Format.fprintf f "%a%s@,%a"
func h
sep
fp_list (func, t, sep)
let fp_range f = function
| (bfun,Utf8.R(c1,c2,b)) ->
Format.fprintf f "[%02X-%02X]%a"
(Char.code c1)
(Char.code c2)
bfun b
let fp_b1 f = function
| Utf8.B1 ->
Format.fprintf f ""
let fp_b2 f = function
| Utf8.B2 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b1,r)),l,"")
let fp_b3 f = function
| Utf8.B3 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b2,r)),l,"")
let fp_b4 f = function
| Utf8.B4 l ->
Format.fprintf f "@[<v>%a@]"
fp_list ((fun f r -> fp_range f (fp_b3,r)),l,"")
let fp_utf8_range f = function
| { Utf8.byte1 = l1;
Utf8.byte2 = l2;
Utf8.byte3 = l3;
Utf8.byte4 = l4 } ->
Format.fprintf f "@[<v>%s @[<v>%a@]@,%s @[<v>%a@]@,%s @[<v>%a@]@,%s @[<v>%a@]@]@."
"1"
fp_list ((fun f r -> fp_range f (fp_b1,r)),l1,"")
"2"
fp_list ((fun f r -> fp_range f (fp_b2,r)),l2,"")
"3"
fp_list ((fun f r -> fp_range f (fp_b3,r)),l3,"")
"4"
fp_list ((fun f r -> fp_range f (fp_b4,r)),l4,"")
let string_of_range = function
| Utf8.R(c1,c2,_) ->
Format.sprintf "[\\%i-\\%i]"
(Char.code c1)
(Char.code c2)
let string_of_ranges l =
List.fold_left
(fun s r -> s ^ (string_of_range r))
""
l
let string_of_utf8_range : Utf8.range -> string = function
| { Utf8.byte1 = l1;
Utf8.byte2 = l2;
Utf8.byte3 = l3;
Utf8.byte4 = l4 } ->
"1 " ^ string_of_ranges l1 ^ "\n" ^
"2 " ^ string_of_ranges l2 ^ "\n" ^
"3 " ^ string_of_ranges l3 ^ "\n" ^
"4 " ^ string_of_ranges l4 ^ "\n"
let string_code_of_string s =
let len = String.length s in
let rlen = len * 4 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
"\\" ^ (Format.sprintf "%03i" (Char.code s.[i]))
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
let string_hex_of_string s =
let len = String.length s in
let rec calc_len i ri =
if i >= len
then ri
else if Char.code s.[i] <= 0x7F
then calc_len (i + 1) (ri + 1)
else calc_len (i + 1) (ri + 2)
in
let rlen = calc_len 0 0 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
if Char.code s.[i] <= 0x7F
then
String.make 0 s.[i]
else
Format.sprintf "%02X" (Char.code s.[i])
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
let hex_string_of_string s =
let len = String.length s in
let rec calc_len i ri =
if i >= len
then ri
else if s.[i] >= '\032' && s.[i] <= '\126'
then calc_len (i + 1) (ri + 1)
else calc_len (i + 1) (ri + 2)
in
let rlen = calc_len 0 0 in
if rlen > Sys.max_string_length
then raise (Error.E Error.ML_max_string_reached);
let rec pr_char rs i =
if i >= len
then rs
else
let n =
if s.[i] >= '\032' && s.[i] <= '\126'
then
String.make 0 s.[i]
else
Format.sprintf "%02X" (Char.code s.[i])
in
pr_char (rs ^ n) (i + 1)
in
pr_char "" 0
end
module Pr_error = struct
* { 2 Printing library for errors }
let string_of_wfc = function
| Error.WFC_uniqattspec ->
"Unique Att Spec"
| Error.WFC_GIMatch ->
"Element Type Match"
| Error.WFC_Legalchar ->
"Legal Character"
let string_of_nsc = function
| Error.NSC_xmlReserved ->
"Reserved Prefixes and Namespace Names"
| Error.NSC_NSDeclared ->
"Prefix Declared"
| Error.NSC_NoPrefixUndecl ->
"No Prefix Undeclaring"
| Error.NSC_AttrsUnique ->
"Attributes Unique"
let primitive = function
| Lxsd.PT_string -> "string"
| Lxsd.PT_boolean -> "boolean"
| Lxsd.PT_decimal -> "decimal"
| Lxsd.PT_float -> "float"
| Lxsd.PT_double -> "double"
| Lxsd.PT_duration -> "duration"
| Lxsd.PT_dateTime -> "dateTime"
| Lxsd.PT_time -> "time"
| Lxsd.PT_date -> "date"
| Lxsd.PT_gYearMonth -> "gYearMonth"
| Lxsd.PT_gYear -> "gYear"
| Lxsd.PT_gMonthDay -> "gMonthDay"
| Lxsd.PT_gDay -> "gDay"
| Lxsd.PT_gMonth -> "gMonth"
| Lxsd.PT_hexBinary -> "hexBinary"
| Lxsd.PT_base64Binary -> "base64Binary"
| Lxsd.PT_anyURI -> "anyURI"
| Lxsd.PT_QName -> "QName"
| Lxsd.PT_NOTATION -> "NOTATION"
let non_primitive = function
| Lxsd.NPT_normalizedString -> "normalizedString"
| Lxsd.NPT_token -> "token"
| Lxsd.NPT_language -> "language"
| Lxsd.NPT_IDREFS -> "IDREFS"
| Lxsd.NPT_ENTITIES -> "ENTITIES"
| Lxsd.NPT_NMTOKEN -> "NMTOKEN"
| Lxsd.NPT_NMTOKENS -> "NMTOKENS"
| Lxsd.NPT_Name -> "Name"
| Lxsd.NPT_NCName -> "NCName"
| Lxsd.NPT_ID -> "ID"
| Lxsd.NPT_IDREF -> "IDREF"
| Lxsd.NPT_ENTITY -> "ENTITY"
| Lxsd.NPT_integer -> "integer"
| Lxsd.NPT_nonPositiveInteger -> "nonPositiveInteger"
| Lxsd.NPT_negativeInteger -> "negativeInteger"
| Lxsd.NPT_long -> "long"
| Lxsd.NPT_int -> "int"
| Lxsd.NPT_short -> "short"
| Lxsd.NPT_byte -> "byte"
| Lxsd.NPT_nonNegativeInteger -> "nonNegativeInteger"
| Lxsd.NPT_unsignedLong -> "unsignedLong"
| Lxsd.NPT_unsignedInt -> "unsignedInt"
| Lxsd.NPT_unsignedShort -> "unsignedShort"
| Lxsd.NPT_unsignedByte -> "unsignedByte"
| Lxsd.NPT_positiveInteger -> "positiveInteger"
| Lxsd.NPT_yearMonthDuration -> "yearMonthDuration"
| Lxsd.NPT_dayTimeDuration -> "dayTimeDuration"
| Lxsd.NPT_dateTimeStamp -> "dateTimeStamp"
let primitive_opt = function
| None -> ""
| Some p -> " '" ^ primitive p ^ "'"
let non_primitive_opt = function
| None -> ""
| Some p -> " '" ^ non_primitive p ^ "'"
let string_of_cvc = function
| Lxsd.CVC_accept ->
"Element Sequence Accepted (Particle)"
| Lxsd.CVC_accept_1 ->
"Element Sequence Accepted (Particle) /1"
| Lxsd.CVC_accept_2 ->
"Element Sequence Accepted (Particle) /2"
| Lxsd.CVC_accept_3 ->
"Element Sequence Accepted (Particle) /3"
| Lxsd.CVC_assess_attr ->
"Schema-Validity Assessment (Attribute)"
| Lxsd.CVC_assess_elt ->
"Schema-Validity Assessment (Element)"
| Lxsd.CVC_assess_elt_1 ->
"Schema-Validity Assessment (Element) /1"
| Lxsd.CVC_assess_elt_2 ->
"Schema-Validity Assessment (Element) /2"
| Lxsd.CVC_assess_elt_3 ->
"Schema-Validity Assessment (Element) /3"
| Lxsd.CVC_attribute ->
"Attribute Locally Valid"
| Lxsd.CVC_au ->
"Attribute Locally Valid (Use)"
| Lxsd.CVC_complex_content ->
"complex content"
| Lxsd.CVC_complex_content_1 ->
"complex content /1"
| Lxsd.CVC_complex_type ->
"complex type"
| Lxsd.CVC_complex_type_1_1 ->
"complex type /1.1"
| Lxsd.CVC_complex_type_1_2 ->
"complex type /1.2"
| Lxsd.CVC_complex_type_1_3 ->
"complex type /1.3"
| Lxsd.CVC_complex_type_1_4 ->
"complex type /1.4"
| Lxsd.CVC_complex_type_2 ->
"complex type /2"
| Lxsd.CVC_complex_type_3 ->
"complex type /3"
| Lxsd.CVC_complex_type_4 ->
"complex type /4"
| Lxsd.CVC_datatype_valid ->
"datatype valid"
| Lxsd.CVC_datatype_valid_1 ->
"datatype valid /1"
| Lxsd.CVC_datatype_valid_2_1 ->
"datatype valid /2.1"
| Lxsd.CVC_datatype_valid_2_2 ->
"datatype valid /2.2"
| Lxsd.CVC_datatype_valid_2_3 ->
"datatype valid /2.3"
| Lxsd.CVC_elt ->
"Element Locally Valid (Element)"
| Lxsd.CVC_elt_5_2_1 ->
"Element Locally Valid (Element) /5.2.1"
| Lxsd.CVC_elt_5_2_2 ->
"Element Locally Valid (Element) /5.2.2"
| Lxsd.CVC_enumeration_valid ->
"enumeration valid"
| Lxsd.CVC_facet_valid ->
"Facet Valid"
| Lxsd.CVC_length_valid ->
"length valid"
| Lxsd.CVC_maxLength_valid ->
"maxLength valid"
| Lxsd.CVC_minLength_valid ->
"minLength valid"
| Lxsd.CVC_minInclusive_valid ->
"minInclusive valid"
| Lxsd.CVC_maxInclusive_valid ->
"maxInclusive valid"
| Lxsd.CVC_minExclusive_valid ->
"minExclusive valid"
| Lxsd.CVC_maxExclusive_valid ->
"maxExclusive valid"
| Lxsd.CVC_model_group ->
"model group"
| Lxsd.CVC_model_group_all ->
"model group /all"
| Lxsd.CVC_model_group_choice ->
"model group /choice"
| Lxsd.CVC_model_group_sequence ->
"model group /sequence"
| Lxsd.CVC_particle ->
"Element Sequence Valid"
| Lxsd.CVC_pattern_valid ->
"pattern valid"
| Lxsd.CVC_simple_type_p opt_p ->
"string valid" ^ (primitive_opt opt_p)
| Lxsd.CVC_simple_type_np opt_p ->
"string valid" ^ (non_primitive_opt opt_p)
| Lxsd.CVC_type ->
"Element Locally Valid (Type)"
| Lxsd.CVC_type_3_1 ->
"Element Locally Valid (Type) /3.1"
| Lxsd.CVC_type_3_1_1 ->
"Element Locally Valid (Type) /3.1.1"
| Lxsd.CVC_type_3_1_2 ->
"Element Locally Valid (Type) /3.1.2"
| Lxsd.CVC_type_3_2 ->
"Element Locally Valid (Type) /3.2"
| Lxsd.CVC_wildcard ->
"Item Valid (Wildcard)"
| Lxsd.CVC_wildcard_1 ->
"Item Valid (Wildcard) /1"
| Lxsd.CVC_wildcard_name ->
"Wildcard allows Expanded Name"
| Lxsd.CVC_wildcard_name_1 ->
"Wildcard allows Expanded Name /1"
| Lxsd.CVC_wildcard_namespace ->
"Wildcard allows Namespace Name"
| Lxsd.CVC_wildcard_namespace_1 ->
"Wildcard allows Namespace Name /1"
| Lxsd.CVC_wildcard_namespace_2 ->
"Wildcard allows Namespace Name /2"
| Lxsd.CVC_wildcard_namespace_3 ->
"Wildcard allows Namespace Name /3"
| Lxsd.Skip_wildcard_valid ->
"skip wildcard valid"
| Lxsd.Strict_wildcard_valid ->
"strict wildcard valid"
| Lxsd.Lax_wildcard_valid ->
"lax wildcard valid"
| Lxsd.AnyType_valid ->
"anyType valid"
| Lxsd.Schema_root_valid ->
"schema root valid"
let string_of_cos = function
| Lxsd.COS_applicable_facets ->
"Applicable Facets"
| Lxsd.COS_st_restricts_2_1 ->
"Derivation Valid (Restriction, Simple) /2.1"
let string_of_src = function
| Lxsd.SRC_ct_5 ->
"Complex Type Definition Representation OK /5"
| Lxsd.SRC_import_1_1 ->
"Import Constraints and Semantics /1.1"
| Lxsd.SRC_import_3_1 ->
"Import Constraints and Semantics /3.1"
let string_of_scc = function
| Lxsd.SCC_whiteSpace_valid_restriction ->
"whiteSpace valid restriction"
| Lxsd.SCC_length_valid_restriction ->
"length valid restriction"
| Lxsd.SCC_maxminExInclusive_valid_restriction ->
"max/minEx/Inclusive valid restriction"
| Lxsd.SCC_ct_extends ->
"Derivation Valid (Extension)"
| Lxsd.SCC_derivation_ok_restriction ->
"Derivation Valid (Restriction, Complex)"
* Return the string corresponding to an XML 's Well - formedness constraint
let xml_wfc wfc =
" [WFC " ^ (string_of_wfc wfc) ^ "]"
let ns_c nsc =
" [NSC " ^ (string_of_nsc nsc) ^ "]"
let cvc r =
" [CVC " ^ (string_of_cvc r) ^ "]"
let cos r =
" [COS " ^ (string_of_cos r) ^ "]"
let src r =
" [SRC " ^ (string_of_src r) ^ "]"
let scc r =
" [SCC " ^ (string_of_scc r) ^ "]"
let sec () =
String.copy " (security constraint)"
let lim () =
String.copy " (limitation)"
let string_of_xsdl_def = function
| Error.XSDLDEF_type ->
"type"
| Error.XSDLDEF_attribute ->
"attribute"
| Error.XSDLDEF_element ->
"element"
| Error.XSDLDEF_attribute_group ->
"attribute group"
| Error.XSDLDEF_model_group ->
"model group"
let string_of_error = function
| Error.LIM_nc_disallowed ->
"XSD construction not implemented" ^ lim ()
| Error.SEC_attributes_limit_reached ->
"maximum attributes limit reached" ^ sec ()
| Error.SEC_contents_limit_reached ->
"maximum contents limit reached" ^ sec ()
| Error.SEC_depth_limit_reached ->
"maximum depth limit reached" ^ sec ()
| Error.SEC_length_limit_reached ->
"maximum length limit reached" ^ sec ()
| Error.F_wrong_output_file_extension ->
"wrong output file extension"
| Error.F_wrong_output_basename ->
"wrong output name"
| Error.F_wrong_printing_file_extension ->
"wrong printing file extension"
| Error.F_wrong_input_extension ->
"wrong input extension"
| Error.F_wrong_input_file ->
"wrong input file"
| Error.F_GEN_primary_file_not_provided ->
"XSD primary file not provided"
| Error.IO_input ->
"fail to read input"
| Error.IO_open_in_file_fail s ->
"fail to read file " ^ s
| Error.IO_open_out_file_fail s ->
"fail to write file " ^ s
| Error.IO_lib_dir_missing ->
"validator library directory missing"
| Error.IO_output_dir ->
"fail to prepare the output directory"
| Error.SD_subtraction_positions ->
"subtraction positions error"
| Error.SD_subtraction_limits ->
"subtraction limits error"
| Error.SD_subtraction_sources ->
"subtraction sources error"
| Error.SD_structure ->
"string data error"
| Error.SD_building_regexp_failed ->
"building regular expression failed"
| Error.UTF8_invalid ->
"invalid UTF-8 encoding"
| Error.UTF8_overlong ->
"overlong UTF-8 encoding"
| Error.UTF8_first_byte_invalid ->
"first byte for 1-, 2-, 3-, 4--bytes UTF-8 encoding invalid"
| Error.UTF8_range_split_invalid ->
"UTF-8 range split invalid"
| Error.Unicode_code_point_out_of_range ->
"Unicode code point out of range"
| Error.XMLL_encoding_missing ->
"XML encoding is missing, using UTF-8"
| Error.XMLL_encoding_not_supported ->
"encoding not supported"
| Error.XMLL_token_not_finished ->
"XML token not finished"
| Error.XMLL_standalone_not_recognised ->
"'standalone' value not recognised"
| Error.XMLL_version_missing ->
"version missing in XML declaration"
| Error.XMLL_version_not_supported ->
"XML version not supported"
| Error.XMLL_pi_target_not_allowed ->
"processing instructions target not allowed"
| Error.XMLL_empty_input ->
"empty input"
| Error.XMLL_expected s ->
"XML syntax error, " ^ s ^ " expected"
| Error.XMLL_not_well_formed s ->
"XML syntax error, " ^ s ^ " not well formed"
| Error.XMLL_entity_reference_unknown ->
"entity reference unknown"
| Error.XMLL_char_reference_not_well_formed ->
"char reference not well formed" ^ (xml_wfc Error.WFC_Legalchar)
| Error.XMLL_SEC_ascii_char_reference_not_allowed ->
"ASCII char reference not allowed" ^ sec ()
| Error.XMLP_opening_tag_missing ->
"opening tag missing"
| Error.XMLP_SEC_doctype ->
"DocType in file" ^ sec ()
| Error.XMLP_SEC_cdata ->
"CDATA in file" ^ sec ()
| Error.XMLP_SEC_pi ->
"PI in file" ^ sec ()
| Error.XMLP_SEC_comment ->
"comment in file" ^ sec ()
| Error.XMLP_content_after_root_not_allowed ->
"Invalid data/elements after root element"
| Error.XMLP_data_before_root_not_allowed ->
"Unauthorised data before the root node"
| Error.XMLP_closing_tag_missing ->
"Closing tag missing"
| Error.XMLP_tree_not_recognized ->
"Unrecognized tree in XML parser"
| Error.XMLL_XMLP_attribute_definition_not_unique ->
"Attribute not unique" ^ (xml_wfc Error.WFC_uniqattspec) ^ (ns_c Error.NSC_AttrsUnique)
| Error.XMLP_opening_closing_tags_not_matching ->
"opening closing tags not matching" ^ (xml_wfc Error.WFC_GIMatch)
| Error.XMLP_attribute_required s ->
"attribute '" ^ s ^ "' required"
| Error.XMLP_LIM_attribute_ignored s ->
"attribute '" ^ s ^ "' ignored" ^ lim ()
| Error.XMLP_LIM_attribute_not_implemented s ->
"attribute '" ^ s ^ "' not implemented" ^ lim ()
| Error.XMLP_element_expected s ->
"element '" ^ s ^ "' expected"
| Error.XMLP_LIM_element_ignored s ->
"element '" ^ s ^ "' ignored" ^ lim ()
| Error.XMLP_LIM_element_not_implemented s ->
"element '" ^ s ^ "' not implemented" ^ lim ()
| Error.XMLP_LIM_xml_id_unicity_not_implemented ->
"xml:id unicity not implemented" ^ lim ()
| Error.XMLP_xml_space_value_invalid ->
"xml:space value invalid"
| Error.XMLNS_namespace_uri_reserved ->
"namespace URI reserved" ^ (ns_c Error.NSC_xmlReserved)
| Error.XMLNS_namespace_prefix_reserved ->
"namespace prefix reserved" ^ (ns_c Error.NSC_xmlReserved)
| Error.XMLNS_prefixed_declaration_value_empty ->
"empty value for a prefixed namespace declaration not allowed" ^ (ns_c Error.NSC_NoPrefixUndecl)
| Error.XMLNS_XMLP_xml_attribute_not_recognised ->
"XML attribute not recognised"
| Error.XMLNS_namespace_prefix_not_declared ->
"namespace prefix not declared" ^ (ns_c Error.NSC_NSDeclared)
| Error.XMLNS_XSDT_uri_not_well_formed ->
"URI not well formed"
| Error.XSDT_SEC_appinfo_not_allowed ->
"element 'appinfo' not allowed" ^ sec ()
| Error.XSDT_skip_process_contents_risky ->
"XSD skip process contents is risky"
| Error.XSDT_version_missing ->
"XSD version is missing, using XSD version 1.1"
| Error.XSDT_version_1_0_not_supported ->
"XSD version 1.0 not supported, using XSD version 1.1 instead"
| Error.XSDT_version_not_supported ->
"XSD version not supported, using XSD version 1.1 instead"
| Error.XSDT_qname_not_well_formed ->
"QName not well formed"
| Error.XSDT_qname_namespace_missing ->
"QName's namespace missing"
| Error.XSDT_mixed_values_contradictory ->
"mixed values contradictory" ^ (src Lxsd.SRC_ct_5)
| Error.XSDT_min_max_inconsistent ->
"min max values inconsistent"
| Error.XSDT_constructions_mutually_exclusive s ->
s ^ " mutually exclusive"
| Error.XSDT_extra_xsd_attribute_not_recognised ->
"extra XSD attribute not recognised"
| Error.XSDT_extra_xsd_element_in s ->
"extra XSD element in " ^ s ^ " not recognised"
| Error.XSDT_LIM_schema_location_required ->
"schemaLocation required" ^ lim ()
| Error.XSDT_multiple_name_for_type_definition ->
"multiple name for a type definition"
| Error.XSDT_constructions_not_allowed s ->
s ^ " not allowed"
| Error.XSDT_constructions_expected s ->
s ^ " expected"
| Error.XSDT_occurrence_value_invalid ->
"occurrence value invalid"
| Error.XSDT_LIM_id_idref_unicity_reference_not_implemented ->
"ID unicity and IDREF, IDREFS references existence not implemented" ^ lim ()
| Error.XSDT_default_value_not_verified ->
"default value not verified"
| Error.XSDT_fixed_value_not_verified ->
"fixed value not verified"
| Error.RE_syntax s ->
"regular expression syntax error '" ^ s ^ "'"
| Error.RE_block_unknown s ->
"unknown Unicode block '" ^ s ^ "'"
| Error.RE_UTF8_invalid ->
"invalid UTF-8 encoding in regular expression"
| Error.RE_LIM_charprop_not_implemented ->
"charProp not implemented" ^ lim ()
| Error.XSDL_multiple_definition xsdl_def ->
"multiple " ^ string_of_xsdl_def xsdl_def ^ " definition"
| Error.XSDL_missing_definition xsdl_def ->
"missing " ^ string_of_xsdl_def xsdl_def ^ " definition"
| Error.XSDL_missing_definition_s ->
"missing definition(s) in XSD grammar"
| Error.XSDL_recursive_definitions ->
"recursive definitions in XSD grammar"
| Error.XSDL_non_deterministic_grammar ->
"non deterministic XSD grammar"
| Error.XSDL_facet_not_applicable ->
"facet not applicable" ^ (cos Lxsd.COS_applicable_facets)
| Error.XSDL_counter_not_applicable ->
"length counter not applicable" ^ (scc Lxsd.SCC_length_valid_restriction)
| Error.XSDL_order_not_applicable ->
"order not applicable" ^ (scc Lxsd.SCC_maxminExInclusive_valid_restriction)
| Error.XSDL_XSV_LIM_notation_not_implemented ->
"notation not implemented" ^ lim ()
| Error.XSDL_XSV_LIM_attribute_group_wildcard_not_implemented ->
"attribute group wildcard not implemented" ^ lim ()
| Error.XSDL_LIM_order_not_implemented ->
"order not implemented" ^ lim ()
| Error.XSDL_simple_type_derivation_invalid ->
"simple type derivation invalid" ^ (cos Lxsd.COS_st_restricts_2_1)
| Error.XSDL_LIM_facet_not_implemented ->
"facet not implemented" ^ lim ()
| Error.XSDL_whitespace_values_not_combinable ->
"whitespace values not combinable" ^ (scc Lxsd.SCC_whiteSpace_valid_restriction)
| Error.XSDL_extension_of_anytype_risky ->
"extending anyType is risky"
| Error.XSDL_complex_type_extension_invalid ->
"complex type extension" ^ (scc Lxsd.SCC_ct_extends)
| Error.XSDL_complex_type_restriction_invalid ->
"complex type restriction" ^ (scc Lxsd.SCC_derivation_ok_restriction)
| Error.XSDL_SEC_no_root_element_declared ->
"no root element declared" ^ sec ()
| Error.XSDL_all_model_not_well_formed ->
"all-model group not well formed"
| Error.XSDL_XSV_LIM_more_than_one_wildcard_in_all_model ->
"more than one wildcard in all-model" ^ lim ()
| Error.XSDL_LIM_determinism_check_not_implemented ->
"determinism check not implemented" ^ lim ()
| Error.GEN_referenced_file_missing ->
"referenced XSD's file missing"
| Error.GEN_file_not_referenced ->
"XSD file not referenced"
| Error.GEN_import_effective_uris_mismatch ->
"imported and effective target URI do not match" ^ (src Lxsd.SRC_import_3_1)
| Error.GEN_imports_mismatch ->
"importations do not match"
| Error.GEN_primary_uri_import_not_allowed ->
"importation of primary URI not allowed" ^ (src Lxsd.SRC_import_1_1)
| Error.GEN_XSDT_primary_target_uri_missing ->
"primary target URI missing"
| Error.GEN_debug_mode ->
"generating debug mode validator"
| Error.XSV_input_file_not_provided ->
"input file not provided"
| Error.XSV_multiple_input_files_not_allowed ->
"multiple input files not allowed"
| Error.XSV_command_line_incorrect ->
"command line incorrect"
| Error.XSV_fail ->
"invalid document"
| Error.XSV_uri_not_matching ->
"invalid document (URI not matching)"
| Error.XSV_uri_table_corrupted i ->
"validation table corrupted (URI " ^
string_of_int i ^ " missing)"
| Error.XSV_type_table_corrupted i ->
"validation table corrupted (type " ^
string_of_int i ^ " missing)"
| Error.XSV_element_table_corrupted i ->
"validation table corrupted (element " ^
string_of_int i ^ " missing)"
| Error.XSV_attribute_table_corrupted i ->
"validation table corrupted (attribute " ^
string_of_int i ^ " missing)"
| Error.XSV_attribute_group_table_corrupted i ->
"validation table corrupted (attribute group " ^
string_of_int i ^ " missing)"
| Error.XSV_model_group_table_corrupted i ->
"validation table corrupted (model group " ^
string_of_int i ^ " missing)"
| Error.XSV_SEC_xsi_attribute_not_allowed ->
"XML Schema Instance attribute not allowed" ^ sec ()
| Error.XSV_SEC_xsi_schemaLocation_ignored ->
"XML Schema Instance's schemaLocation attribute ignored" ^ sec ()
| Error.XSV_SEC_xsi_attribute_ignored ->
"XML Schema Instance attribute ignored" ^ sec ()
| Error.XSV_debug_mode ->
"validator in debug mode"
| Error.A_empty_parser_stack
| Error.A_stringdata_comparison_output
| Error.A_double_log_setting
| Error.A_empty_members_list
| Error.A_local_global_function_types
->
"unexpected error"
| Error.A_function qfun ->
"unexprected error (" ^ qfun ^ ")"
| Error.ML_max_string_reached
| Error.ML_max_int_reached ->
"system limit reached"
| Error.EXE (s,(Invalid_argument s')) ->
"execution error, " ^ s ^ ", invalid argument " ^ s'
| Error.EXE (s,_e) ->
"execution error, " ^ s ^ ", unexpected exception"
let fprintf_error f e =
Format.fprintf f "%s"
(string_of_error e)
let string_of_max= function
| None -> "unbounded"
| Some i -> string_of_int i
let string_of_status = function
| true -> "rejected"
| false -> "ignored"
let string_of_xsi_status = function
| Xml.XSI_reject_all ->
"rejected"
| Xml.XSI_ignore_schemaLocation_only ->
"rejected (except schemaLocation)"
| Xml.XSI_ignore_all ->
"ignored"
let string_of_info = function
| INFO_file_XML_valid in_name ->
"file " ^ File_in.to_string in_name ^ " XML valid"
| INFO_file_XML_invalid ->
"file XML invalid"
| INFO_file_XML_notwf ->
"file XML not well-formed"
| INFO_validating_element sd ->
"validating element '" ^ Stringdata.to_string sd ^ "'"
| INFO_testing_element sd ->
"testing element '" ^ Stringdata.to_string sd ^ "'"
| INFO_valid_element sd ->
"valid element '" ^ Stringdata.to_string sd ^ "'"
| INFO_invalid_element sd ->
"invalid element '" ^ Stringdata.to_string sd ^ "'"
| INFO_fetching_td i ->
"fetching type definition " ^ string_of_int i
| INFO_fetching_ed i ->
"fetching element definition " ^ string_of_int i
| INFO_fetching_ad i ->
"fetching attribute definition " ^ string_of_int i
| INFO_fetching_agd i ->
"fetching attribute group definition " ^ string_of_int i
| INFO_fetching_mgd i ->
"fetching model group definition " ^ string_of_int i
| INFO_document_valid ->
"document valid"
| INFO_pattern re ->
"regular expression in pattern " ^
(Stringdata.to_code_escaped_string re)
| INFO_XSD_primary_file in_name ->
"primary XSD file " ^ File_in.to_string in_name
| INFO_XSD_secondary_file in_name ->
"secondary XSD file " ^ File_in.to_string in_name
| INFO_validator_configuration
{ Xml.max_attributes = m_a;
Xml.max_contents = m_c;
Xml.max_depth = m_d;
Xml.max_length = m_l;
Xml.error_status =
{ Xml.doctype = dt;
Xml.cdata = cd;
Xml.pi = pi;
Xml.comment = ct;
Xml.ascii_ref = ar;
Xml.xsi = xsi_status } } ->
"configuration (" ^
"max-attributes " ^ string_of_max m_a ^ ", " ^
"max-contents " ^ string_of_max m_c ^ ", " ^
"max-depth " ^ string_of_max m_d ^ ", " ^
"max-length " ^ string_of_max m_l ^ ", " ^
"DocType" ^ string_of_status dt ^ ", " ^
"CData " ^ string_of_status cd ^ ", " ^
"PI " ^ string_of_status pi ^ ", " ^
"comment " ^ string_of_status ct ^ ", " ^
"ASCII references " ^ string_of_status ar ^ ", " ^
"XSI " ^ string_of_xsi_status xsi_status ^ ")"
let fprintf_info f i =
Format.fprintf f "%s"
(string_of_info i)
end
let pr_fun mtype loc descr_fun descr =
Format.fprintf !log_formatter "[%s] %a%s@."
(match mtype with
| Error -> "error"
| Warning -> "warning"
| Info -> "info"
| Debug -> "DEBUG"
| Todo -> "TODO")
descr_fun descr
(if loc="" then loc else " : " ^ loc)
let error_fun loc err =
pr_fun Error loc Pr_error.fprintf_error err;
raise (E err)
let error err =
error_fun "" err
let error_at s err =
error_fun
(Stringdata.string_loc s)
err
let warning_fun loc err =
pr_fun Warning loc Pr_error.fprintf_error err
let warning err =
warning_fun "" err
let warning_at s err =
warning_fun
(Stringdata.string_loc s)
err
let info_fun loc message =
pr_fun Info loc Pr_error.fprintf_info message
let info message =
info_fun "" message
let info_at s message =
info_fun (Stringdata.string_loc s) message
let warning_cvc cvc =
pr_fun Warning ""
(fun f -> Format.fprintf f "%s")
("failed validation rule" ^ Pr_error.cvc cvc)
let warning_cvc_at s cvc =
pr_fun Warning (Stringdata.string_loc s)
(fun f -> Format.fprintf f "%s")
("failed validation rule" ^ Pr_error.cvc cvc)
let info_cvc cvc =
pr_fun Info ""
(fun f -> Format.fprintf f "%s")
("validation rule" ^ Pr_error.cvc cvc)
let info_cvc_at s cvc =
pr_fun Info (Stringdata.string_loc s)
(fun f -> Format.fprintf f "%s")
("validation rule" ^ Pr_error.cvc cvc)
|
5bb38fef8eedb4221b5986fc786a1cfcb43c1a1c3b3ae06554338d02722859cb | Relph1119/sicp-solutions-manual | p1-22-continue-primes.scm | (load "src/examples/ch01/p33-prime.scm")
(load "src/practices/ch01/p1-22-next-odd.scm")
(define (continue-primes n count)
(cond ((= count 0) (display "are primes."))
((prime? n)
(display n)
(cond ((> count 1) (display ","))
((= count 1) (display " ")))
(continue-primes (next-odd n) (- count 1)))
(else (continue-primes (next-odd n) count)))) | null | https://raw.githubusercontent.com/Relph1119/sicp-solutions-manual/f2ff309a6c898376209c198030c70d6adfac1fc1/src/practices/ch01/p1-22-continue-primes.scm | scheme | (load "src/examples/ch01/p33-prime.scm")
(load "src/practices/ch01/p1-22-next-odd.scm")
(define (continue-primes n count)
(cond ((= count 0) (display "are primes."))
((prime? n)
(display n)
(cond ((> count 1) (display ","))
((= count 1) (display " ")))
(continue-primes (next-odd n) (- count 1)))
(else (continue-primes (next-odd n) count)))) | |
dd0d067403381501243dbaf0eba072c25268593a491c9a36f13bb52e4373b3bc | conal/lambda-ccc | TransCode.hs | # LANGUAGE CPP #
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
# LANGUAGE ViewPatterns , PatternGuards #
# LANGUAGE FlexibleContexts , ConstraintKinds #
# LANGUAGE TupleSections #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE LambdaCase #
# OPTIONS_GHC -Wall #
# OPTIONS_GHC -fno - warn - unused - imports #
{ - # OPTIONS_GHC -fno - warn - unused - binds # - } -- TEMP
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.TransCode
Copyright : ( c ) 2014 Tabula , Inc.
--
-- Maintainer :
-- Stability : experimental
--
Transform a Core program to use only standard types
----------------------------------------------------------------------
module LambdaCCC.TransCode where
-- TODO: explicit exports
import Prelude hiding (id,(.),(>>))
import qualified Prelude
import Control.Category (id,(.),(>>>))
import Control.Arrow (arr)
import Control.Monad (unless,(<=<))
import Data.Functor ((<$),(<$>))
import Control.Applicative (pure,(<*>),liftA2)
import Data.Monoid (mempty)
import Data.List (intercalate,isPrefixOf)
import qualified Data.Set as S
GHC
import PrelNames (eitherTyConName)
import HERMIT.Core (CoreDef(..))
import HERMIT.Dictionary hiding (externals)
import HERMIT.External (External,ExternalName,external,(.+),CmdTag(Loop))
import HERMIT.GHC
import HERMIT.Kure
import HERMIT.Monad (saveDef,newIdH,Label)
import HERMIT.Plugin (hermitPlugin,phase,interactive)
import HERMIT.Extras hiding (findTyConT)
import qualified HERMIT.Extras as Ex
import TypeEncode . Plugin ( findCon )
import LambdaCCC.Misc ((<~))
import qualified LambdaCCC.Monomorphize as Mono
{--------------------------------------------------------------------
Encoding
--------------------------------------------------------------------}
class Enc a where enc :: a -> TransformH x a
instance (Enc a, Enc b) => Enc (a,b) where
enc (a,b) = (,) <$> enc a <*> enc b
instance Enc a => Enc [a] where enc = mapM enc
instance Enc CoreExpr where
enc e@(Lit _) = return e
enc (Var v) = Var <$> enc v -- Revisit for non-local vars
enc (App u v) = App <$> enc u <*> enc v
enc (Lam x e) = Lam <$> enc x <*> enc e
enc (Let b e) = Let <$> enc b <*> enc e
enc (Case e _w _ty [(_,dropTvars -> [v],rhs)]) =
-- TODO: Check whether _w is in rhs
-- TODO: Maybe drop this special case.
return $ Let (NonRec v e) rhs
enc (Case e w ty [alt]) =
Case <$> enc e
<*> enc w
<*> enc ty
<*> ((:[]) <$> encAlt alt)
enc (Case _ _ _ _) = error "enc: Case: not a single alternative"
enc (Cast e _co) = enc e -- Experiment
enc (Tick t e) = Tick t <$> enc e
enc (Type t) = Type <$> enc t
enc (Coercion _co) = error "enc: Coercion -- ??"
encAlt :: CoreAlt -> TransformH x CoreAlt
encAlt (_,dropTvars -> vs,e) =
(DataAlt (tupleCon BoxedTuple (length vs)),vs,) <$> enc e
-- Drop type variables including coercions
dropTvars :: Unop [Var]
dropTvars = filter (not . isTyVar)
instance Enc Id where
enc v | isId v = newIdT (uqVarName v) . enc (varType v)
| otherwise = return v
instance Enc Type where
enc (TyConApp tc tys) | isDistribTC tc = TyConApp tc <$> enc tys
enc (FunTy a b) = FunTy <$> enc a <*> enc b
enc t = observeR "enc: unhandled type" $* t
isDistribTC :: TyCon -> Bool
isDistribTC tc =
any ($ tc) [isTupleTyCon,isFunTyCon] && tyConArity tc == 2
|| tc == unitTyCon
instance Enc CoreBind where
enc (NonRec v e) = NonRec <$> enc v <*> enc e
enc (Rec ws) = Rec <$> enc ws
encode :: Enc a => RewriteH a
encode = id >>= enc
{--------------------------------------------------------------------
Plugin
--------------------------------------------------------------------}
plugin :: Plugin
plugin = hermitPlugin (phase 0 . interactive externals)
where
externals =
[ externC "encodeBind" (encode :: RewriteH CoreBind) "..."
]
++ Mono.externals
| null | https://raw.githubusercontent.com/conal/lambda-ccc/141a713456d447d27dbe440fa27a9372cd44dc7f/src/LambdaCCC/Unused/TransCode.hs | haskell | # LANGUAGE TypeSynonymInstances, FlexibleInstances #
# LANGUAGE Rank2Types #
TEMP
--------------------------------------------------------------------
|
Module : LambdaCCC.TransCode
Maintainer :
Stability : experimental
--------------------------------------------------------------------
TODO: explicit exports
-------------------------------------------------------------------
Encoding
-------------------------------------------------------------------
Revisit for non-local vars
TODO: Check whether _w is in rhs
TODO: Maybe drop this special case.
Experiment
Drop type variables including coercions
-------------------------------------------------------------------
Plugin
------------------------------------------------------------------- | # LANGUAGE CPP #
# LANGUAGE ViewPatterns , PatternGuards #
# LANGUAGE FlexibleContexts , ConstraintKinds #
# LANGUAGE TupleSections #
# LANGUAGE LambdaCase #
# OPTIONS_GHC -Wall #
# OPTIONS_GHC -fno - warn - unused - imports #
Copyright : ( c ) 2014 Tabula , Inc.
Transform a Core program to use only standard types
module LambdaCCC.TransCode where
import Prelude hiding (id,(.),(>>))
import qualified Prelude
import Control.Category (id,(.),(>>>))
import Control.Arrow (arr)
import Control.Monad (unless,(<=<))
import Data.Functor ((<$),(<$>))
import Control.Applicative (pure,(<*>),liftA2)
import Data.Monoid (mempty)
import Data.List (intercalate,isPrefixOf)
import qualified Data.Set as S
GHC
import PrelNames (eitherTyConName)
import HERMIT.Core (CoreDef(..))
import HERMIT.Dictionary hiding (externals)
import HERMIT.External (External,ExternalName,external,(.+),CmdTag(Loop))
import HERMIT.GHC
import HERMIT.Kure
import HERMIT.Monad (saveDef,newIdH,Label)
import HERMIT.Plugin (hermitPlugin,phase,interactive)
import HERMIT.Extras hiding (findTyConT)
import qualified HERMIT.Extras as Ex
import TypeEncode . Plugin ( findCon )
import LambdaCCC.Misc ((<~))
import qualified LambdaCCC.Monomorphize as Mono
class Enc a where enc :: a -> TransformH x a
instance (Enc a, Enc b) => Enc (a,b) where
enc (a,b) = (,) <$> enc a <*> enc b
instance Enc a => Enc [a] where enc = mapM enc
instance Enc CoreExpr where
enc e@(Lit _) = return e
enc (App u v) = App <$> enc u <*> enc v
enc (Lam x e) = Lam <$> enc x <*> enc e
enc (Let b e) = Let <$> enc b <*> enc e
enc (Case e _w _ty [(_,dropTvars -> [v],rhs)]) =
return $ Let (NonRec v e) rhs
enc (Case e w ty [alt]) =
Case <$> enc e
<*> enc w
<*> enc ty
<*> ((:[]) <$> encAlt alt)
enc (Case _ _ _ _) = error "enc: Case: not a single alternative"
enc (Tick t e) = Tick t <$> enc e
enc (Type t) = Type <$> enc t
enc (Coercion _co) = error "enc: Coercion -- ??"
encAlt :: CoreAlt -> TransformH x CoreAlt
encAlt (_,dropTvars -> vs,e) =
(DataAlt (tupleCon BoxedTuple (length vs)),vs,) <$> enc e
dropTvars :: Unop [Var]
dropTvars = filter (not . isTyVar)
instance Enc Id where
enc v | isId v = newIdT (uqVarName v) . enc (varType v)
| otherwise = return v
instance Enc Type where
enc (TyConApp tc tys) | isDistribTC tc = TyConApp tc <$> enc tys
enc (FunTy a b) = FunTy <$> enc a <*> enc b
enc t = observeR "enc: unhandled type" $* t
isDistribTC :: TyCon -> Bool
isDistribTC tc =
any ($ tc) [isTupleTyCon,isFunTyCon] && tyConArity tc == 2
|| tc == unitTyCon
instance Enc CoreBind where
enc (NonRec v e) = NonRec <$> enc v <*> enc e
enc (Rec ws) = Rec <$> enc ws
encode :: Enc a => RewriteH a
encode = id >>= enc
plugin :: Plugin
plugin = hermitPlugin (phase 0 . interactive externals)
where
externals =
[ externC "encodeBind" (encode :: RewriteH CoreBind) "..."
]
++ Mono.externals
|
deae91ba9929d421aca39e3670cc13d0222ccf50f8745ba67dcffe94e745fb60 | project-oak/hafnium-verification | CType_decl.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module CProcname : sig
val from_decl :
?tenv:Tenv.t
-> ?block_return_type:Clang_ast_t.qual_type
-> ?outer_proc:Procname.t
-> Clang_ast_t.decl
-> Procname.t
* Given , return its procname . This function should be used for all procedures present in
original AST
original AST *)
val from_decl_for_linters : Clang_ast_t.decl -> Procname.t
* This is used for bug hashing for linters . In ObjC the method names contain the parameter
names , thus if people add new parameters , any bug about the method will be considered
different which means reporting on unchanged code . So , in the ObjC method case , we create the
method name only based on the first part of the name without the parameters
names, thus if people add new parameters, any bug about the method will be considered
different which means reporting on unchanged code. So, in the ObjC method case, we create the
method name only based on the first part of the name without the parameters *)
(** WARNING: functions from this module should not be used if full decl is available in AST *)
module NoAstDecl : sig
val c_function_of_string : Tenv.t -> string -> Procname.t
val cpp_method_of_string : Tenv.t -> Typ.Name.t -> string -> Procname.t
val objc_method_of_string_kind : Typ.Name.t -> string -> Procname.ObjC_Cpp.kind -> Procname.t
end
end
(** Processes types and record declarations by adding them to the tenv *)
val get_record_typename : ?tenv:Tenv.t -> Clang_ast_t.decl -> Typ.Name.t
val add_types_from_decl_to_tenv : Tenv.t -> Clang_ast_t.decl -> Typ.desc
val add_predefined_types : Tenv.t -> unit
(** Add the predefined types objc_class which is a struct, and Class, which is a pointer to
objc_class. *)
val qual_type_to_sil_type : Tenv.t -> Clang_ast_t.qual_type -> Typ.t
val class_from_pointer_type : Tenv.t -> Clang_ast_t.qual_type -> Typ.Name.t
val get_type_from_expr_info : Clang_ast_t.expr_info -> Tenv.t -> Typ.t
val method_signature_of_decl :
Tenv.t
-> Clang_ast_t.decl
-> ?block_return_type:Clang_ast_t.qual_type
-> Procname.t
-> CMethodSignature.t
val method_signature_body_of_decl :
Tenv.t
-> Clang_ast_t.decl
-> ?block_return_type:Clang_ast_t.qual_type
-> Procname.t
-> CMethodSignature.t
* Clang_ast_t.stmt option
* [> `CXXConstructorInit of Clang_ast_t.cxx_ctor_initializer] list
val should_add_return_param : Typ.typ -> is_objc_method:bool -> bool
val type_of_captured_var :
Tenv.t -> is_block_inside_objc_class_method:bool -> Clang_ast_t.decl_ref -> Typ.typ option
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/clang/CType_decl.mli | ocaml | * WARNING: functions from this module should not be used if full decl is available in AST
* Processes types and record declarations by adding them to the tenv
* Add the predefined types objc_class which is a struct, and Class, which is a pointer to
objc_class. |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module CProcname : sig
val from_decl :
?tenv:Tenv.t
-> ?block_return_type:Clang_ast_t.qual_type
-> ?outer_proc:Procname.t
-> Clang_ast_t.decl
-> Procname.t
* Given , return its procname . This function should be used for all procedures present in
original AST
original AST *)
val from_decl_for_linters : Clang_ast_t.decl -> Procname.t
* This is used for bug hashing for linters . In ObjC the method names contain the parameter
names , thus if people add new parameters , any bug about the method will be considered
different which means reporting on unchanged code . So , in the ObjC method case , we create the
method name only based on the first part of the name without the parameters
names, thus if people add new parameters, any bug about the method will be considered
different which means reporting on unchanged code. So, in the ObjC method case, we create the
method name only based on the first part of the name without the parameters *)
module NoAstDecl : sig
val c_function_of_string : Tenv.t -> string -> Procname.t
val cpp_method_of_string : Tenv.t -> Typ.Name.t -> string -> Procname.t
val objc_method_of_string_kind : Typ.Name.t -> string -> Procname.ObjC_Cpp.kind -> Procname.t
end
end
val get_record_typename : ?tenv:Tenv.t -> Clang_ast_t.decl -> Typ.Name.t
val add_types_from_decl_to_tenv : Tenv.t -> Clang_ast_t.decl -> Typ.desc
val add_predefined_types : Tenv.t -> unit
val qual_type_to_sil_type : Tenv.t -> Clang_ast_t.qual_type -> Typ.t
val class_from_pointer_type : Tenv.t -> Clang_ast_t.qual_type -> Typ.Name.t
val get_type_from_expr_info : Clang_ast_t.expr_info -> Tenv.t -> Typ.t
val method_signature_of_decl :
Tenv.t
-> Clang_ast_t.decl
-> ?block_return_type:Clang_ast_t.qual_type
-> Procname.t
-> CMethodSignature.t
val method_signature_body_of_decl :
Tenv.t
-> Clang_ast_t.decl
-> ?block_return_type:Clang_ast_t.qual_type
-> Procname.t
-> CMethodSignature.t
* Clang_ast_t.stmt option
* [> `CXXConstructorInit of Clang_ast_t.cxx_ctor_initializer] list
val should_add_return_param : Typ.typ -> is_objc_method:bool -> bool
val type_of_captured_var :
Tenv.t -> is_block_inside_objc_class_method:bool -> Clang_ast_t.decl_ref -> Typ.typ option
|
7a8fa65499478b535b7715dfb640e936c42a28e4691a9b034825cc934d892a92 | lingnand/VIMonad | Warp.hs | -----------------------------------------------------------------------------
-- |
Module : XMonad . Actions . Warp
-- Copyright : (c)
-- License : BSD3-style (see LICENSE)
--
-- Maintainer :
-- Stability : unstable
-- Portability : unportable
--
-- Warp the pointer to a given window or screen.
--
-----------------------------------------------------------------------------
module XMonad.Actions.Warp (
-- * Usage
-- $usage
banish,
banishScreen,
Corner(..),
warpToScreen,
warpToWindow
) where
import Data.List
import XMonad
import XMonad.StackSet as W
$ usage
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ :
> import XMonad . Actions . Warp
then add appropriate keybindings to warp the pointer ; for example :
> , ( ( modm , xK_z ) , warpToWindow ( 1%2 ) ( 1%2 ) ) -- @@ Move pointer to currently focused window
>
> -- mod - ctrl-{w , e , r } @@ Move mouse pointer to screen 1 , 2 , or 3
>
> [ ( ( modm .| . controlMask , key ) , warpToScreen sc ( 1%2 ) ( 1%2 ) )
> | ( key , sc ) < - zip [ xK_w , , xK_r ] [ 0 .. ] ]
Note that warping to a particular screen may change the focus .
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Actions.Warp
then add appropriate keybindings to warp the pointer; for example:
> , ((modm, xK_z ), warpToWindow (1%2) (1%2)) -- @@ Move pointer to currently focused window
>
>-- mod-ctrl-{w,e,r} @@ Move mouse pointer to screen 1, 2, or 3
>
> [((modm .|. controlMask, key), warpToScreen sc (1%2) (1%2))
> | (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]]
Note that warping to a particular screen may change the focus.
-}
-- For detailed instructions on editing your key bindings, see
" XMonad . Doc . Extending#Editing_key_bindings " .
data Corner = UpperLeft | UpperRight | LowerLeft | LowerRight
| Move the mouse cursor to a corner of the focused window . Useful for
uncluttering things .
Internally , this uses numerical parameters . We parametrize on the ' Corner '
type so the user need not see the violence inherent in
the system .
' warpToScreen ' and ' warpToWindow ' can be used in a variety of
ways . Suppose you wanted to emulate 's \'banish\ ' command ,
which moves the mouse pointer to a corner ? warpToWindow can do that !
uncluttering things.
Internally, this uses numerical parameters. We parametrize on the 'Corner'
type so the user need not see the violence inherent in
the system.
'warpToScreen' and 'warpToWindow' can be used in a variety of
ways. Suppose you wanted to emulate Ratpoison's \'banish\' command,
which moves the mouse pointer to a corner? warpToWindow can do that! -}
banish :: Corner -> X ()
banish direction = case direction of
LowerRight -> warpToWindow 1 1
LowerLeft -> warpToWindow 0 1
UpperLeft -> warpToWindow 0 0
UpperRight -> warpToWindow 1 0
{- | Same as 'banish' but moves the mouse to the corner of the
currently focused screen -}
banishScreen :: Corner -> X ()
banishScreen direction = case direction of
LowerRight -> warpToCurrentScreen 1 1
LowerLeft -> warpToCurrentScreen 0 1
UpperLeft -> warpToCurrentScreen 0 0
UpperRight -> warpToCurrentScreen 1 0
where
warpToCurrentScreen h v =
do ws <- gets windowset
warpToScreen (W.screen $ current ws) h v
windows (const ws)
fraction :: (Integral a, Integral b) => Rational -> a -> b
fraction f x = floor (f * fromIntegral x)
warp :: Window -> Position -> Position -> X ()
warp w x y = withDisplay $ \d -> io $ warpPointer d none w 0 0 0 0 x y
-- | Warp the pointer to a given position relative to the currently
focused window . Top left = ( 0,0 ) , bottom right = ( 1,1 ) .
warpToWindow :: Rational -> Rational -> X ()
warpToWindow h v =
withDisplay $ \d ->
withFocused $ \w -> do
wa <- io $ getWindowAttributes d w
warp w (fraction h (wa_width wa)) (fraction v (wa_height wa))
| Warp the pointer to the given position ( top left = ( 0,0 ) , bottom
right = ( 1,1 ) ) on the given screen .
warpToScreen :: ScreenId -> Rational -> Rational -> X ()
warpToScreen n h v = do
root <- asks theRoot
(StackSet {current = x, visible = xs}) <- gets windowset
whenJust (fmap (screenRect . W.screenDetail) . find ((n==) . W.screen) $ x : xs)
$ \r ->
warp root (rect_x r + fraction h (rect_width r))
(rect_y r + fraction v (rect_height r))
| null | https://raw.githubusercontent.com/lingnand/VIMonad/048e419fc4ef57a5235dbaeef8890faf6956b574/XMonadContrib/XMonad/Actions/Warp.hs | haskell | ---------------------------------------------------------------------------
|
Copyright : (c)
License : BSD3-style (see LICENSE)
Maintainer :
Stability : unstable
Portability : unportable
Warp the pointer to a given window or screen.
---------------------------------------------------------------------------
* Usage
$usage
@@ Move pointer to currently focused window
mod - ctrl-{w , e , r } @@ Move mouse pointer to screen 1 , 2 , or 3
@@ Move pointer to currently focused window
mod-ctrl-{w,e,r} @@ Move mouse pointer to screen 1, 2, or 3
For detailed instructions on editing your key bindings, see
| Same as 'banish' but moves the mouse to the corner of the
currently focused screen
| Warp the pointer to a given position relative to the currently | Module : XMonad . Actions . Warp
module XMonad.Actions.Warp (
banish,
banishScreen,
Corner(..),
warpToScreen,
warpToWindow
) where
import Data.List
import XMonad
import XMonad.StackSet as W
$ usage
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ :
> import XMonad . Actions . Warp
then add appropriate keybindings to warp the pointer ; for example :
>
>
> [ ( ( modm .| . controlMask , key ) , warpToScreen sc ( 1%2 ) ( 1%2 ) )
> | ( key , sc ) < - zip [ xK_w , , xK_r ] [ 0 .. ] ]
Note that warping to a particular screen may change the focus .
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Actions.Warp
then add appropriate keybindings to warp the pointer; for example:
>
>
> [((modm .|. controlMask, key), warpToScreen sc (1%2) (1%2))
> | (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]]
Note that warping to a particular screen may change the focus.
-}
" XMonad . Doc . Extending#Editing_key_bindings " .
data Corner = UpperLeft | UpperRight | LowerLeft | LowerRight
| Move the mouse cursor to a corner of the focused window . Useful for
uncluttering things .
Internally , this uses numerical parameters . We parametrize on the ' Corner '
type so the user need not see the violence inherent in
the system .
' warpToScreen ' and ' warpToWindow ' can be used in a variety of
ways . Suppose you wanted to emulate 's \'banish\ ' command ,
which moves the mouse pointer to a corner ? warpToWindow can do that !
uncluttering things.
Internally, this uses numerical parameters. We parametrize on the 'Corner'
type so the user need not see the violence inherent in
the system.
'warpToScreen' and 'warpToWindow' can be used in a variety of
ways. Suppose you wanted to emulate Ratpoison's \'banish\' command,
which moves the mouse pointer to a corner? warpToWindow can do that! -}
banish :: Corner -> X ()
banish direction = case direction of
LowerRight -> warpToWindow 1 1
LowerLeft -> warpToWindow 0 1
UpperLeft -> warpToWindow 0 0
UpperRight -> warpToWindow 1 0
banishScreen :: Corner -> X ()
banishScreen direction = case direction of
LowerRight -> warpToCurrentScreen 1 1
LowerLeft -> warpToCurrentScreen 0 1
UpperLeft -> warpToCurrentScreen 0 0
UpperRight -> warpToCurrentScreen 1 0
where
warpToCurrentScreen h v =
do ws <- gets windowset
warpToScreen (W.screen $ current ws) h v
windows (const ws)
fraction :: (Integral a, Integral b) => Rational -> a -> b
fraction f x = floor (f * fromIntegral x)
warp :: Window -> Position -> Position -> X ()
warp w x y = withDisplay $ \d -> io $ warpPointer d none w 0 0 0 0 x y
focused window . Top left = ( 0,0 ) , bottom right = ( 1,1 ) .
warpToWindow :: Rational -> Rational -> X ()
warpToWindow h v =
withDisplay $ \d ->
withFocused $ \w -> do
wa <- io $ getWindowAttributes d w
warp w (fraction h (wa_width wa)) (fraction v (wa_height wa))
| Warp the pointer to the given position ( top left = ( 0,0 ) , bottom
right = ( 1,1 ) ) on the given screen .
warpToScreen :: ScreenId -> Rational -> Rational -> X ()
warpToScreen n h v = do
root <- asks theRoot
(StackSet {current = x, visible = xs}) <- gets windowset
whenJust (fmap (screenRect . W.screenDetail) . find ((n==) . W.screen) $ x : xs)
$ \r ->
warp root (rect_x r + fraction h (rect_width r))
(rect_y r + fraction v (rect_height r))
|
fa442cb73136cd964eb9d2847d61fa208fe89681dd6dc69902e1db00df201043 | jaspervdj/firefly | Input.hs | --------------------------------------------------------------------------------
# LANGUAGE ForeignFunctionInterface #
module Firefly.Input
( Key
, MouseButton
, setGrabInput
, isGrabInput
, flushInput
, sendQuit
, hasReceivedQuit
, isKeyDown
, isKeyPressed
, isKeyReleased
, isMouseButtonDown
, isMouseButtonPressed
, isMouseButtonReleased
, getMousePosition
) where
--------------------------------------------------------------------------------
import Foreign.C.Types (CInt (..))
--------------------------------------------------------------------------------
import Firefly.Internal
import Firefly.Input.Internal
--------------------------------------------------------------------------------
foreign import ccall unsafe "ff_setGrabInput" ff_setGrabInput
:: CInt -> IO ()
foreign import ccall unsafe "ff_isGrabInput" ff_isGrabInput
:: IO CInt
foreign import ccall unsafe "ff_flushInput" ff_flushInput
:: IO ()
foreign import ccall unsafe "ff_sendQuit" ff_sendQuit
:: IO ()
foreign import ccall unsafe "ff_hasReceivedQuit" ff_hasReceivedQuit
:: IO CInt
foreign import ccall unsafe "ff_isKeyDown" ff_isKeyDown
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isKeyPressed" ff_isKeyPressed
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isKeyReleased" ff_isKeyReleased
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonDown" ff_isMouseButtonDown
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonPressed" ff_isMouseButtonPressed
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonReleased" ff_isMouseButtonReleased
:: CInt -> IO CInt
foreign import ccall unsafe "ff_getMouseX" ff_getMouseX
:: IO CInt
foreign import ccall unsafe "ff_getMouseY" ff_getMouseY
:: IO CInt
--------------------------------------------------------------------------------
setGrabInput :: Bool -> IO ()
setGrabInput = ff_setGrabInput . fromBool
# INLINE setGrabInput #
--------------------------------------------------------------------------------
isGrabInput :: IO Bool
isGrabInput = fmap toBool ff_isGrabInput
# INLINE isGrabInput #
--------------------------------------------------------------------------------
-- | Process all pending events. You should call this each step of your main
-- game loop, just before processing the input using the other functions in this
-- module.
flushInput :: IO ()
flushInput = ff_flushInput
# INLINE flushInput #
--------------------------------------------------------------------------------
-- | Send a quit signal. This does *not* automatically kill the program, you
-- should manually check 'hasReceivedQuit' in your game loop.
sendQuit :: IO ()
sendQuit = ff_sendQuit
# INLINE sendQuit #
--------------------------------------------------------------------------------
-- | Did the user close the window?
hasReceivedQuit :: IO Bool
hasReceivedQuit = fmap toBool ff_hasReceivedQuit
# INLINE hasReceivedQuit #
--------------------------------------------------------------------------------
-- | Is the key being held down?
isKeyDown :: Key -> IO Bool
isKeyDown (Key code) = fmap toBool (ff_isKeyDown code)
# INLINE isKeyDown #
--------------------------------------------------------------------------------
isKeyPressed :: Key -> IO Bool
isKeyPressed (Key code) = fmap toBool (ff_isKeyPressed code)
{-# INLINE isKeyPressed #-}
--------------------------------------------------------------------------------
isKeyReleased :: Key -> IO Bool
isKeyReleased (Key code) = fmap toBool (ff_isKeyReleased code)
{-# INLINE isKeyReleased #-}
--------------------------------------------------------------------------------
-- | Is the mouse button being held down?
isMouseButtonDown :: MouseButton -> IO Bool
isMouseButtonDown (MouseButton code) =
fmap toBool (ff_isMouseButtonDown code)
# INLINE isMouseButtonDown #
--------------------------------------------------------------------------------
isMouseButtonPressed :: MouseButton -> IO Bool
isMouseButtonPressed (MouseButton code) =
fmap toBool (ff_isMouseButtonPressed code)
{-# INLINE isMouseButtonPressed #-}
--------------------------------------------------------------------------------
isMouseButtonReleased :: MouseButton -> IO Bool
isMouseButtonReleased (MouseButton code) =
fmap toBool (ff_isMouseButtonReleased code)
# INLINE isMouseButtonReleased #
--------------------------------------------------------------------------------
-- | Obtain the current mouse position
getMousePosition :: IO (Int, Int)
getMousePosition = do
x <- ff_getMouseX
y <- ff_getMouseY
return (fromIntegral x, fromIntegral y)
# INLINE getMousePosition #
| null | https://raw.githubusercontent.com/jaspervdj/firefly/71e1f5f11293272bedc26444446553a24ee318ad/src/Firefly/Input.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Process all pending events. You should call this each step of your main
game loop, just before processing the input using the other functions in this
module.
------------------------------------------------------------------------------
| Send a quit signal. This does *not* automatically kill the program, you
should manually check 'hasReceivedQuit' in your game loop.
------------------------------------------------------------------------------
| Did the user close the window?
------------------------------------------------------------------------------
| Is the key being held down?
------------------------------------------------------------------------------
# INLINE isKeyPressed #
------------------------------------------------------------------------------
# INLINE isKeyReleased #
------------------------------------------------------------------------------
| Is the mouse button being held down?
------------------------------------------------------------------------------
# INLINE isMouseButtonPressed #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Obtain the current mouse position | # LANGUAGE ForeignFunctionInterface #
module Firefly.Input
( Key
, MouseButton
, setGrabInput
, isGrabInput
, flushInput
, sendQuit
, hasReceivedQuit
, isKeyDown
, isKeyPressed
, isKeyReleased
, isMouseButtonDown
, isMouseButtonPressed
, isMouseButtonReleased
, getMousePosition
) where
import Foreign.C.Types (CInt (..))
import Firefly.Internal
import Firefly.Input.Internal
foreign import ccall unsafe "ff_setGrabInput" ff_setGrabInput
:: CInt -> IO ()
foreign import ccall unsafe "ff_isGrabInput" ff_isGrabInput
:: IO CInt
foreign import ccall unsafe "ff_flushInput" ff_flushInput
:: IO ()
foreign import ccall unsafe "ff_sendQuit" ff_sendQuit
:: IO ()
foreign import ccall unsafe "ff_hasReceivedQuit" ff_hasReceivedQuit
:: IO CInt
foreign import ccall unsafe "ff_isKeyDown" ff_isKeyDown
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isKeyPressed" ff_isKeyPressed
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isKeyReleased" ff_isKeyReleased
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonDown" ff_isMouseButtonDown
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonPressed" ff_isMouseButtonPressed
:: CInt -> IO CInt
foreign import ccall unsafe "ff_isMouseButtonReleased" ff_isMouseButtonReleased
:: CInt -> IO CInt
foreign import ccall unsafe "ff_getMouseX" ff_getMouseX
:: IO CInt
foreign import ccall unsafe "ff_getMouseY" ff_getMouseY
:: IO CInt
setGrabInput :: Bool -> IO ()
setGrabInput = ff_setGrabInput . fromBool
# INLINE setGrabInput #
isGrabInput :: IO Bool
isGrabInput = fmap toBool ff_isGrabInput
# INLINE isGrabInput #
flushInput :: IO ()
flushInput = ff_flushInput
# INLINE flushInput #
sendQuit :: IO ()
sendQuit = ff_sendQuit
# INLINE sendQuit #
hasReceivedQuit :: IO Bool
hasReceivedQuit = fmap toBool ff_hasReceivedQuit
# INLINE hasReceivedQuit #
isKeyDown :: Key -> IO Bool
isKeyDown (Key code) = fmap toBool (ff_isKeyDown code)
# INLINE isKeyDown #
isKeyPressed :: Key -> IO Bool
isKeyPressed (Key code) = fmap toBool (ff_isKeyPressed code)
isKeyReleased :: Key -> IO Bool
isKeyReleased (Key code) = fmap toBool (ff_isKeyReleased code)
isMouseButtonDown :: MouseButton -> IO Bool
isMouseButtonDown (MouseButton code) =
fmap toBool (ff_isMouseButtonDown code)
# INLINE isMouseButtonDown #
isMouseButtonPressed :: MouseButton -> IO Bool
isMouseButtonPressed (MouseButton code) =
fmap toBool (ff_isMouseButtonPressed code)
isMouseButtonReleased :: MouseButton -> IO Bool
isMouseButtonReleased (MouseButton code) =
fmap toBool (ff_isMouseButtonReleased code)
# INLINE isMouseButtonReleased #
getMousePosition :: IO (Int, Int)
getMousePosition = do
x <- ff_getMouseX
y <- ff_getMouseY
return (fromIntegral x, fromIntegral y)
# INLINE getMousePosition #
|
6bca262b99c2c0d53719112f6eeeb76a3afe9c96cc02acb8afa61fd6f1abc08c | clash-lang/clash-compiler | Annotations.hs | |
Copyright : ( C ) 2021 - 2022 , QBayLogic B.V. ,
2022 , Google Inc. ,
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Copyright : (C) 2021-2022, QBayLogic B.V.,
2022 , Google Inc.,
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
-}
# LANGUAGE QuasiQuotes #
# LANGUAGE TemplateHaskell #
module Clash.Cores.Xilinx.Floating.Annotations
( veriBinaryPrim
, vhdlBinaryPrim
, vhdlFromUPrim
, veriFromUPrim
, vhdlFromSPrim
, veriFromSPrim
, vhdlComparePrim
, veriComparePrim
) where
import Prelude
import Data.String.Interpolate (__i)
import Language.Haskell.TH.Syntax (Name)
import Clash.Annotations.Primitive (Primitive(..), HDL(..))
import Clash.Cores.Xilinx.Floating.BlackBoxes
| The InlinePrimitive annotation for a binary function in .
Note : The BlackBox template includes ~DEVNULL[~LIT[3 ] ] which will ensure the
-- template function (tclTFName argument) gets a fully evaluated Config.
vhdlBinaryPrim
:: Name
-> Name
-> String
-> Primitive
vhdlBinaryPrim primName tclTFName funcName = InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
ARG[2 ]
)
=> Config -- ARG[3]
-> Clock dom -- ARG[4]
-> Enable dom -- ARG[5]
-> DSignal dom n Float -- x , ARG[6]
-> DSignal dom n Float -- y , ARG[7]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
-- #{funcName} begin
~DEVNULL[~LIT[3]]~GENSYM[#{funcName}][0] : block
COMPONENT ~INCLUDENAME[0]
PORT (
aclk : IN STD_LOGIC;
~IF~ISACTIVEENABLE[5]~THEN aclken : IN STD_LOGIC;
~ELSE~FI s_axis_a_tvalid : IN STD_LOGIC;
s_axis_a_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
s_axis_b_tvalid : IN STD_LOGIC;
s_axis_b_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
m_axis_result_tvalid : OUT STD_LOGIC;
m_axis_result_tdata : OUT STD_LOGIC_VECTOR(31 DOWNTO 0)
);
END COMPONENT;
~IF~ISACTIVEENABLE[5]~THEN signal ~GENSYM[clken_std][2]: std_logic;
begin
~SYM[2] <= '1' when (~ARG[5]) else '0';
~ELSEbegin
~FI ~GENSYM[#{funcName}][1] : ~INCLUDENAME[0]
PORT MAP (
aclk => ~ARG[4],
~IF~ISACTIVEENABLE[5]~THEN aclken => ~SYM[2],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~ARG[6],
s_axis_b_tvalid => '1',
s_axis_b_tdata => ~ARG[7],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
-- #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
| The InlinePrimitive annotation for a binary function in Verilog .
Note : The BlackBox template includes ~DEVNULL[~LIT[3 ] ] which will ensure the
-- template function (tclTFName argument) gets a fully evaluated Config.
veriBinaryPrim
:: Name
-> Name
-> String
-> Primitive
veriBinaryPrim primName tclTFName funcName =
InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
ARG[2 ]
)
=> Config -- ARG[3]
-> Clock dom -- ARG[4]
-> Enable dom -- ARG[5]
-> DSignal dom n Float -- x , ARG[6]
-> DSignal dom n Float -- y , ARG[7]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
~DEVNULL[~LIT[3]]~INCLUDENAME[0] ~GENSYM[#{funcName}][0] (
.aclk(~ARG[4]),
~IF~ISACTIVEENABLE[5]~THEN .aclken(~ARG[5]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[6]),
.s_axis_b_tvalid(1'b1),
.s_axis_b_tdata(~ARG[7]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
vhdlFromUPrim
:: Name
-> String
-> Primitive
vhdlFromUPrim primName funcName =
let tfName = 'fromUTclTF
clockArg, enableArg, inputArg, blockSym, inpSlvSym, compSym,
clkEnStdSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
blockSym = 0
inpSlvSym = 1
compSym = 2
clkEnStdSym = 3
in InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
, HasCallStack -- ARG[2]
)
=> Clock dom -- clockArg, ARG[3]
-> Enable dom -- enableArg, ARG[4]
-> DSignal dom n (Unsigned ..) -- inputArg , ARG[5]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
-- #{funcName} begin
~GENSYM[#{funcName}][#{blockSym}] : block
component ~INCLUDENAME[0]
port (
aclk : in std_logic;
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken : in std_logic;
~ELSE~FI s_axis_a_tvalid : in std_logic;
s_axis_a_tdata : in std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
m_axis_result_tvalid : out std_logic;
m_axis_result_tdata : out std_logic_vector(31 downto 0)
);
end component;
signal ~GENSYM[inp_slv][#{inpSlvSym}]: std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
~IF~ISACTIVEENABLE[#{enableArg}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;
begin
~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enableArg}]) else '0';
~ELSEbegin
~FI ~SYM[#{inpSlvSym}] <= ~TOBV[~ARG[#{inputArg}]][~TYP[#{inputArg}]];
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
port map (
aclk => ~ARG[#{clockArg}],
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken => ~SYM[#{clockArg}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~SYM[#{inpSlvSym}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
-- #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
veriFromUPrim
:: Name
-> String
-> Primitive
veriFromUPrim primName funcName =
let tfName = 'fromUTclTF
clockArg, enableArg, inputArg, instSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
instSym = 0
in InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
, HasCallStack -- ARG[2]
)
=> Clock dom -- clockArg, ARG[3]
-> Enable dom -- enableArg, ARG[4]
-> DSignal dom n (Unsigned ..) -- inputArg , ARG[5]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{instSym}] (
.aclk(~ARG[#{clockArg}]),
~IF~ISACTIVEENABLE[#{enableArg}]~THEN .aclken(~ARG[#{enableArg}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{inputArg}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
vhdlFromSPrim
:: Name
-> String
-> Primitive
vhdlFromSPrim primName funcName =
let tfName = 'fromSTclTF
clockArg, enableArg, inputArg, blockSym, inpSlvSym, compSym,
clkEnStdSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
blockSym = 0
inpSlvSym = 1
compSym = 2
clkEnStdSym = 3
in InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
, HasCallStack -- ARG[2]
)
=> Clock dom -- clockArg, ARG[3]
-> Enable dom -- enableArg, ARG[4]
-> DSignal dom n (Signed ..) -- inputArg , ARG[5]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
-- #{funcName} begin
~GENSYM[#{funcName}][#{blockSym}] : block
component ~INCLUDENAME[0]
port (
aclk : in std_logic;
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken : in std_logic;
~ELSE~FI s_axis_a_tvalid : in std_logic;
s_axis_a_tdata : in std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
m_axis_result_tvalid : out std_logic;
m_axis_result_tdata : out std_logic_vector(31 downto 0)
);
end component;
signal ~GENSYM[inp_slv][#{inpSlvSym}]: std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
~IF~ISACTIVEENABLE[#{enableArg}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;
begin
~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enableArg}]) else '0';
~ELSEbegin
~FI ~SYM[#{inpSlvSym}] <= ~TOBV[~ARG[#{inputArg}]][~TYP[#{inputArg}]];
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
port map (
aclk => ~ARG[#{clockArg}],
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken => ~SYM[#{clockArg}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~SYM[#{inpSlvSym}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
-- #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
veriFromSPrim
:: Name
-> String
-> Primitive
veriFromSPrim primName funcName =
let tfName = 'fromSTclTF
clockArg, enableArg, inputArg, instSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
instSym = 0
in InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
:: ( KnownDomain dom -- ARG[0]
, KnownNat d -- ARG[1]
, HasCallStack -- ARG[2]
)
=> Clock dom -- clockArg, ARG[3]
-> Enable dom -- enableArg, ARG[4]
-> DSignal dom n (Signed ..) -- inputArg , ARG[5]
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{instSym}] (
.aclk(~ARG[#{clockArg}]),
~IF~ISACTIVEENABLE[#{enableArg}]~THEN .aclken(~ARG[#{enableArg}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{inputArg}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
| The InlinePrimitive annotation for Xilinx 's compare floating point
primitive , in .
vhdlComparePrim
:: Name
-> Name
-> String
-> Primitive
vhdlComparePrim primName tclTFName funcName = InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
kind: Declaration
template: |-
-- #{funcName} begin
~GENSYM[#{funcName}][#{blockSym}] : block
COMPONENT ~INCLUDENAME[0]
PORT (
aclk : IN STD_LOGIC;
~IF~ISACTIVEENABLE[#{enable}]~THEN aclken : IN STD_LOGIC;
~ELSE~FI s_axis_a_tvalid : IN STD_LOGIC;
s_axis_a_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
s_axis_b_tvalid : IN STD_LOGIC;
s_axis_b_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
m_axis_result_tvalid : OUT STD_LOGIC;
m_axis_result_tdata : OUT STD_LOGIC_VECTOR(7 DOWNTO 0)
);
END COMPONENT;
~IF~ISACTIVEENABLE[#{enable}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;~ELSE~FI
signal ~GENSYM[ip_result][#{ipResultSym}] : std_logic_vector(7 downto 0);
begin
~IF~ISACTIVEENABLE[#{enable}]~THEN~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enable}]) else '0';~ELSE~FI
~RESULT <= ~SYM[#{ipResultSym}](3 downto 0);
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
PORT MAP (
aclk => ~ARG[#{clock}],
~IF~ISACTIVEENABLE[#{enable}]~THEN aclken => ~SYM[#{clkEnStdSym}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~ARG[#{x}],
s_axis_b_tvalid => '1',
s_axis_b_tdata => ~ARG[#{y}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~SYM[#{ipResultSym}]
);
end block;
-- #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
where
clock, enable, x, y :: Int
( _knownDomain :: Int
, _knownNat :: Int
, _hasCallStack :: Int
, clock
, enable
, x
, y
) = (0,1,2,3,4,5,6)
blockSym, compSym, clkEnStdSym, ipResultSym :: Int
( blockSym
, compSym
, clkEnStdSym
, ipResultSym
) = (0,1,2,3)
| The InlinePrimitive annotation for Xilinx 's compare floating point
primitive , in Verilog .
veriComparePrim
:: Name
-> Name
-> String
-> Primitive
veriComparePrim primName tclTFName funcName =
InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{compSym}] (
.aclk(~ARG[#{clock}]),
~IF~ISACTIVEENABLE[#{enable}]~THEN .aclken(~ARG[#{enable}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{x}]),
.s_axis_b_tvalid(1'b1),
.s_axis_b_tdata(~ARG[#{y}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
where
clock, enable, x, y :: Int
( _knownDomain :: Int
, _knownNat :: Int
, _hasCallStack :: Int
, clock
, enable
, x
, y
) = (0,1,2,3,4,5,6)
compSym = 0 :: Int
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/ca0449ba58bed122bd511cb75959de2b0cea29a3/clash-cores/src/Clash/Cores/Xilinx/Floating/Annotations.hs | haskell | template function (tclTFName argument) gets a fully evaluated Config.
ARG[0]
ARG[1]
ARG[3]
ARG[4]
ARG[5]
x , ARG[6]
y , ARG[7]
#{funcName} begin
#{funcName} end
template function (tclTFName argument) gets a fully evaluated Config.
ARG[0]
ARG[1]
ARG[3]
ARG[4]
ARG[5]
x , ARG[6]
y , ARG[7]
ARG[0]
ARG[1]
ARG[2]
clockArg, ARG[3]
enableArg, ARG[4]
inputArg , ARG[5]
#{funcName} begin
#{funcName} end
ARG[0]
ARG[1]
ARG[2]
clockArg, ARG[3]
enableArg, ARG[4]
inputArg , ARG[5]
ARG[0]
ARG[1]
ARG[2]
clockArg, ARG[3]
enableArg, ARG[4]
inputArg , ARG[5]
#{funcName} begin
#{funcName} end
ARG[0]
ARG[1]
ARG[2]
clockArg, ARG[3]
enableArg, ARG[4]
inputArg , ARG[5]
#{funcName} begin
#{funcName} end | |
Copyright : ( C ) 2021 - 2022 , QBayLogic B.V. ,
2022 , Google Inc. ,
License : BSD2 ( see the file LICENSE )
Maintainer : QBayLogic B.V. < >
Copyright : (C) 2021-2022, QBayLogic B.V.,
2022 , Google Inc.,
License : BSD2 (see the file LICENSE)
Maintainer : QBayLogic B.V. <>
-}
# LANGUAGE QuasiQuotes #
# LANGUAGE TemplateHaskell #
module Clash.Cores.Xilinx.Floating.Annotations
( veriBinaryPrim
, vhdlBinaryPrim
, vhdlFromUPrim
, veriFromUPrim
, vhdlFromSPrim
, veriFromSPrim
, vhdlComparePrim
, veriComparePrim
) where
import Prelude
import Data.String.Interpolate (__i)
import Language.Haskell.TH.Syntax (Name)
import Clash.Annotations.Primitive (Primitive(..), HDL(..))
import Clash.Cores.Xilinx.Floating.BlackBoxes
| The InlinePrimitive annotation for a binary function in .
Note : The BlackBox template includes ~DEVNULL[~LIT[3 ] ] which will ensure the
vhdlBinaryPrim
:: Name
-> Name
-> String
-> Primitive
vhdlBinaryPrim primName tclTFName funcName = InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
ARG[2 ]
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
~DEVNULL[~LIT[3]]~GENSYM[#{funcName}][0] : block
COMPONENT ~INCLUDENAME[0]
PORT (
aclk : IN STD_LOGIC;
~IF~ISACTIVEENABLE[5]~THEN aclken : IN STD_LOGIC;
~ELSE~FI s_axis_a_tvalid : IN STD_LOGIC;
s_axis_a_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
s_axis_b_tvalid : IN STD_LOGIC;
s_axis_b_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
m_axis_result_tvalid : OUT STD_LOGIC;
m_axis_result_tdata : OUT STD_LOGIC_VECTOR(31 DOWNTO 0)
);
END COMPONENT;
~IF~ISACTIVEENABLE[5]~THEN signal ~GENSYM[clken_std][2]: std_logic;
begin
~SYM[2] <= '1' when (~ARG[5]) else '0';
~ELSEbegin
~FI ~GENSYM[#{funcName}][1] : ~INCLUDENAME[0]
PORT MAP (
aclk => ~ARG[4],
~IF~ISACTIVEENABLE[5]~THEN aclken => ~SYM[2],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~ARG[6],
s_axis_b_tvalid => '1',
s_axis_b_tdata => ~ARG[7],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
| The InlinePrimitive annotation for a binary function in Verilog .
Note : The BlackBox template includes ~DEVNULL[~LIT[3 ] ] which will ensure the
veriBinaryPrim
:: Name
-> Name
-> String
-> Primitive
veriBinaryPrim primName tclTFName funcName =
InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
ARG[2 ]
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
~DEVNULL[~LIT[3]]~INCLUDENAME[0] ~GENSYM[#{funcName}][0] (
.aclk(~ARG[4]),
~IF~ISACTIVEENABLE[5]~THEN .aclken(~ARG[5]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[6]),
.s_axis_b_tvalid(1'b1),
.s_axis_b_tdata(~ARG[7]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
vhdlFromUPrim
:: Name
-> String
-> Primitive
vhdlFromUPrim primName funcName =
let tfName = 'fromUTclTF
clockArg, enableArg, inputArg, blockSym, inpSlvSym, compSym,
clkEnStdSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
blockSym = 0
inpSlvSym = 1
compSym = 2
clkEnStdSym = 3
in InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
~GENSYM[#{funcName}][#{blockSym}] : block
component ~INCLUDENAME[0]
port (
aclk : in std_logic;
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken : in std_logic;
~ELSE~FI s_axis_a_tvalid : in std_logic;
s_axis_a_tdata : in std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
m_axis_result_tvalid : out std_logic;
m_axis_result_tdata : out std_logic_vector(31 downto 0)
);
end component;
signal ~GENSYM[inp_slv][#{inpSlvSym}]: std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
~IF~ISACTIVEENABLE[#{enableArg}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;
begin
~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enableArg}]) else '0';
~ELSEbegin
~FI ~SYM[#{inpSlvSym}] <= ~TOBV[~ARG[#{inputArg}]][~TYP[#{inputArg}]];
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
port map (
aclk => ~ARG[#{clockArg}],
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken => ~SYM[#{clockArg}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~SYM[#{inpSlvSym}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
veriFromUPrim
:: Name
-> String
-> Primitive
veriFromUPrim primName funcName =
let tfName = 'fromUTclTF
clockArg, enableArg, inputArg, instSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
instSym = 0
in InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{instSym}] (
.aclk(~ARG[#{clockArg}]),
~IF~ISACTIVEENABLE[#{enableArg}]~THEN .aclken(~ARG[#{enableArg}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{inputArg}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
vhdlFromSPrim
:: Name
-> String
-> Primitive
vhdlFromSPrim primName funcName =
let tfName = 'fromSTclTF
clockArg, enableArg, inputArg, blockSym, inpSlvSym, compSym,
clkEnStdSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
blockSym = 0
inpSlvSym = 1
compSym = 2
clkEnStdSym = 3
in InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
~GENSYM[#{funcName}][#{blockSym}] : block
component ~INCLUDENAME[0]
port (
aclk : in std_logic;
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken : in std_logic;
~ELSE~FI s_axis_a_tvalid : in std_logic;
s_axis_a_tdata : in std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
m_axis_result_tvalid : out std_logic;
m_axis_result_tdata : out std_logic_vector(31 downto 0)
);
end component;
signal ~GENSYM[inp_slv][#{inpSlvSym}]: std_logic_vector(~SIZE[~TYP[#{inputArg}]]-1 downto 0);
~IF~ISACTIVEENABLE[#{enableArg}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;
begin
~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enableArg}]) else '0';
~ELSEbegin
~FI ~SYM[#{inpSlvSym}] <= ~TOBV[~ARG[#{inputArg}]][~TYP[#{inputArg}]];
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
port map (
aclk => ~ARG[#{clockArg}],
~IF~ISACTIVEENABLE[#{enableArg}]~THEN aclken => ~SYM[#{clockArg}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~SYM[#{inpSlvSym}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~RESULT
);
end block;
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
veriFromSPrim
:: Name
-> String
-> Primitive
veriFromSPrim primName funcName =
let tfName = 'fromSTclTF
clockArg, enableArg, inputArg, instSym :: Int
clockArg = 3
enableArg = 4
inputArg = 5
instSym = 0
in InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
type: |-
#{primName}
)
-> DSignal dom (n + d) Float
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{instSym}] (
.aclk(~ARG[#{clockArg}]),
~IF~ISACTIVEENABLE[#{enableArg}]~THEN .aclken(~ARG[#{enableArg}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{inputArg}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tfName}
|]
| The InlinePrimitive annotation for Xilinx 's compare floating point
primitive , in .
vhdlComparePrim
:: Name
-> Name
-> String
-> Primitive
vhdlComparePrim primName tclTFName funcName = InlineYamlPrimitive [VHDL] [__i|
BlackBox:
name: #{primName}
kind: Declaration
template: |-
~GENSYM[#{funcName}][#{blockSym}] : block
COMPONENT ~INCLUDENAME[0]
PORT (
aclk : IN STD_LOGIC;
~IF~ISACTIVEENABLE[#{enable}]~THEN aclken : IN STD_LOGIC;
~ELSE~FI s_axis_a_tvalid : IN STD_LOGIC;
s_axis_a_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
s_axis_b_tvalid : IN STD_LOGIC;
s_axis_b_tdata : IN STD_LOGIC_VECTOR(31 DOWNTO 0);
m_axis_result_tvalid : OUT STD_LOGIC;
m_axis_result_tdata : OUT STD_LOGIC_VECTOR(7 DOWNTO 0)
);
END COMPONENT;
~IF~ISACTIVEENABLE[#{enable}]~THEN signal ~GENSYM[clken_std][#{clkEnStdSym}]: std_logic;~ELSE~FI
signal ~GENSYM[ip_result][#{ipResultSym}] : std_logic_vector(7 downto 0);
begin
~IF~ISACTIVEENABLE[#{enable}]~THEN~SYM[#{clkEnStdSym}] <= '1' when (~ARG[#{enable}]) else '0';~ELSE~FI
~RESULT <= ~SYM[#{ipResultSym}](3 downto 0);
~GENSYM[#{funcName}][#{compSym}] : ~INCLUDENAME[0]
PORT MAP (
aclk => ~ARG[#{clock}],
~IF~ISACTIVEENABLE[#{enable}]~THEN aclken => ~SYM[#{clkEnStdSym}],
~ELSE~FI s_axis_a_tvalid => '1',
s_axis_a_tdata => ~ARG[#{x}],
s_axis_b_tvalid => '1',
s_axis_b_tdata => ~ARG[#{y}],
m_axis_result_tvalid => open,
m_axis_result_tdata => ~SYM[#{ipResultSym}]
);
end block;
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
where
clock, enable, x, y :: Int
( _knownDomain :: Int
, _knownNat :: Int
, _hasCallStack :: Int
, clock
, enable
, x
, y
) = (0,1,2,3,4,5,6)
blockSym, compSym, clkEnStdSym, ipResultSym :: Int
( blockSym
, compSym
, clkEnStdSym
, ipResultSym
) = (0,1,2,3)
| The InlinePrimitive annotation for Xilinx 's compare floating point
primitive , in Verilog .
veriComparePrim
:: Name
-> Name
-> String
-> Primitive
veriComparePrim primName tclTFName funcName =
InlineYamlPrimitive [Verilog, SystemVerilog] [__i|
BlackBox:
name: #{primName}
kind: Declaration
template: |-
// #{funcName} begin
~INCLUDENAME[0] ~GENSYM[#{funcName}][#{compSym}] (
.aclk(~ARG[#{clock}]),
~IF~ISACTIVEENABLE[#{enable}]~THEN .aclken(~ARG[#{enable}]),
~ELSE~FI .s_axis_a_tvalid(1'b1),
.s_axis_a_tdata(~ARG[#{x}]),
.s_axis_b_tvalid(1'b1),
.s_axis_b_tdata(~ARG[#{y}]),
.m_axis_result_tvalid(),
.m_axis_result_tdata(~RESULT)
);
// #{funcName} end
includes:
- extension: clash.tcl
name: floating_point
format: Haskell
templateFunction: #{tclTFName}
|]
where
clock, enable, x, y :: Int
( _knownDomain :: Int
, _knownNat :: Int
, _hasCallStack :: Int
, clock
, enable
, x
, y
) = (0,1,2,3,4,5,6)
compSym = 0 :: Int
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.