_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
ffe66b4476593327ea4ea226c80fdbdfb28b47a3b493d7ef095916eba870b56a | futurice/haskell-mega-repo | Library.hs | # LANGUAGE DataKinds #
# LANGUAGE InstanceSigs #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Futurice.App.Library.Types.Library where
import Data.Aeson
import Data.Aeson.Types (toJSONKeyText)
import Data.Swagger
import Database.PostgreSQL.Simple.FromField
import Database.PostgreSQL.Simple.ToField
import qualified Data.ByteString.Char8 as C
import qualified Data.Text as T
import Futurice.Generics
import Futurice.Office
import Futurice.Prelude
import Prelude ()
data Library
= OfficeLibrary Office
| Elibrary
| UnknownLibrary
deriving (Eq, Show, Ord, Typeable, Generic)
data LibraryOrAll = AllLibraries
| JustLibrary Library
deriving (Eq)
deriveGeneric ''Library
allLibraries :: [Library]
allLibraries = (OfficeLibrary <$> [minBound .. maxBound]) <> [Elibrary] <> [UnknownLibrary]
libraryToText :: Library -> Text
libraryToText (OfficeLibrary office) = officeToText office
libraryToText Elibrary = "Elibrary"
libraryToText UnknownLibrary = "Unknown"
libraryFromText :: Text -> Library
libraryFromText library = case officeFromText library of
Just office -> OfficeLibrary office
Nothing | library == "Elibrary" -> Elibrary
| otherwise -> UnknownLibrary
instance FromField Library where
fromField _ mdata = return library
where library =
let officeText = T.pack <$> C.unpack <$> mdata >>= officeFromText
in case officeText of
Just office -> OfficeLibrary office
Nothing -> case mdata of
Just "Elibrary" -> Elibrary
_ -> UnknownLibrary
instance ToField Library where
toField = toField . libraryToText
instance ToSchema Library where
declareNamedSchema _ = do
return $ NamedSchema (Just "Library") $ mempty
& type_ .~ Just SwaggerString
instance ToJSON Library where
toJSON = toJSON . libraryToText
instance ToJSONKey Library where
toJSONKey = toJSONKeyText libraryToText
instance FromJSON Library where
parseJSON = withText "library" (pure . libraryFromText)
instance ToHtml Library where
toHtml = toHtmlRaw
toHtmlRaw = toHtmlRaw . libraryToText
instance ToHttpApiData LibraryOrAll where
toUrlPiece AllLibraries = "all"
toUrlPiece (JustLibrary lib) = libraryToText lib
instance FromHttpApiData LibraryOrAll where
parseUrlPiece "all" = Right AllLibraries
parseUrlPiece lib = Right $ JustLibrary $ libraryFromText lib
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/library-app/src/Futurice/App/Library/Types/Library.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators # | # LANGUAGE DataKinds #
# LANGUAGE InstanceSigs #
# LANGUAGE TemplateHaskell #
module Futurice.App.Library.Types.Library where
import Data.Aeson
import Data.Aeson.Types (toJSONKeyText)
import Data.Swagger
import Database.PostgreSQL.Simple.FromField
import Database.PostgreSQL.Simple.ToField
import qualified Data.ByteString.Char8 as C
import qualified Data.Text as T
import Futurice.Generics
import Futurice.Office
import Futurice.Prelude
import Prelude ()
data Library
= OfficeLibrary Office
| Elibrary
| UnknownLibrary
deriving (Eq, Show, Ord, Typeable, Generic)
data LibraryOrAll = AllLibraries
| JustLibrary Library
deriving (Eq)
deriveGeneric ''Library
allLibraries :: [Library]
allLibraries = (OfficeLibrary <$> [minBound .. maxBound]) <> [Elibrary] <> [UnknownLibrary]
libraryToText :: Library -> Text
libraryToText (OfficeLibrary office) = officeToText office
libraryToText Elibrary = "Elibrary"
libraryToText UnknownLibrary = "Unknown"
libraryFromText :: Text -> Library
libraryFromText library = case officeFromText library of
Just office -> OfficeLibrary office
Nothing | library == "Elibrary" -> Elibrary
| otherwise -> UnknownLibrary
instance FromField Library where
fromField _ mdata = return library
where library =
let officeText = T.pack <$> C.unpack <$> mdata >>= officeFromText
in case officeText of
Just office -> OfficeLibrary office
Nothing -> case mdata of
Just "Elibrary" -> Elibrary
_ -> UnknownLibrary
instance ToField Library where
toField = toField . libraryToText
instance ToSchema Library where
declareNamedSchema _ = do
return $ NamedSchema (Just "Library") $ mempty
& type_ .~ Just SwaggerString
instance ToJSON Library where
toJSON = toJSON . libraryToText
instance ToJSONKey Library where
toJSONKey = toJSONKeyText libraryToText
instance FromJSON Library where
parseJSON = withText "library" (pure . libraryFromText)
instance ToHtml Library where
toHtml = toHtmlRaw
toHtmlRaw = toHtmlRaw . libraryToText
instance ToHttpApiData LibraryOrAll where
toUrlPiece AllLibraries = "all"
toUrlPiece (JustLibrary lib) = libraryToText lib
instance FromHttpApiData LibraryOrAll where
parseUrlPiece "all" = Right AllLibraries
parseUrlPiece lib = Right $ JustLibrary $ libraryFromText lib
|
09450a678662a2551c4e54026e1cf4a39cf84b6fbd487118f6a66f5ce4b24b17 | uwplse/synapse | superopt-test.rkt | #lang s-exp rosette
(require
"../opsyn/metasketches/imetasketch.rkt" "../opsyn/metasketches/superoptimization.rkt"
"../opsyn/metasketches/cost.rkt"
"../opsyn/engine/metasketch.rkt" "../opsyn/engine/eval.rkt" "../opsyn/engine/util.rkt"
"../opsyn/bv/lang.rkt"
"util.rkt"
rackunit "test-runner.rkt")
(current-bitwidth 4)
A metasketch with 1 input , 1 instruction , and programs of size up to 3 .
(define M0
(superopt∑ #:arity 1
#:maxlength 3
#:instructions (list bvadd)
#:post (lambda (p inputs)
(assert (= (interpret p inputs) (* 3 (car inputs)))))
#:cost-model sample-cost-model))
A metasketch with 2 inputs ( second of which is fixed to a constant ) ,
3 instructions , and programs of unbounded size .
(define M1
(superopt∑ #:arity 2
#:maxlength +inf.0
#:instructions (list bv bvadd bvmul)
#:pre (lambda (inputs)
(assert (= 3 (second inputs))))
#:post (lambda (p inputs)
(assert (= (interpret p inputs) (- (first inputs) (second inputs)))))
#:cost-model sample-cost-model))
Tests the interface of M0 .
(define (test0)
(test-case "M0 interface"
(check equal? (length (inputs M0)) 1)
(check equal? (set-count (sketches M0)) 3)
(check equal? (set-count (sketches M0 +inf.0)) 3)
(check equal? (set-count (sketches M0 400)) 3)
(check equal? (set-count (sketches M0 -100)) 0)
(check equal? (set-count (sketches M0 0)) 0)
(check equal? (set-count (sketches M0 1)) 0)
(check equal? (set-count (sketches M0 2)) 1)
(check equal? (set-count (sketches M0 3)) 2)
(check equal? (set-count (sketches M0 4)) 3)
(for ([S (sketches M0)])
(check equal? (pre S) null)
(define P (programs S))
(match-define (list i) (isketch-index S))
(check equal? (length (program-instructions P)) i)
(for ([inst (program-instructions P)])
(check-true (bvadd? inst)))
(check equal? (cost M0 P) i)
(check-false (null? (post S P))))
(match-define (list S1 S2 S3) (set->list (sketches M0)))
(check equal? (min-bitwidth M0 S1) 6)
(check equal? (min-bitwidth M0 S2) 6)
(check equal? (min-bitwidth M0 S3) 6)))
; Tests the correctness of M0.
(define (test1)
(test-case "M0 correctness"
(match-define (list S1 S2 S3) (set->list (sketches M0)))
(check-true (unsat? (synth M0 S1)))
(check-true (sat? (synth M0 S2)))
(check-true (sat? (synth M0 S3)))
(check-true (sat? (synth2 M0 S2)))
there is no solution with > 2 instructions
Tests the interface of M1 .
(define (test2)
(test-case "M1 interface"
(check equal? (length (inputs M1)) 2)
(check equal? (set-count (sketches M1)) +inf.0)
(check equal? (set-count (sketches M1 +inf.0)) +inf.0)
(check equal? (set-count (sketches M1 -100)) 0)
(check equal? (set-count (sketches M1 0)) 0)
(for ([i (in-range 1 10)])
(check equal? (set-count (sketches M1 i)) (sub1 i)))
(for ([S (sketches M1 5)])
(check-false (null? (pre S)))
(define P (programs S))
(match-define (list i) (isketch-index S))
(check equal? (length (program-instructions P)) i)
(check-true (term? (cost M1 P)))
(check-false (null? (post S P))))
(match-define (list S1 S2 S3 S4) (set->list (sketches M1 5)))
(check equal? (min-bitwidth M1 S1) 6)
(check equal? (min-bitwidth M1 S2) 6)
(check equal? (min-bitwidth M1 S3) 6)
(check equal? (min-bitwidth M1 S4) 6)))
Tests the correctness of M1 .
(define (test3)
(test-case "M1 correctness"
(match-define (list S1 S2 S3 S4) (set->list (sketches M1 5)))
(check-true (unsat? (synth M1 S1)))
(check-true (sat? (synth M1 S2)))
(check-true (sat? (synth M1 S3)))
(check-true (sat? (synth M1 S4)))
(check-true (sat? (synth2 M1 S2)))
(check-true (sat? (synth2 M1 S3)))
(check-true (sat? (synth2 M1 S4)))))
(define/provide-test-suite superopt-tests
(test0)
(test1)
(test2)
(test3)
)
(run-tests-quiet superopt-tests)
| null | https://raw.githubusercontent.com/uwplse/synapse/10f605f8f1fff6dade90607f516550b961a10169/test/superopt-test.rkt | racket | Tests the correctness of M0. | #lang s-exp rosette
(require
"../opsyn/metasketches/imetasketch.rkt" "../opsyn/metasketches/superoptimization.rkt"
"../opsyn/metasketches/cost.rkt"
"../opsyn/engine/metasketch.rkt" "../opsyn/engine/eval.rkt" "../opsyn/engine/util.rkt"
"../opsyn/bv/lang.rkt"
"util.rkt"
rackunit "test-runner.rkt")
(current-bitwidth 4)
A metasketch with 1 input , 1 instruction , and programs of size up to 3 .
(define M0
(superopt∑ #:arity 1
#:maxlength 3
#:instructions (list bvadd)
#:post (lambda (p inputs)
(assert (= (interpret p inputs) (* 3 (car inputs)))))
#:cost-model sample-cost-model))
A metasketch with 2 inputs ( second of which is fixed to a constant ) ,
3 instructions , and programs of unbounded size .
(define M1
(superopt∑ #:arity 2
#:maxlength +inf.0
#:instructions (list bv bvadd bvmul)
#:pre (lambda (inputs)
(assert (= 3 (second inputs))))
#:post (lambda (p inputs)
(assert (= (interpret p inputs) (- (first inputs) (second inputs)))))
#:cost-model sample-cost-model))
Tests the interface of M0 .
(define (test0)
(test-case "M0 interface"
(check equal? (length (inputs M0)) 1)
(check equal? (set-count (sketches M0)) 3)
(check equal? (set-count (sketches M0 +inf.0)) 3)
(check equal? (set-count (sketches M0 400)) 3)
(check equal? (set-count (sketches M0 -100)) 0)
(check equal? (set-count (sketches M0 0)) 0)
(check equal? (set-count (sketches M0 1)) 0)
(check equal? (set-count (sketches M0 2)) 1)
(check equal? (set-count (sketches M0 3)) 2)
(check equal? (set-count (sketches M0 4)) 3)
(for ([S (sketches M0)])
(check equal? (pre S) null)
(define P (programs S))
(match-define (list i) (isketch-index S))
(check equal? (length (program-instructions P)) i)
(for ([inst (program-instructions P)])
(check-true (bvadd? inst)))
(check equal? (cost M0 P) i)
(check-false (null? (post S P))))
(match-define (list S1 S2 S3) (set->list (sketches M0)))
(check equal? (min-bitwidth M0 S1) 6)
(check equal? (min-bitwidth M0 S2) 6)
(check equal? (min-bitwidth M0 S3) 6)))
(define (test1)
(test-case "M0 correctness"
(match-define (list S1 S2 S3) (set->list (sketches M0)))
(check-true (unsat? (synth M0 S1)))
(check-true (sat? (synth M0 S2)))
(check-true (sat? (synth M0 S3)))
(check-true (sat? (synth2 M0 S2)))
there is no solution with > 2 instructions
Tests the interface of M1 .
(define (test2)
(test-case "M1 interface"
(check equal? (length (inputs M1)) 2)
(check equal? (set-count (sketches M1)) +inf.0)
(check equal? (set-count (sketches M1 +inf.0)) +inf.0)
(check equal? (set-count (sketches M1 -100)) 0)
(check equal? (set-count (sketches M1 0)) 0)
(for ([i (in-range 1 10)])
(check equal? (set-count (sketches M1 i)) (sub1 i)))
(for ([S (sketches M1 5)])
(check-false (null? (pre S)))
(define P (programs S))
(match-define (list i) (isketch-index S))
(check equal? (length (program-instructions P)) i)
(check-true (term? (cost M1 P)))
(check-false (null? (post S P))))
(match-define (list S1 S2 S3 S4) (set->list (sketches M1 5)))
(check equal? (min-bitwidth M1 S1) 6)
(check equal? (min-bitwidth M1 S2) 6)
(check equal? (min-bitwidth M1 S3) 6)
(check equal? (min-bitwidth M1 S4) 6)))
Tests the correctness of M1 .
(define (test3)
(test-case "M1 correctness"
(match-define (list S1 S2 S3 S4) (set->list (sketches M1 5)))
(check-true (unsat? (synth M1 S1)))
(check-true (sat? (synth M1 S2)))
(check-true (sat? (synth M1 S3)))
(check-true (sat? (synth M1 S4)))
(check-true (sat? (synth2 M1 S2)))
(check-true (sat? (synth2 M1 S3)))
(check-true (sat? (synth2 M1 S4)))))
(define/provide-test-suite superopt-tests
(test0)
(test1)
(test2)
(test3)
)
(run-tests-quiet superopt-tests)
|
484b3396272e091bd6371e3555a3a62ddfa00dcc2cd6c3c64766a9cac82dcafa | gilbertw1/clipbot | weather.clj | (ns clipbot.plugins.weather
(:require [clipbot.plugin :as plugin]
[clj-http.client :as http]))
(def fing-weather-url "/")
(defn match [s rx]
(->> s (re-seq rx) first second))
(defn get-weather [place]
(let [html (-> fing-weather-url (http/get {:query-params {:where place} :headers {:User-Agent "Mozilla/5.0"}}) :body)
temp (or (match html #"<span class=\"temperature\" tempf=\"\d*\">(.*?)<") "")
remark (or (match html #"<p class=\"remark\">(.*)<") "remark not found")
flavor (or (match html #"<p class=\"flavor\">(.*)<") "flavor not found")]
(when temp
(str temp " degrees -- " remark " *" flavor "*"))))
(plugin/register-plugin
{:id "weather"
:regex #"\$weather\s+(.*)"
:function (fn [responder user msg]
(-> msg (match #"\$weather\s+(.*)") get-weather responder))}) | null | https://raw.githubusercontent.com/gilbertw1/clipbot/ef9516ceee58e860493f1f9cb6cb0bc2169c3d7c/resources/plugins/weather.clj | clojure | (ns clipbot.plugins.weather
(:require [clipbot.plugin :as plugin]
[clj-http.client :as http]))
(def fing-weather-url "/")
(defn match [s rx]
(->> s (re-seq rx) first second))
(defn get-weather [place]
(let [html (-> fing-weather-url (http/get {:query-params {:where place} :headers {:User-Agent "Mozilla/5.0"}}) :body)
temp (or (match html #"<span class=\"temperature\" tempf=\"\d*\">(.*?)<") "")
remark (or (match html #"<p class=\"remark\">(.*)<") "remark not found")
flavor (or (match html #"<p class=\"flavor\">(.*)<") "flavor not found")]
(when temp
(str temp " degrees -- " remark " *" flavor "*"))))
(plugin/register-plugin
{:id "weather"
:regex #"\$weather\s+(.*)"
:function (fn [responder user msg]
(-> msg (match #"\$weather\s+(.*)") get-weather responder))}) | |
1a330428f70179a4ff761be13ad503cf135ec20c09cdd99996b3ac826d53eaaa | Chaddai/CurveProject | CurveGenerator.hs | # LANGUAGE TemplateHaskell , ViewPatterns , FlexibleInstances #
module Math.CurveGenerator
(CurveInput(..)
, CurveOptions(..)
, GridOptions(..)
, AxisOptions(..)
, TangentsOptions(..)
, CGConfig(..)
, PointInfo(..)
, Curve(..)
, saveConfig
, loadConfig
, drawingTangent
, createCurve) where
import Diagrams.Prelude
import Data.Maybe
import Data.SafeCopy
import Data.Default()
import Data.ByteString (ByteString)
import Data.Serialize
import Lens.Simple
-- All major input types and the configuration that holds them all
data CurveInput = CurvePointsAndT { looseness :: Double, pointsWithT :: [(P2 Double, Maybe Double, Bool)] }
-- | CurveFunction { func :: String, wantTangents :: [Double] }
instance Default CurveInput where
def = CurvePointsAndT { looseness=0.4, pointsWithT=[] }
instance SafeCopy (P2 Double) where
putCopy (coords -> x :& y) = contain $ safePut x >> safePut y
getCopy = contain $ curry p2 <$> safeGet <*> safeGet
deriveSafeCopy 1 'base ''CurveInput
makeLensesBy (\n -> Just (n ++ "L")) ''CurveInput
data CurveOptions = CurveOpts { curveColor :: String, curveStyle :: String }
instance Default CurveOptions where
def = CurveOpts { curveColor="black", curveStyle="solid" } -- Allowed curveStyle : solid, dashed, dotted
deriveSafeCopy 1 'base ''CurveOptions
makeLensesBy (\n -> Just (n ++ "L")) ''CurveOptions
data GridOptions = GridOpts { dxMajor, dyMajor, dxMinor, dyMinor :: Double, majorGrid, minorGrid :: Bool }
instance Default GridOptions where
def = GridOpts 1 1 0.2 0.2 True False
deriveSafeCopy 1 'base ''GridOptions
makeLensesBy (\n -> Just (n ++ "L")) ''GridOptions
data AxisOptions = AxisOpts { xMin, xMax, yMin, yMax, xOrig, yOrig, xTicks, yTicks :: Double }
instance Default AxisOptions where
def = AxisOpts { xMin= -8, xMax=8, yMin= -6, yMax=6, xTicks=1, yTicks=1, xOrig=0, yOrig=0 }
deriveSafeCopy 1 'base ''AxisOptions
makeLensesBy (\n -> Just (n ++ "L")) ''AxisOptions
data TangentsOptions = TanOpts { tangentLen :: Double, tangentColor :: String, tangentStyle :: String }
instance Default TangentsOptions where
def = TanOpts { tangentLen = 2, tangentColor = "black", tangentStyle = "solid" }
deriveSafeCopy 1 'base ''TangentsOptions
makeLensesBy (\n -> Just (n ++ "L")) ''TangentsOptions
data CGConfig = CGConfig { curveInputs :: [(CurveInput, CurveOptions)]
, gridOptions :: GridOptions
, axisOptions :: AxisOptions
, tangentsOptions :: TangentsOptions
, comments :: Bool
}
instance Default CGConfig where
def = CGConfig (replicate 10 (def,def)) def def def False
deriveSafeCopy 1 'base ''CGConfig
makeLensesBy (\n -> Just (n ++ "L")) ''CGConfig
saveConfig :: CGConfig -> ByteString
saveConfig = runPut . safePut
loadConfig :: ByteString -> Either String CGConfig
loadConfig = runGet safeGet
data PointInfo = Through { piPoint :: P2 Double, tangent :: V2 Double, drawTangent :: Bool } | Control { piPoint :: P2 Double }
data Curve = BezierJoints [PointInfo]
| SymbolFunction String
drawingTangent :: PointInfo -> Bool
drawingTangent (Through _ _ True) = True
drawingTangent _ = False
createCurve :: CurveInput -> Curve
createCurve (CurvePointsAndT e params@((lextr,t, b):(p2,_,_):ps)) = BezierJoints $ Through lextr dv b : Control clextr : go params
where
dv = computeDVector (centralSymAbout lextr p2) lextr p2 t
clextr = lextr .+^ (e *^ dv)
go [(pbl,_,_),(rextr,t, b)] = [Control crextr, Through rextr dv b]
where
dv = computeDVector pbl rextr (centralSymAbout rextr pbl) t
crextr = rextr .-^ (e *^ dv)
go ((p1,_,_):ps@((p2,t,b):(p3,_,_):_)) = Control lcp : Through p2 dv b : Control rcp : go ps
where
dv = computeDVector p1 p2 p3 t
lcp = p2 .-^ (e *^ dv)
rcp = p2 .+^ (e *^ dv)
createCurve _ = BezierJoints []
computeDVector :: P2 Double -> P2 Double -> P2 Double -> Maybe Double -> V2 Double
computeDVector (coords -> lx :& ly) (coords -> mx :& my) (coords -> rx :& ry) givenT
| (ly - my)*(ry - my) > 0 && isNothing givenT = x ^& 0
| otherwise = x ^& y
where
t = fromMaybe ((ly-ry)/(lx-rx)) givenT
x = min (mx - lx) (rx - mx)
y = t*x
centralSymAbout c = rotateAround c (1/2 @@ turn)
| null | https://raw.githubusercontent.com/Chaddai/CurveProject/f1b22f29ce7939123469ded14ef68756f93ad714/high-school-plotting/src/Math/CurveGenerator.hs | haskell | All major input types and the configuration that holds them all
| CurveFunction { func :: String, wantTangents :: [Double] }
Allowed curveStyle : solid, dashed, dotted | # LANGUAGE TemplateHaskell , ViewPatterns , FlexibleInstances #
module Math.CurveGenerator
(CurveInput(..)
, CurveOptions(..)
, GridOptions(..)
, AxisOptions(..)
, TangentsOptions(..)
, CGConfig(..)
, PointInfo(..)
, Curve(..)
, saveConfig
, loadConfig
, drawingTangent
, createCurve) where
import Diagrams.Prelude
import Data.Maybe
import Data.SafeCopy
import Data.Default()
import Data.ByteString (ByteString)
import Data.Serialize
import Lens.Simple
data CurveInput = CurvePointsAndT { looseness :: Double, pointsWithT :: [(P2 Double, Maybe Double, Bool)] }
instance Default CurveInput where
def = CurvePointsAndT { looseness=0.4, pointsWithT=[] }
instance SafeCopy (P2 Double) where
putCopy (coords -> x :& y) = contain $ safePut x >> safePut y
getCopy = contain $ curry p2 <$> safeGet <*> safeGet
deriveSafeCopy 1 'base ''CurveInput
makeLensesBy (\n -> Just (n ++ "L")) ''CurveInput
data CurveOptions = CurveOpts { curveColor :: String, curveStyle :: String }
instance Default CurveOptions where
deriveSafeCopy 1 'base ''CurveOptions
makeLensesBy (\n -> Just (n ++ "L")) ''CurveOptions
data GridOptions = GridOpts { dxMajor, dyMajor, dxMinor, dyMinor :: Double, majorGrid, minorGrid :: Bool }
instance Default GridOptions where
def = GridOpts 1 1 0.2 0.2 True False
deriveSafeCopy 1 'base ''GridOptions
makeLensesBy (\n -> Just (n ++ "L")) ''GridOptions
data AxisOptions = AxisOpts { xMin, xMax, yMin, yMax, xOrig, yOrig, xTicks, yTicks :: Double }
instance Default AxisOptions where
def = AxisOpts { xMin= -8, xMax=8, yMin= -6, yMax=6, xTicks=1, yTicks=1, xOrig=0, yOrig=0 }
deriveSafeCopy 1 'base ''AxisOptions
makeLensesBy (\n -> Just (n ++ "L")) ''AxisOptions
data TangentsOptions = TanOpts { tangentLen :: Double, tangentColor :: String, tangentStyle :: String }
instance Default TangentsOptions where
def = TanOpts { tangentLen = 2, tangentColor = "black", tangentStyle = "solid" }
deriveSafeCopy 1 'base ''TangentsOptions
makeLensesBy (\n -> Just (n ++ "L")) ''TangentsOptions
data CGConfig = CGConfig { curveInputs :: [(CurveInput, CurveOptions)]
, gridOptions :: GridOptions
, axisOptions :: AxisOptions
, tangentsOptions :: TangentsOptions
, comments :: Bool
}
instance Default CGConfig where
def = CGConfig (replicate 10 (def,def)) def def def False
deriveSafeCopy 1 'base ''CGConfig
makeLensesBy (\n -> Just (n ++ "L")) ''CGConfig
saveConfig :: CGConfig -> ByteString
saveConfig = runPut . safePut
loadConfig :: ByteString -> Either String CGConfig
loadConfig = runGet safeGet
data PointInfo = Through { piPoint :: P2 Double, tangent :: V2 Double, drawTangent :: Bool } | Control { piPoint :: P2 Double }
data Curve = BezierJoints [PointInfo]
| SymbolFunction String
drawingTangent :: PointInfo -> Bool
drawingTangent (Through _ _ True) = True
drawingTangent _ = False
createCurve :: CurveInput -> Curve
createCurve (CurvePointsAndT e params@((lextr,t, b):(p2,_,_):ps)) = BezierJoints $ Through lextr dv b : Control clextr : go params
where
dv = computeDVector (centralSymAbout lextr p2) lextr p2 t
clextr = lextr .+^ (e *^ dv)
go [(pbl,_,_),(rextr,t, b)] = [Control crextr, Through rextr dv b]
where
dv = computeDVector pbl rextr (centralSymAbout rextr pbl) t
crextr = rextr .-^ (e *^ dv)
go ((p1,_,_):ps@((p2,t,b):(p3,_,_):_)) = Control lcp : Through p2 dv b : Control rcp : go ps
where
dv = computeDVector p1 p2 p3 t
lcp = p2 .-^ (e *^ dv)
rcp = p2 .+^ (e *^ dv)
createCurve _ = BezierJoints []
computeDVector :: P2 Double -> P2 Double -> P2 Double -> Maybe Double -> V2 Double
computeDVector (coords -> lx :& ly) (coords -> mx :& my) (coords -> rx :& ry) givenT
| (ly - my)*(ry - my) > 0 && isNothing givenT = x ^& 0
| otherwise = x ^& y
where
t = fromMaybe ((ly-ry)/(lx-rx)) givenT
x = min (mx - lx) (rx - mx)
y = t*x
centralSymAbout c = rotateAround c (1/2 @@ turn)
|
6117a49f9ca19c91bf95f2191c9ed472a5c4f47152f8901f6095e3ba5bd2680f | aveltras/sessionula | Map.hs | # LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Sessionula.Backend.Map where
import Data.IORef (IORef, atomicModifyIORef', newIORef,
readIORef)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.Time (UTCTime)
import Sessionula
newtype MapStorage
= MapStorage { unMapStorage :: IORef (Map Token (Session, UTCTime, UTCTime)) }
mapStorage :: IO MapStorage
mapStorage = newIORef Map.empty >>= pure . MapStorage
instance Storage MapStorage where
type Encoding MapStorage = Session
toEncoding = id
fromEncoding = Just . id
fetchSession (MapStorage ioRef) token = Map.lookup token <$> readIORef ioRef
persistSession (MapStorage ioRef) mOldToken token session currentTime = do
atomicModifyIORef' ioRef $ \sessions -> (, ()) $
Map.insertWith f token (session, currentTime, currentTime) $
case mOldToken of
Nothing -> sessions
Just t -> Map.delete t sessions
where
f (newSession, _, accessedAt) (_, issuedAt, _) = (newSession, issuedAt, accessedAt)
gcSessions (MapStorage ioRef) limitTimeStamp = do
expiredTokens <- Map.foldrWithKey f Set.empty <$> readIORef ioRef
atomicModifyIORef' ioRef $ \sessions -> (Map.withoutKeys sessions expiredTokens, ())
where
f token (_, issuedAt, _) acc =
if issuedAt < limitTimeStamp
then Set.insert token acc
else acc
| null | https://raw.githubusercontent.com/aveltras/sessionula/9b25861f93f2c673d56447eb4292eb4ca1677d53/sessionula/src/Sessionula/Backend/Map.hs | haskell | # LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Sessionula.Backend.Map where
import Data.IORef (IORef, atomicModifyIORef', newIORef,
readIORef)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.Time (UTCTime)
import Sessionula
newtype MapStorage
= MapStorage { unMapStorage :: IORef (Map Token (Session, UTCTime, UTCTime)) }
mapStorage :: IO MapStorage
mapStorage = newIORef Map.empty >>= pure . MapStorage
instance Storage MapStorage where
type Encoding MapStorage = Session
toEncoding = id
fromEncoding = Just . id
fetchSession (MapStorage ioRef) token = Map.lookup token <$> readIORef ioRef
persistSession (MapStorage ioRef) mOldToken token session currentTime = do
atomicModifyIORef' ioRef $ \sessions -> (, ()) $
Map.insertWith f token (session, currentTime, currentTime) $
case mOldToken of
Nothing -> sessions
Just t -> Map.delete t sessions
where
f (newSession, _, accessedAt) (_, issuedAt, _) = (newSession, issuedAt, accessedAt)
gcSessions (MapStorage ioRef) limitTimeStamp = do
expiredTokens <- Map.foldrWithKey f Set.empty <$> readIORef ioRef
atomicModifyIORef' ioRef $ \sessions -> (Map.withoutKeys sessions expiredTokens, ())
where
f token (_, issuedAt, _) acc =
if issuedAt < limitTimeStamp
then Set.insert token acc
else acc
| |
7f2b473097e0c2a4498d22d30741a3308a5829bab7835438b06e9176ae2af6fe | fccm/glMLite | quaternions.ml | (** Quaternions *)
(* OpenGL:Tutorials:Using Quaternions to represent rotation *)
(* from: :Tutorials:Using_Quaternions_to_represent_rotation *)
Content is available under GNU Free Documentation License 1.2
(* *)
History log of the document , and authors :
26 - 05 - 2006 11:39 Tannin ( Initial post )
26 - 05 - 2006 ( typo )
25 - 07 - 2006 19:05 ( Just what is a quaternion ? )
25 - 07 - 2006 19:06 81.174.37.6 ( Just what is a quaternion ? )
30 - 07 - 2006 06:38 203.206.100.123 ( How to convert to / from quaternions - fix formatting )
30 - 07 - 2006 11:40 203.206.100.123 ( add category tags )
03 - 10 - 2006 21:10 144.30.108.40 ( Some basic quaternion operations )
05 - 10 - 2006 08:21 84.56.180.65
30 - 10 - 2007 22:41 158.130.58.47 ( Just what is a quaternion ? )
30 - 10 - 2007 22:42 158.130.58.47 ( Why use quaternions )
26 - 11 - 2007 19:08 151.188.17.247 ( Some basic quaternion operations )
27 - 11 - 2007 ( Reverted edit of 151.188.17.247 , changed back to last version by 158.130.58.47 )
13 - 01 - 2008 ( Just what is a quaternion ? )
13 - 01 - 2008 ( Why use quaternions )
13 - 01 - 2008 ( Why use quaternions )
13 - 01 - 2008 10:29 ( Added section " Why quaternions are neat " )
13 - 01 - 2008 18:28 ( Some basic quaternion operations )
13 - 01 - 2008 18:48 ( How to convert to / from quaternions )
26 - 01 - 2008 06:51 142.167.170.72 ( Rotating vectors )
15 - 01 - 2009 15:04 83.237.221.197 ( access to ' y ' variable was ambigous )
06 - 08 - 2009 23:10 Geveno ( Normalizing a quaternion )
06 - 08 - 2009 23:39 Liam ( Bug fix )
15 - 12 - 2009 01:01 Helge ( Error in vector part : You should add the cross - product , not subtract ( found the correct answer on Wikipedia ) )
26-05-2006 11:39 Tannin (Initial post)
26-05-2006 11:42 Tannin (typo)
25-07-2006 19:05 81.174.37.6 (Just what is a quaternion?)
25-07-2006 19:06 81.174.37.6 (Just what is a quaternion?)
30-07-2006 06:38 203.206.100.123 (How to convert to/from quaternions - fix formatting)
30-07-2006 11:40 203.206.100.123 (add category tags)
03-10-2006 21:10 144.30.108.40 (Some basic quaternion operations)
05-10-2006 08:21 84.56.180.65
30-10-2007 22:41 158.130.58.47 (Just what is a quaternion?)
30-10-2007 22:42 158.130.58.47 (Why use quaternions)
26-11-2007 19:08 151.188.17.247 (Some basic quaternion operations)
27-11-2007 10:25 Codehead (Reverted edit of 151.188.17.247, changed back to last version by 158.130.58.47)
13-01-2008 09:42 Ronguida (Just what is a quaternion?)
13-01-2008 09:48 Ronguida (Why use quaternions)
13-01-2008 09:51 Ronguida (Why use quaternions)
13-01-2008 10:29 Ronguida (Added section "Why quaternions are neat")
13-01-2008 18:28 Ronguida (Some basic quaternion operations)
13-01-2008 18:48 Ronguida (How to convert to/from quaternions)
26-01-2008 06:51 142.167.170.72 (Rotating vectors)
15-01-2009 15:04 83.237.221.197 (access to 'y' variable was ambigous)
06-08-2009 23:10 Geveno (Normalizing a quaternion)
06-08-2009 23:39 Liam (Bug fix)
15-12-2009 01:01 Helge (Error in vector part: You should add the cross-product, not subtract (found the correct answer on Wikipedia))
*)
(* there are only float calculations in this module *)
external ( + ) : float -> float -> float = "%addfloat"
external ( - ) : float -> float -> float = "%subfloat"
external ( * ) : float -> float -> float = "%mulfloat"
external ( / ) : float -> float -> float = "%divfloat"
= = = = Foreword and warning = = = =
Quaternions are all the rage these days for ( 3D ) computer games , so this wiki
would n't be complete without an explanation about them . Unfortunately , I 'm not
exactly a quaternion - specialist , so there might be errors here . I hope someone
with more knowledge on the topic will review this article . Although this article
is in the OpenGL - section , the background information is of course true for Direct3D
too . As far as I know , D3D also has some convenience functions for Quaternions .
= = = = Just what is a quaternion ? = = = =
A quaternion is an element of a 4 dimensional vector - space . It 's defined as
w + xi + yj + zk where i , j and k are imaginary numbers . Alternatively , a
quaternion is what you get when you add a scalar and a 3d vector .
The math behind quaternions is only slightly harder than the math behind vectors ,
but I 'm going to spare you ( for the moment ) .
Sounds scary ? Ok , so now you know never to ask that question again ...
Fortunately for you , we will only work with a subset of quaternions : unit
quaternions . Those are quaternions with length 1 . Every unit quaternion
represents a 3D rotation , and every 3D rotation has two unit quaternion
representations . Unit quaternions are a way to compactly represent 3D rotations
while avoiding singularities or discontinuities ( e.g. gimbal lock ) .
+ -------------------------------------+---------------------------------------------+
| Rotation | Quaternions |
+ -------------------------------------+---------------------------------------------+
| Identity ( no rotation ) | 1 , -1 |
| 180 degrees about x - axis | i , -i |
| 180 degrees about y - axis | j , -j |
| 180 degrees about z - axis | k , |
| angle θ , axis ( unit vector ) \vec{n } | \pm[\cos(\theta/2 ) + \vec{n}\sin(\theta/2 ) ] |
+ -------------------------------------+---------------------------------------------+
= = = = Why use quaternions = = = =
Quaternions have some advantages over other representations of rotations .
- Quaternions do n't suffer from gimbal lock , unlike Euler angles .
- They can be represented as 4 numbers , in contrast to the 9 numbers of
a rotations matrix .
- The conversion to and from axis / angle representation is trivial .
- Smooth interpolation between two quaternions is easy ( in contrast to
axis / angle or rotation matrices ) .
- After a lot of calculations on quaternions and matrices , rounding errors
accumulate , so you have to normalize quaternions and orthogonalize a rotation
matrix , but normalizing a quaternion is a lot less troublesome than
orthogonalizing a matrix .
- Similar to rotation matrices , you can just multiply 2 quaternions together
to receive a quaternion that represents both rotations .
The only disadvantages of quaternions are :
- They are hard to visualize .
- You have to convert them to get a human - readable representation ( Euler angles )
or something OpenGL can understand ( Matrix ) .
- Smooth interpolation between quaternions is complicated by the fact that each
3D rotation has two representations .
= = = = Why quaternions are neat = = = =
Quaternions are neat because the unit quaternions are a double - cover for the set
of 3D rotations .
To understand what that means , consider a Mobius strip . To make one , start with
a strip of paper and bring the ends together . Give one of the ends a half - twist ,
and then attach it to the other end . The resulting surface is a Mobius strip .
The Mobius strip has a very important feature : it 's non - orientable . When I want
to build and render an object , like a sphere or a cylinder , I need to make sure
all my vertex normals point the same way ( e.g. outward ) . If I try to render a
Mobius strip , I 'll get stuck because I wo n't be able to assign consistent
outward - pointing vector normals . That 's what it means to be non - orientable .
The solution to this problem is to simply duplicate each vertex . One copy gets
an " outward " pointing normal , and the other copy gets an " inward " pointing
normal . When I render the Mobius strip , I 'll have to draw each polygon twice ,
once facing " outward " , and again facing " inward " . Mathematicians would call
this a double - cover .
The set of 3D rotations can be thought of as a 3D surface sitting in 4D
hyper - space . Like the Mobius strip , this surface is non - orientable . As a
result , if I try to represent a 3D rotation using three numbers ( e.g. Euler
angles ) , I 'll get stuck with either a singularity or a discontinuity .
Just like with the Mobius strip , I can solve the problem with a double - cover .
I simply duplicate each possible 3D rotation ; one copy gets a " positive "
quaternion , and the other copy gets a " negative " quaternion . The resulting
representation is equivalent to a 3 - sphere in 4D hyper - space , it 's orientable ,
and it completely avoids the problem of singularities and discontinuities .
= = = = Some basic quaternion operations = = = =
Here are some methods you will regularly need to work with quaternions .
==== Foreword and warning ====
Quaternions are all the rage these days for (3D) computer games, so this wiki
wouldn't be complete without an explanation about them. Unfortunately, I'm not
exactly a quaternion-specialist, so there might be errors here. I hope someone
with more knowledge on the topic will review this article. Although this article
is in the OpenGL-section, the background information is of course true for Direct3D
too. As far as I know, D3D also has some convenience functions for Quaternions.
==== Just what is a quaternion? ====
A quaternion is an element of a 4 dimensional vector-space. It's defined as
w + xi + yj + zk where i, j and k are imaginary numbers. Alternatively, a
quaternion is what you get when you add a scalar and a 3d vector.
The math behind quaternions is only slightly harder than the math behind vectors,
but I'm going to spare you (for the moment).
Sounds scary? Ok, so now you know never to ask that question again...
Fortunately for you, we will only work with a subset of quaternions: unit
quaternions. Those are quaternions with length 1. Every unit quaternion
represents a 3D rotation, and every 3D rotation has two unit quaternion
representations. Unit quaternions are a way to compactly represent 3D rotations
while avoiding singularities or discontinuities (e.g. gimbal lock).
+-------------------------------------+---------------------------------------------+
| Rotation | Quaternions |
+-------------------------------------+---------------------------------------------+
| Identity (no rotation) | 1, -1 |
| 180 degrees about x-axis | i, -i |
| 180 degrees about y-axis | j, -j |
| 180 degrees about z-axis | k, -k |
| angle θ, axis (unit vector) \vec{n} | \pm[\cos(\theta/2) + \vec{n}\sin(\theta/2)] |
+-------------------------------------+---------------------------------------------+
==== Why use quaternions ====
Quaternions have some advantages over other representations of rotations.
- Quaternions don't suffer from gimbal lock, unlike Euler angles.
- They can be represented as 4 numbers, in contrast to the 9 numbers of
a rotations matrix.
- The conversion to and from axis/angle representation is trivial.
- Smooth interpolation between two quaternions is easy (in contrast to
axis/angle or rotation matrices).
- After a lot of calculations on quaternions and matrices, rounding errors
accumulate, so you have to normalize quaternions and orthogonalize a rotation
matrix, but normalizing a quaternion is a lot less troublesome than
orthogonalizing a matrix.
- Similar to rotation matrices, you can just multiply 2 quaternions together
to receive a quaternion that represents both rotations.
The only disadvantages of quaternions are:
- They are hard to visualize.
- You have to convert them to get a human-readable representation (Euler angles)
or something OpenGL can understand (Matrix).
- Smooth interpolation between quaternions is complicated by the fact that each
3D rotation has two representations.
==== Why quaternions are neat ====
Quaternions are neat because the unit quaternions are a double-cover for the set
of 3D rotations.
To understand what that means, consider a Mobius strip. To make one, start with
a strip of paper and bring the ends together. Give one of the ends a half-twist,
and then attach it to the other end. The resulting surface is a Mobius strip.
The Mobius strip has a very important feature: it's non-orientable. When I want
to build and render an object, like a sphere or a cylinder, I need to make sure
all my vertex normals point the same way (e.g. outward). If I try to render a
Mobius strip, I'll get stuck because I won't be able to assign consistent
outward-pointing vector normals. That's what it means to be non-orientable.
The solution to this problem is to simply duplicate each vertex. One copy gets
an "outward" pointing normal, and the other copy gets an "inward" pointing
normal. When I render the Mobius strip, I'll have to draw each polygon twice,
once facing "outward", and again facing "inward". Mathematicians would call
this a double-cover.
The set of 3D rotations can be thought of as a 3D surface sitting in 4D
hyper-space. Like the Mobius strip, this surface is non-orientable. As a
result, if I try to represent a 3D rotation using three numbers (e.g. Euler
angles), I'll get stuck with either a singularity or a discontinuity.
Just like with the Mobius strip, I can solve the problem with a double-cover.
I simply duplicate each possible 3D rotation; one copy gets a "positive"
quaternion, and the other copy gets a "negative" quaternion. The resulting
representation is equivalent to a 3-sphere in 4D hyper-space, it's orientable,
and it completely avoids the problem of singularities and discontinuities.
==== Some basic quaternion operations ====
Here are some methods you will regularly need to work with quaternions.
*)
type quaternion =
{ mutable qx:float;
mutable qy:float;
mutable qz:float;
mutable qw:float; }
let quaternion (x,y,z) w =
{ qx = x; qy = y; qz = z; qw = w }
let identity_quaternion () =
{ qx = 0.0;
qy = 0.0;
qz = 0.0;
qw = 1.0; }
* { 3 Normalizing a quaternion }
normalising a quaternion works similar to a vector . This method will not do anything
if the quaternion is close enough to being unit - length . define TOLERANCE as something
small like 0.00001 to get accurate results
if the quaternion is close enough to being unit-length. define TOLERANCE as something
small like 0.00001 to get accurate results *)
let normalise quat =
let tolerance = 0.00001 in
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
(* Don't normalize if we don't have to *)
let mag2 = w *. w +. x *. x +. y *. y +. z *. z in
if (mag2 <> 0.0) && (abs_float(mag2 -. 1.0) > tolerance)
then begin
let mag = sqrt mag2 in
quat.qx <- quat.qx /. mag;
quat.qy <- quat.qy /. mag;
quat.qz <- quat.qz /. mag;
quat.qw <- quat.qw /. mag;
end
* { 3 The complex conjugate of a quaternion }
(* We need to get the inverse of a quaternion to properly apply a
quaternion-rotation to a vector.
* The conjugate of a quaternion is the same as the inverse, as long as the
quaternion is unit-length *)
let getConjugate quat =
{
qx = -. quat.qx;
qy = -. quat.qy;
qz = -. quat.qz;
qw = quat.qw;
}
* { 3 Multiplying quaternions }
To multiply two quaternions , write each one as the sum of a scalar and a vector .
The product of
= w_1 + \vec{v_1 } and } is q = w + \vec{v }
where
w = } \cdot \vec{v_2 }
\vec{v } = w_1 \vec{v_2 } } + \vec{v_1 } \times \vec{v_2 }
To multiply two quaternions, write each one as the sum of a scalar and a vector.
The product of
q_1 = w_1 + \vec{v_1} and q_2 = w_2 + \vec{v_2} is q = w + \vec{v}
where
w = w_1 w_2 - \vec{v_1} \cdot \vec{v_2}
\vec{v} = w_1 \vec{v_2} + w_2 \vec{v_1} + \vec{v_1} \times \vec{v_2}
*)
Multiplying q1 with q2 applies the rotation q2 to q1
let mult_quaternion q1 rq =
{
qx = q1.qw *. rq.qx +. q1.qx *. rq.qw +. q1.qy *. rq.qz -. q1.qz *. rq.qy;
qy = q1.qw *. rq.qy +. q1.qy *. rq.qw +. q1.qz *. rq.qx -. q1.qx *. rq.qz;
qz = q1.qw *. rq.qz +. q1.qz *. rq.qw +. q1.qx *. rq.qy -. q1.qy *. rq.qx;
qw = q1.qw *. rq.qw -. q1.qx *. rq.qx -. q1.qy *. rq.qy -. q1.qz *. rq.qz;
}
Please note : Quaternion - multiplication is NOT commutative . Thus q1 * q2 is
not the same as q2 * q1 . This is pretty obvious actually : As I explained ,
quaternions represent rotations and multiplying them " concatenates " the
rotations . Now take you hand and hold it parallel to the floor so your hand
points away from you . Rotate it 90 ° around the x - axis so it is pointing upward .
Now rotate it 90 ° clockwise around its local y - axis ( the one coming out of the
back of your hand ) . Your hand should now be pointing to your right , with you
looking at the back of your hand . Now invert the rotations : Rotate your hand
around the y - axis so its facing right with the back of the hand facing upwards .
Now rotate around the x axis and your hand is pointing up , back of hand facing
your left . See , the order in which you apply rotations matters . Ok , ok , you
probably knew that ...
Please note: Quaternion-multiplication is NOT commutative. Thus q1 * q2 is
not the same as q2 * q1. This is pretty obvious actually: As I explained,
quaternions represent rotations and multiplying them "concatenates" the
rotations. Now take you hand and hold it parallel to the floor so your hand
points away from you. Rotate it 90° around the x-axis so it is pointing upward.
Now rotate it 90° clockwise around its local y-axis (the one coming out of the
back of your hand). Your hand should now be pointing to your right, with you
looking at the back of your hand. Now invert the rotations: Rotate your hand
around the y-axis so its facing right with the back of the hand facing upwards.
Now rotate around the x axis and your hand is pointing up, back of hand facing
your left. See, the order in which you apply rotations matters. Ok, ok, you
probably knew that...
*)
* { 3 Rotating vectors }
(*
To apply a quaternion-rotation to a vector, you need to multiply the vector by
the quaternion and its conjugate.
\vec{v}' = q\; \vec{v}\; \overline{q}
*)
let normalise_vector (x,y,z) =
let len = sqrt(x *. x +. y *. y +. z *. z) in
(x /. len, y /. len, z /. len)
(* Multiplying a quaternion q with a vector v applies the q-rotation to v *)
let mult_quaternion_vector q vec =
let vn_x, vn_y, vn_z = normalise_vector vec in
let vecQuat = {
qx = vn_x;
qy = vn_y;
qz = vn_z;
qw = 0.0;
} in
let resQuat = mult_quaternion vecQuat (getConjugate q) in
let resQuat = mult_quaternion q resQuat in
(resQuat.qx, resQuat.qy, resQuat.qz)
;;
(*
==== How to convert to/from quaternions ====
In the following, I will present the methods necessary to convert all kind of
rotation-representations to and from quaternions. I'll not show how to derive
them because, well, who cares? (oh, and because I don't know how)
*)
* { 3 Quaternion from axis - angle }
To rotate through an angle θ , about the axis ( unit vector ) \vec{v } , use :
q = \cos(\theta/2 ) + \vec{v}\sin(\theta/2 )
To rotate through an angle θ, about the axis (unit vector) \vec{v}, use:
q = \cos(\theta/2) + \vec{v}\sin(\theta/2)
*)
Convert from Axis Angle
let quaternion_of_axis vec angle =
let angle = angle *. 0.5 in
let vn_x, vn_y, vn_z = normalise_vector vec in
let sinAngle = sin angle in
{
qx = vn_x *. sinAngle;
qy = vn_y *. sinAngle;
qz = vn_z *. sinAngle;
qw = cos angle;
}
;;
* { 3 Quaternion from Euler angles }
let piover180 = 3.14159265358979312 /. 180.0
Convert from Euler Angles
let quaternion_of_euler ~pitch ~yaw ~roll =
Basically we create 3 Quaternions , one for pitch , one for yaw , one for roll
and multiply those together .
the calculation below does the same , just shorter
and multiply those together.
the calculation below does the same, just shorter *)
let p = pitch *. piover180 /. 2.0
and y = yaw *. piover180 /. 2.0
and r = roll *. piover180 /. 2.0 in
let sinp = sin p
and siny = sin y
and sinr = sin r
and cosp = cos p
and cosy = cos y
and cosr = cos r in
let q = {
qx = sinr *. cosp *. cosy -. cosr *. sinp *. siny;
qy = cosr *. sinp *. cosy +. sinr *. cosp *. siny;
qz = cosr *. cosp *. siny -. sinr *. sinp *. cosy;
qw = cosr *. cosp *. cosy +. sinr *. sinp *. siny;
} in
normalise q;
(q)
;;
* { 3 Quaternion to Matrix }
(* Convert to Matrix *)
let matrix_of_quaternion quat =
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
let x2 = x * x
and y2 = y * y
and z2 = z * z
and xy = x * y
and xz = x * z
and yz = y * z
and wx = w * x
and wy = w * y
and wz = w * z in
This calculation would be a lot more complicated for non - unit length quaternions
Note : The constructor of Matrix4 expects the Matrix in column - major format like
expected by OpenGL
Note: The constructor of Matrix4 expects the Matrix in column-major format like
expected by OpenGL *)
[|
1.0 - 2.0 * (y2 + z2); 2.0 * (xy - wz); 2.0 * (xz + wy); 0.0;
2.0 * (xy + wz); 1.0 - 2.0 * (x2 + z2); 2.0 * (yz - wx); 0.0;
2.0 * (xz - wy); 2.0 * (yz + wx); 1.0 - 2.0 * (x2 + y2); 0.0;
0.0; 0.0; 0.0; 1.0;
|]
;;
* { 3 Quaternion to axis - angle }
Given a quaternion q = w + \vec{v } , the ( non - normalized ) rotation axis is simply
\vec{v } , provided that an axis exists . For very small rotations , \vec{v } gets
close to the zero vector , so when we compute the normalized rotation axis , the
calculation may blow up . In particular , the identity rotation has \vec{v } = 0 ,
so the rotation axis is undefined .
To find the angle of rotation , note that w = cos(θ / 2 ) and \|v\| = \sin(\theta/2 ) .
Given a quaternion q = w + \vec{v}, the (non-normalized) rotation axis is simply
\vec{v}, provided that an axis exists. For very small rotations, \vec{v} gets
close to the zero vector, so when we compute the normalized rotation axis, the
calculation may blow up. In particular, the identity rotation has \vec{v} = 0,
so the rotation axis is undefined.
To find the angle of rotation, note that w = cos(θ / 2) and \|v\| = \sin(\theta/2).
*)
Convert to Axis / Angles
let axisAngle_of_quaternion quat =
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
let scale = sqrt(x * x + y * y + z * z) in
let axis =
( x / scale,
y / scale,
z / scale )
and angle = (acos w) * 2.0 in
(axis, angle)
;;
= = = = Example = = = =
Ok , with the above Quaternion class , It 's very simple to create a camera class
that has one such Quaternion to represent its orientation :
==== Example ====
Ok, with the above Quaternion class, It's very simple to create a camera class
that has one such Quaternion to represent its orientation:
*)
let vectors_add (x1,y1,z1) (x2,y2,z2) =
(x1+x2, y1+y2, z1+z2)
class camera =
object (s)
val mutable pos = (0.0, 0.0, 0.0)
val mutable rotation = identity_quaternion ()
val mutable xrot = 0.0
val mutable yrot = 0.0
val mutable xmov = 0.0
val mutable ymov = 0.0
val mutable zmov = 0.0
val mutable rotspeed = 0.0
val mutable movespeed = 0.0
method get_pos = pos
method get_rotation = rotation
method set_pos v = pos <- v
method set_rotation r = rotation <- r
method set_xrot v = xrot <- v
method set_yrot v = yrot <- v
method set_xmov v = xmov <- v
method set_ymov v = ymov <- v
method set_zmov v = zmov <- v
method set_rotspeed v = rotspeed <- v
method set_movespeed v = movespeed <- v
method movex xmmod =
let vec = mult_quaternion_vector rotation (xmmod, 0.0, 0.0) in
pos <- vectors_add pos vec;
method movey ymmod =
let x, y, z = pos in
let new_pos = (x, y - ymmod, z) in
pos <- new_pos;
method movez zmmod =
let vec = mult_quaternion_vector rotation (0.0, 0.0, -. zmmod) in
pos <- vectors_add pos vec;
method rotatex xrmod =
let nrot = { qx = 1.0; qy = 0.0; qz = 0.0; qw = xrmod * piover180 } in
rotation <- mult_quaternion rotation nrot; (* TODO: check if the order is right *)
method rotatey yrmod =
let nrot = quaternion (0.0, 1.0, 0.0) (yrmod * piover180) in
rotation <- mult_quaternion nrot rotation; (* TODO: check if the order is right *)
method tick seconds =
if (xrot <> 0.0) then s#rotatex(xrot * seconds * rotspeed);
if (yrot <> 0.0) then s#rotatey(yrot * seconds * rotspeed);
if (xmov <> 0.0) then s#movex(xmov * seconds * movespeed);
if (ymov <> 0.0) then s#movey(ymov * seconds * movespeed);
if (zmov <> 0.0) then s#movez(zmov * seconds * movespeed);
end
;;
In this code , xrot , yrot , xmov , and zmov are floats representing how fast
the player wants to rotate / move around / on this axis . " seconds " is the time
passed since the last call to tick . and movespeed represent how fast
the camera can rotate or move . piover180 is defined as pi/180 , so multiplying
with it converts from degrees to radians .
You might be wondering why in rotatex we multiply " rotation * nrot " and in
rotatey " nrot * rotation " . As I said , multiplication is not commutative . The
first rotates the existing quaternion around x ( looking up and down ) , the second
rotates an upward - quaternion around the existing rotation . This way , we look
left / right around the global y - axis , while rotation up / down is around the local
x - axis . This is the behaviour you have in a 3D shooter . Try to change the order
of rotations to see what happens .
In this code, xrot, yrot, xmov, ymov and zmov are floats representing how fast
the player wants to rotate/move around/on this axis. "seconds" is the time
passed since the last call to tick. rotspeed and movespeed represent how fast
the camera can rotate or move. piover180 is defined as pi/180, so multiplying
with it converts from degrees to radians.
You might be wondering why in rotatex we multiply "rotation * nrot" and in
rotatey "nrot * rotation". As I said, multiplication is not commutative. The
first rotates the existing quaternion around x (looking up and down), the second
rotates an upward-quaternion around the existing rotation. This way, we look
left/right around the global y-axis, while rotation up/down is around the local
x-axis. This is the behaviour you have in a 3D shooter. Try to change the order
of rotations to see what happens.
*)
| null | https://raw.githubusercontent.com/fccm/glMLite/c52cd806909581e49d9b660195576c8a932f6d33/toolbox/quaternions/quaternions.ml | ocaml | * Quaternions
OpenGL:Tutorials:Using Quaternions to represent rotation
from: :Tutorials:Using_Quaternions_to_represent_rotation
there are only float calculations in this module
Don't normalize if we don't have to
We need to get the inverse of a quaternion to properly apply a
quaternion-rotation to a vector.
* The conjugate of a quaternion is the same as the inverse, as long as the
quaternion is unit-length
To apply a quaternion-rotation to a vector, you need to multiply the vector by
the quaternion and its conjugate.
\vec{v}' = q\; \vec{v}\; \overline{q}
Multiplying a quaternion q with a vector v applies the q-rotation to v
==== How to convert to/from quaternions ====
In the following, I will present the methods necessary to convert all kind of
rotation-representations to and from quaternions. I'll not show how to derive
them because, well, who cares? (oh, and because I don't know how)
Convert to Matrix
TODO: check if the order is right
TODO: check if the order is right | Content is available under GNU Free Documentation License 1.2
History log of the document , and authors :
26 - 05 - 2006 11:39 Tannin ( Initial post )
26 - 05 - 2006 ( typo )
25 - 07 - 2006 19:05 ( Just what is a quaternion ? )
25 - 07 - 2006 19:06 81.174.37.6 ( Just what is a quaternion ? )
30 - 07 - 2006 06:38 203.206.100.123 ( How to convert to / from quaternions - fix formatting )
30 - 07 - 2006 11:40 203.206.100.123 ( add category tags )
03 - 10 - 2006 21:10 144.30.108.40 ( Some basic quaternion operations )
05 - 10 - 2006 08:21 84.56.180.65
30 - 10 - 2007 22:41 158.130.58.47 ( Just what is a quaternion ? )
30 - 10 - 2007 22:42 158.130.58.47 ( Why use quaternions )
26 - 11 - 2007 19:08 151.188.17.247 ( Some basic quaternion operations )
27 - 11 - 2007 ( Reverted edit of 151.188.17.247 , changed back to last version by 158.130.58.47 )
13 - 01 - 2008 ( Just what is a quaternion ? )
13 - 01 - 2008 ( Why use quaternions )
13 - 01 - 2008 ( Why use quaternions )
13 - 01 - 2008 10:29 ( Added section " Why quaternions are neat " )
13 - 01 - 2008 18:28 ( Some basic quaternion operations )
13 - 01 - 2008 18:48 ( How to convert to / from quaternions )
26 - 01 - 2008 06:51 142.167.170.72 ( Rotating vectors )
15 - 01 - 2009 15:04 83.237.221.197 ( access to ' y ' variable was ambigous )
06 - 08 - 2009 23:10 Geveno ( Normalizing a quaternion )
06 - 08 - 2009 23:39 Liam ( Bug fix )
15 - 12 - 2009 01:01 Helge ( Error in vector part : You should add the cross - product , not subtract ( found the correct answer on Wikipedia ) )
26-05-2006 11:39 Tannin (Initial post)
26-05-2006 11:42 Tannin (typo)
25-07-2006 19:05 81.174.37.6 (Just what is a quaternion?)
25-07-2006 19:06 81.174.37.6 (Just what is a quaternion?)
30-07-2006 06:38 203.206.100.123 (How to convert to/from quaternions - fix formatting)
30-07-2006 11:40 203.206.100.123 (add category tags)
03-10-2006 21:10 144.30.108.40 (Some basic quaternion operations)
05-10-2006 08:21 84.56.180.65
30-10-2007 22:41 158.130.58.47 (Just what is a quaternion?)
30-10-2007 22:42 158.130.58.47 (Why use quaternions)
26-11-2007 19:08 151.188.17.247 (Some basic quaternion operations)
27-11-2007 10:25 Codehead (Reverted edit of 151.188.17.247, changed back to last version by 158.130.58.47)
13-01-2008 09:42 Ronguida (Just what is a quaternion?)
13-01-2008 09:48 Ronguida (Why use quaternions)
13-01-2008 09:51 Ronguida (Why use quaternions)
13-01-2008 10:29 Ronguida (Added section "Why quaternions are neat")
13-01-2008 18:28 Ronguida (Some basic quaternion operations)
13-01-2008 18:48 Ronguida (How to convert to/from quaternions)
26-01-2008 06:51 142.167.170.72 (Rotating vectors)
15-01-2009 15:04 83.237.221.197 (access to 'y' variable was ambigous)
06-08-2009 23:10 Geveno (Normalizing a quaternion)
06-08-2009 23:39 Liam (Bug fix)
15-12-2009 01:01 Helge (Error in vector part: You should add the cross-product, not subtract (found the correct answer on Wikipedia))
*)
external ( + ) : float -> float -> float = "%addfloat"
external ( - ) : float -> float -> float = "%subfloat"
external ( * ) : float -> float -> float = "%mulfloat"
external ( / ) : float -> float -> float = "%divfloat"
= = = = Foreword and warning = = = =
Quaternions are all the rage these days for ( 3D ) computer games , so this wiki
would n't be complete without an explanation about them . Unfortunately , I 'm not
exactly a quaternion - specialist , so there might be errors here . I hope someone
with more knowledge on the topic will review this article . Although this article
is in the OpenGL - section , the background information is of course true for Direct3D
too . As far as I know , D3D also has some convenience functions for Quaternions .
= = = = Just what is a quaternion ? = = = =
A quaternion is an element of a 4 dimensional vector - space . It 's defined as
w + xi + yj + zk where i , j and k are imaginary numbers . Alternatively , a
quaternion is what you get when you add a scalar and a 3d vector .
The math behind quaternions is only slightly harder than the math behind vectors ,
but I 'm going to spare you ( for the moment ) .
Sounds scary ? Ok , so now you know never to ask that question again ...
Fortunately for you , we will only work with a subset of quaternions : unit
quaternions . Those are quaternions with length 1 . Every unit quaternion
represents a 3D rotation , and every 3D rotation has two unit quaternion
representations . Unit quaternions are a way to compactly represent 3D rotations
while avoiding singularities or discontinuities ( e.g. gimbal lock ) .
+ -------------------------------------+---------------------------------------------+
| Rotation | Quaternions |
+ -------------------------------------+---------------------------------------------+
| Identity ( no rotation ) | 1 , -1 |
| 180 degrees about x - axis | i , -i |
| 180 degrees about y - axis | j , -j |
| 180 degrees about z - axis | k , |
| angle θ , axis ( unit vector ) \vec{n } | \pm[\cos(\theta/2 ) + \vec{n}\sin(\theta/2 ) ] |
+ -------------------------------------+---------------------------------------------+
= = = = Why use quaternions = = = =
Quaternions have some advantages over other representations of rotations .
- Quaternions do n't suffer from gimbal lock , unlike Euler angles .
- They can be represented as 4 numbers , in contrast to the 9 numbers of
a rotations matrix .
- The conversion to and from axis / angle representation is trivial .
- Smooth interpolation between two quaternions is easy ( in contrast to
axis / angle or rotation matrices ) .
- After a lot of calculations on quaternions and matrices , rounding errors
accumulate , so you have to normalize quaternions and orthogonalize a rotation
matrix , but normalizing a quaternion is a lot less troublesome than
orthogonalizing a matrix .
- Similar to rotation matrices , you can just multiply 2 quaternions together
to receive a quaternion that represents both rotations .
The only disadvantages of quaternions are :
- They are hard to visualize .
- You have to convert them to get a human - readable representation ( Euler angles )
or something OpenGL can understand ( Matrix ) .
- Smooth interpolation between quaternions is complicated by the fact that each
3D rotation has two representations .
= = = = Why quaternions are neat = = = =
Quaternions are neat because the unit quaternions are a double - cover for the set
of 3D rotations .
To understand what that means , consider a Mobius strip . To make one , start with
a strip of paper and bring the ends together . Give one of the ends a half - twist ,
and then attach it to the other end . The resulting surface is a Mobius strip .
The Mobius strip has a very important feature : it 's non - orientable . When I want
to build and render an object , like a sphere or a cylinder , I need to make sure
all my vertex normals point the same way ( e.g. outward ) . If I try to render a
Mobius strip , I 'll get stuck because I wo n't be able to assign consistent
outward - pointing vector normals . That 's what it means to be non - orientable .
The solution to this problem is to simply duplicate each vertex . One copy gets
an " outward " pointing normal , and the other copy gets an " inward " pointing
normal . When I render the Mobius strip , I 'll have to draw each polygon twice ,
once facing " outward " , and again facing " inward " . Mathematicians would call
this a double - cover .
The set of 3D rotations can be thought of as a 3D surface sitting in 4D
hyper - space . Like the Mobius strip , this surface is non - orientable . As a
result , if I try to represent a 3D rotation using three numbers ( e.g. Euler
angles ) , I 'll get stuck with either a singularity or a discontinuity .
Just like with the Mobius strip , I can solve the problem with a double - cover .
I simply duplicate each possible 3D rotation ; one copy gets a " positive "
quaternion , and the other copy gets a " negative " quaternion . The resulting
representation is equivalent to a 3 - sphere in 4D hyper - space , it 's orientable ,
and it completely avoids the problem of singularities and discontinuities .
= = = = Some basic quaternion operations = = = =
Here are some methods you will regularly need to work with quaternions .
==== Foreword and warning ====
Quaternions are all the rage these days for (3D) computer games, so this wiki
wouldn't be complete without an explanation about them. Unfortunately, I'm not
exactly a quaternion-specialist, so there might be errors here. I hope someone
with more knowledge on the topic will review this article. Although this article
is in the OpenGL-section, the background information is of course true for Direct3D
too. As far as I know, D3D also has some convenience functions for Quaternions.
==== Just what is a quaternion? ====
A quaternion is an element of a 4 dimensional vector-space. It's defined as
w + xi + yj + zk where i, j and k are imaginary numbers. Alternatively, a
quaternion is what you get when you add a scalar and a 3d vector.
The math behind quaternions is only slightly harder than the math behind vectors,
but I'm going to spare you (for the moment).
Sounds scary? Ok, so now you know never to ask that question again...
Fortunately for you, we will only work with a subset of quaternions: unit
quaternions. Those are quaternions with length 1. Every unit quaternion
represents a 3D rotation, and every 3D rotation has two unit quaternion
representations. Unit quaternions are a way to compactly represent 3D rotations
while avoiding singularities or discontinuities (e.g. gimbal lock).
+-------------------------------------+---------------------------------------------+
| Rotation | Quaternions |
+-------------------------------------+---------------------------------------------+
| Identity (no rotation) | 1, -1 |
| 180 degrees about x-axis | i, -i |
| 180 degrees about y-axis | j, -j |
| 180 degrees about z-axis | k, -k |
| angle θ, axis (unit vector) \vec{n} | \pm[\cos(\theta/2) + \vec{n}\sin(\theta/2)] |
+-------------------------------------+---------------------------------------------+
==== Why use quaternions ====
Quaternions have some advantages over other representations of rotations.
- Quaternions don't suffer from gimbal lock, unlike Euler angles.
- They can be represented as 4 numbers, in contrast to the 9 numbers of
a rotations matrix.
- The conversion to and from axis/angle representation is trivial.
- Smooth interpolation between two quaternions is easy (in contrast to
axis/angle or rotation matrices).
- After a lot of calculations on quaternions and matrices, rounding errors
accumulate, so you have to normalize quaternions and orthogonalize a rotation
matrix, but normalizing a quaternion is a lot less troublesome than
orthogonalizing a matrix.
- Similar to rotation matrices, you can just multiply 2 quaternions together
to receive a quaternion that represents both rotations.
The only disadvantages of quaternions are:
- They are hard to visualize.
- You have to convert them to get a human-readable representation (Euler angles)
or something OpenGL can understand (Matrix).
- Smooth interpolation between quaternions is complicated by the fact that each
3D rotation has two representations.
==== Why quaternions are neat ====
Quaternions are neat because the unit quaternions are a double-cover for the set
of 3D rotations.
To understand what that means, consider a Mobius strip. To make one, start with
a strip of paper and bring the ends together. Give one of the ends a half-twist,
and then attach it to the other end. The resulting surface is a Mobius strip.
The Mobius strip has a very important feature: it's non-orientable. When I want
to build and render an object, like a sphere or a cylinder, I need to make sure
all my vertex normals point the same way (e.g. outward). If I try to render a
Mobius strip, I'll get stuck because I won't be able to assign consistent
outward-pointing vector normals. That's what it means to be non-orientable.
The solution to this problem is to simply duplicate each vertex. One copy gets
an "outward" pointing normal, and the other copy gets an "inward" pointing
normal. When I render the Mobius strip, I'll have to draw each polygon twice,
once facing "outward", and again facing "inward". Mathematicians would call
this a double-cover.
The set of 3D rotations can be thought of as a 3D surface sitting in 4D
hyper-space. Like the Mobius strip, this surface is non-orientable. As a
result, if I try to represent a 3D rotation using three numbers (e.g. Euler
angles), I'll get stuck with either a singularity or a discontinuity.
Just like with the Mobius strip, I can solve the problem with a double-cover.
I simply duplicate each possible 3D rotation; one copy gets a "positive"
quaternion, and the other copy gets a "negative" quaternion. The resulting
representation is equivalent to a 3-sphere in 4D hyper-space, it's orientable,
and it completely avoids the problem of singularities and discontinuities.
==== Some basic quaternion operations ====
Here are some methods you will regularly need to work with quaternions.
*)
type quaternion =
{ mutable qx:float;
mutable qy:float;
mutable qz:float;
mutable qw:float; }
let quaternion (x,y,z) w =
{ qx = x; qy = y; qz = z; qw = w }
let identity_quaternion () =
{ qx = 0.0;
qy = 0.0;
qz = 0.0;
qw = 1.0; }
* { 3 Normalizing a quaternion }
normalising a quaternion works similar to a vector . This method will not do anything
if the quaternion is close enough to being unit - length . define TOLERANCE as something
small like 0.00001 to get accurate results
if the quaternion is close enough to being unit-length. define TOLERANCE as something
small like 0.00001 to get accurate results *)
let normalise quat =
let tolerance = 0.00001 in
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
let mag2 = w *. w +. x *. x +. y *. y +. z *. z in
if (mag2 <> 0.0) && (abs_float(mag2 -. 1.0) > tolerance)
then begin
let mag = sqrt mag2 in
quat.qx <- quat.qx /. mag;
quat.qy <- quat.qy /. mag;
quat.qz <- quat.qz /. mag;
quat.qw <- quat.qw /. mag;
end
* { 3 The complex conjugate of a quaternion }
let getConjugate quat =
{
qx = -. quat.qx;
qy = -. quat.qy;
qz = -. quat.qz;
qw = quat.qw;
}
* { 3 Multiplying quaternions }
To multiply two quaternions , write each one as the sum of a scalar and a vector .
The product of
= w_1 + \vec{v_1 } and } is q = w + \vec{v }
where
w = } \cdot \vec{v_2 }
\vec{v } = w_1 \vec{v_2 } } + \vec{v_1 } \times \vec{v_2 }
To multiply two quaternions, write each one as the sum of a scalar and a vector.
The product of
q_1 = w_1 + \vec{v_1} and q_2 = w_2 + \vec{v_2} is q = w + \vec{v}
where
w = w_1 w_2 - \vec{v_1} \cdot \vec{v_2}
\vec{v} = w_1 \vec{v_2} + w_2 \vec{v_1} + \vec{v_1} \times \vec{v_2}
*)
Multiplying q1 with q2 applies the rotation q2 to q1
let mult_quaternion q1 rq =
{
qx = q1.qw *. rq.qx +. q1.qx *. rq.qw +. q1.qy *. rq.qz -. q1.qz *. rq.qy;
qy = q1.qw *. rq.qy +. q1.qy *. rq.qw +. q1.qz *. rq.qx -. q1.qx *. rq.qz;
qz = q1.qw *. rq.qz +. q1.qz *. rq.qw +. q1.qx *. rq.qy -. q1.qy *. rq.qx;
qw = q1.qw *. rq.qw -. q1.qx *. rq.qx -. q1.qy *. rq.qy -. q1.qz *. rq.qz;
}
Please note : Quaternion - multiplication is NOT commutative . Thus q1 * q2 is
not the same as q2 * q1 . This is pretty obvious actually : As I explained ,
quaternions represent rotations and multiplying them " concatenates " the
rotations . Now take you hand and hold it parallel to the floor so your hand
points away from you . Rotate it 90 ° around the x - axis so it is pointing upward .
Now rotate it 90 ° clockwise around its local y - axis ( the one coming out of the
back of your hand ) . Your hand should now be pointing to your right , with you
looking at the back of your hand . Now invert the rotations : Rotate your hand
around the y - axis so its facing right with the back of the hand facing upwards .
Now rotate around the x axis and your hand is pointing up , back of hand facing
your left . See , the order in which you apply rotations matters . Ok , ok , you
probably knew that ...
Please note: Quaternion-multiplication is NOT commutative. Thus q1 * q2 is
not the same as q2 * q1. This is pretty obvious actually: As I explained,
quaternions represent rotations and multiplying them "concatenates" the
rotations. Now take you hand and hold it parallel to the floor so your hand
points away from you. Rotate it 90° around the x-axis so it is pointing upward.
Now rotate it 90° clockwise around its local y-axis (the one coming out of the
back of your hand). Your hand should now be pointing to your right, with you
looking at the back of your hand. Now invert the rotations: Rotate your hand
around the y-axis so its facing right with the back of the hand facing upwards.
Now rotate around the x axis and your hand is pointing up, back of hand facing
your left. See, the order in which you apply rotations matters. Ok, ok, you
probably knew that...
*)
* { 3 Rotating vectors }
let normalise_vector (x,y,z) =
let len = sqrt(x *. x +. y *. y +. z *. z) in
(x /. len, y /. len, z /. len)
let mult_quaternion_vector q vec =
let vn_x, vn_y, vn_z = normalise_vector vec in
let vecQuat = {
qx = vn_x;
qy = vn_y;
qz = vn_z;
qw = 0.0;
} in
let resQuat = mult_quaternion vecQuat (getConjugate q) in
let resQuat = mult_quaternion q resQuat in
(resQuat.qx, resQuat.qy, resQuat.qz)
;;
* { 3 Quaternion from axis - angle }
To rotate through an angle θ , about the axis ( unit vector ) \vec{v } , use :
q = \cos(\theta/2 ) + \vec{v}\sin(\theta/2 )
To rotate through an angle θ, about the axis (unit vector) \vec{v}, use:
q = \cos(\theta/2) + \vec{v}\sin(\theta/2)
*)
Convert from Axis Angle
let quaternion_of_axis vec angle =
let angle = angle *. 0.5 in
let vn_x, vn_y, vn_z = normalise_vector vec in
let sinAngle = sin angle in
{
qx = vn_x *. sinAngle;
qy = vn_y *. sinAngle;
qz = vn_z *. sinAngle;
qw = cos angle;
}
;;
* { 3 Quaternion from Euler angles }
let piover180 = 3.14159265358979312 /. 180.0
Convert from Euler Angles
let quaternion_of_euler ~pitch ~yaw ~roll =
Basically we create 3 Quaternions , one for pitch , one for yaw , one for roll
and multiply those together .
the calculation below does the same , just shorter
and multiply those together.
the calculation below does the same, just shorter *)
let p = pitch *. piover180 /. 2.0
and y = yaw *. piover180 /. 2.0
and r = roll *. piover180 /. 2.0 in
let sinp = sin p
and siny = sin y
and sinr = sin r
and cosp = cos p
and cosy = cos y
and cosr = cos r in
let q = {
qx = sinr *. cosp *. cosy -. cosr *. sinp *. siny;
qy = cosr *. sinp *. cosy +. sinr *. cosp *. siny;
qz = cosr *. cosp *. siny -. sinr *. sinp *. cosy;
qw = cosr *. cosp *. cosy +. sinr *. sinp *. siny;
} in
normalise q;
(q)
;;
* { 3 Quaternion to Matrix }
let matrix_of_quaternion quat =
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
let x2 = x * x
and y2 = y * y
and z2 = z * z
and xy = x * y
and xz = x * z
and yz = y * z
and wx = w * x
and wy = w * y
and wz = w * z in
This calculation would be a lot more complicated for non - unit length quaternions
Note : The constructor of Matrix4 expects the Matrix in column - major format like
expected by OpenGL
Note: The constructor of Matrix4 expects the Matrix in column-major format like
expected by OpenGL *)
[|
1.0 - 2.0 * (y2 + z2); 2.0 * (xy - wz); 2.0 * (xz + wy); 0.0;
2.0 * (xy + wz); 1.0 - 2.0 * (x2 + z2); 2.0 * (yz - wx); 0.0;
2.0 * (xz - wy); 2.0 * (yz + wx); 1.0 - 2.0 * (x2 + y2); 0.0;
0.0; 0.0; 0.0; 1.0;
|]
;;
* { 3 Quaternion to axis - angle }
Given a quaternion q = w + \vec{v } , the ( non - normalized ) rotation axis is simply
\vec{v } , provided that an axis exists . For very small rotations , \vec{v } gets
close to the zero vector , so when we compute the normalized rotation axis , the
calculation may blow up . In particular , the identity rotation has \vec{v } = 0 ,
so the rotation axis is undefined .
To find the angle of rotation , note that w = cos(θ / 2 ) and \|v\| = \sin(\theta/2 ) .
Given a quaternion q = w + \vec{v}, the (non-normalized) rotation axis is simply
\vec{v}, provided that an axis exists. For very small rotations, \vec{v} gets
close to the zero vector, so when we compute the normalized rotation axis, the
calculation may blow up. In particular, the identity rotation has \vec{v} = 0,
so the rotation axis is undefined.
To find the angle of rotation, note that w = cos(θ / 2) and \|v\| = \sin(\theta/2).
*)
Convert to Axis / Angles
let axisAngle_of_quaternion quat =
let x = quat.qx
and y = quat.qy
and z = quat.qz
and w = quat.qw in
let scale = sqrt(x * x + y * y + z * z) in
let axis =
( x / scale,
y / scale,
z / scale )
and angle = (acos w) * 2.0 in
(axis, angle)
;;
= = = = Example = = = =
Ok , with the above Quaternion class , It 's very simple to create a camera class
that has one such Quaternion to represent its orientation :
==== Example ====
Ok, with the above Quaternion class, It's very simple to create a camera class
that has one such Quaternion to represent its orientation:
*)
let vectors_add (x1,y1,z1) (x2,y2,z2) =
(x1+x2, y1+y2, z1+z2)
class camera =
object (s)
val mutable pos = (0.0, 0.0, 0.0)
val mutable rotation = identity_quaternion ()
val mutable xrot = 0.0
val mutable yrot = 0.0
val mutable xmov = 0.0
val mutable ymov = 0.0
val mutable zmov = 0.0
val mutable rotspeed = 0.0
val mutable movespeed = 0.0
method get_pos = pos
method get_rotation = rotation
method set_pos v = pos <- v
method set_rotation r = rotation <- r
method set_xrot v = xrot <- v
method set_yrot v = yrot <- v
method set_xmov v = xmov <- v
method set_ymov v = ymov <- v
method set_zmov v = zmov <- v
method set_rotspeed v = rotspeed <- v
method set_movespeed v = movespeed <- v
method movex xmmod =
let vec = mult_quaternion_vector rotation (xmmod, 0.0, 0.0) in
pos <- vectors_add pos vec;
method movey ymmod =
let x, y, z = pos in
let new_pos = (x, y - ymmod, z) in
pos <- new_pos;
method movez zmmod =
let vec = mult_quaternion_vector rotation (0.0, 0.0, -. zmmod) in
pos <- vectors_add pos vec;
method rotatex xrmod =
let nrot = { qx = 1.0; qy = 0.0; qz = 0.0; qw = xrmod * piover180 } in
method rotatey yrmod =
let nrot = quaternion (0.0, 1.0, 0.0) (yrmod * piover180) in
method tick seconds =
if (xrot <> 0.0) then s#rotatex(xrot * seconds * rotspeed);
if (yrot <> 0.0) then s#rotatey(yrot * seconds * rotspeed);
if (xmov <> 0.0) then s#movex(xmov * seconds * movespeed);
if (ymov <> 0.0) then s#movey(ymov * seconds * movespeed);
if (zmov <> 0.0) then s#movez(zmov * seconds * movespeed);
end
;;
In this code , xrot , yrot , xmov , and zmov are floats representing how fast
the player wants to rotate / move around / on this axis . " seconds " is the time
passed since the last call to tick . and movespeed represent how fast
the camera can rotate or move . piover180 is defined as pi/180 , so multiplying
with it converts from degrees to radians .
You might be wondering why in rotatex we multiply " rotation * nrot " and in
rotatey " nrot * rotation " . As I said , multiplication is not commutative . The
first rotates the existing quaternion around x ( looking up and down ) , the second
rotates an upward - quaternion around the existing rotation . This way , we look
left / right around the global y - axis , while rotation up / down is around the local
x - axis . This is the behaviour you have in a 3D shooter . Try to change the order
of rotations to see what happens .
In this code, xrot, yrot, xmov, ymov and zmov are floats representing how fast
the player wants to rotate/move around/on this axis. "seconds" is the time
passed since the last call to tick. rotspeed and movespeed represent how fast
the camera can rotate or move. piover180 is defined as pi/180, so multiplying
with it converts from degrees to radians.
You might be wondering why in rotatex we multiply "rotation * nrot" and in
rotatey "nrot * rotation". As I said, multiplication is not commutative. The
first rotates the existing quaternion around x (looking up and down), the second
rotates an upward-quaternion around the existing rotation. This way, we look
left/right around the global y-axis, while rotation up/down is around the local
x-axis. This is the behaviour you have in a 3D shooter. Try to change the order
of rotations to see what happens.
*)
|
d7bb098f3a0381e908dacd7927fed6233536b8b4b015a8345fe9e56cdbf5b8eb | 1Jajen1/Brokkr | Vector.hs | # LANGUAGE FunctionalDependencies #
module Util.Linear.Vector (
VectorSpace(..)
, (|-|)
) where
class VectorSpace k v | v -> k where
(|+|) :: v -> v -> v
(|*|) :: k -> v -> v
(|-|) :: (Num k, VectorSpace k v) => v -> v -> v
(|-|) l r = l |+| ((-1) |*| r)
| null | https://raw.githubusercontent.com/1Jajen1/Brokkr/5c128a47c7123e576b4e415048d58c2ab3f4a0aa/src/Util/Linear/Vector.hs | haskell | # LANGUAGE FunctionalDependencies #
module Util.Linear.Vector (
VectorSpace(..)
, (|-|)
) where
class VectorSpace k v | v -> k where
(|+|) :: v -> v -> v
(|*|) :: k -> v -> v
(|-|) :: (Num k, VectorSpace k v) => v -> v -> v
(|-|) l r = l |+| ((-1) |*| r)
| |
c78bf89d57bb656ae0de101c72206418febf45fd2aeeed49003d0e274169ad74 | russell/cl-git | buffer.lisp | -*- Mode : Lisp ; Syntax : COMMON - LISP ; Base : 10 -*-
;; cl-git is a Common Lisp interface to git repositories.
Copyright ( C ) 2011 - 2022 < >
;;
;; This program is free software: you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public License
as published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
;;
;; This program is distributed in the hope that it will be useful, but
;; WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this program. If not, see
;; </>.
(in-package #:cl-git)
(define-foreign-type buffer (git-pointer)
nil
(:actual-type :pointer)
(:simple-parser %buffer))
(defcfun %git-buf-free
:void
(buffer (:pointer (:struct git-buf))))
| null | https://raw.githubusercontent.com/russell/cl-git/db84343e6b756b26fc64877583b41e887bd74602/src/buffer.lisp | lisp | Syntax : COMMON - LISP ; Base : 10 -*-
cl-git is a Common Lisp interface to git repositories.
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this program. If not, see
</>. |
Copyright ( C ) 2011 - 2022 < >
as published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
(in-package #:cl-git)
(define-foreign-type buffer (git-pointer)
nil
(:actual-type :pointer)
(:simple-parser %buffer))
(defcfun %git-buf-free
:void
(buffer (:pointer (:struct git-buf))))
|
6a7c6cb9426c07c5c68a7915f1562fa00fba80d1fd43118c9d053fdd659fcdaf | clash-lang/clash-compiler | KnownCase.hs | # OPTIONS_GHC -Wno - overlapping - patterns #
{-# LANGUAGE OverloadedStrings #-}
-- Case alternatives should be selected by the partial evalautor when they
-- can be statically determined. This means we need to show that each type of
-- pattern can be correctly identified (Data, Literal, Default). In these tests
-- we give top entities which
--
-- * syntactically start with a case when translated to core
-- * have a statically known scrutinee, so an alternative should be selected
--
-- and check that the partial evaluator selects the correct branch (which
will is a list of the first 10 Catalan numbers ) .
--
module KnownCase where
import Control.Monad (unless)
import Clash.Prelude
import Clash.Backend
import Clash.Core.PartialEval
import Clash.Core.Name
import Clash.Core.Subst
import Clash.Core.Term
import Clash.Core.TyCon
import Clash.Core.Var
import Clash.Core.VarEnv
import Clash.Driver.Types
import Clash.GHC.PartialEval
import Test.Tasty.Clash
import Test.Tasty.Clash.CoreTest
# INLINE matchedAlt #
matchedAlt :: [Integer]
matchedAlt = [1, 1, 2, 5, 14, 42, 132, 429, 1430, 4862]
# NOINLINE caseOfData #
# ANN caseOfData ( Synthesize
{ t_name = " caseOfData "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfData"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfData :: [Integer]
caseOfData = maybe [] (const matchedAlt) (Just 0)
# NOINLINE caseOfLit #
# ANN caseOfLit ( Synthesize
{ t_name = " caseOfLit "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfLit"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfLit :: [Integer]
caseOfLit =
case (3 :: Integer) of
2 -> [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
3 -> matchedAlt
_ -> []
# NOINLINE caseOfDefault #
# ANN caseOfDefault ( Synthesize
{ t_name = " caseOfDefault "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfDefault"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfDefault :: [Integer]
caseOfDefault =
case 'X' of
_ -> matchedAlt
testPath :: FilePath
testPath = "tests/shouldwork/PartialEvaluation/KnownCase.hs"
mainCommon
:: (Backend (TargetToState target))
=> SBuildTarget target
-> IO ()
mainCommon hdl = do
entities <- runToCoreStage hdl id testPath
alt <- findBinding "KnownCase.matchedAlt" entities
just <- findBinding "KnownCase.caseOfData" entities
lit <- findBinding "KnownCase.caseOfLit" entities
def <- findBinding "KnownCase.caseOfDefault" entities
unless (aeqTerm just alt) $
error ("Not alpha equivalent: " <> show just <> "\n\n" <> show alt)
unless (aeqTerm lit alt) $
error ("Not alpha equivalent: " <> show lit <> "\n\n" <> show alt)
unless (aeqTerm def alt) $
error ("Not alpha equivalent: " <> show def <> "\n\n" <> show alt)
mainVHDL :: IO ()
mainVHDL = mainCommon SVHDL
mainVerilog :: IO ()
mainVerilog = mainCommon SVerilog
mainSystemVerilog :: IO ()
mainSystemVerilog = mainCommon SSystemVerilog
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/PartialEvaluation/KnownCase.hs | haskell | # LANGUAGE OverloadedStrings #
Case alternatives should be selected by the partial evalautor when they
can be statically determined. This means we need to show that each type of
pattern can be correctly identified (Data, Literal, Default). In these tests
we give top entities which
* syntactically start with a case when translated to core
* have a statically known scrutinee, so an alternative should be selected
and check that the partial evaluator selects the correct branch (which
| # OPTIONS_GHC -Wno - overlapping - patterns #
will is a list of the first 10 Catalan numbers ) .
module KnownCase where
import Control.Monad (unless)
import Clash.Prelude
import Clash.Backend
import Clash.Core.PartialEval
import Clash.Core.Name
import Clash.Core.Subst
import Clash.Core.Term
import Clash.Core.TyCon
import Clash.Core.Var
import Clash.Core.VarEnv
import Clash.Driver.Types
import Clash.GHC.PartialEval
import Test.Tasty.Clash
import Test.Tasty.Clash.CoreTest
# INLINE matchedAlt #
matchedAlt :: [Integer]
matchedAlt = [1, 1, 2, 5, 14, 42, 132, 429, 1430, 4862]
# NOINLINE caseOfData #
# ANN caseOfData ( Synthesize
{ t_name = " caseOfData "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfData"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfData :: [Integer]
caseOfData = maybe [] (const matchedAlt) (Just 0)
# NOINLINE caseOfLit #
# ANN caseOfLit ( Synthesize
{ t_name = " caseOfLit "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfLit"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfLit :: [Integer]
caseOfLit =
case (3 :: Integer) of
2 -> [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
3 -> matchedAlt
_ -> []
# NOINLINE caseOfDefault #
# ANN caseOfDefault ( Synthesize
{ t_name = " caseOfDefault "
, t_inputs = [ ]
, t_output = PortName " res "
} )
#
{ t_name = "caseOfDefault"
, t_inputs = []
, t_output = PortName "res"
})
#-}
caseOfDefault :: [Integer]
caseOfDefault =
case 'X' of
_ -> matchedAlt
testPath :: FilePath
testPath = "tests/shouldwork/PartialEvaluation/KnownCase.hs"
mainCommon
:: (Backend (TargetToState target))
=> SBuildTarget target
-> IO ()
mainCommon hdl = do
entities <- runToCoreStage hdl id testPath
alt <- findBinding "KnownCase.matchedAlt" entities
just <- findBinding "KnownCase.caseOfData" entities
lit <- findBinding "KnownCase.caseOfLit" entities
def <- findBinding "KnownCase.caseOfDefault" entities
unless (aeqTerm just alt) $
error ("Not alpha equivalent: " <> show just <> "\n\n" <> show alt)
unless (aeqTerm lit alt) $
error ("Not alpha equivalent: " <> show lit <> "\n\n" <> show alt)
unless (aeqTerm def alt) $
error ("Not alpha equivalent: " <> show def <> "\n\n" <> show alt)
mainVHDL :: IO ()
mainVHDL = mainCommon SVHDL
mainVerilog :: IO ()
mainVerilog = mainCommon SVerilog
mainSystemVerilog :: IO ()
mainSystemVerilog = mainCommon SSystemVerilog
|
ad0036c93520a4f9ad0f3ed10afbaf152889cd7d27a09c5901f9f17601a32c51 | haskell/statistics | Kendall.hs | # LANGUAGE BangPatterns , FlexibleContexts #
-- |
-- Module : Statistics.Correlation.Kendall
--
-- Fast O(NlogN) implementation of
< 's tau > .
--
This module implements 's tau form b which allows ties in the data .
This is the same formula used by other statistical packages , e.g. , R , matlab .
--
-- > \tau = \frac{n_c - n_d}{\sqrt{(n_0 - n_1)(n_0 - n_2)}}
--
where n_0 = n(n-1)\/2 , n_1 = number of pairs tied for the first quantify ,
n_2 = number of pairs tied for the second quantify ,
n_c = number of concordant pairs$ , n_d = number of discordant pairs .
module Statistics.Correlation.Kendall
( kendall
-- * References
-- $references
) where
import Control.Monad.ST (ST, runST)
import Data.Bits (shiftR)
import Data.Function (on)
import Data.STRef
import qualified Data.Vector.Algorithms.Intro as I
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as GM
| /O(nlogn)/ Compute the Kendall 's tau from a vector of paired data .
Return NaN when number of pairs < = 1 .
kendall :: (Ord a, Ord b, G.Vector v (a, b)) => v (a, b) -> Double
kendall xy'
| G.length xy' <= 1 = 0/0
| otherwise = runST $ do
xy <- G.thaw xy'
let n = GM.length xy
n_dRef <- newSTRef 0
I.sort xy
tieX <- numOfTiesBy ((==) `on` fst) xy
tieXY <- numOfTiesBy (==) xy
tmp <- GM.new n
mergeSort (compare `on` snd) xy tmp n_dRef
tieY <- numOfTiesBy ((==) `on` snd) xy
n_d <- readSTRef n_dRef
let n_0 = (fromIntegral n * (fromIntegral n-1)) `shiftR` 1 :: Integer
n_c = n_0 - n_d - tieX - tieY + tieXY
return $ fromIntegral (n_c - n_d) /
(sqrt.fromIntegral) ((n_0 - tieX) * (n_0 - tieY))
# INLINE kendall #
-- calculate number of tied pairs in a sorted vector
numOfTiesBy :: GM.MVector v a
=> (a -> a -> Bool) -> v s a -> ST s Integer
numOfTiesBy f xs = do count <- newSTRef (0::Integer)
loop count (1::Int) (0::Int)
readSTRef count
where
n = GM.length xs
loop c !acc !i | i >= n - 1 = modifySTRef' c (+ g acc)
| otherwise = do
x1 <- GM.unsafeRead xs i
x2 <- GM.unsafeRead xs (i+1)
if f x1 x2
then loop c (acc+1) (i+1)
else modifySTRef' c (+ g acc) >> loop c 1 (i+1)
g x = fromIntegral ((x * (x - 1)) `shiftR` 1)
# INLINE numOfTiesBy #
-- Implementation of Knight's merge sort (adapted from vector-algorithm). This
-- function is used to count the number of discordant pairs.
mergeSort :: GM.MVector v e
=> (e -> e -> Ordering)
-> v s e
-> v s e
-> STRef s Integer
-> ST s ()
mergeSort cmp src buf count = loop 0 (GM.length src - 1)
where
loop l u
| u == l = return ()
| u - l == 1 = do
eL <- GM.unsafeRead src l
eU <- GM.unsafeRead src u
case cmp eL eU of
GT -> do GM.unsafeWrite src l eU
GM.unsafeWrite src u eL
modifySTRef' count (+1)
_ -> return ()
| otherwise = do
let mid = (u + l) `shiftR` 1
loop l mid
loop mid u
merge cmp (GM.unsafeSlice l (u-l+1) src) buf (mid - l) count
# INLINE mergeSort #
merge :: GM.MVector v e
=> (e -> e -> Ordering)
-> v s e
-> v s e
-> Int
-> STRef s Integer
-> ST s ()
merge cmp src buf mid count = do GM.unsafeCopy tmp lower
eTmp <- GM.unsafeRead tmp 0
eUpp <- GM.unsafeRead upper 0
loop tmp 0 eTmp upper 0 eUpp 0
where
lower = GM.unsafeSlice 0 mid src
upper = GM.unsafeSlice mid (GM.length src - mid) src
tmp = GM.unsafeSlice 0 mid buf
wroteHigh low iLow eLow high iHigh iIns
| iHigh >= GM.length high =
GM.unsafeCopy (GM.unsafeSlice iIns (GM.length low - iLow) src)
(GM.unsafeSlice iLow (GM.length low - iLow) low)
| otherwise = do eHigh <- GM.unsafeRead high iHigh
loop low iLow eLow high iHigh eHigh iIns
wroteLow low iLow high iHigh eHigh iIns
| iLow >= GM.length low = return ()
| otherwise = do eLow <- GM.unsafeRead low iLow
loop low iLow eLow high iHigh eHigh iIns
loop !low !iLow !eLow !high !iHigh !eHigh !iIns = case cmp eHigh eLow of
LT -> do GM.unsafeWrite src iIns eHigh
modifySTRef' count (+ fromIntegral (GM.length low - iLow))
wroteHigh low iLow eLow high (iHigh+1) (iIns+1)
_ -> do GM.unsafeWrite src iIns eLow
wroteLow low (iLow+1) high iHigh eHigh (iIns+1)
# INLINE merge #
-- $references
--
* . ( 1966 ) A computer method for calculating 's Tau
with ungrouped data . /Journal of the American Statistical Association/ ,
Vol . 61 , No . 314 , Part 1 , pp . 436 - 439 . < >
--
| null | https://raw.githubusercontent.com/haskell/statistics/d018625f33e8b01911674bffdfcf541415cc3455/Statistics/Correlation/Kendall.hs | haskell | |
Module : Statistics.Correlation.Kendall
Fast O(NlogN) implementation of
> \tau = \frac{n_c - n_d}{\sqrt{(n_0 - n_1)(n_0 - n_2)}}
* References
$references
calculate number of tied pairs in a sorted vector
Implementation of Knight's merge sort (adapted from vector-algorithm). This
function is used to count the number of discordant pairs.
$references
| # LANGUAGE BangPatterns , FlexibleContexts #
< 's tau > .
This module implements 's tau form b which allows ties in the data .
This is the same formula used by other statistical packages , e.g. , R , matlab .
where n_0 = n(n-1)\/2 , n_1 = number of pairs tied for the first quantify ,
n_2 = number of pairs tied for the second quantify ,
n_c = number of concordant pairs$ , n_d = number of discordant pairs .
module Statistics.Correlation.Kendall
( kendall
) where
import Control.Monad.ST (ST, runST)
import Data.Bits (shiftR)
import Data.Function (on)
import Data.STRef
import qualified Data.Vector.Algorithms.Intro as I
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as GM
| /O(nlogn)/ Compute the Kendall 's tau from a vector of paired data .
Return NaN when number of pairs < = 1 .
kendall :: (Ord a, Ord b, G.Vector v (a, b)) => v (a, b) -> Double
kendall xy'
| G.length xy' <= 1 = 0/0
| otherwise = runST $ do
xy <- G.thaw xy'
let n = GM.length xy
n_dRef <- newSTRef 0
I.sort xy
tieX <- numOfTiesBy ((==) `on` fst) xy
tieXY <- numOfTiesBy (==) xy
tmp <- GM.new n
mergeSort (compare `on` snd) xy tmp n_dRef
tieY <- numOfTiesBy ((==) `on` snd) xy
n_d <- readSTRef n_dRef
let n_0 = (fromIntegral n * (fromIntegral n-1)) `shiftR` 1 :: Integer
n_c = n_0 - n_d - tieX - tieY + tieXY
return $ fromIntegral (n_c - n_d) /
(sqrt.fromIntegral) ((n_0 - tieX) * (n_0 - tieY))
# INLINE kendall #
numOfTiesBy :: GM.MVector v a
=> (a -> a -> Bool) -> v s a -> ST s Integer
numOfTiesBy f xs = do count <- newSTRef (0::Integer)
loop count (1::Int) (0::Int)
readSTRef count
where
n = GM.length xs
loop c !acc !i | i >= n - 1 = modifySTRef' c (+ g acc)
| otherwise = do
x1 <- GM.unsafeRead xs i
x2 <- GM.unsafeRead xs (i+1)
if f x1 x2
then loop c (acc+1) (i+1)
else modifySTRef' c (+ g acc) >> loop c 1 (i+1)
g x = fromIntegral ((x * (x - 1)) `shiftR` 1)
# INLINE numOfTiesBy #
mergeSort :: GM.MVector v e
=> (e -> e -> Ordering)
-> v s e
-> v s e
-> STRef s Integer
-> ST s ()
mergeSort cmp src buf count = loop 0 (GM.length src - 1)
where
loop l u
| u == l = return ()
| u - l == 1 = do
eL <- GM.unsafeRead src l
eU <- GM.unsafeRead src u
case cmp eL eU of
GT -> do GM.unsafeWrite src l eU
GM.unsafeWrite src u eL
modifySTRef' count (+1)
_ -> return ()
| otherwise = do
let mid = (u + l) `shiftR` 1
loop l mid
loop mid u
merge cmp (GM.unsafeSlice l (u-l+1) src) buf (mid - l) count
# INLINE mergeSort #
merge :: GM.MVector v e
=> (e -> e -> Ordering)
-> v s e
-> v s e
-> Int
-> STRef s Integer
-> ST s ()
merge cmp src buf mid count = do GM.unsafeCopy tmp lower
eTmp <- GM.unsafeRead tmp 0
eUpp <- GM.unsafeRead upper 0
loop tmp 0 eTmp upper 0 eUpp 0
where
lower = GM.unsafeSlice 0 mid src
upper = GM.unsafeSlice mid (GM.length src - mid) src
tmp = GM.unsafeSlice 0 mid buf
wroteHigh low iLow eLow high iHigh iIns
| iHigh >= GM.length high =
GM.unsafeCopy (GM.unsafeSlice iIns (GM.length low - iLow) src)
(GM.unsafeSlice iLow (GM.length low - iLow) low)
| otherwise = do eHigh <- GM.unsafeRead high iHigh
loop low iLow eLow high iHigh eHigh iIns
wroteLow low iLow high iHigh eHigh iIns
| iLow >= GM.length low = return ()
| otherwise = do eLow <- GM.unsafeRead low iLow
loop low iLow eLow high iHigh eHigh iIns
loop !low !iLow !eLow !high !iHigh !eHigh !iIns = case cmp eHigh eLow of
LT -> do GM.unsafeWrite src iIns eHigh
modifySTRef' count (+ fromIntegral (GM.length low - iLow))
wroteHigh low iLow eLow high (iHigh+1) (iIns+1)
_ -> do GM.unsafeWrite src iIns eLow
wroteLow low (iLow+1) high iHigh eHigh (iIns+1)
# INLINE merge #
* . ( 1966 ) A computer method for calculating 's Tau
with ungrouped data . /Journal of the American Statistical Association/ ,
Vol . 61 , No . 314 , Part 1 , pp . 436 - 439 . < >
|
6fe5ecfe5238b30c5345fc006c2e83e83c0d59d28c9bf965ac6e17f63c8e0ee2 | lambdacube3d/lambdacube-gl | HelloEmbedded.hs | # LANGUAGE PackageImports , LambdaCase , OverloadedStrings #
import "GLFW-b" Graphics.UI.GLFW as GLFW
import qualified Data.Map as Map
import qualified Data.Vector as V
import LambdaCube.GL as LambdaCubeGL -- renderer
import LambdaCube.GL.Mesh as LambdaCubeGL
import Codec.Picture as Juicy
import LambdaCube.Compiler as LambdaCube -- compiler
----------------------------------------------------
-- See: -started
----------------------------------------------------
main :: IO ()
main = do
compile hello.lc to graphics pipeline description
pipelineDesc <- LambdaCube.compileMain ["."] OpenGL33 "hello.lc" >>= \case
Left err -> fail $ "compile error:\n" ++ ppShow err
Right pd -> return pd
win <- initWindow "LambdaCube 3D DSL Hello World" 640 640
-- setup render data
let inputSchema = makeSchema $ do
defObjectArray "objects" Triangles $ do
"position" @: Attribute_V2F
"uv" @: Attribute_V2F
defUniforms $ do
"time" @: Float
"diffuseTexture" @: FTexture2D
storage <- LambdaCubeGL.allocStorage inputSchema
upload geometry to GPU and add to pipeline input
LambdaCubeGL.uploadMeshToGPU triangleA >>= LambdaCubeGL.addMeshToObjectArray storage "objects" []
LambdaCubeGL.uploadMeshToGPU triangleB >>= LambdaCubeGL.addMeshToObjectArray storage "objects" []
-- load image and upload texture
Right img <- Juicy.readImage "logo.png"
textureData <- LambdaCubeGL.uploadTexture2DToGPU img
allocate GL pipeline
renderer <- LambdaCubeGL.allocRenderer pipelineDesc
LambdaCubeGL.setStorage renderer storage >>= \case -- check schema compatibility
Just err -> putStrLn err
Nothing -> loop
where loop = do
-- update graphics input
GLFW.getWindowSize win >>= \(w,h) -> LambdaCubeGL.setScreenSize storage (fromIntegral w) (fromIntegral h)
LambdaCubeGL.updateUniforms storage $ do
"diffuseTexture" @= return textureData
"time" @= do
Just t <- GLFW.getTime
return (realToFrac t :: Float)
-- render
LambdaCubeGL.renderFrame renderer
GLFW.swapBuffers win
GLFW.pollEvents
let keyIsPressed k = fmap (==KeyState'Pressed) $ GLFW.getKey win k
escape <- keyIsPressed Key'Escape
if escape then return () else loop
LambdaCubeGL.disposeRenderer renderer
LambdaCubeGL.disposeStorage storage
GLFW.destroyWindow win
GLFW.terminate
-- geometry data: triangles
triangleA :: LambdaCubeGL.Mesh
triangleA = Mesh
{ mAttributes = Map.fromList
[ ("position", A_V2F $ V.fromList [V2 1 1, V2 1 (-1), V2 (-1) (-1)])
, ("uv", A_V2F $ V.fromList [V2 1 1, V2 0 1, V2 0 0])
]
, mPrimitive = P_Triangles
}
triangleB :: LambdaCubeGL.Mesh
triangleB = Mesh
{ mAttributes = Map.fromList
[ ("position", A_V2F $ V.fromList [V2 1 1, V2 (-1) (-1), V2 (-1) 1])
, ("uv", A_V2F $ V.fromList [V2 1 1, V2 0 0, V2 1 0])
]
, mPrimitive = P_Triangles
}
initWindow :: String -> Int -> Int -> IO Window
initWindow title width height = do
GLFW.init
GLFW.defaultWindowHints
mapM_ GLFW.windowHint
[ WindowHint'ContextVersionMajor 3
, WindowHint'ContextVersionMinor 3
, WindowHint'OpenGLProfile OpenGLProfile'Core
, WindowHint'OpenGLForwardCompat True
]
Just win <- GLFW.createWindow width height title Nothing Nothing
GLFW.makeContextCurrent $ Just win
return win
| null | https://raw.githubusercontent.com/lambdacube3d/lambdacube-gl/bcd2c642d1282646d0022fc0da1d2ccaa7b40ab4/examples/HelloEmbedded.hs | haskell | renderer
compiler
--------------------------------------------------
See: -started
--------------------------------------------------
setup render data
load image and upload texture
check schema compatibility
update graphics input
render
geometry data: triangles | # LANGUAGE PackageImports , LambdaCase , OverloadedStrings #
import "GLFW-b" Graphics.UI.GLFW as GLFW
import qualified Data.Map as Map
import qualified Data.Vector as V
import LambdaCube.GL.Mesh as LambdaCubeGL
import Codec.Picture as Juicy
main :: IO ()
main = do
compile hello.lc to graphics pipeline description
pipelineDesc <- LambdaCube.compileMain ["."] OpenGL33 "hello.lc" >>= \case
Left err -> fail $ "compile error:\n" ++ ppShow err
Right pd -> return pd
win <- initWindow "LambdaCube 3D DSL Hello World" 640 640
let inputSchema = makeSchema $ do
defObjectArray "objects" Triangles $ do
"position" @: Attribute_V2F
"uv" @: Attribute_V2F
defUniforms $ do
"time" @: Float
"diffuseTexture" @: FTexture2D
storage <- LambdaCubeGL.allocStorage inputSchema
upload geometry to GPU and add to pipeline input
LambdaCubeGL.uploadMeshToGPU triangleA >>= LambdaCubeGL.addMeshToObjectArray storage "objects" []
LambdaCubeGL.uploadMeshToGPU triangleB >>= LambdaCubeGL.addMeshToObjectArray storage "objects" []
Right img <- Juicy.readImage "logo.png"
textureData <- LambdaCubeGL.uploadTexture2DToGPU img
allocate GL pipeline
renderer <- LambdaCubeGL.allocRenderer pipelineDesc
Just err -> putStrLn err
Nothing -> loop
where loop = do
GLFW.getWindowSize win >>= \(w,h) -> LambdaCubeGL.setScreenSize storage (fromIntegral w) (fromIntegral h)
LambdaCubeGL.updateUniforms storage $ do
"diffuseTexture" @= return textureData
"time" @= do
Just t <- GLFW.getTime
return (realToFrac t :: Float)
LambdaCubeGL.renderFrame renderer
GLFW.swapBuffers win
GLFW.pollEvents
let keyIsPressed k = fmap (==KeyState'Pressed) $ GLFW.getKey win k
escape <- keyIsPressed Key'Escape
if escape then return () else loop
LambdaCubeGL.disposeRenderer renderer
LambdaCubeGL.disposeStorage storage
GLFW.destroyWindow win
GLFW.terminate
triangleA :: LambdaCubeGL.Mesh
triangleA = Mesh
{ mAttributes = Map.fromList
[ ("position", A_V2F $ V.fromList [V2 1 1, V2 1 (-1), V2 (-1) (-1)])
, ("uv", A_V2F $ V.fromList [V2 1 1, V2 0 1, V2 0 0])
]
, mPrimitive = P_Triangles
}
triangleB :: LambdaCubeGL.Mesh
triangleB = Mesh
{ mAttributes = Map.fromList
[ ("position", A_V2F $ V.fromList [V2 1 1, V2 (-1) (-1), V2 (-1) 1])
, ("uv", A_V2F $ V.fromList [V2 1 1, V2 0 0, V2 1 0])
]
, mPrimitive = P_Triangles
}
initWindow :: String -> Int -> Int -> IO Window
initWindow title width height = do
GLFW.init
GLFW.defaultWindowHints
mapM_ GLFW.windowHint
[ WindowHint'ContextVersionMajor 3
, WindowHint'ContextVersionMinor 3
, WindowHint'OpenGLProfile OpenGLProfile'Core
, WindowHint'OpenGLForwardCompat True
]
Just win <- GLFW.createWindow width height title Nothing Nothing
GLFW.makeContextCurrent $ Just win
return win
|
298a75025fbf30e3ca04562e6d3e3ab3fbfd50a57673d00ccf89463e4f9f08aa | gldubc/cast-machine | syntax.ml | (** Writing a functor for type/epxressions **)
open Primitives
open Types
open Types.Print
module type Gradual_Type = sig
type var = Types.var
type b = Types.b
type t = Types.t (* static type *)
type tau = Types.t (* dynamic type *)
type subst = Types.subst
val subst_single : t -> var * t -> t
val ceil : tau -> t
val floor : tau -> t
val result : tau -> tau -> tau
end
module CDuce_Gradual_Types : Gradual_Type = struct
type t = Types.t
type var = Types.var
type subst = Types.subst
type b = Types.b
type tau = t
let subst_single t s = CD.Types.Subst.apply (CD.Types.Subst.from_list [s]) t
let result tapp targ = app tapp targ
(* ceil and floor functions *)
warning : these two functions erase type variables with Any ...
let rec ceil t =
if CD.Types.Arrow.is_empty (CD.Types.Arrow.get t) then
subst_single t (v_dyn, any)
else
let (dom,arr) = get t in
let arr' = List.map (fun l ->
List.map (fun (d,r) -> (floor d, ceil r)) l) arr
in teg (dom, arr')
and floor t =
if CD.Types.Arrow.is_empty (CD.Types.Arrow.get t) then
subst_single t (v_dyn, empty)
else
let (dom,arr) = get t in
let arr' = List.map (fun l ->
List.map (fun (d,r) -> (ceil d, floor r)) l) arr
in teg (dom, arr')
end
module type Cast_Expr = sig
include Gradual_Type
type p (* blame labels *)
type alpha_vector
type t_vector
type castkind
type e =
| Var of var
| Cst of b
| FCst of (b -> b)
| Pair of e * e
| Let of var * e * e
| LetP of (var * var) * e * e
| Letrec of var * e * e
| Mu of tau * var * var * e
| App of e * e
| Apply of int * tau * var * e * var list * e list
| Cast of e * castkind
| Succ of e | Pred of e | Fst of e | Snd of e
| Mult of e * e | Plus of e * e | Minus of e * e | Mod of e * e | Div of e * e
| Ifz of e * e * e
| Eq of e * e
| Set of e * e * e | Make of e | Get of e * e
| Seq of e * e
(* | Unit *)
type prog =
| Expr of e
| Eol
end
module Make_Expr (Init_Type : Gradual_Type) : (Cast_Expr
with type castkind := Init_Type.tau * Init_Type.tau) =
struct
include Init_Type
type alpha_vector = Init_Type.var list
type t_vector = Init_Type.t list
type p = (* blame label *)
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
type castkind = tau * tau
type e =
| Var of var
| Cst of b
| FCst of (b -> b)
| Pair of e * e
| Let of var * e * e
| LetP of (var * var) * e * e
| Letrec of var * e * e
| Mu of tau * var * var * e
| App of e * e
| Apply of int * tau * var * e * var list * e list
| Cast of e * castkind
| Succ of e | Pred of e | Fst of e | Snd of e
| Mult of e * e | Plus of e * e | Minus of e * e | Mod of e * e | Div of e * e
| Ifz of e * e * e
| Eq of e * e
| Set of e * e * e | Make of e | Get of e * e
| Seq of e * e
(* | Unit *)
(* | TwoCast of e * tau * tau *)
(* for now no product, let and type abstraction *)
(* | `Prd of e * e *)
(* | `Pi1 of e *)
(* | `Pi2 of e *)
(* | `TMu of al _,pha_vector * e *)
(* | `TApp of e * t_vector *)
type prog =
| Expr of e
| Eol
end
module Eager = struct
include Make_Expr(CDuce_Gradual_Types)
module Print = struct
let pprint_p = function
| `Simple n -> string_of_int n
| `Neg (n, m) | `Pos (n, m) -> Printf.sprintf "(%s, %s)" (string_of_int n) (string_of_int m)
let pprint_alpha_vector =
fun av -> "[" ^ (String.concat " ; " av) ^ "]"
let pprint_t_vector = fun tv ->
let stv = List.map show_typ tv in
String.concat " ; " stv
let sprintf = Printf.sprintf
let show_e : e -> string = function
| Var _ -> "Var"
| Cst _ -> "Cst"
| Pair _ -> "Pair"
| Mod _ -> "Mod"
| Mu _ -> "Mu"
| Eq _ -> "Eq"
| Ifz _ -> "Ifz"
| Let _ -> "Let"
| App _ -> "App"
| Letrec _ -> "Letrec"
| LetP (_, _, _) -> "LetP"
|Apply (_, _, _, _, _, _)
|Cast (_, _)
|Succ _ -> "Succ"
|Pred _ -> "Pred"
| _->"no implem"
let rec pprint_e : e -> string = fun e ->
let rec aux offset = function
(* | Unit -> "()" *)
| Var var -> pp_var var
| Cst b -> pp_b b
| Pair (e1, e2) ->
Printf.sprintf "pair (%s, %s)" (pprint_e e1) (pprint_e e2)
| Mod (e1, e2) ->
Printf.sprintf "(%s mod %s)" (pprint_e e1) (pprint_e e2)
| Fst e ->
sprintf "fst %s" (pprint_e e)
| Snd e ->
sprintf "snd %s" (pprint_e e)
| Mu (tau, _, var, e) ->
Printf.sprintf "λ [%s] %s . %s"
(show_tau tau) (pp_var var) (pprint_e e)
| Eq (e1, e2) ->
Printf.sprintf "%s = %s" (pprint_e e1) (pprint_e e2)
| Ifz (cond, e1, e2) ->
Printf.sprintf "%sif %s then %s%selse %s"
(if offset = "" then "\n\t" else offset)
(pprint_e cond) (aux offset e1)
(if offset = "" then "\n\t" else "\n" ^ offset) (aux offset e2)
| Letrec (x,e1,e2) ->
Printf.sprintf "let rec %s = %s in\n%s"
(pp_var x) (pprint_e e1) (aux "\t" e2)
| Let (x, e1, e2) ->
Printf.sprintf "let %s = %s in\n%s"
(pp_var x) (pprint_e e1) (aux "\t" e2)
| App (e1, e2) ->
let s_format : _ format =
(match e2 with
| Mu _
| Cast _ -> "(%s) (%s)"
| _ -> "(%s) %s") in
Printf.sprintf s_format (pprint_e e1) (pprint_e e2)
| Cast (e, (tau1, tau2)) ->
let s_format : _ format =
(match e with
| Mu _ -> "(%s) 〈%s, %s〉" (* careful: influences the variant type *)
| Cast _ -> "%s〈%s, %s〉" (* careful: influences the variant type *)
| _ -> "%s 〈%s, %s〉") in
Printf.sprintf s_format (pprint_e e) (show_tau tau1) (show_tau tau2)
| Succ (e) ->
Printf.sprintf "succ %s" (pprint_e e)
| Pred (e) ->
Printf.sprintf "pred %s" (pprint_e e)
| LetP ((x,y),e1,e2) ->
Printf.sprintf "letP (%s,%s) = %s in\n%s"
(pp_var x) (pp_var y) (pprint_e e1) (aux "\t" e2)
| Mult (e1, e2) ->
Printf.sprintf "%s * %s" (pprint_e e1) (pprint_e e2)
| Plus (e1, e2) ->
Printf.sprintf "%s + %s" (pprint_e e1) (pprint_e e2)
| Minus (e1, e2) ->
Printf.sprintf "%s - %s" (pprint_e e1) (pprint_e e2)
| Make (e) ->
Printf.sprintf "Array.make %s" (pprint_e e)
| Set (e1,e2,e3) ->
Printf.sprintf "Array.set %s %s %s" (pprint_e e1) (pprint_e e2)
(pprint_e e3)
| Get (a,i) ->
Printf.sprintf "Array.get %s %s" (pprint_e a) (pprint_e i)
| Seq (e1, e2) ->
Printf.sprintf "%s ; %s" (pprint_e e1) (pprint_e e2)
| _->"not implem"
in aux "" e
| ` Prd ( e1 , e2 ) - >
Printf.sprintf " ( % s , % s ) " ( pprint_e e1 ) ( pprint_e e2 )
| ` Pi1 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Pi2 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Let ( var , e1 , e2 ) - >
Printf.sprintf " let % s = % s in % s " ( pp_var var ) ( pprint_e e1 ) ( pprint_e e2 )
| ` TMu ( av , _ , e ) - >
Printf.sprintf " Λ % s . % s " ( pprint_alpha_vector av ) ( pprint_e e )
| ` TApp ( e , tv ) - >
Printf.sprintf " ( % s ) [ % s ] " ( pprint_e e ) ( pprint_t_vector tv )
Printf.sprintf "(%s, %s)" (pprint_e e1) (pprint_e e2)
| `Pi1 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Pi2 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Let (var, e1, e2) ->
Printf.sprintf "let %s = %s in %s" (pp_var var) (pprint_e e1) (pprint_e e2)
| `TMu (av, _,e) ->
Printf.sprintf "Λ %s . %s" (pprint_alpha_vector av) (pprint_e e)
| `TApp (e, tv) ->
Printf.sprintf "(%s) [%s]" (pprint_e e) (pprint_t_vector tv) *)
let print_e = function e -> print_string (pprint_e e)
let print_t = fun t -> print_string (show_typ t)
end
end
module Symbolic =
struct
include CDuce_Gradual_Types
type alpha_vector = var list
type t_vector = t list
type p = (* blame label *)
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
(* hi *)
type sigma = Id of tau
| Cast of tau
| Comp of sigma * sigma
| App of tau * sigma
| Dom of sigma
type castkind = sigma
type e =
| Var of var
| Cst of b
| Let of var * e * e
| LetP of (var * var) * e * e
| Mu of tau * var * var * e
| App of e * e
| Cast of e * castkind
| Succ of e | Pred of e
let comp s1 s2 = Comp (s1, s2)
module Print = struct
let pprint_p = function
| `Simple n -> string_of_int n
| `Neg (n, m) | `Pos (n, m) -> Printf.sprintf "(%s, %s)" (string_of_int n) (string_of_int m)
let pprint_alpha_vector =
fun av -> "[" ^ (String.concat " ; " av) ^ "]"
let pprint_t_vector = fun tv ->
let stv = List.map show_typ tv in
String.concat " ; " stv
let rec (pprint_e : e -> string) = function
| Var var -> pp_var var
| Cst b -> pp_b b
| Mu (tau, _, var, e) ->
Printf.sprintf "(λ %s . %s) : %s" (pp_var var) (pprint_e e) (show_tau tau)
| Let (x, e1, e2) ->
Printf.sprintf "let %s = %s in %s"
(pp_var x) (pprint_e e1) (pprint_e e2)
| App (e1, e2) ->
Printf.sprintf "(%s) %s" (pprint_e e1) (pprint_e e2)
| Cast (e, (Cast t | Id t)) ->
let s_format : _ format =
(match e with
| Mu _ -> "(%s) 〈%s〉"
| Cast _ -> "%s〈%s〉"
| _ -> "%s 〈%s〉") in
Printf.sprintf s_format (pprint_e e) (show_tau t)
| Cast (e, _) ->
let s_format : _ format =
(match e with
| Mu _ -> "(%s) 〈sigma〉"
| Cast _ -> "%s〈sigma〉"
| _ -> "%s 〈sigma〉") in
Printf.sprintf s_format (pprint_e e)
| Succ (e) ->
Printf.sprintf "succ %s" (pprint_e e)
| Pred (e) ->
Printf.sprintf "pred %s" (pprint_e e)
| _->"not implem"
| ` Prd ( e1 , e2 ) - >
Printf.sprintf " ( % s , % s ) " ( pprint_e e1 ) ( pprint_e e2 )
| ` Pi1 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Pi2 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Let ( var , e1 , e2 ) - >
Printf.sprintf " let % s = % s in % s " ( pp_var var ) ( pprint_e e1 ) ( pprint_e e2 )
| ` TMu ( av , _ , e ) - >
Printf.sprintf " Λ % s . % s " ( pprint_alpha_vector av ) ( pprint_e e )
| ` TApp ( e , tv ) - >
Printf.sprintf " ( % s ) [ % s ] " ( pprint_e e ) ( pprint_t_vector tv )
Printf.sprintf "(%s, %s)" (pprint_e e1) (pprint_e e2)
| `Pi1 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Pi2 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Let (var, e1, e2) ->
Printf.sprintf "let %s = %s in %s" (pp_var var) (pprint_e e1) (pprint_e e2)
| `TMu (av, _,e) ->
Printf.sprintf "Λ %s . %s" (pprint_alpha_vector av) (pprint_e e)
| `TApp (e, tv) ->
Printf.sprintf "(%s) [%s]" (pprint_e e) (pprint_t_vector tv) *)
let print_e = function e -> print_string (pprint_e e)
let print_t = fun t -> print_string (show_typ t)
end
end
A naive implem of 's types and expressions from " Space - efficient [ ... ] " notes
(* module SE_Types = struct
(* type var = string *)
type var = string
type t =
[ | `TVar of var
| `Int
| `Bool
| `Arr of t * t
| `Or of t * t
| `And of t * t
| `Neg of t
| `Empty ]
type b = [ `I of int | `B of bool ]
type tau =
[ | `Dyn
| `TVar of var
| `Int
| `Bool
| `Arr of tau * tau
| `Or of tau * tau
| `And of tau * tau
| `Neg of tau
| `Empty ]
end *)
module POPL19_Types = struct
type var = string
type b = [ ` I of int | ` B of bool ]
type t = [
| ` TVar of var
| ` Int
| ` Bool
| ` Prod of t * t
| ` Arr of t * t ]
type tau = [
| ` Dyn
| ` TVar of var
| ` Int
| ` Bool
| ` Prod of tau * tau
| ` Arr of tau * tau ]
end
type var = string
type b = [ `I of int | `B of bool ]
type t = [
| `TVar of var
| `Int
| `Bool
| `Prod of t * t
| `Arr of t * t ]
type tau = [
| `Dyn
| `TVar of var
| `Int
| `Bool
| `Prod of tau * tau
| `Arr of tau * tau ]
end *)
module Make_POPL19 ( Init_Type : Dynamic_Type ) : Cast_Expr = struct
( * * POPL19 types and cast expressions
(** POPL19 types and cast expressions *)
include Init_Type
type alpha_vector = Init_Type.var list
type t_vector = Init_Type.t list
type p = (* blame label *)
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
type e =
[ | `Var of var
| `Cst of b
| `Mu of ta _,u * tau * var * e
| `App of e * e
| `Prd of e * e
| `Pi1 of e
| `Pi2 of e
| `Let of var * e * e
| `TMu of al _,pha_vector * e
| `TApp of e * t_vector
| `Cast of e * tau * p * tau ]
(* type v = int *)
(* type env = int *)
let create ( ) = 0
end *)
module POPL19 = ( Make_Cast_Language(POPL19_Types))(Make_POPL19 )
module SE = ( Make_Cast_Language(SE_Types))(Make_SE )
module SE = (Make_Cast_Language(SE_Types))(Make_SE) *)
module type Cast_Language = sig
include Cast_Expr
( * * DONE : find a way to define the signature of pprint_e and print_e
that is compatible with the open variant used to define the function
Here I found a way by adding a single constructor , but I would have like
to use an " empty open variant " [ < ] of which every open variant would be
a supertype , and therefore by contravariance every function
[ variant - > string ] would be a subtype of this type .
On the other hand , now it becomes impossible to use pprint_e in practice ,
because if I use for example on a list
[ ` Var " x " , ` Cst ( ` I 2 ) ]
which has type
[ > ` Var of string | ` Cst of b ]
then this type is not a subtype of [ < ` Var of string ]
include Cast_Expr
(** DONE : find a way to define the signature of pprint_e and print_e
that is compatible with the open variant used to define the function
Here I found a way by adding a single constructor, but I would have like
to use an "empty open variant" [< ] of which every open variant would be
a supertype, and therefore by contravariance every function
[variant -> string] would be a subtype of this type.
On the other hand, now it becomes impossible to use pprint_e in practice,
because if I use for example on a list
[ `Var "x", `Cst (`I 2) ]
which has type
[> `Var of string | `Cst of b ]
then this type is not a subtype of [< `Var of string ] *)
(** Conclusion: switched from variants to constructors, and
defined print_e elsewhere after importing this module. *)
end
module type Make_Cast_Expr = (Dynamic_Type -> Cast_Expr) *) | null | https://raw.githubusercontent.com/gldubc/cast-machine/34d79c324cd0a9aff52865ead19e74126b96daaa/src/syntax.ml | ocaml | * Writing a functor for type/epxressions *
static type
dynamic type
ceil and floor functions
blame labels
| Unit
blame label
| Unit
| TwoCast of e * tau * tau
for now no product, let and type abstraction
| `Prd of e * e
| `Pi1 of e
| `Pi2 of e
| `TMu of al _,pha_vector * e
| `TApp of e * t_vector
| Unit -> "()"
careful: influences the variant type
careful: influences the variant type
blame label
hi
module SE_Types = struct
(* type var = string
* POPL19 types and cast expressions
blame label
type v = int
type env = int
* DONE : find a way to define the signature of pprint_e and print_e
that is compatible with the open variant used to define the function
Here I found a way by adding a single constructor, but I would have like
to use an "empty open variant" [< ] of which every open variant would be
a supertype, and therefore by contravariance every function
[variant -> string] would be a subtype of this type.
On the other hand, now it becomes impossible to use pprint_e in practice,
because if I use for example on a list
[ `Var "x", `Cst (`I 2) ]
which has type
[> `Var of string | `Cst of b ]
then this type is not a subtype of [< `Var of string ]
* Conclusion: switched from variants to constructors, and
defined print_e elsewhere after importing this module. | open Primitives
open Types
open Types.Print
module type Gradual_Type = sig
type var = Types.var
type b = Types.b
type subst = Types.subst
val subst_single : t -> var * t -> t
val ceil : tau -> t
val floor : tau -> t
val result : tau -> tau -> tau
end
module CDuce_Gradual_Types : Gradual_Type = struct
type t = Types.t
type var = Types.var
type subst = Types.subst
type b = Types.b
type tau = t
let subst_single t s = CD.Types.Subst.apply (CD.Types.Subst.from_list [s]) t
let result tapp targ = app tapp targ
warning : these two functions erase type variables with Any ...
let rec ceil t =
if CD.Types.Arrow.is_empty (CD.Types.Arrow.get t) then
subst_single t (v_dyn, any)
else
let (dom,arr) = get t in
let arr' = List.map (fun l ->
List.map (fun (d,r) -> (floor d, ceil r)) l) arr
in teg (dom, arr')
and floor t =
if CD.Types.Arrow.is_empty (CD.Types.Arrow.get t) then
subst_single t (v_dyn, empty)
else
let (dom,arr) = get t in
let arr' = List.map (fun l ->
List.map (fun (d,r) -> (ceil d, floor r)) l) arr
in teg (dom, arr')
end
module type Cast_Expr = sig
include Gradual_Type
type alpha_vector
type t_vector
type castkind
type e =
| Var of var
| Cst of b
| FCst of (b -> b)
| Pair of e * e
| Let of var * e * e
| LetP of (var * var) * e * e
| Letrec of var * e * e
| Mu of tau * var * var * e
| App of e * e
| Apply of int * tau * var * e * var list * e list
| Cast of e * castkind
| Succ of e | Pred of e | Fst of e | Snd of e
| Mult of e * e | Plus of e * e | Minus of e * e | Mod of e * e | Div of e * e
| Ifz of e * e * e
| Eq of e * e
| Set of e * e * e | Make of e | Get of e * e
| Seq of e * e
type prog =
| Expr of e
| Eol
end
module Make_Expr (Init_Type : Gradual_Type) : (Cast_Expr
with type castkind := Init_Type.tau * Init_Type.tau) =
struct
include Init_Type
type alpha_vector = Init_Type.var list
type t_vector = Init_Type.t list
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
type castkind = tau * tau
type e =
| Var of var
| Cst of b
| FCst of (b -> b)
| Pair of e * e
| Let of var * e * e
| LetP of (var * var) * e * e
| Letrec of var * e * e
| Mu of tau * var * var * e
| App of e * e
| Apply of int * tau * var * e * var list * e list
| Cast of e * castkind
| Succ of e | Pred of e | Fst of e | Snd of e
| Mult of e * e | Plus of e * e | Minus of e * e | Mod of e * e | Div of e * e
| Ifz of e * e * e
| Eq of e * e
| Set of e * e * e | Make of e | Get of e * e
| Seq of e * e
type prog =
| Expr of e
| Eol
end
module Eager = struct
include Make_Expr(CDuce_Gradual_Types)
module Print = struct
let pprint_p = function
| `Simple n -> string_of_int n
| `Neg (n, m) | `Pos (n, m) -> Printf.sprintf "(%s, %s)" (string_of_int n) (string_of_int m)
let pprint_alpha_vector =
fun av -> "[" ^ (String.concat " ; " av) ^ "]"
let pprint_t_vector = fun tv ->
let stv = List.map show_typ tv in
String.concat " ; " stv
let sprintf = Printf.sprintf
let show_e : e -> string = function
| Var _ -> "Var"
| Cst _ -> "Cst"
| Pair _ -> "Pair"
| Mod _ -> "Mod"
| Mu _ -> "Mu"
| Eq _ -> "Eq"
| Ifz _ -> "Ifz"
| Let _ -> "Let"
| App _ -> "App"
| Letrec _ -> "Letrec"
| LetP (_, _, _) -> "LetP"
|Apply (_, _, _, _, _, _)
|Cast (_, _)
|Succ _ -> "Succ"
|Pred _ -> "Pred"
| _->"no implem"
let rec pprint_e : e -> string = fun e ->
let rec aux offset = function
| Var var -> pp_var var
| Cst b -> pp_b b
| Pair (e1, e2) ->
Printf.sprintf "pair (%s, %s)" (pprint_e e1) (pprint_e e2)
| Mod (e1, e2) ->
Printf.sprintf "(%s mod %s)" (pprint_e e1) (pprint_e e2)
| Fst e ->
sprintf "fst %s" (pprint_e e)
| Snd e ->
sprintf "snd %s" (pprint_e e)
| Mu (tau, _, var, e) ->
Printf.sprintf "λ [%s] %s . %s"
(show_tau tau) (pp_var var) (pprint_e e)
| Eq (e1, e2) ->
Printf.sprintf "%s = %s" (pprint_e e1) (pprint_e e2)
| Ifz (cond, e1, e2) ->
Printf.sprintf "%sif %s then %s%selse %s"
(if offset = "" then "\n\t" else offset)
(pprint_e cond) (aux offset e1)
(if offset = "" then "\n\t" else "\n" ^ offset) (aux offset e2)
| Letrec (x,e1,e2) ->
Printf.sprintf "let rec %s = %s in\n%s"
(pp_var x) (pprint_e e1) (aux "\t" e2)
| Let (x, e1, e2) ->
Printf.sprintf "let %s = %s in\n%s"
(pp_var x) (pprint_e e1) (aux "\t" e2)
| App (e1, e2) ->
let s_format : _ format =
(match e2 with
| Mu _
| Cast _ -> "(%s) (%s)"
| _ -> "(%s) %s") in
Printf.sprintf s_format (pprint_e e1) (pprint_e e2)
| Cast (e, (tau1, tau2)) ->
let s_format : _ format =
(match e with
| _ -> "%s 〈%s, %s〉") in
Printf.sprintf s_format (pprint_e e) (show_tau tau1) (show_tau tau2)
| Succ (e) ->
Printf.sprintf "succ %s" (pprint_e e)
| Pred (e) ->
Printf.sprintf "pred %s" (pprint_e e)
| LetP ((x,y),e1,e2) ->
Printf.sprintf "letP (%s,%s) = %s in\n%s"
(pp_var x) (pp_var y) (pprint_e e1) (aux "\t" e2)
| Mult (e1, e2) ->
Printf.sprintf "%s * %s" (pprint_e e1) (pprint_e e2)
| Plus (e1, e2) ->
Printf.sprintf "%s + %s" (pprint_e e1) (pprint_e e2)
| Minus (e1, e2) ->
Printf.sprintf "%s - %s" (pprint_e e1) (pprint_e e2)
| Make (e) ->
Printf.sprintf "Array.make %s" (pprint_e e)
| Set (e1,e2,e3) ->
Printf.sprintf "Array.set %s %s %s" (pprint_e e1) (pprint_e e2)
(pprint_e e3)
| Get (a,i) ->
Printf.sprintf "Array.get %s %s" (pprint_e a) (pprint_e i)
| Seq (e1, e2) ->
Printf.sprintf "%s ; %s" (pprint_e e1) (pprint_e e2)
| _->"not implem"
in aux "" e
| ` Prd ( e1 , e2 ) - >
Printf.sprintf " ( % s , % s ) " ( pprint_e e1 ) ( pprint_e e2 )
| ` Pi1 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Pi2 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Let ( var , e1 , e2 ) - >
Printf.sprintf " let % s = % s in % s " ( pp_var var ) ( pprint_e e1 ) ( pprint_e e2 )
| ` TMu ( av , _ , e ) - >
Printf.sprintf " Λ % s . % s " ( pprint_alpha_vector av ) ( pprint_e e )
| ` TApp ( e , tv ) - >
Printf.sprintf " ( % s ) [ % s ] " ( pprint_e e ) ( pprint_t_vector tv )
Printf.sprintf "(%s, %s)" (pprint_e e1) (pprint_e e2)
| `Pi1 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Pi2 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Let (var, e1, e2) ->
Printf.sprintf "let %s = %s in %s" (pp_var var) (pprint_e e1) (pprint_e e2)
| `TMu (av, _,e) ->
Printf.sprintf "Λ %s . %s" (pprint_alpha_vector av) (pprint_e e)
| `TApp (e, tv) ->
Printf.sprintf "(%s) [%s]" (pprint_e e) (pprint_t_vector tv) *)
let print_e = function e -> print_string (pprint_e e)
let print_t = fun t -> print_string (show_typ t)
end
end
module Symbolic =
struct
include CDuce_Gradual_Types
type alpha_vector = var list
type t_vector = t list
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
type sigma = Id of tau
| Cast of tau
| Comp of sigma * sigma
| App of tau * sigma
| Dom of sigma
type castkind = sigma
type e =
| Var of var
| Cst of b
| Let of var * e * e
| LetP of (var * var) * e * e
| Mu of tau * var * var * e
| App of e * e
| Cast of e * castkind
| Succ of e | Pred of e
let comp s1 s2 = Comp (s1, s2)
module Print = struct
let pprint_p = function
| `Simple n -> string_of_int n
| `Neg (n, m) | `Pos (n, m) -> Printf.sprintf "(%s, %s)" (string_of_int n) (string_of_int m)
let pprint_alpha_vector =
fun av -> "[" ^ (String.concat " ; " av) ^ "]"
let pprint_t_vector = fun tv ->
let stv = List.map show_typ tv in
String.concat " ; " stv
let rec (pprint_e : e -> string) = function
| Var var -> pp_var var
| Cst b -> pp_b b
| Mu (tau, _, var, e) ->
Printf.sprintf "(λ %s . %s) : %s" (pp_var var) (pprint_e e) (show_tau tau)
| Let (x, e1, e2) ->
Printf.sprintf "let %s = %s in %s"
(pp_var x) (pprint_e e1) (pprint_e e2)
| App (e1, e2) ->
Printf.sprintf "(%s) %s" (pprint_e e1) (pprint_e e2)
| Cast (e, (Cast t | Id t)) ->
let s_format : _ format =
(match e with
| Mu _ -> "(%s) 〈%s〉"
| Cast _ -> "%s〈%s〉"
| _ -> "%s 〈%s〉") in
Printf.sprintf s_format (pprint_e e) (show_tau t)
| Cast (e, _) ->
let s_format : _ format =
(match e with
| Mu _ -> "(%s) 〈sigma〉"
| Cast _ -> "%s〈sigma〉"
| _ -> "%s 〈sigma〉") in
Printf.sprintf s_format (pprint_e e)
| Succ (e) ->
Printf.sprintf "succ %s" (pprint_e e)
| Pred (e) ->
Printf.sprintf "pred %s" (pprint_e e)
| _->"not implem"
| ` Prd ( e1 , e2 ) - >
Printf.sprintf " ( % s , % s ) " ( pprint_e e1 ) ( pprint_e e2 )
| ` Pi1 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Pi2 e - >
Printf.sprintf " π_1 % s " ( pprint_e e )
| ` Let ( var , e1 , e2 ) - >
Printf.sprintf " let % s = % s in % s " ( pp_var var ) ( pprint_e e1 ) ( pprint_e e2 )
| ` TMu ( av , _ , e ) - >
Printf.sprintf " Λ % s . % s " ( pprint_alpha_vector av ) ( pprint_e e )
| ` TApp ( e , tv ) - >
Printf.sprintf " ( % s ) [ % s ] " ( pprint_e e ) ( pprint_t_vector tv )
Printf.sprintf "(%s, %s)" (pprint_e e1) (pprint_e e2)
| `Pi1 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Pi2 e ->
Printf.sprintf "π_1 %s" (pprint_e e)
| `Let (var, e1, e2) ->
Printf.sprintf "let %s = %s in %s" (pp_var var) (pprint_e e1) (pprint_e e2)
| `TMu (av, _,e) ->
Printf.sprintf "Λ %s . %s" (pprint_alpha_vector av) (pprint_e e)
| `TApp (e, tv) ->
Printf.sprintf "(%s) [%s]" (pprint_e e) (pprint_t_vector tv) *)
let print_e = function e -> print_string (pprint_e e)
let print_t = fun t -> print_string (show_typ t)
end
end
A naive implem of 's types and expressions from " Space - efficient [ ... ] " notes
type var = string
type t =
[ | `TVar of var
| `Int
| `Bool
| `Arr of t * t
| `Or of t * t
| `And of t * t
| `Neg of t
| `Empty ]
type b = [ `I of int | `B of bool ]
type tau =
[ | `Dyn
| `TVar of var
| `Int
| `Bool
| `Arr of tau * tau
| `Or of tau * tau
| `And of tau * tau
| `Neg of tau
| `Empty ]
end *)
module POPL19_Types = struct
type var = string
type b = [ ` I of int | ` B of bool ]
type t = [
| ` TVar of var
| ` Int
| ` Bool
| ` Prod of t * t
| ` Arr of t * t ]
type tau = [
| ` Dyn
| ` TVar of var
| ` Int
| ` Bool
| ` Prod of tau * tau
| ` Arr of tau * tau ]
end
type var = string
type b = [ `I of int | `B of bool ]
type t = [
| `TVar of var
| `Int
| `Bool
| `Prod of t * t
| `Arr of t * t ]
type tau = [
| `Dyn
| `TVar of var
| `Int
| `Bool
| `Prod of tau * tau
| `Arr of tau * tau ]
end *)
module Make_POPL19 ( Init_Type : Dynamic_Type ) : Cast_Expr = struct
( * * POPL19 types and cast expressions
include Init_Type
type alpha_vector = Init_Type.var list
type t_vector = Init_Type.t list
[ | `Simple of int
| `Pos of int * int
| `Neg of int * int ]
type e =
[ | `Var of var
| `Cst of b
| `Mu of ta _,u * tau * var * e
| `App of e * e
| `Prd of e * e
| `Pi1 of e
| `Pi2 of e
| `Let of var * e * e
| `TMu of al _,pha_vector * e
| `TApp of e * t_vector
| `Cast of e * tau * p * tau ]
let create ( ) = 0
end *)
module POPL19 = ( Make_Cast_Language(POPL19_Types))(Make_POPL19 )
module SE = ( Make_Cast_Language(SE_Types))(Make_SE )
module SE = (Make_Cast_Language(SE_Types))(Make_SE) *)
module type Cast_Language = sig
include Cast_Expr
( * * DONE : find a way to define the signature of pprint_e and print_e
that is compatible with the open variant used to define the function
Here I found a way by adding a single constructor , but I would have like
to use an " empty open variant " [ < ] of which every open variant would be
a supertype , and therefore by contravariance every function
[ variant - > string ] would be a subtype of this type .
On the other hand , now it becomes impossible to use pprint_e in practice ,
because if I use for example on a list
[ ` Var " x " , ` Cst ( ` I 2 ) ]
which has type
[ > ` Var of string | ` Cst of b ]
then this type is not a subtype of [ < ` Var of string ]
include Cast_Expr
end
module type Make_Cast_Expr = (Dynamic_Type -> Cast_Expr) *) |
7f62af2bf0d10af17e144191a2cd393f31981c290a22fb6a0602a978e929b149 | S8A/htdp-exercises | ex451.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex451) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct table [length array])
; A Table is a structure:
; (make-table N [N -> Number])
(define table1 (make-table 3 (lambda (i) i)))
(define table2
(make-table 1
(lambda (i)
(if (= i 0)
pi
(error "table2 is not defined for i =!= 0")))))
(define table3 (make-table 8 (lambda (x) (- x 3))))
; Table N -> Number
; looks up the ith value in array of t
(define (table-ref t i)
((table-array t) i))
(define epsilon 0.000000001)
; Table -> N
; finds the smallest index for a root of the table t
; assumes that the table is monotonically increasing
(define (find-linear t)
(local ((define len (table-length t))
(define (find-linear-helper i)
(cond
[(= i len) (error 'find-linear "no root found")]
[else
(if (<= (abs (table-ref t i)) epsilon)
i
(find-linear-helper (add1 i)))])))
(find-linear-helper 0)))
(check-expect (find-linear table1) 0)
(check-error (find-linear table2))
(check-expect (find-linear table3) 3)
; Table -> N
; finds the smallest index for a root of the table t
; assumes that the table is monotonically increasing
; assume (<= (table-ref t left) 0 (table-ref t right))
generative roughly divides the table in half , the root is in one of
; the halves
; termination at some point the interval will be reduced to
a length of 1 , at which point the result is one of the
; interval's boundaries
(define (find-binary t)
(local ((define len (table-length t))
(define (find-binary-helper left right fleft fright)
(cond
[(= (- right left) 1)
(if (<= (abs fleft) (abs fright)) left right)]
[else
(local ((define mid (quotient (+ left right) 2))
(define fmid (table-ref t mid)))
(cond
[(<= fleft 0 fmid)
(find-binary-helper left mid fleft fmid)]
[(<= fmid 0 fright)
(find-binary-helper mid right fmid fright)]))])))
(find-binary-helper 0 (sub1 len)
(table-ref t 0) (table-ref t (sub1 len)))))
(check-within (find-binary table3) 3 epsilon)
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex451.rkt | racket | about the language level of this file in a form that our tools can easily process.
A Table is a structure:
(make-table N [N -> Number])
Table N -> Number
looks up the ith value in array of t
Table -> N
finds the smallest index for a root of the table t
assumes that the table is monotonically increasing
Table -> N
finds the smallest index for a root of the table t
assumes that the table is monotonically increasing
assume (<= (table-ref t left) 0 (table-ref t right))
the halves
termination at some point the interval will be reduced to
interval's boundaries | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex451) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct table [length array])
(define table1 (make-table 3 (lambda (i) i)))
(define table2
(make-table 1
(lambda (i)
(if (= i 0)
pi
(error "table2 is not defined for i =!= 0")))))
(define table3 (make-table 8 (lambda (x) (- x 3))))
(define (table-ref t i)
((table-array t) i))
(define epsilon 0.000000001)
(define (find-linear t)
(local ((define len (table-length t))
(define (find-linear-helper i)
(cond
[(= i len) (error 'find-linear "no root found")]
[else
(if (<= (abs (table-ref t i)) epsilon)
i
(find-linear-helper (add1 i)))])))
(find-linear-helper 0)))
(check-expect (find-linear table1) 0)
(check-error (find-linear table2))
(check-expect (find-linear table3) 3)
generative roughly divides the table in half , the root is in one of
a length of 1 , at which point the result is one of the
(define (find-binary t)
(local ((define len (table-length t))
(define (find-binary-helper left right fleft fright)
(cond
[(= (- right left) 1)
(if (<= (abs fleft) (abs fright)) left right)]
[else
(local ((define mid (quotient (+ left right) 2))
(define fmid (table-ref t mid)))
(cond
[(<= fleft 0 fmid)
(find-binary-helper left mid fleft fmid)]
[(<= fmid 0 fright)
(find-binary-helper mid right fmid fright)]))])))
(find-binary-helper 0 (sub1 len)
(table-ref t 0) (table-ref t (sub1 len)))))
(check-within (find-binary table3) 3 epsilon)
|
1e36bf703aad9b8183803a18e9a283a828e780210026f8dc2d126846c3498e40 | Trundle/idris-go | CodegenGo.hs | {-# LANGUAGE OverloadedStrings #-}
module IRTS.CodegenGo (codegenGo) where
import Control.Monad.Trans.State.Strict (State, evalState, gets)
import Data.Char (isAlphaNum, ord)
import Data.Int (Int64)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe, mapMaybe)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Formatting (int, sformat, stext, string,
(%))
import System.IO (IOMode (..), withFile)
import System.Process (CreateProcess (..),
StdStream (..),
createProcess, proc,
waitForProcess)
import Idris.Core.TT hiding (V, arity)
import IRTS.CodegenCommon
import IRTS.Lang (FDesc (..), FType (..),
LVar (..), PrimFn (..))
import IRTS.Simplified
data Line = Line (Maybe Var) [Var] T.Text
deriving (Show)
data Var = RVal | V Int
deriving (Show, Eq, Ord)
newtype CGState = CGState { requiresTrampoline :: Name -> Bool
}
type CG a = State CGState a
createCgState :: (Name -> Bool) -> CGState
createCgState trampolineLookup = CGState { requiresTrampoline = trampolineLookup }
goPreamble :: [T.Text] -> T.Text
goPreamble imports = T.unlines $
[ "// THIS FILE IS AUTOGENERATED! DO NOT EDIT"
, ""
, "package main"
, ""
, "import ("
, " \"flag\""
, " \"log\""
, " \"math/big\""
, " \"os\""
, " \"strconv\""
, " \"unicode/utf8\""
, " \"unsafe\""
, " \"runtime\""
, " \"runtime/pprof\""
, ")"
, ""
] ++ map ("import " `T.append`) imports ++
[ ""
, "func BigIntFromString(s string) *big.Int {"
, " value, _ := big.NewInt(0).SetString(s, 10)"
, " return value"
, "}"
, ""
, "type Con0 struct {"
, " tag int"
, "}"
, ""
, "type Con1 struct {"
, " tag int"
, " _0 unsafe.Pointer"
, "}"
, ""
, "type Con2 struct {"
, " tag int"
, " _0, _1 unsafe.Pointer"
, "}"
, ""
, "type Con3 struct {"
, " tag int"
, " _0, _1, _2 unsafe.Pointer"
, "}"
, ""
, "type Con4 struct {"
, " tag int"
, " _0, _1, _2, _3 unsafe.Pointer"
, "}"
, ""
, "type Con5 struct {"
, " tag int"
, " _0, _1, _2, _3, _4 unsafe.Pointer"
, "}"
, ""
, "type Con6 struct {"
, " tag int"
, " _0, _1, _2, _3, _4, _5 unsafe.Pointer"
, "}"
, ""
, "var nullCons [256]Con0"
, ""
, "func GetTag(con unsafe.Pointer) int {"
, " return (*Con0)(con).tag"
, "}"
, ""
, "func MkCon0(tag int) unsafe.Pointer {"
, " return unsafe.Pointer(&Con0{tag})"
, "}"
, ""
, "func MkCon1(tag int, _0 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con1{tag, _0})"
, "}"
, ""
, "func MkCon2(tag int, _0, _1 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con2{tag, _0, _1})"
, "}"
, ""
, "func MkCon3(tag int, _0, _1, _2 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con3{tag, _0, _1, _2})"
, "}"
, ""
, "func MkCon4(tag int, _0, _1, _2, _3 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con4{tag, _0, _1, _2, _3})"
, "}"
, ""
, "func MkCon5(tag int, _0, _1, _2, _3, _4 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con5{tag, _0, _1, _2, _3, _4})"
, "}"
, ""
, "func MkCon6(tag int, _0, _1, _2, _3, _4, _5 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con6{tag, _0, _1, _2, _3, _4, _5})"
, "}"
, ""
, "func MkIntFromBool(value bool) unsafe.Pointer {"
, " if value {"
, " return intOne"
, " } else {"
, " return intZero"
, " }"
, "}"
, ""
, "func MkInt(value int64) unsafe.Pointer {"
, " var retVal *int64 = new(int64)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func MkRune(value rune) unsafe.Pointer {"
, " var retVal *rune = new(rune)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func MkString(value string) unsafe.Pointer {"
, " var retVal *string = new(string)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func RuneAtIndex(s string, index int) rune {"
, " if index == 0 {"
, " chr, _ := utf8.DecodeRuneInString(s)"
, " return chr"
, " } else {"
, " i := 0"
, " for _, chr := range s {"
, " if i == index {"
, " return chr"
, " }"
, " i++"
, " }"
, " }"
, "panic(\"Illegal index: \" + string(index))"
, "}"
, ""
, "func StrTail(s string) string {"
, " _, offset := utf8.DecodeRuneInString(s)"
, " return s[offset:]"
, "}"
, ""
, "func WriteStr(str unsafe.Pointer) unsafe.Pointer {"
, " _, err := os.Stdout.WriteString(*(*string)(str))"
, " if (err != nil) {"
, " return intZero"
, " } else {"
, " return intMinusOne"
, " }"
, "}"
, ""
, "func Go(action unsafe.Pointer) {"
, " var th Thunk"
, " go Trampoline(MkThunk2(&th, APPLY0, action, nil))"
, "}"
, ""
, "func MkMaybe(value unsafe.Pointer, present bool) unsafe.Pointer {"
, " if present {"
, " return MkCon1(1, value)"
, " } else {"
, " return unsafe.Pointer(&nullCons[0])"
, " }"
, "}"
, ""
, "type Thunk0 func(*Thunk) unsafe.Pointer"
, "type Thunk1 func(*Thunk, unsafe.Pointer) unsafe.Pointer"
, "type Thunk2 func(*Thunk, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk3 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk4 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk5 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk6 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk7 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, ""
, "type Thunk struct {"
, " arity int8"
, " f0 Thunk0"
, " f1 Thunk1"
, " f2 Thunk2"
, " f3 Thunk3"
, " f4 Thunk4"
, " f5 Thunk5"
, " f6 Thunk6"
, " f7 Thunk7"
, " _0, _1, _2, _3, _4, _5, _6 unsafe.Pointer"
, "}"
, ""
, "func (t *Thunk) Run() unsafe.Pointer {"
, " switch t.arity {"
, " case 0:"
, " return t.f0(t)"
, " case 1:"
, " return t.f1(t, t._0)"
, " case 2:"
, " return t.f2(t, t._0, t._1)"
, " case 3:"
, " return t.f3(t, t._0, t._1, t._2)"
, " case 4:"
, " return t.f4(t, t._0, t._1, t._2, t._3)"
, " case 5:"
, " return t.f5(t, t._0, t._1, t._2, t._3, t._4,)"
, " case 6:"
, " return t.f6(t, t._0, t._1, t._2, t._3, t._4, t._5)"
, " case 7:"
, " return t.f7(t, t._0, t._1, t._2, t._3, t._4, t._5, t._6)"
, " }"
, " panic(\"Invalid arity: \" + string(t.arity))"
, "}"
, ""
, "func MkThunk0(th *Thunk, f Thunk0) *Thunk {"
, " th.arity = 0"
, " th.f0 = f"
, " return th"
, "}"
, ""
, "func MkThunk1(th *Thunk, f Thunk1, _0 unsafe.Pointer) *Thunk {"
, " th.arity = 1"
, " th.f1 = f"
, " th._0 = _0"
, " return th"
, "}"
, ""
, "func MkThunk2(th *Thunk, f Thunk2, _0, _1 unsafe.Pointer) *Thunk {"
, " th.arity = 2"
, " th.f2 = f"
, " th._0 = _0"
, " th._1 = _1"
, " return th"
, "}"
, ""
, "func MkThunk3(th *Thunk, f Thunk3, _0, _1, _2 unsafe.Pointer) *Thunk {"
, " th.arity = 3"
, " th.f3 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " return th"
, "}"
, ""
, "func MkThunk4(th *Thunk, f Thunk4, _0, _1, _2, _3 unsafe.Pointer) *Thunk {"
, " th.arity = 4"
, " th.f4 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " return th"
, "}"
, ""
, "func MkThunk5(th *Thunk, f Thunk5, _0, _1, _2, _3, _4 unsafe.Pointer) *Thunk {"
, " th.arity = 5"
, " th.f5 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " return th"
, "}"
, ""
, "func MkThunk6(th *Thunk, f Thunk6, _0, _1, _2, _3, _4, _5 unsafe.Pointer) *Thunk {"
, " th.arity = 6"
, " th.f6 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " th._5 = _5"
, " return th"
, "}"
, ""
, "func MkThunk7(th *Thunk, f Thunk7, _0, _1, _2, _3, _4, _5, _6 unsafe.Pointer) *Thunk {"
, " th.arity = 7"
, " th.f7 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " th._5 = _5"
, " th._6 = _6"
, " return th"
, "}"
, ""
, "func Trampoline(th *Thunk) unsafe.Pointer {"
, " var result unsafe.Pointer"
, " for th.arity >= 0 {"
, " result = th.Run()"
, " }"
, " return result"
, "}"
, ""
, "func initNullCons() {"
, " for i := 0; i < 256; i++ {"
, " nullCons[i] = Con0{i}"
, " }"
, "}"
, ""
, "var bigZero *big.Int = big.NewInt(0)"
, "var bigOne *big.Int = big.NewInt(1)"
, "var intMinusOne unsafe.Pointer = MkInt(-1)"
, "var intZero unsafe.Pointer = MkInt(0)"
, "var intOne unsafe.Pointer = MkInt(1)"
, ""
-- This solely exists so the strconv import is used even if the program
-- doesn't use the LIntStr primitive.
, "func __useStrconvImport() string {"
, " return strconv.Itoa(-42)"
, "}"
, ""
]
mangleName :: Name -> T.Text
mangleName name = T.concat $ map mangleChar (showCG name)
where
mangleChar x
| isAlphaNum x = T.singleton x
| otherwise = sformat ("_" % int % "_") (ord x)
nameToGo :: Name -> T.Text
nameToGo (MN i n) | T.all (\x -> isAlphaNum x || x == '_') n =
n `T.append` T.pack (show i)
nameToGo n = mangleName n
lVarToGo :: LVar -> T.Text
lVarToGo (Loc i) = sformat ("_" % int) i
lVarToGo (Glob n) = nameToGo n
lVarToVar :: LVar -> Var
lVarToVar (Loc i) = V i
lVarToVar v = error $ "LVar not convertible to var: " ++ show v
varToGo :: Var -> T.Text
varToGo RVal = "__rval"
varToGo (V i) = sformat ("_" % int) i
assign :: Var -> T.Text -> T.Text
assign RVal x = "__thunk.arity = -1; " `T.append` varToGo RVal `T.append` " = " `T.append` x
assign var x = varToGo var `T.append` " = " `T.append` x
exprToGo :: Name -> Var -> SExp -> CG [Line]
exprToGo f var SNothing = return . return $ Line (Just var) [] (assign var "nil")
exprToGo _ var (SConst i@BI{})
| i == BI 0 = return [ Line (Just var) [] (assign var "unsafe.Pointer(bigZero)") ]
| i == BI 1 = return [ Line (Just var) [] (assign var "unsafe.Pointer(bigOne)") ]
| otherwise = return
[ Line (Just var) [] (assign var (sformat ("unsafe.Pointer(" % stext % ")") (constToGo i))) ]
exprToGo f var (SConst c@Ch{}) = return . return $ mkVal var c (sformat ("MkRune(" % stext % ")"))
exprToGo _ var (SConst i@I{})
| i == I (-1) = return . return $ Line (Just var) [] (assign var "intMinusOne")
| i == I 0 = return . return $ Line (Just var) [] (assign var "intZero")
| i == I 1 = return . return $ Line (Just var) [] (assign var "intOne")
| otherwise = return . return $ mkVal var i (sformat ("MkInt(" % stext % ")"))
exprToGo f var (SConst s@Str{}) = return . return $ mkVal var s (sformat ("MkString(" % stext % ")"))
exprToGo _ (V i) (SV (Loc j))
| i == j = return []
exprToGo _ var (SV (Loc i)) = return [ Line (Just var) [V i] (assign var (lVarToGo (Loc i))) ]
exprToGo f var (SLet (Loc i) e sc) = do
a <- exprToGo f (V i) e
b <- exprToGo f var sc
return $ a ++ b
exprToGo f var (SApp True name vs)
-- self call, simply goto to the entry again
| f == name = return $
[ Line (Just (V i)) [ V a ] (sformat ("_" % int % " = _" % int) i a) | (i, Loc a) <- zip [0..] vs, i /= a ] ++
[ Line Nothing [ ] "goto entry" ]
exprToGo f RVal (SApp True name vs) = do
trampolined <- fmap ($ name) (gets requiresTrampoline)
let args = T.intercalate ", " ("__thunk" : map lVarToGo vs)
code = if trampolined
then mkThunk name vs
else assign RVal (nameToGo name `T.append` "(" `T.append` args `T.append` ")")
return [ Line (Just RVal) [ V i | (Loc i) <- vs ] code ]
exprToGo _ var (SApp True _ _) = error $ "Tail-recursive call, but should be assigned to " ++ show var
exprToGo _ var (SApp False name vs) = do
-- Not a tail call, but we might call a function that needs to be trampolined
trampolined <- fmap ($ name) (gets requiresTrampoline)
let code = if trampolined
then assign var (sformat ("Trampoline(" % stext % ")") (mkThunk name vs))
else assign var (sformat (stext % "(" % stext % ")") (nameToGo name) args)
return [ Line (Just var) [ V i | (Loc i) <- vs ] code ]
where
args = T.intercalate ", " ("__thunk" : map lVarToGo vs)
exprToGo f var (SCase up (Loc l) alts)
| isBigIntConst alts = constBigIntCase f var (V l) (dedupDefaults alts)
| isConst alts = constCase f var (V l) alts
| otherwise = conCase f var (V l) alts
where
isBigIntConst (SConstCase (BI _) _ : _) = True
isBigIntConst _ = False
isConst [] = False
isConst (SConstCase _ _ : _) = True
isConst (SConCase{} : _) = False
isConst (_ : _) = False
dedupDefaults (d@SDefaultCase{} : [SDefaultCase{}]) = [d]
dedupDefaults (x : xs) = x : dedupDefaults xs
dedupDefaults [] = []
exprToGo f var (SChkCase (Loc l) alts) = conCase f var (V l) alts
exprToGo f var (SCon _ tag name args) = return . return $
Line (Just var) [ V i | (Loc i) <- args] (comment `T.append` assign var mkCon)
where
comment = "// " `T.append` (T.pack . show) name `T.append` "\n"
mkCon
| tag < 256 && null args = sformat ("unsafe.Pointer(&nullCons[" % int % "])") tag
| otherwise =
let argsCode = case args of
[] -> T.empty
_ -> ", " `T.append` T.intercalate ", " (map lVarToGo args)
in sformat ("MkCon" % int % "(" % int % stext % ")") (length args) tag argsCode
exprToGo f var (SOp prim args) = return . return $ primToGo var prim args
exprToGo f var (SForeign ty (FApp callType callTypeArgs) args) =
let call = toCall callType callTypeArgs
in return . return $ Line Nothing [] (retVal (fDescToGoType ty) call)
where
convertedArgs = [ toArg (fDescToGoType t) (lVarToGo l) | (t, l) <- args]
toCall ct [ FStr fname ]
| ct == sUN "Function" = T.pack fname `T.append` "(" `T.append` T.intercalate ", " convertedArgs `T.append` ")"
toCall ct [ FStr _, _, FStr methodName ]
| ct == sUN "Method" =
let obj : args = convertedArgs in
sformat (stext % "." % string % "(" % stext % ")")
obj methodName (T.intercalate ", " args)
toCall ct a = error $ show ct ++ " " ++ show a
toArg (GoInterface name) x = sformat ("(*(*" % string % ")(" % stext % "))") name x
toArg GoByte x = "byte(*(*rune)(" `T.append` x `T.append` "))"
toArg GoString x = "*(*string)(" `T.append` x `T.append` ")"
toArg GoAny x = x
toArg f _ = error $ "Not implemented yet: toArg " ++ show f
ptrFromRef x = "unsafe.Pointer(&" `T.append` x `T.append` ")"
toPtr (GoInterface _) x = ptrFromRef x
toPtr GoInt x = ptrFromRef x
toPtr GoString x = ptrFromRef x
toPtr (GoNilable valueType) x =
sformat ("MkMaybe(" % stext % ", " % stext % " != nil)" )
(toPtr valueType x) x
retRef ty x =
sformat ("{ __tmp := " % stext % "\n " % stext % " = " % stext % " }")
x (varToGo var) (toPtr ty "__tmp")
retVal GoUnit x = x
retVal GoString x = retRef GoString x
retVal (i@GoInterface{}) x = retRef i x
retVal (n@GoNilable{}) x = retRef n x
retVal (GoMultiVal varTypes) x =
XXX assumes exactly two vars
sformat ("{ " % stext % " := " % stext % "\n " % stext % " = MkCon" % int % "(0, " % stext % ") }")
(T.intercalate ", " [ sformat ("__tmp" % int) i | i <- [1..length varTypes]])
x
(varToGo var)
(length varTypes)
(T.intercalate ", " [ toPtr varTy (sformat ("__tmp" % int) i) | (i, varTy) <- zip [1 :: Int ..] varTypes ])
retVal (GoPtr _) x = sformat (stext % " = unsafe.Pointer(" % stext % ")") (varToGo var) x
retVal t _ = error $ "Not implemented yet: retVal " ++ show t
exprToGo _ _ expr = error $ "Not implemented yet: " ++ show expr
data GoType = GoByte
| GoInt
| GoString
| GoNilable GoType
| GoInterface String
| GoUnit
| GoMultiVal [GoType]
| GoPtr GoType
| GoAny
deriving (Show)
fDescToGoType :: FDesc -> GoType
fDescToGoType (FCon c)
| c == sUN "Go_Byte" = GoByte
| c == sUN "Go_Int" = GoInt
| c == sUN "Go_Str" = GoString
| c == sUN "Go_Unit" = GoUnit
fDescToGoType (FApp c [ FStr name ])
| c == sUN "Go_Interface" = GoInterface name
fDescToGoType (FApp c [ _ ])
| c == sUN "Go_Any" = GoAny
fDescToGoType (FApp c [ _, ty ])
| c == sUN "Go_Nilable" = GoNilable (fDescToGoType ty)
fDescToGoType (FApp c [ _, _, FApp c2 [ _, _, a, b ] ])
| c == sUN "Go_MultiVal" && c2 == sUN "MkPair" = GoMultiVal [ fDescToGoType a, fDescToGoType b ]
fDescToGoType (FApp c [ _, ty ])
| c == sUN "Go_Ptr" = GoPtr (fDescToGoType ty)
fDescToGoType f = error $ "Not implemented yet: fDescToGoType " ++ show f
toFunType :: FDesc -> FType
toFunType (FApp c [ _, _ ])
| c == sUN "Go_FnBase" = FFunction
| c == sUN "Go_FnIO" = FFunctionIO
toFunType desc = error $ "Not implemented yet: toFunType " ++ show desc
mkThunk :: Name -> [LVar] -> T.Text
mkThunk f [] =
sformat ("MkThunk0(__thunk, " % stext % ")") (nameToGo f)
mkThunk f args =
sformat ("MkThunk" % int % "(__thunk, " % stext % ", " % stext % ")")
(length args) (nameToGo f) (T.intercalate "," (map lVarToGo args))
mkVal :: Var -> Const -> (T.Text -> T.Text) -> Line
mkVal var c factory =
Line (Just var) [] (assign var (factory (constToGo c)))
constToGo :: Const -> T.Text
constToGo (BI i)
| i == 0 = "bigZero"
| i == 1 = "bigOne"
| i < toInteger (maxBound :: Int64) && i > toInteger (minBound :: Int64) =
"big.NewInt(" `T.append` T.pack (show i) `T.append` ")"
| otherwise =
"BigIntFromString(\"" `T.append` T.pack (show i) `T.append` "\")"
constToGo (Ch '\DEL') = "'\\x7F'"
constToGo (Ch '\SO') = "'\\x0e'"
constToGo (Str s) = T.pack (show s)
constToGo constVal = T.pack (show constVal)
Special case for big . Ints , as we need to compare with there
constBigIntCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
constBigIntCase f var v alts = do
cases <- traverse case_ alts
return $
[ Line Nothing [] "switch {" ] ++ concat cases ++ [ Line Nothing [] "}" ]
where
valueCmp other = sformat ("(*big.Int)(" % stext % ").Cmp(" % stext % ") == 0") (varToGo v) (constToGo other)
case_ (SConstCase constVal expr) = do
code <- exprToGo f var expr
return $ Line Nothing [v] (sformat ("case " % stext % ":") (valueCmp constVal)) : code
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ c = error $ "Unexpected big int case: " ++ show c
constCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
constCase f var v alts = do
cases <- traverse case_ alts
return $ [ Line Nothing [v] (T.concat [ "switch " , castValue alts , " {" ])
] ++ concat cases ++ [ Line Nothing [] "}" ]
where
castValue (SConstCase (Ch _) _ : _) = "*(*rune)(" `T.append` varToGo v `T.append` ")"
castValue (SConstCase (I _) _ : _) = "*(*int64)(" `T.append` varToGo v `T.append` ")"
castValue (SConstCase constVal _ : _) = error $ "Not implemented: cast for " ++ show constVal
castValue _ = error "First alt not a SConstCase!"
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ (SConstCase constVal expr) = do
code <- exprToGo f var expr
return $
Line Nothing [] (T.concat [ "case " , constToGo constVal , ":" ]) : code
case_ c = error $ "Unexpected const case: " ++ show c
conCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
conCase f var v [ SDefaultCase expr ] = exprToGo f var expr
conCase f var v alts = do
cases <- traverse case_ alts
return $ [ Line Nothing [v] (T.concat [ "switch GetTag(" , varToGo v , ") {" ])
] ++ concat cases ++ [ Line Nothing [] "}" ]
where
project left i =
Line (Just left) [v]
(assign left (sformat ("(*Con" % int % ")(" % stext % ")._" % int) (i+1) (varToGo v) i))
case_ (SConCase base tag name args expr) = do
let locals = [base .. base + length args - 1]
projections = [ project (V i) (i - base) | i <- locals ]
code <- exprToGo f var expr
return $ [ Line Nothing [] (sformat ("case " % int % ":\n // Projection of " % stext) tag (nameToGo name))
] ++ projections ++ code
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ c = error $ "Unexpected con case: " ++ show c
primToGo :: Var -> PrimFn -> [LVar] -> Line
primToGo var (LChInt ITNative) [ch] =
let code = "MkInt(int64(*(*rune)(" `T.append` lVarToGo ch `T.append` ")))"
in Line (Just var) [ lVarToVar ch ] (assign var code)
primToGo var (LEq (ATInt ITChar)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*rune)("
, lVarToGo left
, ") == *(*rune)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LEq (ATInt ITNative)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*int64)("
, lVarToGo left
, ") == *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LEq (ATInt ITBig)) [left, right] =
let code = T.concat [ "MkIntFromBool((*big.Int)("
, lVarToGo left
, ").Cmp((*big.Int)("
, lVarToGo right
, ")) == 0)"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LSLt (ATInt ITChar)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*rune)("
, lVarToGo left
, ") < *(*rune)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LSLt (ATInt ITNative)) [left, right] =
let code = T.concat [ varToGo var
, " = MkIntFromBool(*(*int64)("
, lVarToGo left
, ") < *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] code
primToGo var (LSLt (ATInt ITBig)) [left, right] =
let code = T.concat [ "MkIntFromBool((*big.Int)("
, lVarToGo left
, ").Cmp((*big.Int)("
, lVarToGo right
, ")) < 0)"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LMinus (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "-"
primToGo var (LMinus (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Sub"
primToGo var (LPlus (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "+"
primToGo var (LPlus (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Add"
primToGo var (LSExt ITNative ITBig) [i] =
let code = "unsafe.Pointer(big.NewInt(*(*int64)(" `T.append` lVarToGo i `T.append` ")))"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var (LIntStr ITBig) [i] =
let code = "MkString((*big.Int)(" `T.append` lVarToGo i `T.append` ").String())"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var (LIntStr ITNative) [i] =
let code = "MkString(strconv.FormatInt(*(*int64)(" `T.append` lVarToGo i `T.append` "), 10))"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var LStrEq [left, right] =
let code = T.concat [ "MkIntFromBool(*(*string)("
, lVarToGo left
, ") == *(*string)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var LStrCons [c, s] =
let code = T.concat [ "MkString(string(*(*rune)("
, lVarToGo c
, ")) + *(*string)("
, lVarToGo s
, "))"
]
in Line (Just var) [ lVarToVar c, lVarToVar s ] (assign var code)
primToGo var LStrHead [s] =
let code = "MkRune(RuneAtIndex(*(*string)(" `T.append` lVarToGo s `T.append` "), 0))"
in Line (Just var) [ lVarToVar s ] (assign var code)
primToGo var LStrTail [s] =
let code = "MkString(StrTail(*(*string)(" `T.append` lVarToGo s `T.append` ")))"
in Line (Just var) [ lVarToVar s ] (assign var code)
primToGo var LStrConcat [left, right] =
let code = T.concat [ "MkString(*(*string)("
, lVarToGo left
, ") + *(*string)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var LWriteStr [world, s] =
let code = "WriteStr(" `T.append` lVarToGo s `T.append` ")"
in Line (Just var) [ lVarToVar world, lVarToVar s ] (assign var code)
primToGo var (LTimes (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "*"
primToGo var (LTimes (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Mul"
primToGo _ fn _ = Line Nothing [] (sformat ("panic(\"Unimplemented PrimFn: " % string % "\")") (show fn))
bigIntBigOp :: Var -> LVar -> LVar -> T.Text -> Line
bigIntBigOp var left right op =
let code = T.concat [ "unsafe.Pointer(new(big.Int)."
, op
, "((*big.Int)("
, lVarToGo left
, "), (*big.Int)("
, lVarToGo right
, ")))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
nativeIntBinOp :: Var -> LVar -> LVar -> T.Text -> Line
nativeIntBinOp var left right op =
let code = T.concat [ "MkInt(*(*int64)("
, lVarToGo left
, ") "
, op
, " *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
data TailCall = Self
| Other
deriving (Eq, Show)
containsTailCall :: Name -> SExp -> [TailCall]
containsTailCall self (SApp True n _) = if self == n
then [ Self ]
else [ Other ]
containsTailCall self (SLet _ a b) = containsTailCall self a ++ containsTailCall self b
containsTailCall self (SUpdate _ e) = containsTailCall self e
containsTailCall self (SCase _ _ alts) = concatMap (altContainsTailCall self) alts
containsTailCall self (SChkCase _ alts) = concatMap (altContainsTailCall self) alts
containsTailCall _ _ = []
altContainsTailCall :: Name -> SAlt -> [TailCall]
altContainsTailCall self (SConCase _ _ _ _ e) = containsTailCall self e
altContainsTailCall self (SConstCase _ e) = containsTailCall self e
altContainsTailCall self (SDefaultCase e) = containsTailCall self e
extractUsedVars :: [Line] -> S.Set Var
extractUsedVars lines = S.fromList (concat [used | Line _ used _ <- lines])
filterUnusedLines :: [Line] -> [Line]
filterUnusedLines lines =
let usedVars = extractUsedVars lines
requiredLines = mapMaybe (required usedVars) lines
in if length lines /= length requiredLines
-- the filtered lines might have made some other lines obsolete, filter again
then filterUnusedLines requiredLines
else lines
where
required _ l@(Line Nothing _ _) = Just l
required _ l@(Line (Just RVal) _ _) = Just l
required usedVars l@(Line (Just v) _ _) =
if S.member v usedVars
then Just l
else Nothing
funToGo :: (Name, SDecl, [TailCall]) -> CG T.Text
funToGo (name, SFun _ args locs expr, tailCalls) = do
bodyLines <- filterUnusedLines <$> exprToGo name RVal expr
let usedVars = extractUsedVars bodyLines
pure . T.concat $
[ "// "
, T.pack $ show name
, "\nfunc "
, nameToGo name
, "("
, "__thunk *Thunk" `T.append` if (not . null) args then ", " else T.empty
, T.intercalate ", " [ sformat ("_" % int % " unsafe.Pointer") i | i <- [0..length args-1]]
, ") unsafe.Pointer {\n var __rval unsafe.Pointer\n"
, reserve usedVars locs
, tailCallEntry
, T.unlines [ line | Line _ _ line <- bodyLines ]
, "return __rval\n}\n\n"
]
where
tailCallEntry = if Self `elem` tailCalls
then "entry:"
else T.empty
loc usedVars i =
let i' = length args + i in
if S.member (V i') usedVars
then Just $ sformat ("_" % int) i'
else Nothing
reserve usedVars locs = case mapMaybe (loc usedVars) [0..locs] of
[] -> T.empty
usedLocs -> " var " `T.append` T.intercalate ", " usedLocs `T.append` " unsafe.Pointer\n"
genMain :: T.Text
genMain = T.unlines
[ "var cpuprofile = flag.String(\"cpuprofile\", \"\", \"write cpu profile `file`\")"
, "var memprofile = flag.String(\"memprofile\", \"\", \"write memory profile to `file`\")"
, ""
, "func main() {"
, " flag.Parse()"
, " initNullCons()"
, " if *cpuprofile != \"\" {"
, " f, err := os.Create(*cpuprofile)"
, " if err != nil {"
, " log.Fatal(\"Could not create CPU profile: \", err)"
, " }"
, " if err := pprof.StartCPUProfile(f); err != nil {"
, " log.Fatal(\"Could not start CPU profile: \", err)"
, " }"
, " defer pprof.StopCPUProfile()"
, " }"
, " var thunk Thunk"
, " runMain0(&thunk)"
, " if *memprofile != \"\" {"
, " f, err := os.Create(*memprofile)"
, " if err != nil {"
, " log.Fatal(\"Could not create memory profile: \", err)"
, " }"
, " runtime.GC()"
, " if err := pprof.WriteHeapProfile(f); err != nil {"
, " log.Fatal(\"Could not write memory profile: \", err)"
, " }"
, " f.Close()"
, " }"
, "}"
]
codegenGo :: CodeGenerator
codegenGo ci = do
let funs = [ (name, fun, containsTailCall name expr)
| (name, fun@(SFun _ _ _ expr)) <- simpleDecls ci
]
needsTrampolineByName = M.fromList [ (name, Other `elem` tailCalls)
| (name, _, tailCalls) <- funs
]
trampolineLookup = fromMaybe False . (`M.lookup` needsTrampolineByName)
funCodes = evalState (traverse funToGo funs) (createCgState trampolineLookup)
code = T.concat [ goPreamble (map (T.pack . show) (includes ci))
, T.concat funCodes
, genMain
]
withFile (outputFile ci) WriteMode $ \hOut -> do
(Just hIn, _, _, p) <-
createProcess (proc "gofmt" [ "-s" ]){ std_in = CreatePipe, std_out = UseHandle hOut }
TIO.hPutStr hIn code
_ <- waitForProcess p
return ()
| null | https://raw.githubusercontent.com/Trundle/idris-go/9510b7b41b7fec03ca8d71f7f6b160e259da829c/src/IRTS/CodegenGo.hs | haskell | # LANGUAGE OverloadedStrings #
This solely exists so the strconv import is used even if the program
doesn't use the LIntStr primitive.
self call, simply goto to the entry again
Not a tail call, but we might call a function that needs to be trampolined
the filtered lines might have made some other lines obsolete, filter again |
module IRTS.CodegenGo (codegenGo) where
import Control.Monad.Trans.State.Strict (State, evalState, gets)
import Data.Char (isAlphaNum, ord)
import Data.Int (Int64)
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe, mapMaybe)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Formatting (int, sformat, stext, string,
(%))
import System.IO (IOMode (..), withFile)
import System.Process (CreateProcess (..),
StdStream (..),
createProcess, proc,
waitForProcess)
import Idris.Core.TT hiding (V, arity)
import IRTS.CodegenCommon
import IRTS.Lang (FDesc (..), FType (..),
LVar (..), PrimFn (..))
import IRTS.Simplified
data Line = Line (Maybe Var) [Var] T.Text
deriving (Show)
data Var = RVal | V Int
deriving (Show, Eq, Ord)
newtype CGState = CGState { requiresTrampoline :: Name -> Bool
}
type CG a = State CGState a
createCgState :: (Name -> Bool) -> CGState
createCgState trampolineLookup = CGState { requiresTrampoline = trampolineLookup }
goPreamble :: [T.Text] -> T.Text
goPreamble imports = T.unlines $
[ "// THIS FILE IS AUTOGENERATED! DO NOT EDIT"
, ""
, "package main"
, ""
, "import ("
, " \"flag\""
, " \"log\""
, " \"math/big\""
, " \"os\""
, " \"strconv\""
, " \"unicode/utf8\""
, " \"unsafe\""
, " \"runtime\""
, " \"runtime/pprof\""
, ")"
, ""
] ++ map ("import " `T.append`) imports ++
[ ""
, "func BigIntFromString(s string) *big.Int {"
, " value, _ := big.NewInt(0).SetString(s, 10)"
, " return value"
, "}"
, ""
, "type Con0 struct {"
, " tag int"
, "}"
, ""
, "type Con1 struct {"
, " tag int"
, " _0 unsafe.Pointer"
, "}"
, ""
, "type Con2 struct {"
, " tag int"
, " _0, _1 unsafe.Pointer"
, "}"
, ""
, "type Con3 struct {"
, " tag int"
, " _0, _1, _2 unsafe.Pointer"
, "}"
, ""
, "type Con4 struct {"
, " tag int"
, " _0, _1, _2, _3 unsafe.Pointer"
, "}"
, ""
, "type Con5 struct {"
, " tag int"
, " _0, _1, _2, _3, _4 unsafe.Pointer"
, "}"
, ""
, "type Con6 struct {"
, " tag int"
, " _0, _1, _2, _3, _4, _5 unsafe.Pointer"
, "}"
, ""
, "var nullCons [256]Con0"
, ""
, "func GetTag(con unsafe.Pointer) int {"
, " return (*Con0)(con).tag"
, "}"
, ""
, "func MkCon0(tag int) unsafe.Pointer {"
, " return unsafe.Pointer(&Con0{tag})"
, "}"
, ""
, "func MkCon1(tag int, _0 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con1{tag, _0})"
, "}"
, ""
, "func MkCon2(tag int, _0, _1 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con2{tag, _0, _1})"
, "}"
, ""
, "func MkCon3(tag int, _0, _1, _2 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con3{tag, _0, _1, _2})"
, "}"
, ""
, "func MkCon4(tag int, _0, _1, _2, _3 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con4{tag, _0, _1, _2, _3})"
, "}"
, ""
, "func MkCon5(tag int, _0, _1, _2, _3, _4 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con5{tag, _0, _1, _2, _3, _4})"
, "}"
, ""
, "func MkCon6(tag int, _0, _1, _2, _3, _4, _5 unsafe.Pointer) unsafe.Pointer {"
, " return unsafe.Pointer(&Con6{tag, _0, _1, _2, _3, _4, _5})"
, "}"
, ""
, "func MkIntFromBool(value bool) unsafe.Pointer {"
, " if value {"
, " return intOne"
, " } else {"
, " return intZero"
, " }"
, "}"
, ""
, "func MkInt(value int64) unsafe.Pointer {"
, " var retVal *int64 = new(int64)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func MkRune(value rune) unsafe.Pointer {"
, " var retVal *rune = new(rune)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func MkString(value string) unsafe.Pointer {"
, " var retVal *string = new(string)"
, " *retVal = value"
, " return unsafe.Pointer(retVal)"
, "}"
, ""
, "func RuneAtIndex(s string, index int) rune {"
, " if index == 0 {"
, " chr, _ := utf8.DecodeRuneInString(s)"
, " return chr"
, " } else {"
, " i := 0"
, " for _, chr := range s {"
, " if i == index {"
, " return chr"
, " }"
, " i++"
, " }"
, " }"
, "panic(\"Illegal index: \" + string(index))"
, "}"
, ""
, "func StrTail(s string) string {"
, " _, offset := utf8.DecodeRuneInString(s)"
, " return s[offset:]"
, "}"
, ""
, "func WriteStr(str unsafe.Pointer) unsafe.Pointer {"
, " _, err := os.Stdout.WriteString(*(*string)(str))"
, " if (err != nil) {"
, " return intZero"
, " } else {"
, " return intMinusOne"
, " }"
, "}"
, ""
, "func Go(action unsafe.Pointer) {"
, " var th Thunk"
, " go Trampoline(MkThunk2(&th, APPLY0, action, nil))"
, "}"
, ""
, "func MkMaybe(value unsafe.Pointer, present bool) unsafe.Pointer {"
, " if present {"
, " return MkCon1(1, value)"
, " } else {"
, " return unsafe.Pointer(&nullCons[0])"
, " }"
, "}"
, ""
, "type Thunk0 func(*Thunk) unsafe.Pointer"
, "type Thunk1 func(*Thunk, unsafe.Pointer) unsafe.Pointer"
, "type Thunk2 func(*Thunk, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk3 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk4 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk5 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk6 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, "type Thunk7 func(*Thunk, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer, unsafe.Pointer) unsafe.Pointer"
, ""
, "type Thunk struct {"
, " arity int8"
, " f0 Thunk0"
, " f1 Thunk1"
, " f2 Thunk2"
, " f3 Thunk3"
, " f4 Thunk4"
, " f5 Thunk5"
, " f6 Thunk6"
, " f7 Thunk7"
, " _0, _1, _2, _3, _4, _5, _6 unsafe.Pointer"
, "}"
, ""
, "func (t *Thunk) Run() unsafe.Pointer {"
, " switch t.arity {"
, " case 0:"
, " return t.f0(t)"
, " case 1:"
, " return t.f1(t, t._0)"
, " case 2:"
, " return t.f2(t, t._0, t._1)"
, " case 3:"
, " return t.f3(t, t._0, t._1, t._2)"
, " case 4:"
, " return t.f4(t, t._0, t._1, t._2, t._3)"
, " case 5:"
, " return t.f5(t, t._0, t._1, t._2, t._3, t._4,)"
, " case 6:"
, " return t.f6(t, t._0, t._1, t._2, t._3, t._4, t._5)"
, " case 7:"
, " return t.f7(t, t._0, t._1, t._2, t._3, t._4, t._5, t._6)"
, " }"
, " panic(\"Invalid arity: \" + string(t.arity))"
, "}"
, ""
, "func MkThunk0(th *Thunk, f Thunk0) *Thunk {"
, " th.arity = 0"
, " th.f0 = f"
, " return th"
, "}"
, ""
, "func MkThunk1(th *Thunk, f Thunk1, _0 unsafe.Pointer) *Thunk {"
, " th.arity = 1"
, " th.f1 = f"
, " th._0 = _0"
, " return th"
, "}"
, ""
, "func MkThunk2(th *Thunk, f Thunk2, _0, _1 unsafe.Pointer) *Thunk {"
, " th.arity = 2"
, " th.f2 = f"
, " th._0 = _0"
, " th._1 = _1"
, " return th"
, "}"
, ""
, "func MkThunk3(th *Thunk, f Thunk3, _0, _1, _2 unsafe.Pointer) *Thunk {"
, " th.arity = 3"
, " th.f3 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " return th"
, "}"
, ""
, "func MkThunk4(th *Thunk, f Thunk4, _0, _1, _2, _3 unsafe.Pointer) *Thunk {"
, " th.arity = 4"
, " th.f4 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " return th"
, "}"
, ""
, "func MkThunk5(th *Thunk, f Thunk5, _0, _1, _2, _3, _4 unsafe.Pointer) *Thunk {"
, " th.arity = 5"
, " th.f5 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " return th"
, "}"
, ""
, "func MkThunk6(th *Thunk, f Thunk6, _0, _1, _2, _3, _4, _5 unsafe.Pointer) *Thunk {"
, " th.arity = 6"
, " th.f6 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " th._5 = _5"
, " return th"
, "}"
, ""
, "func MkThunk7(th *Thunk, f Thunk7, _0, _1, _2, _3, _4, _5, _6 unsafe.Pointer) *Thunk {"
, " th.arity = 7"
, " th.f7 = f"
, " th._0 = _0"
, " th._1 = _1"
, " th._2 = _2"
, " th._3 = _3"
, " th._4 = _4"
, " th._5 = _5"
, " th._6 = _6"
, " return th"
, "}"
, ""
, "func Trampoline(th *Thunk) unsafe.Pointer {"
, " var result unsafe.Pointer"
, " for th.arity >= 0 {"
, " result = th.Run()"
, " }"
, " return result"
, "}"
, ""
, "func initNullCons() {"
, " for i := 0; i < 256; i++ {"
, " nullCons[i] = Con0{i}"
, " }"
, "}"
, ""
, "var bigZero *big.Int = big.NewInt(0)"
, "var bigOne *big.Int = big.NewInt(1)"
, "var intMinusOne unsafe.Pointer = MkInt(-1)"
, "var intZero unsafe.Pointer = MkInt(0)"
, "var intOne unsafe.Pointer = MkInt(1)"
, ""
, "func __useStrconvImport() string {"
, " return strconv.Itoa(-42)"
, "}"
, ""
]
mangleName :: Name -> T.Text
mangleName name = T.concat $ map mangleChar (showCG name)
where
mangleChar x
| isAlphaNum x = T.singleton x
| otherwise = sformat ("_" % int % "_") (ord x)
nameToGo :: Name -> T.Text
nameToGo (MN i n) | T.all (\x -> isAlphaNum x || x == '_') n =
n `T.append` T.pack (show i)
nameToGo n = mangleName n
lVarToGo :: LVar -> T.Text
lVarToGo (Loc i) = sformat ("_" % int) i
lVarToGo (Glob n) = nameToGo n
lVarToVar :: LVar -> Var
lVarToVar (Loc i) = V i
lVarToVar v = error $ "LVar not convertible to var: " ++ show v
varToGo :: Var -> T.Text
varToGo RVal = "__rval"
varToGo (V i) = sformat ("_" % int) i
assign :: Var -> T.Text -> T.Text
assign RVal x = "__thunk.arity = -1; " `T.append` varToGo RVal `T.append` " = " `T.append` x
assign var x = varToGo var `T.append` " = " `T.append` x
exprToGo :: Name -> Var -> SExp -> CG [Line]
exprToGo f var SNothing = return . return $ Line (Just var) [] (assign var "nil")
exprToGo _ var (SConst i@BI{})
| i == BI 0 = return [ Line (Just var) [] (assign var "unsafe.Pointer(bigZero)") ]
| i == BI 1 = return [ Line (Just var) [] (assign var "unsafe.Pointer(bigOne)") ]
| otherwise = return
[ Line (Just var) [] (assign var (sformat ("unsafe.Pointer(" % stext % ")") (constToGo i))) ]
exprToGo f var (SConst c@Ch{}) = return . return $ mkVal var c (sformat ("MkRune(" % stext % ")"))
exprToGo _ var (SConst i@I{})
| i == I (-1) = return . return $ Line (Just var) [] (assign var "intMinusOne")
| i == I 0 = return . return $ Line (Just var) [] (assign var "intZero")
| i == I 1 = return . return $ Line (Just var) [] (assign var "intOne")
| otherwise = return . return $ mkVal var i (sformat ("MkInt(" % stext % ")"))
exprToGo f var (SConst s@Str{}) = return . return $ mkVal var s (sformat ("MkString(" % stext % ")"))
exprToGo _ (V i) (SV (Loc j))
| i == j = return []
exprToGo _ var (SV (Loc i)) = return [ Line (Just var) [V i] (assign var (lVarToGo (Loc i))) ]
exprToGo f var (SLet (Loc i) e sc) = do
a <- exprToGo f (V i) e
b <- exprToGo f var sc
return $ a ++ b
exprToGo f var (SApp True name vs)
| f == name = return $
[ Line (Just (V i)) [ V a ] (sformat ("_" % int % " = _" % int) i a) | (i, Loc a) <- zip [0..] vs, i /= a ] ++
[ Line Nothing [ ] "goto entry" ]
exprToGo f RVal (SApp True name vs) = do
trampolined <- fmap ($ name) (gets requiresTrampoline)
let args = T.intercalate ", " ("__thunk" : map lVarToGo vs)
code = if trampolined
then mkThunk name vs
else assign RVal (nameToGo name `T.append` "(" `T.append` args `T.append` ")")
return [ Line (Just RVal) [ V i | (Loc i) <- vs ] code ]
exprToGo _ var (SApp True _ _) = error $ "Tail-recursive call, but should be assigned to " ++ show var
exprToGo _ var (SApp False name vs) = do
trampolined <- fmap ($ name) (gets requiresTrampoline)
let code = if trampolined
then assign var (sformat ("Trampoline(" % stext % ")") (mkThunk name vs))
else assign var (sformat (stext % "(" % stext % ")") (nameToGo name) args)
return [ Line (Just var) [ V i | (Loc i) <- vs ] code ]
where
args = T.intercalate ", " ("__thunk" : map lVarToGo vs)
exprToGo f var (SCase up (Loc l) alts)
| isBigIntConst alts = constBigIntCase f var (V l) (dedupDefaults alts)
| isConst alts = constCase f var (V l) alts
| otherwise = conCase f var (V l) alts
where
isBigIntConst (SConstCase (BI _) _ : _) = True
isBigIntConst _ = False
isConst [] = False
isConst (SConstCase _ _ : _) = True
isConst (SConCase{} : _) = False
isConst (_ : _) = False
dedupDefaults (d@SDefaultCase{} : [SDefaultCase{}]) = [d]
dedupDefaults (x : xs) = x : dedupDefaults xs
dedupDefaults [] = []
exprToGo f var (SChkCase (Loc l) alts) = conCase f var (V l) alts
exprToGo f var (SCon _ tag name args) = return . return $
Line (Just var) [ V i | (Loc i) <- args] (comment `T.append` assign var mkCon)
where
comment = "// " `T.append` (T.pack . show) name `T.append` "\n"
mkCon
| tag < 256 && null args = sformat ("unsafe.Pointer(&nullCons[" % int % "])") tag
| otherwise =
let argsCode = case args of
[] -> T.empty
_ -> ", " `T.append` T.intercalate ", " (map lVarToGo args)
in sformat ("MkCon" % int % "(" % int % stext % ")") (length args) tag argsCode
exprToGo f var (SOp prim args) = return . return $ primToGo var prim args
exprToGo f var (SForeign ty (FApp callType callTypeArgs) args) =
let call = toCall callType callTypeArgs
in return . return $ Line Nothing [] (retVal (fDescToGoType ty) call)
where
convertedArgs = [ toArg (fDescToGoType t) (lVarToGo l) | (t, l) <- args]
toCall ct [ FStr fname ]
| ct == sUN "Function" = T.pack fname `T.append` "(" `T.append` T.intercalate ", " convertedArgs `T.append` ")"
toCall ct [ FStr _, _, FStr methodName ]
| ct == sUN "Method" =
let obj : args = convertedArgs in
sformat (stext % "." % string % "(" % stext % ")")
obj methodName (T.intercalate ", " args)
toCall ct a = error $ show ct ++ " " ++ show a
toArg (GoInterface name) x = sformat ("(*(*" % string % ")(" % stext % "))") name x
toArg GoByte x = "byte(*(*rune)(" `T.append` x `T.append` "))"
toArg GoString x = "*(*string)(" `T.append` x `T.append` ")"
toArg GoAny x = x
toArg f _ = error $ "Not implemented yet: toArg " ++ show f
ptrFromRef x = "unsafe.Pointer(&" `T.append` x `T.append` ")"
toPtr (GoInterface _) x = ptrFromRef x
toPtr GoInt x = ptrFromRef x
toPtr GoString x = ptrFromRef x
toPtr (GoNilable valueType) x =
sformat ("MkMaybe(" % stext % ", " % stext % " != nil)" )
(toPtr valueType x) x
retRef ty x =
sformat ("{ __tmp := " % stext % "\n " % stext % " = " % stext % " }")
x (varToGo var) (toPtr ty "__tmp")
retVal GoUnit x = x
retVal GoString x = retRef GoString x
retVal (i@GoInterface{}) x = retRef i x
retVal (n@GoNilable{}) x = retRef n x
retVal (GoMultiVal varTypes) x =
XXX assumes exactly two vars
sformat ("{ " % stext % " := " % stext % "\n " % stext % " = MkCon" % int % "(0, " % stext % ") }")
(T.intercalate ", " [ sformat ("__tmp" % int) i | i <- [1..length varTypes]])
x
(varToGo var)
(length varTypes)
(T.intercalate ", " [ toPtr varTy (sformat ("__tmp" % int) i) | (i, varTy) <- zip [1 :: Int ..] varTypes ])
retVal (GoPtr _) x = sformat (stext % " = unsafe.Pointer(" % stext % ")") (varToGo var) x
retVal t _ = error $ "Not implemented yet: retVal " ++ show t
exprToGo _ _ expr = error $ "Not implemented yet: " ++ show expr
data GoType = GoByte
| GoInt
| GoString
| GoNilable GoType
| GoInterface String
| GoUnit
| GoMultiVal [GoType]
| GoPtr GoType
| GoAny
deriving (Show)
fDescToGoType :: FDesc -> GoType
fDescToGoType (FCon c)
| c == sUN "Go_Byte" = GoByte
| c == sUN "Go_Int" = GoInt
| c == sUN "Go_Str" = GoString
| c == sUN "Go_Unit" = GoUnit
fDescToGoType (FApp c [ FStr name ])
| c == sUN "Go_Interface" = GoInterface name
fDescToGoType (FApp c [ _ ])
| c == sUN "Go_Any" = GoAny
fDescToGoType (FApp c [ _, ty ])
| c == sUN "Go_Nilable" = GoNilable (fDescToGoType ty)
fDescToGoType (FApp c [ _, _, FApp c2 [ _, _, a, b ] ])
| c == sUN "Go_MultiVal" && c2 == sUN "MkPair" = GoMultiVal [ fDescToGoType a, fDescToGoType b ]
fDescToGoType (FApp c [ _, ty ])
| c == sUN "Go_Ptr" = GoPtr (fDescToGoType ty)
fDescToGoType f = error $ "Not implemented yet: fDescToGoType " ++ show f
toFunType :: FDesc -> FType
toFunType (FApp c [ _, _ ])
| c == sUN "Go_FnBase" = FFunction
| c == sUN "Go_FnIO" = FFunctionIO
toFunType desc = error $ "Not implemented yet: toFunType " ++ show desc
mkThunk :: Name -> [LVar] -> T.Text
mkThunk f [] =
sformat ("MkThunk0(__thunk, " % stext % ")") (nameToGo f)
mkThunk f args =
sformat ("MkThunk" % int % "(__thunk, " % stext % ", " % stext % ")")
(length args) (nameToGo f) (T.intercalate "," (map lVarToGo args))
mkVal :: Var -> Const -> (T.Text -> T.Text) -> Line
mkVal var c factory =
Line (Just var) [] (assign var (factory (constToGo c)))
constToGo :: Const -> T.Text
constToGo (BI i)
| i == 0 = "bigZero"
| i == 1 = "bigOne"
| i < toInteger (maxBound :: Int64) && i > toInteger (minBound :: Int64) =
"big.NewInt(" `T.append` T.pack (show i) `T.append` ")"
| otherwise =
"BigIntFromString(\"" `T.append` T.pack (show i) `T.append` "\")"
constToGo (Ch '\DEL') = "'\\x7F'"
constToGo (Ch '\SO') = "'\\x0e'"
constToGo (Str s) = T.pack (show s)
constToGo constVal = T.pack (show constVal)
Special case for big . Ints , as we need to compare with there
constBigIntCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
constBigIntCase f var v alts = do
cases <- traverse case_ alts
return $
[ Line Nothing [] "switch {" ] ++ concat cases ++ [ Line Nothing [] "}" ]
where
valueCmp other = sformat ("(*big.Int)(" % stext % ").Cmp(" % stext % ") == 0") (varToGo v) (constToGo other)
case_ (SConstCase constVal expr) = do
code <- exprToGo f var expr
return $ Line Nothing [v] (sformat ("case " % stext % ":") (valueCmp constVal)) : code
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ c = error $ "Unexpected big int case: " ++ show c
constCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
constCase f var v alts = do
cases <- traverse case_ alts
return $ [ Line Nothing [v] (T.concat [ "switch " , castValue alts , " {" ])
] ++ concat cases ++ [ Line Nothing [] "}" ]
where
castValue (SConstCase (Ch _) _ : _) = "*(*rune)(" `T.append` varToGo v `T.append` ")"
castValue (SConstCase (I _) _ : _) = "*(*int64)(" `T.append` varToGo v `T.append` ")"
castValue (SConstCase constVal _ : _) = error $ "Not implemented: cast for " ++ show constVal
castValue _ = error "First alt not a SConstCase!"
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ (SConstCase constVal expr) = do
code <- exprToGo f var expr
return $
Line Nothing [] (T.concat [ "case " , constToGo constVal , ":" ]) : code
case_ c = error $ "Unexpected const case: " ++ show c
conCase :: Name -> Var -> Var -> [SAlt] -> CG [Line]
conCase f var v [ SDefaultCase expr ] = exprToGo f var expr
conCase f var v alts = do
cases <- traverse case_ alts
return $ [ Line Nothing [v] (T.concat [ "switch GetTag(" , varToGo v , ") {" ])
] ++ concat cases ++ [ Line Nothing [] "}" ]
where
project left i =
Line (Just left) [v]
(assign left (sformat ("(*Con" % int % ")(" % stext % ")._" % int) (i+1) (varToGo v) i))
case_ (SConCase base tag name args expr) = do
let locals = [base .. base + length args - 1]
projections = [ project (V i) (i - base) | i <- locals ]
code <- exprToGo f var expr
return $ [ Line Nothing [] (sformat ("case " % int % ":\n // Projection of " % stext) tag (nameToGo name))
] ++ projections ++ code
case_ (SDefaultCase expr) = do
code <- exprToGo f var expr
return $ Line Nothing [] "default:" : code
case_ c = error $ "Unexpected con case: " ++ show c
primToGo :: Var -> PrimFn -> [LVar] -> Line
primToGo var (LChInt ITNative) [ch] =
let code = "MkInt(int64(*(*rune)(" `T.append` lVarToGo ch `T.append` ")))"
in Line (Just var) [ lVarToVar ch ] (assign var code)
primToGo var (LEq (ATInt ITChar)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*rune)("
, lVarToGo left
, ") == *(*rune)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LEq (ATInt ITNative)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*int64)("
, lVarToGo left
, ") == *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LEq (ATInt ITBig)) [left, right] =
let code = T.concat [ "MkIntFromBool((*big.Int)("
, lVarToGo left
, ").Cmp((*big.Int)("
, lVarToGo right
, ")) == 0)"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LSLt (ATInt ITChar)) [left, right] =
let code = T.concat [ "MkIntFromBool(*(*rune)("
, lVarToGo left
, ") < *(*rune)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LSLt (ATInt ITNative)) [left, right] =
let code = T.concat [ varToGo var
, " = MkIntFromBool(*(*int64)("
, lVarToGo left
, ") < *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] code
primToGo var (LSLt (ATInt ITBig)) [left, right] =
let code = T.concat [ "MkIntFromBool((*big.Int)("
, lVarToGo left
, ").Cmp((*big.Int)("
, lVarToGo right
, ")) < 0)"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var (LMinus (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "-"
primToGo var (LMinus (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Sub"
primToGo var (LPlus (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "+"
primToGo var (LPlus (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Add"
primToGo var (LSExt ITNative ITBig) [i] =
let code = "unsafe.Pointer(big.NewInt(*(*int64)(" `T.append` lVarToGo i `T.append` ")))"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var (LIntStr ITBig) [i] =
let code = "MkString((*big.Int)(" `T.append` lVarToGo i `T.append` ").String())"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var (LIntStr ITNative) [i] =
let code = "MkString(strconv.FormatInt(*(*int64)(" `T.append` lVarToGo i `T.append` "), 10))"
in Line (Just var) [ lVarToVar i ] (assign var code)
primToGo var LStrEq [left, right] =
let code = T.concat [ "MkIntFromBool(*(*string)("
, lVarToGo left
, ") == *(*string)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var LStrCons [c, s] =
let code = T.concat [ "MkString(string(*(*rune)("
, lVarToGo c
, ")) + *(*string)("
, lVarToGo s
, "))"
]
in Line (Just var) [ lVarToVar c, lVarToVar s ] (assign var code)
primToGo var LStrHead [s] =
let code = "MkRune(RuneAtIndex(*(*string)(" `T.append` lVarToGo s `T.append` "), 0))"
in Line (Just var) [ lVarToVar s ] (assign var code)
primToGo var LStrTail [s] =
let code = "MkString(StrTail(*(*string)(" `T.append` lVarToGo s `T.append` ")))"
in Line (Just var) [ lVarToVar s ] (assign var code)
primToGo var LStrConcat [left, right] =
let code = T.concat [ "MkString(*(*string)("
, lVarToGo left
, ") + *(*string)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
primToGo var LWriteStr [world, s] =
let code = "WriteStr(" `T.append` lVarToGo s `T.append` ")"
in Line (Just var) [ lVarToVar world, lVarToVar s ] (assign var code)
primToGo var (LTimes (ATInt ITNative)) [left, right] = nativeIntBinOp var left right "*"
primToGo var (LTimes (ATInt ITBig)) [left, right] = bigIntBigOp var left right "Mul"
primToGo _ fn _ = Line Nothing [] (sformat ("panic(\"Unimplemented PrimFn: " % string % "\")") (show fn))
bigIntBigOp :: Var -> LVar -> LVar -> T.Text -> Line
bigIntBigOp var left right op =
let code = T.concat [ "unsafe.Pointer(new(big.Int)."
, op
, "((*big.Int)("
, lVarToGo left
, "), (*big.Int)("
, lVarToGo right
, ")))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
nativeIntBinOp :: Var -> LVar -> LVar -> T.Text -> Line
nativeIntBinOp var left right op =
let code = T.concat [ "MkInt(*(*int64)("
, lVarToGo left
, ") "
, op
, " *(*int64)("
, lVarToGo right
, "))"
]
in Line (Just var) [ lVarToVar left, lVarToVar right ] (assign var code)
data TailCall = Self
| Other
deriving (Eq, Show)
containsTailCall :: Name -> SExp -> [TailCall]
containsTailCall self (SApp True n _) = if self == n
then [ Self ]
else [ Other ]
containsTailCall self (SLet _ a b) = containsTailCall self a ++ containsTailCall self b
containsTailCall self (SUpdate _ e) = containsTailCall self e
containsTailCall self (SCase _ _ alts) = concatMap (altContainsTailCall self) alts
containsTailCall self (SChkCase _ alts) = concatMap (altContainsTailCall self) alts
containsTailCall _ _ = []
altContainsTailCall :: Name -> SAlt -> [TailCall]
altContainsTailCall self (SConCase _ _ _ _ e) = containsTailCall self e
altContainsTailCall self (SConstCase _ e) = containsTailCall self e
altContainsTailCall self (SDefaultCase e) = containsTailCall self e
extractUsedVars :: [Line] -> S.Set Var
extractUsedVars lines = S.fromList (concat [used | Line _ used _ <- lines])
filterUnusedLines :: [Line] -> [Line]
filterUnusedLines lines =
let usedVars = extractUsedVars lines
requiredLines = mapMaybe (required usedVars) lines
in if length lines /= length requiredLines
then filterUnusedLines requiredLines
else lines
where
required _ l@(Line Nothing _ _) = Just l
required _ l@(Line (Just RVal) _ _) = Just l
required usedVars l@(Line (Just v) _ _) =
if S.member v usedVars
then Just l
else Nothing
funToGo :: (Name, SDecl, [TailCall]) -> CG T.Text
funToGo (name, SFun _ args locs expr, tailCalls) = do
bodyLines <- filterUnusedLines <$> exprToGo name RVal expr
let usedVars = extractUsedVars bodyLines
pure . T.concat $
[ "// "
, T.pack $ show name
, "\nfunc "
, nameToGo name
, "("
, "__thunk *Thunk" `T.append` if (not . null) args then ", " else T.empty
, T.intercalate ", " [ sformat ("_" % int % " unsafe.Pointer") i | i <- [0..length args-1]]
, ") unsafe.Pointer {\n var __rval unsafe.Pointer\n"
, reserve usedVars locs
, tailCallEntry
, T.unlines [ line | Line _ _ line <- bodyLines ]
, "return __rval\n}\n\n"
]
where
tailCallEntry = if Self `elem` tailCalls
then "entry:"
else T.empty
loc usedVars i =
let i' = length args + i in
if S.member (V i') usedVars
then Just $ sformat ("_" % int) i'
else Nothing
reserve usedVars locs = case mapMaybe (loc usedVars) [0..locs] of
[] -> T.empty
usedLocs -> " var " `T.append` T.intercalate ", " usedLocs `T.append` " unsafe.Pointer\n"
genMain :: T.Text
genMain = T.unlines
[ "var cpuprofile = flag.String(\"cpuprofile\", \"\", \"write cpu profile `file`\")"
, "var memprofile = flag.String(\"memprofile\", \"\", \"write memory profile to `file`\")"
, ""
, "func main() {"
, " flag.Parse()"
, " initNullCons()"
, " if *cpuprofile != \"\" {"
, " f, err := os.Create(*cpuprofile)"
, " if err != nil {"
, " log.Fatal(\"Could not create CPU profile: \", err)"
, " }"
, " if err := pprof.StartCPUProfile(f); err != nil {"
, " log.Fatal(\"Could not start CPU profile: \", err)"
, " }"
, " defer pprof.StopCPUProfile()"
, " }"
, " var thunk Thunk"
, " runMain0(&thunk)"
, " if *memprofile != \"\" {"
, " f, err := os.Create(*memprofile)"
, " if err != nil {"
, " log.Fatal(\"Could not create memory profile: \", err)"
, " }"
, " runtime.GC()"
, " if err := pprof.WriteHeapProfile(f); err != nil {"
, " log.Fatal(\"Could not write memory profile: \", err)"
, " }"
, " f.Close()"
, " }"
, "}"
]
codegenGo :: CodeGenerator
codegenGo ci = do
let funs = [ (name, fun, containsTailCall name expr)
| (name, fun@(SFun _ _ _ expr)) <- simpleDecls ci
]
needsTrampolineByName = M.fromList [ (name, Other `elem` tailCalls)
| (name, _, tailCalls) <- funs
]
trampolineLookup = fromMaybe False . (`M.lookup` needsTrampolineByName)
funCodes = evalState (traverse funToGo funs) (createCgState trampolineLookup)
code = T.concat [ goPreamble (map (T.pack . show) (includes ci))
, T.concat funCodes
, genMain
]
withFile (outputFile ci) WriteMode $ \hOut -> do
(Just hIn, _, _, p) <-
createProcess (proc "gofmt" [ "-s" ]){ std_in = CreatePipe, std_out = UseHandle hOut }
TIO.hPutStr hIn code
_ <- waitForProcess p
return ()
|
18490c62368992ceeff31ff0883f04e683abf1907c5c4e8b8e87d68f047e799d | chrix75/clj-javafx | project.clj | (defproject clj-javafx "0.1.0-SNAPSHOT"
:description "A Clojure wrapper for JavaFX 2.x"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.4.0"]
[local.oracle/javafxrt "2.2.3"]]
:aot [clj-javafx.application clj-javafx.component]
:test-selectors {:default (complement :integration)
:integration :integration
:all (constantly true)})
| null | https://raw.githubusercontent.com/chrix75/clj-javafx/8d7e9f7ac1c06caa5b8e2ce43b568e7b5581a896/project.clj | clojure | (defproject clj-javafx "0.1.0-SNAPSHOT"
:description "A Clojure wrapper for JavaFX 2.x"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.4.0"]
[local.oracle/javafxrt "2.2.3"]]
:aot [clj-javafx.application clj-javafx.component]
:test-selectors {:default (complement :integration)
:integration :integration
:all (constantly true)})
| |
adbb52bbf7c6d38cca6fc8c7d3558b5f6c012ff9c7862187263cabf00017bafa | facebook/pyre-check | dependencyGraphTest.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
open OUnit2
open Analysis
open Ast
open Interprocedural
open Statement
open Test
let setup ?(update_environment_with = []) ~context ~handle source =
let project =
let external_sources =
List.map update_environment_with ~f:(fun { handle; source } -> handle, source)
in
ScratchProject.setup ~context ~external_sources [handle, source]
in
let { ScratchProject.BuiltTypeEnvironment.sources; type_environment; _ } =
ScratchProject.build_type_environment project
in
let source =
List.find_exn sources ~f:(fun { Source.module_path; _ } ->
String.equal (ModulePath.relative module_path) handle)
in
source, type_environment, ScratchProject.configuration_of project
let create_call_graph ?(update_environment_with = []) ~context source_text =
let source, environment, configuration =
setup ~update_environment_with ~context ~handle:"test.py" source_text
in
let static_analysis_configuration = Configuration.StaticAnalysis.create configuration () in
let override_graph =
OverrideGraph.Heap.from_source ~environment ~include_unit_tests:true ~source
|> OverrideGraph.SharedMemory.from_heap
in
let () =
let errors = TypeEnvironment.ReadOnly.get_errors environment !&"test" in
if not (List.is_empty errors) then
Format.asprintf
"Type errors in %s\n%a"
source_text
(Format.pp_print_list TypeCheck.Error.pp)
errors
|> failwith
in
let callables =
FetchCallables.from_source
~configuration
~resolution:(TypeEnvironment.ReadOnly.global_resolution environment)
~include_unit_tests:true
~source
|> FetchCallables.get_non_stub_callables
in
let fold call_graph callable =
let callees =
CallGraph.call_graph_of_callable
~static_analysis_configuration
~environment
~override_graph
~attribute_targets:(Target.HashSet.create ())
~callable
|> CallGraph.DefineCallGraph.all_targets
in
CallGraph.WholeProgramCallGraph.add_or_exn call_graph ~callable ~callees
in
List.fold ~init:CallGraph.WholeProgramCallGraph.empty ~f:fold callables
let create_callable = function
| `Function name -> !&name |> Target.create_function
| `Method name -> !&name |> Target.create_method
| `Override name -> !&name |> Target.create_override
let compare_dependency_graph call_graph ~expected =
let expected =
let map_callee_callers (callee, callers) =
( create_callable callee,
List.map callers ~f:create_callable |> List.sort ~compare:Target.compare )
in
List.map expected ~f:map_callee_callers
in
let call_graph =
List.map call_graph ~f:(fun (callee, callers) ->
callee, List.sort callers ~compare:Target.compare)
in
let printer call_graph =
Sexp.to_string [%message (call_graph : (Target.t * Target.t list) list)]
in
assert_equal ~printer expected call_graph
let assert_call_graph ?update_environment_with ~context source ~expected =
let graph =
create_call_graph ?update_environment_with ~context source
|> DependencyGraph.Reversed.from_call_graph
|> DependencyGraph.Reversed.to_target_graph
|> TargetGraph.to_alist
in
compare_dependency_graph graph ~expected
let assert_reverse_call_graph ~context source ~expected =
let graph =
create_call_graph ~context source
|> DependencyGraph.Reversed.from_call_graph
|> DependencyGraph.Reversed.reverse
|> DependencyGraph.to_target_graph
|> TargetGraph.to_alist
in
compare_dependency_graph graph ~expected
let test_construction context =
let assert_call_graph = assert_call_graph ~context in
assert_call_graph
{|
class Foo:
def __init__(self):
pass
def bar(self):
return 10
def qux(self):
return self.bar()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.Foo.$class_toplevel", [];
`Method "test.Foo.__init__", [];
`Method "test.Foo.bar", [];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
];
assert_call_graph
{|
class Foo:
def __init__(self):
pass
def bar(self):
return self.qux()
def qux(self):
return self.bar()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.Foo.$class_toplevel", [];
`Method "test.Foo.__init__", [];
`Method "test.Foo.bar", [`Method "test.Foo.qux"];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
];
assert_call_graph
{|
class A:
def __init__(self) -> None:
pass
class B:
def __init__(self) -> None:
a = A()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.A.$class_toplevel", [];
`Method "test.A.__init__", [];
`Method "test.B.$class_toplevel", [];
`Method "test.B.__init__", [`Method "test.A.__init__"; `Method "object.__new__"];
];
assert_call_graph
~update_environment_with:
[{ handle = "foobar.pyi"; source = {|
def bar(x: str) -> str: ...
|} }]
{|
def foo():
foobar.bar("foo")
|}
~expected:[`Function "test.$toplevel", []; `Function "test.foo", [`Function "foobar.bar"]];
assert_call_graph
~update_environment_with:
[{ handle = "bar/baz/qux.pyi"; source = {|
def derp() -> str: ...
|} }]
{|
from bar.baz import qux
def foo():
qux.derp()
|}
~expected:[`Function "test.$toplevel", []; `Function "test.foo", [`Function "bar.baz.qux.derp"]];
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
];
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class D(C):
def foo(self) -> None: ...
class E(C):
pass
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"; `Method "test.D.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.E.$class_toplevel", [];
];
(* Ensure that we don't include UnrelatedToC.foo here. *)
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class D(C):
def foo(self) -> None: ...
class UnrelatedToC(Base):
def foo(self) -> None: ...
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"; `Method "test.D.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.UnrelatedToC.$class_toplevel", [];
];
We only dereference overrides by one level .
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class Child(C):
def foo(self) -> None: ...
class Grandchild(Child):
def foo(self) -> None: ...
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Override "test.Child.foo"; `Method "test.Base.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.Child.$class_toplevel", [];
`Method "test.Grandchild.$class_toplevel", [];
];
assert_call_graph
{|
class C:
def foo(self) -> int: ...
class D(C):
def bar(self) -> int: ...
class E(D):
def foo(self) -> int: ...
def calls_c(c: C) -> None:
c.foo()
def calls_d(d: D) -> None:
d.foo()
def calls_e(e: E) -> None:
e.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.calls_c", [`Override "test.C.foo"];
`Function "test.calls_d", [`Method "test.E.foo"; `Method "test.C.foo"];
`Function "test.calls_e", [`Method "test.E.foo"];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.E.$class_toplevel", [];
];
assert_call_graph
{|
class C(str):
def format(self, *args) -> C: ...
def format_str() -> None:
"string literal {}".format("foo")
|}
(* If we didn't weaken literals, the call would be a method("str.format") instead of override
here. *)
~expected:
[
`Function "test.$toplevel", [];
`Function "test.format_str", [`Override "str.format"];
`Method "test.C.$class_toplevel", [];
];
assert_call_graph
{|
def foo() -> None:
def bar() -> None:
"ASD".format("ASD").lower()
bar()
|}
~expected:
[
`Function "$local_test?foo$bar", [`Method "str.lower"; `Override "str.format"];
`Function "test.$toplevel", [];
`Function "test.foo", [`Function "$local_test?foo$bar"];
];
assert_call_graph
{|
from typing import Generic, TypeVar
T = TypeVar("T")
class C(Generic[T]):
def method(self) -> int: ...
class D(C[int]):
def method(self) -> int: ...
def calls_C_str(c: C[str]) -> None:
c.method()
def calls_C_int(c: C[int]) -> None:
c.method()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.calls_C_int", [`Override "test.C.method"];
`Function "test.calls_C_str", [`Override "test.C.method"];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
]
let test_construction_reverse context =
assert_reverse_call_graph
~context
{|
class Foo:
def __init__(self):
pass
def bar(self):
return 10
def qux(self):
return self.bar()
|}
~expected:[`Method "test.Foo.bar", [`Method "test.Foo.qux"]];
assert_reverse_call_graph
~context
{|
class Foo:
def __init__(self):
pass
def baz(self):
return self.bar()
def qux(self):
return self.bar()
def bar(self):
return self.qux()
|}
~expected:
[
`Method "test.Foo.bar", [`Method "test.Foo.qux"; `Method "test.Foo.baz"];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
]
let test_type_collection context =
let assert_type_collection source ~handle ~expected =
let source, environment =
let project = ScratchProject.setup ~context [handle, source] in
let { ScratchProject.BuiltTypeEnvironment.type_environment = environment; _ } =
ScratchProject.build_type_environment project
in
let source =
AstEnvironment.ReadOnly.get_processed_source
(TypeEnvironment.ReadOnly.ast_environment environment)
(Reference.create (String.chop_suffix_exn handle ~suffix:".py"))
|> fun option -> Option.value_exn option
in
source, environment
in
let defines =
Preprocessing.defines ~include_toplevels:true source
|> List.map ~f:(fun { Node.value; _ } -> value)
in
let { Define.signature = { name; _ }; body = statements; _ } = List.nth_exn defines 2 in
let lookup =
TypeEnvironment.ReadOnly.get_local_annotations environment name
|> fun value -> Option.value_exn value
in
let test_expect (node_id, statement_index, test_expression, expected_type) =
let statement_key = [%hash: int * int] (node_id, statement_index) in
let annotation_store =
LocalAnnotationMap.ReadOnly.get_precondition lookup ~statement_key
|> fun value -> Option.value_exn value
in
let global_resolution = TypeEnvironment.ReadOnly.global_resolution environment in
let resolution =
TypeCheck.resolution global_resolution ~annotation_store (module TypeCheck.DummyContext)
in
let statement = List.nth_exn statements statement_index in
Visit.collect_calls_and_names statement
|> List.filter ~f:Expression.has_identifier_base
|> List.hd_exn
|> fun expression ->
if String.equal (Expression.show expression) test_expression then
match Resolution.resolve_expression_to_type resolution expression with
| Type.Callable { Type.Callable.kind = Type.Callable.Named callable_type; _ } ->
assert_equal expected_type (Reference.show callable_type)
| _ -> assert false
in
List.iter expected ~f:test_expect
in
assert_type_collection
{|
class A:
def foo(self) -> int:
return 1
class B:
def foo(self) -> int:
return 2
class X:
def caller(self):
a = A()
a.foo()
a = B()
a.foo()
|}
~handle:"test1.py"
~expected:
[4, 1, "$local_0$a.foo.(...)", "test1.A.foo"; 4, 3, "$local_0$a.foo.(...)", "test1.B.foo"];
assert_type_collection
{|
class A:
def foo(self) -> int:
return 1
class B:
def foo(self) -> A:
return A()
class X:
def caller(self):
a = B().foo().foo()
|}
~handle:"test2.py"
~expected:[4, 0, "$local_0$a.foo.(...).foo.(...)", "test2.A.foo"]
let test_prune_callables _ =
let assert_pruned
~callgraph
~overrides
~project_callables
~expected_callables
~expected_dependencies
=
let create name =
if String.is_prefix ~prefix:"O|" (Reference.show name) then
Target.create_override (String.drop_prefix (Reference.show name) 2 |> Reference.create)
else
Target.create_method name
in
let callgraph =
List.map callgraph ~f:(fun (key, values) ->
( Target.create_method (Reference.create key),
List.map values ~f:(fun value -> create (Reference.create value)) ))
|> CallGraph.WholeProgramCallGraph.of_alist_exn
in
let overrides =
List.map overrides ~f:(fun (key, values) ->
( Target.create_method (Reference.create key),
List.map values ~f:(fun value -> Reference.create value) ))
|> OverrideGraph.Heap.of_alist_exn
in
let project_callables =
List.map ~f:(fun name -> name |> Reference.create |> Target.create_method) project_callables
in
let overrides = DependencyGraph.Reversed.from_overrides overrides in
let dependencies =
DependencyGraph.Reversed.from_call_graph callgraph
|> DependencyGraph.Reversed.disjoint_union overrides
in
let {
DependencyGraph.Reversed.reverse_dependency_graph = actual_dependencies;
callables_kept = actual_callables;
}
=
DependencyGraph.Reversed.prune dependencies ~callables_to_analyze:project_callables
in
let actual_dependencies = DependencyGraph.Reversed.to_target_graph actual_dependencies in
assert_equal
~cmp:(List.equal Target.equal)
~printer:(List.to_string ~f:Target.show_pretty)
(List.map expected_callables ~f:(fun callable -> create (Reference.create callable)))
actual_callables;
assert_equal
~cmp:
(List.equal (fun (left_key, left_values) (right_key, right_values) ->
Target.equal left_key right_key && List.equal Target.equal left_values right_values))
~printer:(fun graph ->
graph
|> List.map ~f:(fun (key, values) ->
Format.asprintf
"%a -> %s"
Target.pp_pretty
key
(List.to_string values ~f:Target.show_pretty))
|> String.concat ~sep:"\n")
(List.map expected_dependencies ~f:(fun (key, values) ->
( create (Reference.create key),
List.map values ~f:(fun value -> create (Reference.create value)) )))
(Target.Map.Tree.to_alist actual_dependencies)
in
(* Basic case. *)
assert_pruned
~callgraph:
["a.foo", ["external.bar"]; "external.bar", []; "external.test.test_bar", ["external.bar"]]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"]
~expected_dependencies:["a.foo", ["external.bar"]; "external.bar", []];
(* Transitive case. *)
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["external.baz"];
"external.baz", [];
"external.test.test_baz", ["external.baz"];
"external.test.test_bar", ["external.bar"];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"; "external.baz"]
~expected_dependencies:
["a.foo", ["external.bar"]; "external.bar", ["external.baz"]; "external.baz", []];
(* Basic override. *)
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["O|external.C.m"];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:["external.C.m", ["external.D"]; "external.D.m", []]
~project_callables:["a.foo"]
~expected_callables:
[
"a.foo";
"external.bar";
"O|external.C.m";
"external.C.m";
"O|external.D.m";
"external.D.m";
"external.called_by_override";
]
~expected_dependencies:
[
"a.foo", ["external.bar"];
"external.bar", ["O|external.C.m"];
"external.called_by_override", [];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"O|external.C.m", ["O|external.D.m"; "external.C.m"];
"O|external.D.m", ["external.D.m"];
];
The calls go away if we do n't have the override between C and D.
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["external.C.m"];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"; "external.C.m"]
~expected_dependencies:
["a.foo", ["external.bar"]; "external.bar", ["external.C.m"]; "external.C.m", []];
(* Transitive overrides. *)
assert_pruned
~callgraph:
[
"a.foo", ["O|external.C.m"];
"external.C.m", [];
"external.D.m", [];
"external.E.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:["external.C.m", ["external.D"]; "external.D.m", ["external.E"]]
~project_callables:["a.foo"]
~expected_callables:
[
"a.foo";
"O|external.C.m";
"external.C.m";
"O|external.D.m";
"external.D.m";
"O|external.E.m";
"external.E.m";
"external.called_by_override";
]
~expected_dependencies:
[
"a.foo", ["O|external.C.m"];
"external.called_by_override", [];
"external.C.m", [];
"external.D.m", [];
"external.E.m", ["external.called_by_override"];
"O|external.C.m", ["O|external.D.m"; "external.C.m"];
"O|external.D.m", ["O|external.E.m"; "external.D.m"];
"O|external.E.m", ["external.E.m"];
];
(* Strongly connected components are handled fine. *)
assert_pruned
~callgraph:
[
"a.foo", ["external.a"];
"external.a", ["external.b"];
"external.b", ["external.c"];
"external.c", ["external.a"];
"external.d", ["external.e"];
"external.e", ["external.f"];
"external.f", ["external.d"];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.a"; "external.b"; "external.c"]
~expected_dependencies:
[
"a.foo", ["external.a"];
"external.a", ["external.b"];
"external.b", ["external.c"];
"external.c", ["external.a"];
];
()
let () =
Scheduler.Daemon.check_entry_point ();
"callGraph"
>::: [
"type_collection" >:: test_type_collection;
"build" >:: test_construction;
"build_reverse" >:: test_construction_reverse;
"prune_callables" >:: test_prune_callables;
]
|> Test.run
| null | https://raw.githubusercontent.com/facebook/pyre-check/536e4a2c008726d0ce5589eaef681166923b6183/source/interprocedural/test/dependencyGraphTest.ml | ocaml | Ensure that we don't include UnrelatedToC.foo here.
If we didn't weaken literals, the call would be a method("str.format") instead of override
here.
Basic case.
Transitive case.
Basic override.
Transitive overrides.
Strongly connected components are handled fine. |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Core
open OUnit2
open Analysis
open Ast
open Interprocedural
open Statement
open Test
let setup ?(update_environment_with = []) ~context ~handle source =
let project =
let external_sources =
List.map update_environment_with ~f:(fun { handle; source } -> handle, source)
in
ScratchProject.setup ~context ~external_sources [handle, source]
in
let { ScratchProject.BuiltTypeEnvironment.sources; type_environment; _ } =
ScratchProject.build_type_environment project
in
let source =
List.find_exn sources ~f:(fun { Source.module_path; _ } ->
String.equal (ModulePath.relative module_path) handle)
in
source, type_environment, ScratchProject.configuration_of project
let create_call_graph ?(update_environment_with = []) ~context source_text =
let source, environment, configuration =
setup ~update_environment_with ~context ~handle:"test.py" source_text
in
let static_analysis_configuration = Configuration.StaticAnalysis.create configuration () in
let override_graph =
OverrideGraph.Heap.from_source ~environment ~include_unit_tests:true ~source
|> OverrideGraph.SharedMemory.from_heap
in
let () =
let errors = TypeEnvironment.ReadOnly.get_errors environment !&"test" in
if not (List.is_empty errors) then
Format.asprintf
"Type errors in %s\n%a"
source_text
(Format.pp_print_list TypeCheck.Error.pp)
errors
|> failwith
in
let callables =
FetchCallables.from_source
~configuration
~resolution:(TypeEnvironment.ReadOnly.global_resolution environment)
~include_unit_tests:true
~source
|> FetchCallables.get_non_stub_callables
in
let fold call_graph callable =
let callees =
CallGraph.call_graph_of_callable
~static_analysis_configuration
~environment
~override_graph
~attribute_targets:(Target.HashSet.create ())
~callable
|> CallGraph.DefineCallGraph.all_targets
in
CallGraph.WholeProgramCallGraph.add_or_exn call_graph ~callable ~callees
in
List.fold ~init:CallGraph.WholeProgramCallGraph.empty ~f:fold callables
let create_callable = function
| `Function name -> !&name |> Target.create_function
| `Method name -> !&name |> Target.create_method
| `Override name -> !&name |> Target.create_override
let compare_dependency_graph call_graph ~expected =
let expected =
let map_callee_callers (callee, callers) =
( create_callable callee,
List.map callers ~f:create_callable |> List.sort ~compare:Target.compare )
in
List.map expected ~f:map_callee_callers
in
let call_graph =
List.map call_graph ~f:(fun (callee, callers) ->
callee, List.sort callers ~compare:Target.compare)
in
let printer call_graph =
Sexp.to_string [%message (call_graph : (Target.t * Target.t list) list)]
in
assert_equal ~printer expected call_graph
let assert_call_graph ?update_environment_with ~context source ~expected =
let graph =
create_call_graph ?update_environment_with ~context source
|> DependencyGraph.Reversed.from_call_graph
|> DependencyGraph.Reversed.to_target_graph
|> TargetGraph.to_alist
in
compare_dependency_graph graph ~expected
let assert_reverse_call_graph ~context source ~expected =
let graph =
create_call_graph ~context source
|> DependencyGraph.Reversed.from_call_graph
|> DependencyGraph.Reversed.reverse
|> DependencyGraph.to_target_graph
|> TargetGraph.to_alist
in
compare_dependency_graph graph ~expected
let test_construction context =
let assert_call_graph = assert_call_graph ~context in
assert_call_graph
{|
class Foo:
def __init__(self):
pass
def bar(self):
return 10
def qux(self):
return self.bar()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.Foo.$class_toplevel", [];
`Method "test.Foo.__init__", [];
`Method "test.Foo.bar", [];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
];
assert_call_graph
{|
class Foo:
def __init__(self):
pass
def bar(self):
return self.qux()
def qux(self):
return self.bar()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.Foo.$class_toplevel", [];
`Method "test.Foo.__init__", [];
`Method "test.Foo.bar", [`Method "test.Foo.qux"];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
];
assert_call_graph
{|
class A:
def __init__(self) -> None:
pass
class B:
def __init__(self) -> None:
a = A()
|}
~expected:
[
`Function "test.$toplevel", [];
`Method "test.A.$class_toplevel", [];
`Method "test.A.__init__", [];
`Method "test.B.$class_toplevel", [];
`Method "test.B.__init__", [`Method "test.A.__init__"; `Method "object.__new__"];
];
assert_call_graph
~update_environment_with:
[{ handle = "foobar.pyi"; source = {|
def bar(x: str) -> str: ...
|} }]
{|
def foo():
foobar.bar("foo")
|}
~expected:[`Function "test.$toplevel", []; `Function "test.foo", [`Function "foobar.bar"]];
assert_call_graph
~update_environment_with:
[{ handle = "bar/baz/qux.pyi"; source = {|
def derp() -> str: ...
|} }]
{|
from bar.baz import qux
def foo():
qux.derp()
|}
~expected:[`Function "test.$toplevel", []; `Function "test.foo", [`Function "bar.baz.qux.derp"]];
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
];
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class D(C):
def foo(self) -> None: ...
class E(C):
pass
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"; `Method "test.D.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.E.$class_toplevel", [];
];
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class D(C):
def foo(self) -> None: ...
class UnrelatedToC(Base):
def foo(self) -> None: ...
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Method "test.Base.foo"; `Method "test.D.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.UnrelatedToC.$class_toplevel", [];
];
We only dereference overrides by one level .
assert_call_graph
{|
class Base:
def foo(self) -> None: ...
class C(Base):
pass
class Child(C):
def foo(self) -> None: ...
class Grandchild(Child):
def foo(self) -> None: ...
def call_foo(c: C) -> None:
c.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.call_foo", [`Override "test.Child.foo"; `Method "test.Base.foo"];
`Method "test.Base.$class_toplevel", [];
`Method "test.C.$class_toplevel", [];
`Method "test.Child.$class_toplevel", [];
`Method "test.Grandchild.$class_toplevel", [];
];
assert_call_graph
{|
class C:
def foo(self) -> int: ...
class D(C):
def bar(self) -> int: ...
class E(D):
def foo(self) -> int: ...
def calls_c(c: C) -> None:
c.foo()
def calls_d(d: D) -> None:
d.foo()
def calls_e(e: E) -> None:
e.foo()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.calls_c", [`Override "test.C.foo"];
`Function "test.calls_d", [`Method "test.E.foo"; `Method "test.C.foo"];
`Function "test.calls_e", [`Method "test.E.foo"];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
`Method "test.E.$class_toplevel", [];
];
assert_call_graph
{|
class C(str):
def format(self, *args) -> C: ...
def format_str() -> None:
"string literal {}".format("foo")
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.format_str", [`Override "str.format"];
`Method "test.C.$class_toplevel", [];
];
assert_call_graph
{|
def foo() -> None:
def bar() -> None:
"ASD".format("ASD").lower()
bar()
|}
~expected:
[
`Function "$local_test?foo$bar", [`Method "str.lower"; `Override "str.format"];
`Function "test.$toplevel", [];
`Function "test.foo", [`Function "$local_test?foo$bar"];
];
assert_call_graph
{|
from typing import Generic, TypeVar
T = TypeVar("T")
class C(Generic[T]):
def method(self) -> int: ...
class D(C[int]):
def method(self) -> int: ...
def calls_C_str(c: C[str]) -> None:
c.method()
def calls_C_int(c: C[int]) -> None:
c.method()
|}
~expected:
[
`Function "test.$toplevel", [];
`Function "test.calls_C_int", [`Override "test.C.method"];
`Function "test.calls_C_str", [`Override "test.C.method"];
`Method "test.C.$class_toplevel", [];
`Method "test.D.$class_toplevel", [];
]
let test_construction_reverse context =
assert_reverse_call_graph
~context
{|
class Foo:
def __init__(self):
pass
def bar(self):
return 10
def qux(self):
return self.bar()
|}
~expected:[`Method "test.Foo.bar", [`Method "test.Foo.qux"]];
assert_reverse_call_graph
~context
{|
class Foo:
def __init__(self):
pass
def baz(self):
return self.bar()
def qux(self):
return self.bar()
def bar(self):
return self.qux()
|}
~expected:
[
`Method "test.Foo.bar", [`Method "test.Foo.qux"; `Method "test.Foo.baz"];
`Method "test.Foo.qux", [`Method "test.Foo.bar"];
]
let test_type_collection context =
let assert_type_collection source ~handle ~expected =
let source, environment =
let project = ScratchProject.setup ~context [handle, source] in
let { ScratchProject.BuiltTypeEnvironment.type_environment = environment; _ } =
ScratchProject.build_type_environment project
in
let source =
AstEnvironment.ReadOnly.get_processed_source
(TypeEnvironment.ReadOnly.ast_environment environment)
(Reference.create (String.chop_suffix_exn handle ~suffix:".py"))
|> fun option -> Option.value_exn option
in
source, environment
in
let defines =
Preprocessing.defines ~include_toplevels:true source
|> List.map ~f:(fun { Node.value; _ } -> value)
in
let { Define.signature = { name; _ }; body = statements; _ } = List.nth_exn defines 2 in
let lookup =
TypeEnvironment.ReadOnly.get_local_annotations environment name
|> fun value -> Option.value_exn value
in
let test_expect (node_id, statement_index, test_expression, expected_type) =
let statement_key = [%hash: int * int] (node_id, statement_index) in
let annotation_store =
LocalAnnotationMap.ReadOnly.get_precondition lookup ~statement_key
|> fun value -> Option.value_exn value
in
let global_resolution = TypeEnvironment.ReadOnly.global_resolution environment in
let resolution =
TypeCheck.resolution global_resolution ~annotation_store (module TypeCheck.DummyContext)
in
let statement = List.nth_exn statements statement_index in
Visit.collect_calls_and_names statement
|> List.filter ~f:Expression.has_identifier_base
|> List.hd_exn
|> fun expression ->
if String.equal (Expression.show expression) test_expression then
match Resolution.resolve_expression_to_type resolution expression with
| Type.Callable { Type.Callable.kind = Type.Callable.Named callable_type; _ } ->
assert_equal expected_type (Reference.show callable_type)
| _ -> assert false
in
List.iter expected ~f:test_expect
in
assert_type_collection
{|
class A:
def foo(self) -> int:
return 1
class B:
def foo(self) -> int:
return 2
class X:
def caller(self):
a = A()
a.foo()
a = B()
a.foo()
|}
~handle:"test1.py"
~expected:
[4, 1, "$local_0$a.foo.(...)", "test1.A.foo"; 4, 3, "$local_0$a.foo.(...)", "test1.B.foo"];
assert_type_collection
{|
class A:
def foo(self) -> int:
return 1
class B:
def foo(self) -> A:
return A()
class X:
def caller(self):
a = B().foo().foo()
|}
~handle:"test2.py"
~expected:[4, 0, "$local_0$a.foo.(...).foo.(...)", "test2.A.foo"]
let test_prune_callables _ =
let assert_pruned
~callgraph
~overrides
~project_callables
~expected_callables
~expected_dependencies
=
let create name =
if String.is_prefix ~prefix:"O|" (Reference.show name) then
Target.create_override (String.drop_prefix (Reference.show name) 2 |> Reference.create)
else
Target.create_method name
in
let callgraph =
List.map callgraph ~f:(fun (key, values) ->
( Target.create_method (Reference.create key),
List.map values ~f:(fun value -> create (Reference.create value)) ))
|> CallGraph.WholeProgramCallGraph.of_alist_exn
in
let overrides =
List.map overrides ~f:(fun (key, values) ->
( Target.create_method (Reference.create key),
List.map values ~f:(fun value -> Reference.create value) ))
|> OverrideGraph.Heap.of_alist_exn
in
let project_callables =
List.map ~f:(fun name -> name |> Reference.create |> Target.create_method) project_callables
in
let overrides = DependencyGraph.Reversed.from_overrides overrides in
let dependencies =
DependencyGraph.Reversed.from_call_graph callgraph
|> DependencyGraph.Reversed.disjoint_union overrides
in
let {
DependencyGraph.Reversed.reverse_dependency_graph = actual_dependencies;
callables_kept = actual_callables;
}
=
DependencyGraph.Reversed.prune dependencies ~callables_to_analyze:project_callables
in
let actual_dependencies = DependencyGraph.Reversed.to_target_graph actual_dependencies in
assert_equal
~cmp:(List.equal Target.equal)
~printer:(List.to_string ~f:Target.show_pretty)
(List.map expected_callables ~f:(fun callable -> create (Reference.create callable)))
actual_callables;
assert_equal
~cmp:
(List.equal (fun (left_key, left_values) (right_key, right_values) ->
Target.equal left_key right_key && List.equal Target.equal left_values right_values))
~printer:(fun graph ->
graph
|> List.map ~f:(fun (key, values) ->
Format.asprintf
"%a -> %s"
Target.pp_pretty
key
(List.to_string values ~f:Target.show_pretty))
|> String.concat ~sep:"\n")
(List.map expected_dependencies ~f:(fun (key, values) ->
( create (Reference.create key),
List.map values ~f:(fun value -> create (Reference.create value)) )))
(Target.Map.Tree.to_alist actual_dependencies)
in
assert_pruned
~callgraph:
["a.foo", ["external.bar"]; "external.bar", []; "external.test.test_bar", ["external.bar"]]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"]
~expected_dependencies:["a.foo", ["external.bar"]; "external.bar", []];
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["external.baz"];
"external.baz", [];
"external.test.test_baz", ["external.baz"];
"external.test.test_bar", ["external.bar"];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"; "external.baz"]
~expected_dependencies:
["a.foo", ["external.bar"]; "external.bar", ["external.baz"]; "external.baz", []];
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["O|external.C.m"];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:["external.C.m", ["external.D"]; "external.D.m", []]
~project_callables:["a.foo"]
~expected_callables:
[
"a.foo";
"external.bar";
"O|external.C.m";
"external.C.m";
"O|external.D.m";
"external.D.m";
"external.called_by_override";
]
~expected_dependencies:
[
"a.foo", ["external.bar"];
"external.bar", ["O|external.C.m"];
"external.called_by_override", [];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"O|external.C.m", ["O|external.D.m"; "external.C.m"];
"O|external.D.m", ["external.D.m"];
];
The calls go away if we do n't have the override between C and D.
assert_pruned
~callgraph:
[
"a.foo", ["external.bar"];
"external.bar", ["external.C.m"];
"external.C.m", [];
"external.D.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.bar"; "external.C.m"]
~expected_dependencies:
["a.foo", ["external.bar"]; "external.bar", ["external.C.m"]; "external.C.m", []];
assert_pruned
~callgraph:
[
"a.foo", ["O|external.C.m"];
"external.C.m", [];
"external.D.m", [];
"external.E.m", ["external.called_by_override"];
"external.called_by_override", [];
"external.unrelated", [];
]
~overrides:["external.C.m", ["external.D"]; "external.D.m", ["external.E"]]
~project_callables:["a.foo"]
~expected_callables:
[
"a.foo";
"O|external.C.m";
"external.C.m";
"O|external.D.m";
"external.D.m";
"O|external.E.m";
"external.E.m";
"external.called_by_override";
]
~expected_dependencies:
[
"a.foo", ["O|external.C.m"];
"external.called_by_override", [];
"external.C.m", [];
"external.D.m", [];
"external.E.m", ["external.called_by_override"];
"O|external.C.m", ["O|external.D.m"; "external.C.m"];
"O|external.D.m", ["O|external.E.m"; "external.D.m"];
"O|external.E.m", ["external.E.m"];
];
assert_pruned
~callgraph:
[
"a.foo", ["external.a"];
"external.a", ["external.b"];
"external.b", ["external.c"];
"external.c", ["external.a"];
"external.d", ["external.e"];
"external.e", ["external.f"];
"external.f", ["external.d"];
]
~overrides:[]
~project_callables:["a.foo"]
~expected_callables:["a.foo"; "external.a"; "external.b"; "external.c"]
~expected_dependencies:
[
"a.foo", ["external.a"];
"external.a", ["external.b"];
"external.b", ["external.c"];
"external.c", ["external.a"];
];
()
let () =
Scheduler.Daemon.check_entry_point ();
"callGraph"
>::: [
"type_collection" >:: test_type_collection;
"build" >:: test_construction;
"build_reverse" >:: test_construction_reverse;
"prune_callables" >:: test_prune_callables;
]
|> Test.run
|
0df4022e5a80bdc6b7889ed6ee1a1a11a01b186a58c55e3fdbd1fb1a07ca8f0c | DanielG/cabal-helper | Parsec.hs | cabal - helper : Simple interface to Cabal 's configuration state
Copyright ( C ) 2018 < >
--
SPDX - License - Identifier : Apache-2.0
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
# LANGUAGE CPP #
module CabalHelper.Compiletime.Compat.Parsec
( absorbParsecFailure
, eitherParsec
) where
#if MIN_VERSION_Cabal(2,5,0)
import Distribution.Parsec
#else
import qualified Distribution.Compat.ReadP as Dist
import Distribution.Text
#endif
absorbParsecFailure :: String -> Either String a -> a
absorbParsecFailure _ (Right x) = x
absorbParsecFailure ctx (Left err) =
error $ "Error parsing in '"++ctx++"': " ++ err
#if !MIN_VERSION_Cabal(2,5,0)
eitherParsec :: Text t => String -> Either String t
eitherParsec i =
case filter ((=="") . snd) $ Dist.readP_to_S parse i of
(a,""):[] -> Right a
_ -> Left $ show i
#endif
| null | https://raw.githubusercontent.com/DanielG/cabal-helper/0e9df088226d80669dd0882ed743bca871dce61c/src/CabalHelper/Compiletime/Compat/Parsec.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0 | cabal - helper : Simple interface to Cabal 's configuration state
Copyright ( C ) 2018 < >
SPDX - License - Identifier : Apache-2.0
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
# LANGUAGE CPP #
module CabalHelper.Compiletime.Compat.Parsec
( absorbParsecFailure
, eitherParsec
) where
#if MIN_VERSION_Cabal(2,5,0)
import Distribution.Parsec
#else
import qualified Distribution.Compat.ReadP as Dist
import Distribution.Text
#endif
absorbParsecFailure :: String -> Either String a -> a
absorbParsecFailure _ (Right x) = x
absorbParsecFailure ctx (Left err) =
error $ "Error parsing in '"++ctx++"': " ++ err
#if !MIN_VERSION_Cabal(2,5,0)
eitherParsec :: Text t => String -> Either String t
eitherParsec i =
case filter ((=="") . snd) $ Dist.readP_to_S parse i of
(a,""):[] -> Right a
_ -> Left $ show i
#endif
|
f20291496da580ee2e7db9a547c2b5256b5d42c4981b97e9a71c6b0613fda2b2 | LightTable/LightTable | navigate.cljs | (ns lt.objs.sidebar.navigate
"Provide sidebar for finding and opening files"
(:require [lt.object :as object]
[lt.objs.workspace :as workspace]
[lt.objs.context :as ctx]
[lt.objs.sidebar.command :as cmd]
[lt.objs.files :as files]
[lt.objs.notifos :as notifos]
[lt.objs.keyboard :as keyboard]
[lt.objs.opener :as opener]
[lt.objs.sidebar :as sidebar]
[lt.util.dom :as dom]
[lt.objs.thread]
[lt.util.load :as load]
[singultus.core :as crate]
[singultus.binding :refer [bound subatom]])
(:require-macros [lt.macros :refer [behavior defui background]]))
(defn file-filters [f]
(re-seq files/ignore-pattern f))
(def populate-bg (background (fn [obj-id {:keys [lim pattern ws]}]
(let [fs (js/require "fs")
fpath (js/require "path")
walkdir (js/require (str js/ltpath "/core/lighttable/background/walkdir2.js"))
grab-files (fn [all-files folder]
(let [root-length (inc (count (.dirname fpath folder)))
walked (walkdir folder (js-obj "filter" (js/RegExp. pattern)
"limit" lim))]
(.concat all-files (.map (.-paths walked)
#(js-obj "full" %
"rel" (subs % root-length))))))
all-files (.reduce (to-array (:folders ws)) grab-files (array))
other-files (.map (to-array (:files ws)) #(js-obj "full" % "rel" (.basename fpath %)))
final (.concat all-files other-files)]
(js/_send obj-id :workspace-files final)
))))
(declare sidebar-navigate)
(behavior ::workspace-files
:triggers #{:workspace-files}
:reaction (fn [this files]
(object/merge! this {:files (js->clj files :keywordize-keys true)})
(object/raise (:filter-list @this) :refresh!)
))
(behavior ::populate-on-ws-update
:triggers #{:updated :refresh}
:debounce 150
:reaction (fn [ws]
(populate-bg sidebar-navigate {:lim (dec (:file-limit @sidebar-navigate))
:pattern (.-source files/ignore-pattern)
:ws (workspace/serialize @ws)})))
(behavior ::watched.create
:triggers #{:watched.create}
:reaction (fn [ws path]
(when-not (file-filters (files/basename path))
(let [ws-parent (files/parent (first (filter #(= 0 (.indexOf path %)) (:folders @ws))))
rel-length (inc (count ws-parent))]
(object/update! sidebar-navigate [:files] conj {:full path :rel (subs path rel-length)})
(object/raise (:filter-list @sidebar-navigate) :refresh!)))))
(behavior ::watched.delete
:triggers #{:watched.delete}
:reaction (fn [ws path]
;;TODO: this is terribly inefficient
(object/update! sidebar-navigate [:files] #(remove (fn [x] (= 0 (.indexOf (:full x) path))) %))
(object/raise (:filter-list @sidebar-navigate) :refresh!)))
(behavior ::focus!
:triggers #{:focus!}
:reaction (fn [this]
(object/raise (:filter-list @this) :focus!)
))
(behavior ::focus-on-show
:triggers #{:show}
:reaction (fn [this]
(object/raise this :focus!)))
(behavior ::open-on-select
:triggers #{:select}
:reaction (fn [this cur]
(object/raise opener/opener :open! (:full cur))))
(behavior ::escape!
:triggers #{:escape!}
:reaction (fn [this]
(cmd/exec! :escape-navigate)
(cmd/exec! :focus-last-editor)))
(behavior ::pop-transient-on-select
:triggers #{:selected}
:reaction (fn [this]
(object/raise sidebar/rightbar :close!)))
(behavior ::set-file-limit
:triggers #{:object.instant}
:type :user
:desc "Navigate: set maximum number of indexed files"
:params [{:label "Number"
:example 8000}]
:reaction (fn [this n]
(object/merge! this {:file-limit n})))
(object/object* ::sidebar.navigate
:tags #{:navigator}
:label "navigate"
:order -3
:selected 0
:files []
:file-limit 8000
:search ""
:init (fn [this]
(let [list (cmd/filter-list {:key :rel
:transform #(str "<h2>" (files/basename %) "</h2><p>" %3 "</p>")
:items (subatom this :files)
:placeholder "file"})]
(object/add-tags list [:navigate.selector])
(object/merge! this {:filter-list list})
[:div.navigate
(object/->content list)
]
)))
(def sidebar-navigate (object/create ::sidebar.navigate))
(sidebar/add-item sidebar/rightbar sidebar-navigate)
(cmd/command {:command :navigate-workspace
:desc "Navigate: open navigate"
:exec (fn []
(object/raise sidebar/rightbar :toggle sidebar-navigate {:transient? false})
)})
(cmd/command {:command :navigate-workspace-transient
:desc "Navigate: open navigate transient"
:hidden true
:exec (fn []
(object/raise sidebar/rightbar :toggle sidebar-navigate {:transient? true})
)})
(cmd/command {:command :escape-navigate
:desc "Navigate: exit navigate"
:hidden true
:exec (fn []
(cmd/exec! :close-sidebar)
(cmd/exec! :focus-last-editor))})
| null | https://raw.githubusercontent.com/LightTable/LightTable/57f861ae5b33d21ef8c7d064dd026a2b1a98fa87/src/lt/objs/sidebar/navigate.cljs | clojure | TODO: this is terribly inefficient | (ns lt.objs.sidebar.navigate
"Provide sidebar for finding and opening files"
(:require [lt.object :as object]
[lt.objs.workspace :as workspace]
[lt.objs.context :as ctx]
[lt.objs.sidebar.command :as cmd]
[lt.objs.files :as files]
[lt.objs.notifos :as notifos]
[lt.objs.keyboard :as keyboard]
[lt.objs.opener :as opener]
[lt.objs.sidebar :as sidebar]
[lt.util.dom :as dom]
[lt.objs.thread]
[lt.util.load :as load]
[singultus.core :as crate]
[singultus.binding :refer [bound subatom]])
(:require-macros [lt.macros :refer [behavior defui background]]))
(defn file-filters [f]
(re-seq files/ignore-pattern f))
(def populate-bg (background (fn [obj-id {:keys [lim pattern ws]}]
(let [fs (js/require "fs")
fpath (js/require "path")
walkdir (js/require (str js/ltpath "/core/lighttable/background/walkdir2.js"))
grab-files (fn [all-files folder]
(let [root-length (inc (count (.dirname fpath folder)))
walked (walkdir folder (js-obj "filter" (js/RegExp. pattern)
"limit" lim))]
(.concat all-files (.map (.-paths walked)
#(js-obj "full" %
"rel" (subs % root-length))))))
all-files (.reduce (to-array (:folders ws)) grab-files (array))
other-files (.map (to-array (:files ws)) #(js-obj "full" % "rel" (.basename fpath %)))
final (.concat all-files other-files)]
(js/_send obj-id :workspace-files final)
))))
(declare sidebar-navigate)
(behavior ::workspace-files
:triggers #{:workspace-files}
:reaction (fn [this files]
(object/merge! this {:files (js->clj files :keywordize-keys true)})
(object/raise (:filter-list @this) :refresh!)
))
(behavior ::populate-on-ws-update
:triggers #{:updated :refresh}
:debounce 150
:reaction (fn [ws]
(populate-bg sidebar-navigate {:lim (dec (:file-limit @sidebar-navigate))
:pattern (.-source files/ignore-pattern)
:ws (workspace/serialize @ws)})))
(behavior ::watched.create
:triggers #{:watched.create}
:reaction (fn [ws path]
(when-not (file-filters (files/basename path))
(let [ws-parent (files/parent (first (filter #(= 0 (.indexOf path %)) (:folders @ws))))
rel-length (inc (count ws-parent))]
(object/update! sidebar-navigate [:files] conj {:full path :rel (subs path rel-length)})
(object/raise (:filter-list @sidebar-navigate) :refresh!)))))
(behavior ::watched.delete
:triggers #{:watched.delete}
:reaction (fn [ws path]
(object/update! sidebar-navigate [:files] #(remove (fn [x] (= 0 (.indexOf (:full x) path))) %))
(object/raise (:filter-list @sidebar-navigate) :refresh!)))
(behavior ::focus!
:triggers #{:focus!}
:reaction (fn [this]
(object/raise (:filter-list @this) :focus!)
))
(behavior ::focus-on-show
:triggers #{:show}
:reaction (fn [this]
(object/raise this :focus!)))
(behavior ::open-on-select
:triggers #{:select}
:reaction (fn [this cur]
(object/raise opener/opener :open! (:full cur))))
(behavior ::escape!
:triggers #{:escape!}
:reaction (fn [this]
(cmd/exec! :escape-navigate)
(cmd/exec! :focus-last-editor)))
(behavior ::pop-transient-on-select
:triggers #{:selected}
:reaction (fn [this]
(object/raise sidebar/rightbar :close!)))
(behavior ::set-file-limit
:triggers #{:object.instant}
:type :user
:desc "Navigate: set maximum number of indexed files"
:params [{:label "Number"
:example 8000}]
:reaction (fn [this n]
(object/merge! this {:file-limit n})))
(object/object* ::sidebar.navigate
:tags #{:navigator}
:label "navigate"
:order -3
:selected 0
:files []
:file-limit 8000
:search ""
:init (fn [this]
(let [list (cmd/filter-list {:key :rel
:transform #(str "<h2>" (files/basename %) "</h2><p>" %3 "</p>")
:items (subatom this :files)
:placeholder "file"})]
(object/add-tags list [:navigate.selector])
(object/merge! this {:filter-list list})
[:div.navigate
(object/->content list)
]
)))
(def sidebar-navigate (object/create ::sidebar.navigate))
(sidebar/add-item sidebar/rightbar sidebar-navigate)
(cmd/command {:command :navigate-workspace
:desc "Navigate: open navigate"
:exec (fn []
(object/raise sidebar/rightbar :toggle sidebar-navigate {:transient? false})
)})
(cmd/command {:command :navigate-workspace-transient
:desc "Navigate: open navigate transient"
:hidden true
:exec (fn []
(object/raise sidebar/rightbar :toggle sidebar-navigate {:transient? true})
)})
(cmd/command {:command :escape-navigate
:desc "Navigate: exit navigate"
:hidden true
:exec (fn []
(cmd/exec! :close-sidebar)
(cmd/exec! :focus-last-editor))})
|
398945556887d3115fb300b9cf3c2aa17676fc33aee5da958db29c73d008a196 | dvcrn/markright | codemirror.cljs | (ns markright.components.codemirror
(:require [om.next :as om :refer-macros [defui]]
[om.dom :as dom :include-macros true]
[goog.dom :as gdom]))
(defonce local-state (atom {}))
(defn fill-codemirror [_]
(let [cm (aget (. js/document (getElementsByClassName "CodeMirror")) 0)
cmg (aget (. js/document (getElementsByClassName "CodeMirror-gutters")) 0)
h (.-innerHeight js/window)]
(.setAttribute cm "style" (str "height:" h "px;"))
(.setAttribute cmg "style" (str "height:" h "px;"))))
(defui CodemirrorComponent
Object
(render [this]
(let [{:keys [app/force-overwrite app/text]} (om/props this)]
;; Ignore overwriting if force-overwrite is not true
;; This is because the cursor would jump if we overwrite
;; the entire thing with every keypress. Not good, no no
(if force-overwrite
(do
(.setValue (.getDoc (@local-state :codemirror)) text)
((@local-state :overwrite-callback)))))
(dom/div #js {:id "codemirror-target"}))
(componentDidMount [this]
(let [codemirror
(js/CodeMirror (gdom/getElement "codemirror-target")
#js {:matchBrackets true
:mode "spell-checker"
:backdrop "gfm"
:autoCloseBrackets true
:lineWrapping true
:lineNumbers true
})]
(swap! local-state assoc :codemirror codemirror)
(let [{:keys [app/text text-callback overwrite-callback]} (om/props this)]
(swap! local-state assoc :overwrite-callback overwrite-callback)
(.setValue (.getDoc codemirror) text)
(.on codemirror "change"
#(text-callback (.getValue codemirror)))))
(.addEventListener js/window "resize" fill-codemirror)
(fill-codemirror nil))
(componentWillUnmount [this]
(.removeEventListener js/window "resize" fill-codemirror)))
(def codemirror (om/factory CodemirrorComponent))
| null | https://raw.githubusercontent.com/dvcrn/markright/531a47524474d5c2945148e33d397d0c70171cb5/src/cljs/markright/components/codemirror.cljs | clojure | Ignore overwriting if force-overwrite is not true
This is because the cursor would jump if we overwrite
the entire thing with every keypress. Not good, no no | (ns markright.components.codemirror
(:require [om.next :as om :refer-macros [defui]]
[om.dom :as dom :include-macros true]
[goog.dom :as gdom]))
(defonce local-state (atom {}))
(defn fill-codemirror [_]
(let [cm (aget (. js/document (getElementsByClassName "CodeMirror")) 0)
cmg (aget (. js/document (getElementsByClassName "CodeMirror-gutters")) 0)
h (.-innerHeight js/window)]
(.setAttribute cm "style" (str "height:" h "px;"))
(.setAttribute cmg "style" (str "height:" h "px;"))))
(defui CodemirrorComponent
Object
(render [this]
(let [{:keys [app/force-overwrite app/text]} (om/props this)]
(if force-overwrite
(do
(.setValue (.getDoc (@local-state :codemirror)) text)
((@local-state :overwrite-callback)))))
(dom/div #js {:id "codemirror-target"}))
(componentDidMount [this]
(let [codemirror
(js/CodeMirror (gdom/getElement "codemirror-target")
#js {:matchBrackets true
:mode "spell-checker"
:backdrop "gfm"
:autoCloseBrackets true
:lineWrapping true
:lineNumbers true
})]
(swap! local-state assoc :codemirror codemirror)
(let [{:keys [app/text text-callback overwrite-callback]} (om/props this)]
(swap! local-state assoc :overwrite-callback overwrite-callback)
(.setValue (.getDoc codemirror) text)
(.on codemirror "change"
#(text-callback (.getValue codemirror)))))
(.addEventListener js/window "resize" fill-codemirror)
(fill-codemirror nil))
(componentWillUnmount [this]
(.removeEventListener js/window "resize" fill-codemirror)))
(def codemirror (om/factory CodemirrorComponent))
|
796b6b7effa1f2d5eec3ec594f34c1740010d8291fee548c8e2dd25e4e246f2d | alexandergunnarson/quantum | logic.cljc | (ns
^{:doc "Logic-related functions. fn-not, fn-and, splice-or,
ifn, whenf1, rcomp, fn->, condpc, and the like. Extremely useful
and used everywhere in the quantum library."
:attribution "alexandergunnarson"}
quantum.core.logic
(:refer-clojure :exclude
[= and not or ifs
if-let when-let])
(:require
[clojure.core :as core]
[quantum.untyped.core.fn :as ufn
:refer [fn1 fn-> fn->> fn']]
[quantum.untyped.core.form.evaluate
:refer [case-env]]
[quantum.untyped.core.logic :as u]
[quantum.untyped.core.vars :as var
:refer [defalias defaliases]])
#?(:cljs
(:require-macros
[quantum.core.logic :as self
:refer [fn-not]])))
; TODO: ; cond-not, for :pre
Java ` switch ` is implemented using an array and then points to the code .
; Java String `switch` is implemented using a map8
; not 1 0 ; complement
and 0 0 0 1 ; conjunction
nand 1 1 1 0 ; stroke
or 0 1 1 1 ; disjunction
nor 1 0 0 0 ; 's arrow
; xor 0 1 1 0
xnor 1 0 0 1
implies ? 1 1 0 1
(defalias u/default)
;; ===== Logical operators ===== ;;
(defaliases u
= ref=
not
#?@(:clj
[and nand
or nor
xor xnor
implies?]))
;; ===== Function-logical operators ===== ;;
#?(:clj
(defaliases u
fn= fn-not=
fn-not
fn-and fn-nand
fn-or fn-nor
fn-xor fn-xnor
fn-implies?))
;___________________________________________________________________________________________________________________________________
;==================================================={ BOOLEANS + CONDITIONALS }=====================================================
;==================================================={ }=====================================================
; difference = (and a (not b))
; TODO maybe eliminate `volatile!`?
#?(:clj
(defmacro some-but-not-more-than-n
"`some-but-not-more-than-n` where `n`=1 is equivalent to
`(and (or ...) (not (and ...)))`. However, it performs
one O(n) check rather than two."
[n & args]
(assert (integer? n) {:n n})
`(let [and?# (volatile! true)]
(and (or ~@(take n args) (vreset! and?# false) ~@(drop n args))
(or (not @and?#) (not (and ~@(drop n args))))))))
#?(:clj (defmacro exactly-1 [& args] `(some-but-not-more-than-n 1 ~@args)))
TODO ` exactly - n `
(def falsey? (some-fn false? nil?))
(def truthy? (fn-not falsey?))
(defn splice-or [obj compare-fn & coll]
(some #_seq-or (partial compare-fn obj) coll))
(defn splice-and [obj compare-fn & coll]
(every? #_seq-and (partial compare-fn obj) coll))
(defn bool
{:todo ["Deprecate or incorporate"]}
[v]
(cond
(= v 0) false
(= v 1) true
:else
(throw (#?(:clj IllegalArgumentException.
:cljs js/Error.)
(str "Value not booleanizable: " v)))))
#?(:clj
(defmacro cond*
"`cond` meets `case`.
Like `case`, takes test pairs with an optional trailing (unpaired) clause.
If all preds are compile-time constants, transforms into `case`.
Otherwise behaves more like `cond`, evaluating each test in order:
If a pred matches,
Returns the corresponding expression
Else
If there is a trailing (unpaired) clause,
That clause is returned, like the last arg to `case`.
Else
Throws an error that no clause matches, like `case`."
[& args]
(throw (ex-info "TODO" nil))))
;; ===== `cond(f|c|p)` ===== ;;
#?(:clj (defaliases u ifs condf condf1 condf& condfc is? condpc))
;; ===== `if(n|c|p)` ===== ;;
#?(:clj
(defaliases u
ifn ifn-> ifn->> ifn1
ifc ifc-> ifc->> ifc1
ifp ifp-> ifp->> ifp1))
;; ===== `when(f|c|p)` ===== ;;
#?(:clj
(defaliases u
whenf whenf-> whenf->> whenf1
whenc whenc-> whenc->> whenc1
whenp whenp-> whenp->> whenp1))
= = = = = = = = CONDITIONAL LET BINDINGS = = = = = = = =
#?(:clj
(defmacro if-let-base
{:attribution "alexandergunnarson"}
([cond-sym bindings then]
`(if-let-base ~cond-sym ~bindings ~then nil))
([cond-sym [bnd expr & more] then else]
`(let [temp# ~expr ~bnd temp#]
(~cond-sym temp#
~(if (seq more)
`(if-let-base ~cond-sym [~@more] ~then ~else)
then)
~else)))))
#?(:clj
(defmacro if-let
"Like `if-let`, but multiple bindings can be used."
[& xs] `(if-let-base if ~@xs)))
#?(:clj
(defmacro if-not-let
"if : if-let :: if-not : if-not-let. All conditions must be false."
[& xs] `(if-let-base if-not ~@xs)))
#?(:clj
(defmacro when-let-base
{:attribution "alexandergunnarson"}
[cond-sym [bnd expr & more] & body]
`(let [temp# ~expr ~bnd temp#]
(~cond-sym temp#
~(if (seq more)
`(when-let-base ~cond-sym [~@more] ~@body)
`(do ~@body))))))
#?(:clj
(defmacro when-let
"Like `when-let`, but multiple bindings can be used."
[& xs] `(if-let-base when ~@xs)))
#?(:clj
(defmacro when-not-let
"when : when-let :: when-not : when-not-let. All conditions must be false."
[& xs] `(when-let-base when-not ~@xs)))
#?(:clj
(defmacro cond-let
"Transforms into a series of nested `if-let` statements."
{:attribution "alexandergunnarson"}
([] nil) ; no else
([else] else)
([bindings then & more]
`(if-let ~bindings
~then
(cond-let ~@more)))))
;; ===== `coll-(or|and)` ===== ;;
#?(:clj (defaliases u coll-or coll-and))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/core/logic.cljc | clojure | TODO: ; cond-not, for :pre
Java String `switch` is implemented using a map8
not 1 0 ; complement
conjunction
stroke
disjunction
's arrow
xor 0 1 1 0
===== Logical operators ===== ;;
===== Function-logical operators ===== ;;
___________________________________________________________________________________________________________________________________
==================================================={ BOOLEANS + CONDITIONALS }=====================================================
==================================================={ }=====================================================
difference = (and a (not b))
TODO maybe eliminate `volatile!`?
===== `cond(f|c|p)` ===== ;;
===== `if(n|c|p)` ===== ;;
===== `when(f|c|p)` ===== ;;
no else
===== `coll-(or|and)` ===== ;; | (ns
^{:doc "Logic-related functions. fn-not, fn-and, splice-or,
ifn, whenf1, rcomp, fn->, condpc, and the like. Extremely useful
and used everywhere in the quantum library."
:attribution "alexandergunnarson"}
quantum.core.logic
(:refer-clojure :exclude
[= and not or ifs
if-let when-let])
(:require
[clojure.core :as core]
[quantum.untyped.core.fn :as ufn
:refer [fn1 fn-> fn->> fn']]
[quantum.untyped.core.form.evaluate
:refer [case-env]]
[quantum.untyped.core.logic :as u]
[quantum.untyped.core.vars :as var
:refer [defalias defaliases]])
#?(:cljs
(:require-macros
[quantum.core.logic :as self
:refer [fn-not]])))
Java ` switch ` is implemented using an array and then points to the code .
xnor 1 0 0 1
implies ? 1 1 0 1
(defalias u/default)
(defaliases u
= ref=
not
#?@(:clj
[and nand
or nor
xor xnor
implies?]))
#?(:clj
(defaliases u
fn= fn-not=
fn-not
fn-and fn-nand
fn-or fn-nor
fn-xor fn-xnor
fn-implies?))
#?(:clj
(defmacro some-but-not-more-than-n
"`some-but-not-more-than-n` where `n`=1 is equivalent to
`(and (or ...) (not (and ...)))`. However, it performs
one O(n) check rather than two."
[n & args]
(assert (integer? n) {:n n})
`(let [and?# (volatile! true)]
(and (or ~@(take n args) (vreset! and?# false) ~@(drop n args))
(or (not @and?#) (not (and ~@(drop n args))))))))
#?(:clj (defmacro exactly-1 [& args] `(some-but-not-more-than-n 1 ~@args)))
TODO ` exactly - n `
(def falsey? (some-fn false? nil?))
(def truthy? (fn-not falsey?))
(defn splice-or [obj compare-fn & coll]
(some #_seq-or (partial compare-fn obj) coll))
(defn splice-and [obj compare-fn & coll]
(every? #_seq-and (partial compare-fn obj) coll))
(defn bool
{:todo ["Deprecate or incorporate"]}
[v]
(cond
(= v 0) false
(= v 1) true
:else
(throw (#?(:clj IllegalArgumentException.
:cljs js/Error.)
(str "Value not booleanizable: " v)))))
#?(:clj
(defmacro cond*
"`cond` meets `case`.
Like `case`, takes test pairs with an optional trailing (unpaired) clause.
If all preds are compile-time constants, transforms into `case`.
Otherwise behaves more like `cond`, evaluating each test in order:
If a pred matches,
Returns the corresponding expression
Else
If there is a trailing (unpaired) clause,
That clause is returned, like the last arg to `case`.
Else
Throws an error that no clause matches, like `case`."
[& args]
(throw (ex-info "TODO" nil))))
#?(:clj (defaliases u ifs condf condf1 condf& condfc is? condpc))
#?(:clj
(defaliases u
ifn ifn-> ifn->> ifn1
ifc ifc-> ifc->> ifc1
ifp ifp-> ifp->> ifp1))
#?(:clj
(defaliases u
whenf whenf-> whenf->> whenf1
whenc whenc-> whenc->> whenc1
whenp whenp-> whenp->> whenp1))
= = = = = = = = CONDITIONAL LET BINDINGS = = = = = = = =
#?(:clj
(defmacro if-let-base
{:attribution "alexandergunnarson"}
([cond-sym bindings then]
`(if-let-base ~cond-sym ~bindings ~then nil))
([cond-sym [bnd expr & more] then else]
`(let [temp# ~expr ~bnd temp#]
(~cond-sym temp#
~(if (seq more)
`(if-let-base ~cond-sym [~@more] ~then ~else)
then)
~else)))))
#?(:clj
(defmacro if-let
"Like `if-let`, but multiple bindings can be used."
[& xs] `(if-let-base if ~@xs)))
#?(:clj
(defmacro if-not-let
"if : if-let :: if-not : if-not-let. All conditions must be false."
[& xs] `(if-let-base if-not ~@xs)))
#?(:clj
(defmacro when-let-base
{:attribution "alexandergunnarson"}
[cond-sym [bnd expr & more] & body]
`(let [temp# ~expr ~bnd temp#]
(~cond-sym temp#
~(if (seq more)
`(when-let-base ~cond-sym [~@more] ~@body)
`(do ~@body))))))
#?(:clj
(defmacro when-let
"Like `when-let`, but multiple bindings can be used."
[& xs] `(if-let-base when ~@xs)))
#?(:clj
(defmacro when-not-let
"when : when-let :: when-not : when-not-let. All conditions must be false."
[& xs] `(when-let-base when-not ~@xs)))
#?(:clj
(defmacro cond-let
"Transforms into a series of nested `if-let` statements."
{:attribution "alexandergunnarson"}
([else] else)
([bindings then & more]
`(if-let ~bindings
~then
(cond-let ~@more)))))
#?(:clj (defaliases u coll-or coll-and))
|
299356617851cb7156b883bb5ae0f4ee8cd022d43e2bbe078287b73cbe9d5b68 | squirrel-prover/squirrel-prover | lowEquivSequent.mli | * Equivalence sequents ,
or more accurately global sequents whose conclusion
is a global meta - formula .
or more accurately global sequents whose conclusion
is a global meta-formula. *)
module SE = SystemExpr
(*------------------------------------------------------------------*)
include LowSequent.S with
type hyp_form = Equiv.global_form and
type conc_form = Equiv.global_form
(*------------------------------------------------------------------*)
* { 2 Creation of global sequents }
(** Initialize a sequent with the given components.
At most one hypothesis can be given, which will be named "H":
this is intended to ease simple cases like observational
equivalence goals.
For more general cases, the global meta-formula used as conclusion
can include implications. *)
val init :
env:Env.t->
hint_db:Hint.hint_db ->
?hyp:Equiv.form ->
Equiv.form ->
t
(** Special pretty-printer for initial sequents.
It does not display hypotheses, which might be misleading. *)
val pp_init : Format.formatter -> t -> unit
(*------------------------------------------------------------------*)
* { 2 Misc }
val get_system_pair : t -> SE.pair
val get_system_pair_projs : t -> Term.proj * Term.proj
(*------------------------------------------------------------------*)
* { 2 Utilities for equivalence sequents }
Equivalence sequents are global sequents whose conclusion
is an equivalence atom .
Equivalence sequents are global sequents whose conclusion
is an equivalence atom. *)
val set_equiv_goal : Equiv.equiv -> t -> t
* Get one of the projections of the biframe ,
as a list of terms where diff operators have been fully
eliminated .
@return [ None ] if the conclusion is not an equivalence atom .
as a list of terms where diff operators have been fully
eliminated.
@return [None] if the conclusion is not an equivalence atom. *)
val get_frame : Term.proj -> t -> Equiv.equiv option
val goal_is_equiv : t -> bool
val goal_as_equiv : t -> Equiv.equiv
(*------------------------------------------------------------------*)
* { 2 Trace sequents and reachability goals }
(** Change sequent goal to some reachability atom. *)
val set_reach_goal : Term.term -> t -> t
* Convert a global sequent whose conclusion is a reachability
atom to a trace sequent .
@raise if sequent conclusion is not well - formed .
atom to a trace sequent.
@raise Tactics.soft_failure if sequent conclusion is not well-formed. *)
val to_trace_sequent : t -> LowTraceSequent.t
(*------------------------------------------------------------------*)
* { 2 Automated reasoning }
val query_happens : precise:bool -> t -> Term.term -> bool
| null | https://raw.githubusercontent.com/squirrel-prover/squirrel-prover/3cf8b728424cabfb39cf41eba52d53f02a86f358/src/lowEquivSequent.mli | ocaml | ------------------------------------------------------------------
------------------------------------------------------------------
* Initialize a sequent with the given components.
At most one hypothesis can be given, which will be named "H":
this is intended to ease simple cases like observational
equivalence goals.
For more general cases, the global meta-formula used as conclusion
can include implications.
* Special pretty-printer for initial sequents.
It does not display hypotheses, which might be misleading.
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
* Change sequent goal to some reachability atom.
------------------------------------------------------------------ | * Equivalence sequents ,
or more accurately global sequents whose conclusion
is a global meta - formula .
or more accurately global sequents whose conclusion
is a global meta-formula. *)
module SE = SystemExpr
include LowSequent.S with
type hyp_form = Equiv.global_form and
type conc_form = Equiv.global_form
* { 2 Creation of global sequents }
val init :
env:Env.t->
hint_db:Hint.hint_db ->
?hyp:Equiv.form ->
Equiv.form ->
t
val pp_init : Format.formatter -> t -> unit
* { 2 Misc }
val get_system_pair : t -> SE.pair
val get_system_pair_projs : t -> Term.proj * Term.proj
* { 2 Utilities for equivalence sequents }
Equivalence sequents are global sequents whose conclusion
is an equivalence atom .
Equivalence sequents are global sequents whose conclusion
is an equivalence atom. *)
val set_equiv_goal : Equiv.equiv -> t -> t
* Get one of the projections of the biframe ,
as a list of terms where diff operators have been fully
eliminated .
@return [ None ] if the conclusion is not an equivalence atom .
as a list of terms where diff operators have been fully
eliminated.
@return [None] if the conclusion is not an equivalence atom. *)
val get_frame : Term.proj -> t -> Equiv.equiv option
val goal_is_equiv : t -> bool
val goal_as_equiv : t -> Equiv.equiv
* { 2 Trace sequents and reachability goals }
val set_reach_goal : Term.term -> t -> t
* Convert a global sequent whose conclusion is a reachability
atom to a trace sequent .
@raise if sequent conclusion is not well - formed .
atom to a trace sequent.
@raise Tactics.soft_failure if sequent conclusion is not well-formed. *)
val to_trace_sequent : t -> LowTraceSequent.t
* { 2 Automated reasoning }
val query_happens : precise:bool -> t -> Term.term -> bool
|
e7cb3a59715ee63b483ea82ca19f706c554a618be7386eea629586458f136bdc | TrustInSoft/tis-interpreter | rformat.mli | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of WP plug - in of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat a l'energie atomique et aux energies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Format
val epsilon : float
val get_time : float array -> float -> int
(** [get_time T t] returns [k] such that [T[k-1] <= t <= T[k]],
[T] is extended with [T[-1]=0] and [T[N]=+oo]. *)
val pp_time : formatter -> float -> unit
* Pretty print time in hour , minutes , seconds , or milliseconds , as appropriate
val pp_time_range : float array -> formatter -> float -> unit
type command =
| CMD of string
| ARG of string * string
| TEXT
val command : string -> command
val pretty : (formatter -> string -> string -> unit) -> formatter -> string -> unit
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/wp/rformat.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* [get_time T t] returns [k] such that [T[k-1] <= t <= T[k]],
[T] is extended with [T[-1]=0] and [T[N]=+oo]. | Modified by TrustInSoft
This file is part of WP plug - in of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat a l'energie atomique et aux energies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Format
val epsilon : float
val get_time : float array -> float -> int
val pp_time : formatter -> float -> unit
* Pretty print time in hour , minutes , seconds , or milliseconds , as appropriate
val pp_time_range : float array -> formatter -> float -> unit
type command =
| CMD of string
| ARG of string * string
| TEXT
val command : string -> command
val pretty : (formatter -> string -> string -> unit) -> formatter -> string -> unit
|
e2319baab09ec16d0847ef299a5ec34a2cd1e195bc48577ad4bca589865dadc7 | binsec/binsec | loader_pe.ml | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Loader_buf
open Loader_types
let read_magic t =
(not (dim t.buffer < 0x40))
&& Read.u8 t = Char.code 'M'
&& Read.u8 t = Char.code 'Z'
&&
(seek t 0x3c;
seek t (Read.u32 t);
(not (t.position + 4 > dim t.buffer))
&& Read.u8 t = Char.code 'P'
&& Read.u8 t = Char.code 'E'
&& Read.u8 t = 0x0
&& Read.u8 t = 0x0)
let check_magic buffer =
let t = cursor Machine.LittleEndian buffer in
read_magic t
let init_cursor buffer =
let t = cursor Machine.LittleEndian buffer in
if not (read_magic t) then invalid_format "No PE magic number";
t
(* File header *)
type file_header = {
machine : u16;
number_of_sections : u16;
time_date_stamp : u32;
pointer_to_symbol_table : u32;
number_of_symbols : u32;
size_of_optional_header : u16;
characteristics : u16;
}
let arch = function
| 0x014c -> Machine.x86
| 0x01c0 -> Machine.armv7 Machine.LittleEndian
| 0x01c2 - > Machine.armv7
(* Thumb *)
| 0x01c4 - > Machine.armv7
(* Thumb-2 *)
| 0x01f0 - > Machine . PowerPC
* | 0x01f1 - > Machine . PowerPC
* | 0x0200 - > Machine . IA64
* | 0x0166 - > Machine . MIPS
* | 0x0169 - > Machine . MIPS
* | 0x0266 - > Machine . MIPS
* | 0x0366 - > Machine . MIPS
* | 0x0466 - > Machine . MIPS
* | 0x01f1 -> Machine.PowerPC
* | 0x0200 -> Machine.IA64
* | 0x0166 -> Machine.MIPS
* | 0x0169 -> Machine.MIPS
* | 0x0266 -> Machine.MIPS
* | 0x0366 -> Machine.MIPS
* | 0x0466 -> Machine.MIPS *)
| 0x8664 -> Machine.amd64
| 0xaa64 - > Machine . ARM64
| _ -> Machine.unknown
let read_file_header t =
ensure t 20 "File header truncated";
let machine = Read.u16 t in
let number_of_sections = Read.u16 t in
let time_date_stamp = Read.u32 t in
let pointer_to_symbol_table = Read.u32 t in
let number_of_symbols = Read.u32 t in
let size_of_optional_header = Read.u16 t in
let characteristics = Read.u16 t in
{
machine;
number_of_sections;
time_date_stamp;
pointer_to_symbol_table;
number_of_symbols;
size_of_optional_header;
characteristics;
}
(* Optional header *)
type standard_fields = {
magic : u16;
size_of_code : u32;
size_of_initialized_data : u32;
size_of_uninitialized_data : u32;
address_of_entry_point : u32;
base_of_code : u32;
base_of_data : u32 option;
}
type windows_fields = {
image_base : u64;
section_alignement : u32;
file_alignement : u32;
size_of_image : u32;
size_of_headers : u32;
checksum : u32;
subsystem : u16;
dll_characteristics : u16;
size_of_stack_reserve : u64;
size_of_stack_commit : u64;
size_of_heap_reserve : u64;
size_of_heap_commit : u64;
number_of_rva_and_sizes : u32;
}
type data_directory = { virtual_address : u32; size : u32 }
type data_directories = {
export_directory : data_directory;
import_directory : data_directory;
resource_directory : data_directory;
exception_directory : data_directory;
security_directory : data_directory;
basereloc_directory : data_directory;
debug_directory : data_directory;
globalptr_directory : data_directory;
tls_directory : data_directory;
load_config_directory : data_directory;
bound_import_directory : data_directory;
iat_directory : data_directory;
delay_import_directory : data_directory;
clr_header_directory : data_directory;
}
type optional_header = {
standard_fields : standard_fields;
windows_fields : windows_fields;
data_directories : data_directories;
}
type program = file_header * optional_header
let read_standard_fields32 t magic =
ensure t 26 "Standard fields truncated";
let _major_linker_version = Read.u8 t in
let _minor_linker_version = Read.u8 t in
let size_of_code = Read.u32 t in
let size_of_initialized_data = Read.u32 t in
let size_of_uninitialized_data = Read.u32 t in
let address_of_entry_point = Read.u32 t in
let base_of_code = Read.u32 t in
let base_of_data = Some (Read.u32 t) in
{
magic;
size_of_code;
size_of_initialized_data;
size_of_uninitialized_data;
address_of_entry_point;
base_of_code;
base_of_data;
}
let read_standard_fields64 t magic =
ensure t 22 "Standard fields truncated";
let _major_linker_version = Read.u8 t in
let _minor_linker_version = Read.u8 t in
let size_of_code = Read.u32 t in
let size_of_initialized_data = Read.u32 t in
let size_of_uninitialized_data = Read.u32 t in
let address_of_entry_point = Read.u32 t in
let base_of_code = Read.u32 t in
let base_of_data = None in
{
magic;
size_of_code;
size_of_initialized_data;
size_of_uninitialized_data;
address_of_entry_point;
base_of_code;
base_of_data;
}
let read_standard_fields t =
ensure t 2 "PE magic number truncated";
let magic = Read.u16 t in
match magic with
| 0x10b -> read_standard_fields32 t magic
| 0x20b -> read_standard_fields64 t magic
| _ -> invalid_format "Invalid PE image file"
let read_windows_fields32 t =
ensure t 68 "Windows fields truncated";
let image_base = Read.u32 t in
let section_alignement = Read.u32 t in
let file_alignement = Read.u32 t in
let _major_os_version = Read.u16 t in
let _minor_os_version = Read.u16 t in
let _major_image_version = Read.u16 t in
let _minor_image_version = Read.u16 t in
let _major_subsystem_version = Read.u16 t in
let _minor_subsystem_version = Read.u16 t in
if not (Read.u32 t = 0) then invalid_format "Invalid Win32 version value";
let size_of_image = Read.u32 t in
let size_of_headers = Read.u32 t in
let checksum = Read.u32 t in
let subsystem = Read.u16 t in
let dll_characteristics = Read.u16 t in
let size_of_stack_reserve = Read.u32 t in
let size_of_stack_commit = Read.u32 t in
let size_of_heap_reserve = Read.u32 t in
let size_of_heap_commit = Read.u32 t in
if not (Read.u32 t = 0) then invalid_format "Invalid loader flags";
let number_of_rva_and_sizes = Read.u32 t in
{
image_base;
section_alignement;
file_alignement;
size_of_image;
size_of_headers;
number_of_rva_and_sizes;
checksum;
subsystem;
dll_characteristics;
size_of_stack_reserve;
size_of_stack_commit;
size_of_heap_reserve;
size_of_heap_commit;
}
let read_windows_fields64 t =
ensure t 88 "Windows fields truncated";
let image_base = Read.u64 t in
let section_alignement = Read.u32 t in
let file_alignement = Read.u32 t in
let _major_os_version = Read.u16 t in
let _minor_os_version = Read.u16 t in
let _major_image_version = Read.u16 t in
let _minor_image_version = Read.u16 t in
let _major_subsystem_version = Read.u16 t in
let _minor_subsystem_version = Read.u16 t in
if not (Read.u32 t = 0) then invalid_format "Invalid Win32 version value";
let size_of_image = Read.u32 t in
let size_of_headers = Read.u32 t in
let checksum = Read.u32 t in
let subsystem = Read.u16 t in
let dll_characteristics = Read.u16 t in
let size_of_stack_reserve = Read.u64 t in
let size_of_stack_commit = Read.u64 t in
let size_of_heap_reserve = Read.u64 t in
let size_of_heap_commit = Read.u64 t in
if not (Read.u32 t = 0) then invalid_format "Invalid loader flags";
let number_of_rva_and_sizes = Read.u32 t in
{
image_base;
section_alignement;
file_alignement;
size_of_image;
size_of_headers;
number_of_rva_and_sizes;
checksum;
subsystem;
dll_characteristics;
size_of_stack_reserve;
size_of_stack_commit;
size_of_heap_reserve;
size_of_heap_commit;
}
let read_windows_fields standard t =
match standard.magic with
| 0x10b -> read_windows_fields32 t
| 0x20b -> read_windows_fields64 t
| _ -> invalid_format "Invalid PE image file"
let read_data_directory t =
ensure t 8 "Data directory truncated";
let virtual_address = Read.u32 t in
let size = Read.u32 t in
{ virtual_address; size }
let read_data_directories t =
ensure t 96 "Data directories truncated";
let export_directory = read_data_directory t in
let import_directory = read_data_directory t in
let resource_directory = read_data_directory t in
let exception_directory = read_data_directory t in
let security_directory = read_data_directory t in
let basereloc_directory = read_data_directory t in
let debug_directory = read_data_directory t in
if not (Read.u64 t = 0) then invalid_format "Invalid data directories";
let globalptr_directory = read_data_directory t in
if not (globalptr_directory.size = 0) then
invalid_format "Invalid data directories";
let tls_directory = read_data_directory t in
let load_config_directory = read_data_directory t in
let bound_import_directory = read_data_directory t in
let iat_directory = read_data_directory t in
let delay_import_directory = read_data_directory t in
let clr_header_directory = read_data_directory t in
if not (Read.u64 t = 0) then invalid_format "Invalid data directories";
{
export_directory;
import_directory;
resource_directory;
exception_directory;
security_directory;
basereloc_directory;
debug_directory;
globalptr_directory;
tls_directory;
load_config_directory;
bound_import_directory;
iat_directory;
delay_import_directory;
clr_header_directory;
}
let read_optional_header t =
let standard_fields = read_standard_fields t in
let windows_fields = read_windows_fields standard_fields t in
let data_directories = read_data_directories t in
{ standard_fields; windows_fields; data_directories }
let rebase o i = o.windows_fields.image_base + i
(* Section header *)
type section = {
section_name : string;
virtual_size : u32;
virtual_address : u32;
size_of_raw_data : u32;
pointer_to_raw_data : u32;
characteristics : u32;
}
let read_section_name t =
let position = t.position in
let name = Read.fixed_string t 8 in
seek t (position + 8);
name
let read_section t file optional n =
seek t (optional + file.size_of_optional_header + (n * 40));
(* file header + optional header + nbr * section header *)
ensure t 40 "Section header truncated";
let section_name = read_section_name t in
let virtual_size = Read.u32 t in
let virtual_address = Read.u32 t in
let size_of_raw_data = Read.u32 t in
let pointer_to_raw_data = Read.u32 t in
let _pointer_to_relocations = Read.u32 t in
let _pointer_to_linenumbers = Read.u32 t in
let _number_of_relocations = Read.u16 t in
let _number_of_linenumbers = Read.u16 t in
let characteristics = Read.u32 t in
{
section_name;
virtual_size;
virtual_address;
size_of_raw_data;
pointer_to_raw_data;
characteristics;
}
let read_sections t file optional =
Array.init file.number_of_sections (read_section t file optional)
exception Found of section
let find_section sections f =
try
Array.iter (fun section -> if f section then raise (Found section)) sections;
None
with Found section -> Some section
let in_section optional (section : section) addr =
addr >= rebase optional section.virtual_address
&& addr < rebase optional section.virtual_address + section.virtual_size
let in_section_opt optional section_opt addr =
match section_opt with
| None -> false
| Some section -> in_section optional section addr
let _find_section_by_name sections name =
find_section sections (fun s -> s.section_name = name)
let find_section_by_addr optional sections addr =
find_section sections (fun s -> in_section optional s addr)
(* Symbol header *)
type symbol = {
symbol_name : string;
value : u32;
section_number : u16;
storage_class : u8;
number_of_aux_symbols : u8;
}
let read_symbol_name t strtab strsize =
let position = t.position in
let name =
if Read.u32 t = 0 then (
let n = Read.u32 t in
seek t (strtab + n);
Read.zero_string "Unterminated symbol name" t ~maxlen:(strsize - n) ())
else (
seek t position;
Read.fixed_string t 8)
in
seek t (position + 8);
name
let read_symbol t file strtab strsize n =
seek t (file.pointer_to_symbol_table + (n * 18));
ensure t 18 "Symbol header truncated";
let symbol_name = read_symbol_name t strtab strsize in
let value = Read.u32 t in
let section_number = Read.u16 t in
let storage_class = Read.u8 t in
let number_of_aux_symbols = Read.u8 t in
{ symbol_name; value; section_number; storage_class; number_of_aux_symbols }
let read_symbols t file =
let strtab = file.pointer_to_symbol_table + (18 * file.number_of_symbols) in
let strsize =
seek t strtab;
Read.u32 t
in
seek t file.pointer_to_symbol_table;
Array.init file.number_of_symbols (read_symbol t file strtab strsize)
module Section = struct
type t = optional_header * section
type header = section
let name (_, s) = s.section_name
let flag ((_, s) : t) = s.characteristics
let pos (o, (s : section)) =
{ raw = s.pointer_to_raw_data; virt = rebase o s.virtual_address }
let size (_, s) = { raw = s.size_of_raw_data; virt = s.virtual_size }
let header (_, s) = s
let has_flag f s =
let mask =
match f with
| Write -> 0x80000000
| Read -> 0x40000000
| Exec -> 0x20000000
in
flag s land mask = mask
end
module Symbol = struct
type t = symbol
type header = symbol
let name s = s.symbol_name
let value s = s.value
let header s = s
end
let pp_symbol ppf symbol =
Format.fprintf ppf "@[<h>%-8x %s@]" (Symbol.value symbol) (Symbol.name symbol)
let pp_symbols ppf symbols =
let nsymbols = Array.length symbols in
if nsymbols <> 0 then
Format.fprintf ppf "@[<v 2># Symbols (%d) @ %a@]" nsymbols
(fun ppf a ->
Array.iter (fun sy -> Format.fprintf ppf "%a@ " pp_symbol sy) a)
symbols
let pp_section i ppf section =
let aux fmt section (f, s) =
Format.fprintf fmt "%s" (if Section.has_flag f section then s else "-")
in
let pp_flags fmt section =
List.iter (aux fmt section)
Loader_types.[ (Read, "r"); (Write, "w"); (Exec, "x") ]
in
let pp_imap ppf m =
Format.fprintf ppf "@[<h>%8x %8x@]" m.Loader_types.raw m.Loader_types.virt
in
Format.fprintf ppf "@[<h>%2d %-20s %8x %a %a %a@]" i (Section.name section)
(Section.flag section) pp_imap (Section.pos section) pp_imap
(Section.size section) pp_flags section
let pp_sections ppf sections =
let nsections = Array.length sections in
if nsections <> 0 then
Format.fprintf ppf "@[<v 2># Sections (%d)@ %a@]" nsections
(fun ppf a ->
Array.iteri (fun i sy -> Format.fprintf ppf "%a@ " (pp_section i) sy) a)
sections
let pp_arch ppf arch = Format.fprintf ppf "@[Machine: %a@]" Machine.pp arch
let pp_ep ppf ep = Format.fprintf ppf "@[Entry point address: 0x%x@]" ep
module Img = struct
type t = program * section array * symbol array * Loader_buf.t
type header = program
let arch ((f, _), _, _, _) = arch f.machine
let entry ((_, o), _, _, _) =
rebase o o.standard_fields.address_of_entry_point
let sections ((_, o), s, _, _) = Array.map (fun s -> (o, s)) s
let symbols (_, _, s, _) = Array.copy s
let header (h, _, _, _) = h
let cursor ?(at = 0) (_, _, _, b) =
Loader_buf.cursor ~at Machine.LittleEndian b
let content (_, _, _, b) (_, s) =
Bigarray.Array1.sub b s.pointer_to_raw_data s.size_of_raw_data
let pp_header ppf img =
Format.fprintf ppf "@[<v 2># Header@ %a@ %a@]" pp_arch (arch img) pp_ep
(entry img)
let pp ppf img =
Format.fprintf ppf "@[<v 0>%a@ %a@ %a@]" pp_header img pp_symbols
(symbols img) pp_sections (sections img)
end
let load buffer =
let t = init_cursor buffer in
let file_header = read_file_header t in
let position = t.position in
let optional_header = read_optional_header t in
let sections = read_sections t file_header position in
let symbols = read_symbols t file_header in
((file_header, optional_header), sections, symbols, buffer)
let load_file_descr file_descr =
let buffer =
Bigarray.(
array1_of_genarray
(Unix.map_file file_descr Int8_unsigned C_layout false [| -1 |]))
in
load buffer
let load_file path =
let file_descr = Unix.openfile path [ Unix.O_RDONLY ] 0 in
let img = load_file_descr file_descr in
Unix.close file_descr;
img
let read_offset (_, _, _, b) offset = b.{offset}
let cache = ref None
let find_section_by_addr_with_cache optional sections addr =
if not (in_section_opt optional !cache addr) then
cache := find_section_by_addr optional sections addr;
!cache
let read_address ((_, o), s, _, b) addr =
match find_section_by_addr_with_cache o s addr with
| None ->
let msg = Format.sprintf "Unreachable virtual address %x" addr in
invalid_arg msg
| Some (s : section) ->
let offset = addr - rebase o s.virtual_address in
if offset >= s.size_of_raw_data then 0
else b.{offset + s.pointer_to_raw_data}
module Offset = Loader_buf.Make (struct
type t = Img.t
let get t i = read_offset t i
let dim (_, _, _, b) = Bigarray.Array1.dim b
end)
module Address = Loader_buf.Make (struct
type t = Img.t
let get t i = read_address t i
let dim _ = max_int
end)
| null | https://raw.githubusercontent.com/binsec/binsec/8ed9991d36451a3ae7487b966c4b38acca21a5b3/src/loader/loader_pe.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
File header
Thumb
Thumb-2
Optional header
Section header
file header + optional header + nbr * section header
Symbol header | This file is part of BINSEC .
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Loader_buf
open Loader_types
let read_magic t =
(not (dim t.buffer < 0x40))
&& Read.u8 t = Char.code 'M'
&& Read.u8 t = Char.code 'Z'
&&
(seek t 0x3c;
seek t (Read.u32 t);
(not (t.position + 4 > dim t.buffer))
&& Read.u8 t = Char.code 'P'
&& Read.u8 t = Char.code 'E'
&& Read.u8 t = 0x0
&& Read.u8 t = 0x0)
let check_magic buffer =
let t = cursor Machine.LittleEndian buffer in
read_magic t
let init_cursor buffer =
let t = cursor Machine.LittleEndian buffer in
if not (read_magic t) then invalid_format "No PE magic number";
t
type file_header = {
machine : u16;
number_of_sections : u16;
time_date_stamp : u32;
pointer_to_symbol_table : u32;
number_of_symbols : u32;
size_of_optional_header : u16;
characteristics : u16;
}
let arch = function
| 0x014c -> Machine.x86
| 0x01c0 -> Machine.armv7 Machine.LittleEndian
| 0x01c2 - > Machine.armv7
| 0x01c4 - > Machine.armv7
| 0x01f0 - > Machine . PowerPC
* | 0x01f1 - > Machine . PowerPC
* | 0x0200 - > Machine . IA64
* | 0x0166 - > Machine . MIPS
* | 0x0169 - > Machine . MIPS
* | 0x0266 - > Machine . MIPS
* | 0x0366 - > Machine . MIPS
* | 0x0466 - > Machine . MIPS
* | 0x01f1 -> Machine.PowerPC
* | 0x0200 -> Machine.IA64
* | 0x0166 -> Machine.MIPS
* | 0x0169 -> Machine.MIPS
* | 0x0266 -> Machine.MIPS
* | 0x0366 -> Machine.MIPS
* | 0x0466 -> Machine.MIPS *)
| 0x8664 -> Machine.amd64
| 0xaa64 - > Machine . ARM64
| _ -> Machine.unknown
let read_file_header t =
ensure t 20 "File header truncated";
let machine = Read.u16 t in
let number_of_sections = Read.u16 t in
let time_date_stamp = Read.u32 t in
let pointer_to_symbol_table = Read.u32 t in
let number_of_symbols = Read.u32 t in
let size_of_optional_header = Read.u16 t in
let characteristics = Read.u16 t in
{
machine;
number_of_sections;
time_date_stamp;
pointer_to_symbol_table;
number_of_symbols;
size_of_optional_header;
characteristics;
}
type standard_fields = {
magic : u16;
size_of_code : u32;
size_of_initialized_data : u32;
size_of_uninitialized_data : u32;
address_of_entry_point : u32;
base_of_code : u32;
base_of_data : u32 option;
}
type windows_fields = {
image_base : u64;
section_alignement : u32;
file_alignement : u32;
size_of_image : u32;
size_of_headers : u32;
checksum : u32;
subsystem : u16;
dll_characteristics : u16;
size_of_stack_reserve : u64;
size_of_stack_commit : u64;
size_of_heap_reserve : u64;
size_of_heap_commit : u64;
number_of_rva_and_sizes : u32;
}
type data_directory = { virtual_address : u32; size : u32 }
type data_directories = {
export_directory : data_directory;
import_directory : data_directory;
resource_directory : data_directory;
exception_directory : data_directory;
security_directory : data_directory;
basereloc_directory : data_directory;
debug_directory : data_directory;
globalptr_directory : data_directory;
tls_directory : data_directory;
load_config_directory : data_directory;
bound_import_directory : data_directory;
iat_directory : data_directory;
delay_import_directory : data_directory;
clr_header_directory : data_directory;
}
type optional_header = {
standard_fields : standard_fields;
windows_fields : windows_fields;
data_directories : data_directories;
}
type program = file_header * optional_header
let read_standard_fields32 t magic =
ensure t 26 "Standard fields truncated";
let _major_linker_version = Read.u8 t in
let _minor_linker_version = Read.u8 t in
let size_of_code = Read.u32 t in
let size_of_initialized_data = Read.u32 t in
let size_of_uninitialized_data = Read.u32 t in
let address_of_entry_point = Read.u32 t in
let base_of_code = Read.u32 t in
let base_of_data = Some (Read.u32 t) in
{
magic;
size_of_code;
size_of_initialized_data;
size_of_uninitialized_data;
address_of_entry_point;
base_of_code;
base_of_data;
}
let read_standard_fields64 t magic =
ensure t 22 "Standard fields truncated";
let _major_linker_version = Read.u8 t in
let _minor_linker_version = Read.u8 t in
let size_of_code = Read.u32 t in
let size_of_initialized_data = Read.u32 t in
let size_of_uninitialized_data = Read.u32 t in
let address_of_entry_point = Read.u32 t in
let base_of_code = Read.u32 t in
let base_of_data = None in
{
magic;
size_of_code;
size_of_initialized_data;
size_of_uninitialized_data;
address_of_entry_point;
base_of_code;
base_of_data;
}
let read_standard_fields t =
ensure t 2 "PE magic number truncated";
let magic = Read.u16 t in
match magic with
| 0x10b -> read_standard_fields32 t magic
| 0x20b -> read_standard_fields64 t magic
| _ -> invalid_format "Invalid PE image file"
let read_windows_fields32 t =
ensure t 68 "Windows fields truncated";
let image_base = Read.u32 t in
let section_alignement = Read.u32 t in
let file_alignement = Read.u32 t in
let _major_os_version = Read.u16 t in
let _minor_os_version = Read.u16 t in
let _major_image_version = Read.u16 t in
let _minor_image_version = Read.u16 t in
let _major_subsystem_version = Read.u16 t in
let _minor_subsystem_version = Read.u16 t in
if not (Read.u32 t = 0) then invalid_format "Invalid Win32 version value";
let size_of_image = Read.u32 t in
let size_of_headers = Read.u32 t in
let checksum = Read.u32 t in
let subsystem = Read.u16 t in
let dll_characteristics = Read.u16 t in
let size_of_stack_reserve = Read.u32 t in
let size_of_stack_commit = Read.u32 t in
let size_of_heap_reserve = Read.u32 t in
let size_of_heap_commit = Read.u32 t in
if not (Read.u32 t = 0) then invalid_format "Invalid loader flags";
let number_of_rva_and_sizes = Read.u32 t in
{
image_base;
section_alignement;
file_alignement;
size_of_image;
size_of_headers;
number_of_rva_and_sizes;
checksum;
subsystem;
dll_characteristics;
size_of_stack_reserve;
size_of_stack_commit;
size_of_heap_reserve;
size_of_heap_commit;
}
let read_windows_fields64 t =
ensure t 88 "Windows fields truncated";
let image_base = Read.u64 t in
let section_alignement = Read.u32 t in
let file_alignement = Read.u32 t in
let _major_os_version = Read.u16 t in
let _minor_os_version = Read.u16 t in
let _major_image_version = Read.u16 t in
let _minor_image_version = Read.u16 t in
let _major_subsystem_version = Read.u16 t in
let _minor_subsystem_version = Read.u16 t in
if not (Read.u32 t = 0) then invalid_format "Invalid Win32 version value";
let size_of_image = Read.u32 t in
let size_of_headers = Read.u32 t in
let checksum = Read.u32 t in
let subsystem = Read.u16 t in
let dll_characteristics = Read.u16 t in
let size_of_stack_reserve = Read.u64 t in
let size_of_stack_commit = Read.u64 t in
let size_of_heap_reserve = Read.u64 t in
let size_of_heap_commit = Read.u64 t in
if not (Read.u32 t = 0) then invalid_format "Invalid loader flags";
let number_of_rva_and_sizes = Read.u32 t in
{
image_base;
section_alignement;
file_alignement;
size_of_image;
size_of_headers;
number_of_rva_and_sizes;
checksum;
subsystem;
dll_characteristics;
size_of_stack_reserve;
size_of_stack_commit;
size_of_heap_reserve;
size_of_heap_commit;
}
let read_windows_fields standard t =
match standard.magic with
| 0x10b -> read_windows_fields32 t
| 0x20b -> read_windows_fields64 t
| _ -> invalid_format "Invalid PE image file"
let read_data_directory t =
ensure t 8 "Data directory truncated";
let virtual_address = Read.u32 t in
let size = Read.u32 t in
{ virtual_address; size }
let read_data_directories t =
ensure t 96 "Data directories truncated";
let export_directory = read_data_directory t in
let import_directory = read_data_directory t in
let resource_directory = read_data_directory t in
let exception_directory = read_data_directory t in
let security_directory = read_data_directory t in
let basereloc_directory = read_data_directory t in
let debug_directory = read_data_directory t in
if not (Read.u64 t = 0) then invalid_format "Invalid data directories";
let globalptr_directory = read_data_directory t in
if not (globalptr_directory.size = 0) then
invalid_format "Invalid data directories";
let tls_directory = read_data_directory t in
let load_config_directory = read_data_directory t in
let bound_import_directory = read_data_directory t in
let iat_directory = read_data_directory t in
let delay_import_directory = read_data_directory t in
let clr_header_directory = read_data_directory t in
if not (Read.u64 t = 0) then invalid_format "Invalid data directories";
{
export_directory;
import_directory;
resource_directory;
exception_directory;
security_directory;
basereloc_directory;
debug_directory;
globalptr_directory;
tls_directory;
load_config_directory;
bound_import_directory;
iat_directory;
delay_import_directory;
clr_header_directory;
}
let read_optional_header t =
let standard_fields = read_standard_fields t in
let windows_fields = read_windows_fields standard_fields t in
let data_directories = read_data_directories t in
{ standard_fields; windows_fields; data_directories }
let rebase o i = o.windows_fields.image_base + i
type section = {
section_name : string;
virtual_size : u32;
virtual_address : u32;
size_of_raw_data : u32;
pointer_to_raw_data : u32;
characteristics : u32;
}
let read_section_name t =
let position = t.position in
let name = Read.fixed_string t 8 in
seek t (position + 8);
name
let read_section t file optional n =
seek t (optional + file.size_of_optional_header + (n * 40));
ensure t 40 "Section header truncated";
let section_name = read_section_name t in
let virtual_size = Read.u32 t in
let virtual_address = Read.u32 t in
let size_of_raw_data = Read.u32 t in
let pointer_to_raw_data = Read.u32 t in
let _pointer_to_relocations = Read.u32 t in
let _pointer_to_linenumbers = Read.u32 t in
let _number_of_relocations = Read.u16 t in
let _number_of_linenumbers = Read.u16 t in
let characteristics = Read.u32 t in
{
section_name;
virtual_size;
virtual_address;
size_of_raw_data;
pointer_to_raw_data;
characteristics;
}
let read_sections t file optional =
Array.init file.number_of_sections (read_section t file optional)
exception Found of section
let find_section sections f =
try
Array.iter (fun section -> if f section then raise (Found section)) sections;
None
with Found section -> Some section
let in_section optional (section : section) addr =
addr >= rebase optional section.virtual_address
&& addr < rebase optional section.virtual_address + section.virtual_size
let in_section_opt optional section_opt addr =
match section_opt with
| None -> false
| Some section -> in_section optional section addr
let _find_section_by_name sections name =
find_section sections (fun s -> s.section_name = name)
let find_section_by_addr optional sections addr =
find_section sections (fun s -> in_section optional s addr)
type symbol = {
symbol_name : string;
value : u32;
section_number : u16;
storage_class : u8;
number_of_aux_symbols : u8;
}
let read_symbol_name t strtab strsize =
let position = t.position in
let name =
if Read.u32 t = 0 then (
let n = Read.u32 t in
seek t (strtab + n);
Read.zero_string "Unterminated symbol name" t ~maxlen:(strsize - n) ())
else (
seek t position;
Read.fixed_string t 8)
in
seek t (position + 8);
name
let read_symbol t file strtab strsize n =
seek t (file.pointer_to_symbol_table + (n * 18));
ensure t 18 "Symbol header truncated";
let symbol_name = read_symbol_name t strtab strsize in
let value = Read.u32 t in
let section_number = Read.u16 t in
let storage_class = Read.u8 t in
let number_of_aux_symbols = Read.u8 t in
{ symbol_name; value; section_number; storage_class; number_of_aux_symbols }
let read_symbols t file =
let strtab = file.pointer_to_symbol_table + (18 * file.number_of_symbols) in
let strsize =
seek t strtab;
Read.u32 t
in
seek t file.pointer_to_symbol_table;
Array.init file.number_of_symbols (read_symbol t file strtab strsize)
module Section = struct
type t = optional_header * section
type header = section
let name (_, s) = s.section_name
let flag ((_, s) : t) = s.characteristics
let pos (o, (s : section)) =
{ raw = s.pointer_to_raw_data; virt = rebase o s.virtual_address }
let size (_, s) = { raw = s.size_of_raw_data; virt = s.virtual_size }
let header (_, s) = s
let has_flag f s =
let mask =
match f with
| Write -> 0x80000000
| Read -> 0x40000000
| Exec -> 0x20000000
in
flag s land mask = mask
end
module Symbol = struct
type t = symbol
type header = symbol
let name s = s.symbol_name
let value s = s.value
let header s = s
end
let pp_symbol ppf symbol =
Format.fprintf ppf "@[<h>%-8x %s@]" (Symbol.value symbol) (Symbol.name symbol)
let pp_symbols ppf symbols =
let nsymbols = Array.length symbols in
if nsymbols <> 0 then
Format.fprintf ppf "@[<v 2># Symbols (%d) @ %a@]" nsymbols
(fun ppf a ->
Array.iter (fun sy -> Format.fprintf ppf "%a@ " pp_symbol sy) a)
symbols
let pp_section i ppf section =
let aux fmt section (f, s) =
Format.fprintf fmt "%s" (if Section.has_flag f section then s else "-")
in
let pp_flags fmt section =
List.iter (aux fmt section)
Loader_types.[ (Read, "r"); (Write, "w"); (Exec, "x") ]
in
let pp_imap ppf m =
Format.fprintf ppf "@[<h>%8x %8x@]" m.Loader_types.raw m.Loader_types.virt
in
Format.fprintf ppf "@[<h>%2d %-20s %8x %a %a %a@]" i (Section.name section)
(Section.flag section) pp_imap (Section.pos section) pp_imap
(Section.size section) pp_flags section
let pp_sections ppf sections =
let nsections = Array.length sections in
if nsections <> 0 then
Format.fprintf ppf "@[<v 2># Sections (%d)@ %a@]" nsections
(fun ppf a ->
Array.iteri (fun i sy -> Format.fprintf ppf "%a@ " (pp_section i) sy) a)
sections
let pp_arch ppf arch = Format.fprintf ppf "@[Machine: %a@]" Machine.pp arch
let pp_ep ppf ep = Format.fprintf ppf "@[Entry point address: 0x%x@]" ep
module Img = struct
type t = program * section array * symbol array * Loader_buf.t
type header = program
let arch ((f, _), _, _, _) = arch f.machine
let entry ((_, o), _, _, _) =
rebase o o.standard_fields.address_of_entry_point
let sections ((_, o), s, _, _) = Array.map (fun s -> (o, s)) s
let symbols (_, _, s, _) = Array.copy s
let header (h, _, _, _) = h
let cursor ?(at = 0) (_, _, _, b) =
Loader_buf.cursor ~at Machine.LittleEndian b
let content (_, _, _, b) (_, s) =
Bigarray.Array1.sub b s.pointer_to_raw_data s.size_of_raw_data
let pp_header ppf img =
Format.fprintf ppf "@[<v 2># Header@ %a@ %a@]" pp_arch (arch img) pp_ep
(entry img)
let pp ppf img =
Format.fprintf ppf "@[<v 0>%a@ %a@ %a@]" pp_header img pp_symbols
(symbols img) pp_sections (sections img)
end
let load buffer =
let t = init_cursor buffer in
let file_header = read_file_header t in
let position = t.position in
let optional_header = read_optional_header t in
let sections = read_sections t file_header position in
let symbols = read_symbols t file_header in
((file_header, optional_header), sections, symbols, buffer)
let load_file_descr file_descr =
let buffer =
Bigarray.(
array1_of_genarray
(Unix.map_file file_descr Int8_unsigned C_layout false [| -1 |]))
in
load buffer
let load_file path =
let file_descr = Unix.openfile path [ Unix.O_RDONLY ] 0 in
let img = load_file_descr file_descr in
Unix.close file_descr;
img
let read_offset (_, _, _, b) offset = b.{offset}
let cache = ref None
let find_section_by_addr_with_cache optional sections addr =
if not (in_section_opt optional !cache addr) then
cache := find_section_by_addr optional sections addr;
!cache
let read_address ((_, o), s, _, b) addr =
match find_section_by_addr_with_cache o s addr with
| None ->
let msg = Format.sprintf "Unreachable virtual address %x" addr in
invalid_arg msg
| Some (s : section) ->
let offset = addr - rebase o s.virtual_address in
if offset >= s.size_of_raw_data then 0
else b.{offset + s.pointer_to_raw_data}
module Offset = Loader_buf.Make (struct
type t = Img.t
let get t i = read_offset t i
let dim (_, _, _, b) = Bigarray.Array1.dim b
end)
module Address = Loader_buf.Make (struct
type t = Img.t
let get t i = read_address t i
let dim _ = max_int
end)
|
816a7015fa42058a6b13a177bb224ae583ad373db4ce655f24ecb624b48617bb | 2600hz/kazoo | ecallmgr_originate.erl | %%%-----------------------------------------------------------------------------
( C ) 2012 - 2020 , 2600Hz
%%% @doc
@author
@author
%%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(ecallmgr_originate).
-behaviour(gen_listener).
-export([start_link/2]).
-export([handle_originate_execute/2]).
-export([handle_call_events/2]).
-export([init/1
,handle_call/3
,handle_cast/2
,handle_info/2
,handle_event/2
,terminate/2
,code_change/3
]).
-include("ecallmgr.hrl").
-define(SERVER, ?MODULE).
-type created_uuid() :: {'fs' | 'api', kz_term:ne_binary()}.
-record(state, {node :: atom()
,server_id :: kz_term:api_binary()
,controller_q :: kz_term:api_binary()
,originate_req = kz_json:new() :: kz_json:object()
,uuid :: created_uuid() | 'undefined'
,action :: kz_term:api_binary()
,app :: kz_term:api_binary()
,dialstrings :: kz_term:api_binary()
,queue :: kz_term:api_binary()
,control_pid :: kz_term:api_pid()
,tref :: kz_term:api_reference()
,fetch_id = kz_binary:rand_hex(16)
}).
-type state() :: #state{}.
-define(BINDINGS, [{'self', []}]).
-define(RESPONDERS, [{{?MODULE, 'handle_originate_execute'}
,[{<<"dialplan">>, <<"originate_execute">>}]
}
,{{?MODULE, 'handle_call_events'}
,[{<<"call_event">>, <<"*">>}]
}
]).
-define(QUEUE_NAME, <<>>).
-define(QUEUE_OPTIONS, []).
-define(CONSUME_OPTIONS, []).
-define(ORIGINATE_PARK, <<"&park()">>).
-define(ORIGINATE_EAVESDROP, <<"eavesdrop">>).
-define(REPLY_TIMEOUT, 5 * ?MILLISECONDS_IN_SECOND).
%%%=============================================================================
%%% API
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc Starts the server.
%% @end
%%------------------------------------------------------------------------------
-spec start_link(atom(), kz_json:object()) -> kz_types:startlink_ret().
start_link(Node, JObj) ->
gen_listener:start_link(?SERVER
,[{'bindings', ?BINDINGS}
,{'responders', ?RESPONDERS}
,{'queue_name', ?QUEUE_NAME}
,{'queue_options', ?QUEUE_OPTIONS}
,{'consume_options', ?CONSUME_OPTIONS}
]
,[Node, JObj]
).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec handle_call_events(kz_call_event:doc(), kz_term:proplist()) -> 'ok'.
handle_call_events(JObj, Props) ->
Srv = props:get_value('server', Props),
case props:get_value('uuid', Props) =:= kz_api:call_id(JObj)
andalso kz_api:event_name(JObj)
of
<<"CHANNEL_EXECUTE_COMPLETE">> ->
case kz_call_event:application_name(JObj) of
<<"bridge">> ->
gen_listener:cast(Srv, {'bridge_execute_complete', JObj});
_Else -> 'ok'
end;
<<"CHANNEL_DESTROY">> ->
gen_listener:cast(Srv, {'channel_destroy', JObj});
_Else -> 'ok'
end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec handle_originate_execute(kz_json:object(), kz_term:proplist()) -> 'ok'.
handle_originate_execute(JObj, Props) ->
'true' = kapi_dialplan:originate_execute_v(JObj),
Srv = props:get_value('server', Props),
UUID = props:get_value('uuid', Props),
lager:debug("recv originate_execute for ~s", [UUID]),
_ = case kz_api:queue_id(JObj) of
'undefined' -> 'ok';
ServerId -> gen_listener:cast(Srv, {'update_server_id', ServerId})
end,
kz_cache:store_local(?ECALLMGR_UTIL_CACHE, {UUID, 'start_listener'}, 'true'),
gen_listener:cast(Srv, {'originate_execute'}).
%%%=============================================================================
%%% gen_server callbacks
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc Initializes the server.
%% @end
%%------------------------------------------------------------------------------
-spec init([node() | kz_json:object()]) -> {'stop', 'normal'} | {'ok', state()}.
init([Node, JObj]) ->
_ = kz_log:put_callid(JObj),
ServerId = kz_api:server_id(JObj),
ControllerQ = kz_api:queue_id(JObj),
_ = bind_to_events(Node),
case kapi_resource:originate_req_v(JObj) of
'false' ->
Error = <<"originate failed to execute as JObj did not validate">>,
publish_error(Error, 'undefined', JObj, ServerId),
{'stop', 'normal'};
'true' ->
{'ok', #state{node=Node
,originate_req=JObj
,server_id=ServerId
,controller_q = ControllerQ
}}
end.
-spec bind_to_events(atom()) -> 'ok'.
bind_to_events(Node) ->
gproc:reg({'p', 'l', ?FS_EVENT_REG_MSG(Node, <<"loopback::bowout">>)}).
%%------------------------------------------------------------------------------
%% @doc Handling call messages.
%% @end
%%------------------------------------------------------------------------------
-spec handle_call(any(), kz_term:pid_ref(), state()) -> kz_types:handle_call_ret_state(state()).
handle_call(_Request, _From, State) ->
{'reply', {'error', 'not_implemented'}, State}.
%%------------------------------------------------------------------------------
%% @doc Handling cast messages.
%% @end
%%------------------------------------------------------------------------------
-spec handle_cast(any(), state()) -> kz_types:handle_cast_ret_state(state()).
handle_cast({'gen_listener', {'created_queue', Q}}, State) ->
lager:debug("starting originate request"),
gen_listener:cast(self(), {'get_originate_action'}),
{'noreply', State#state{queue=Q}};
handle_cast({'update_server_id', ServerId}, State) ->
{'noreply', State#state{server_id=ServerId}, 'hibernate'};
handle_cast({'maybe_update_node', Node}, #state{node=Node}=State) ->
{'noreply', State};
handle_cast({'maybe_update_node', Node}, #state{node=_OldNode}=State) ->
lager:debug("updating node from ~s to ~s", [_OldNode, Node]),
{'noreply', State#state{node=Node}, 'hibernate'};
handle_cast({'create_uuid'}, #state{node=Node
,originate_req=JObj
,uuid='undefined'
}=State) ->
UUID = {_, Id} = create_uuid(JObj, Node),
kz_log:put_callid(Id),
lager:debug("created uuid ~p", [UUID]),
case kz_json:is_true(<<"Start-Control-Process">>, JObj, 'true')
andalso start_control_process(State#state{uuid=UUID}) of
'false' ->
gen_listener:cast(self(), {'build_originate_args'}),
{'noreply', State#state{uuid=UUID}, 'hibernate'};
{'ok', #state{control_pid=Pid}=State1} ->
lager:debug("started control proc ~p uuid ~p", [Pid, UUID]),
maybe_send_originate_uuid(UUID, Pid, State),
gen_listener:cast(self(), {'build_originate_args'}),
{'noreply', State1, 'hibernate'};
{'error', _E} ->
lager:debug("failed to start control proc for ~p: ~p", [UUID, _E]),
{'stop', 'normal', State}
end;
handle_cast({'get_originate_action'}, #state{originate_req=JObj
,node=Node
}=State) ->
gen_listener:cast(self(), {'build_originate_args'}),
UseNode = maybe_update_node(JObj, Node),
ApplicationName = kz_json:get_value(<<"Application-Name">>, JObj),
Action = get_originate_action(ApplicationName, JObj, UseNode),
lager:debug("originate action: ~s", [Action]),
{'noreply', State#state{action=Action
,app=ApplicationName
,node=UseNode
}
,'hibernate'
};
handle_cast({'build_originate_args'}, #state{uuid='undefined'}=State) ->
gen_listener:cast(self(), {'create_uuid'}),
{'noreply', State};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action = ?ORIGINATE_PARK
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
case kz_json:is_true(<<"Originate-Immediate">>, JObj) of
'true' -> gen_listener:cast(self(), {'originate_execute'});
'false' -> gen_listener:cast(self(), {'originate_ready'})
end,
Endpoints = [update_endpoint(Endpoint, State)
|| Endpoint <- kz_json:get_ne_value(<<"Endpoints">>, JObj, [])
],
{'noreply', State#state{dialstrings=build_originate_args_from_endpoints(?ORIGINATE_PARK, Endpoints, JObj, FetchId)}};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action = Action
,app = ?ORIGINATE_EAVESDROP
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
gen_listener:cast(self(), {'originate_ready'}),
{'noreply', State#state{dialstrings=build_originate_args(Action, State, JObj, FetchId)}};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action=Action
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
case kz_json:is_true(<<"Originate-Immediate">>, JObj, 'true') of
'true' -> gen_listener:cast(self(), {'originate_execute'});
'false' -> gen_listener:cast(self(), {'originate_ready'})
end,
{'noreply', State#state{dialstrings=build_originate_args(Action, State, JObj, FetchId)}};
handle_cast({'originate_ready'}, #state{node=_Node}=State) ->
case start_control_process(State) of
{'ok', #state{control_pid=Pid
,uuid=UUID
,originate_req=JObj
,server_id=ServerId
,queue=Q
}=State1} ->
CtrlQ = gen_listener:queue_name(Pid),
_ = publish_originate_ready(CtrlQ, UUID, JObj, Q, ServerId),
{'noreply', State1#state{tref=start_abandon_timer()}};
{'error', _E} ->
lager:debug("failed to start control process: ~p", [_E]),
{'stop', 'normal', State}
end;
handle_cast({'originate_execute'}, #state{tref=TRef}=State) when is_reference(TRef) ->
_ = erlang:cancel_timer(TRef),
handle_cast({'originate_execute'}, State#state{tref='undefined'});
handle_cast({'originate_execute'}, #state{dialstrings=Dialstrings
,node=Node
,originate_req=JObj
,uuid={_, UUID}
,server_id=ServerId
,control_pid=CtrlPid
}=State) ->
ControlDisabled = kz_json:is_false(<<"Start-Control-Process">>, JObj, 'false'),
case originate_execute(Node, Dialstrings, find_originate_timeout(JObj)) of
{'ok', UUID} when is_pid(CtrlPid) ->
lager:debug("originate completed for: ~s with ctrl ~p", [UUID, CtrlPid]),
_ = publish_originate_resp(ServerId, JObj, UUID),
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', WinningUUID} when is_pid(CtrlPid) ->
lager:debug("originate completed for other UUID: ~s (not ~s)", [WinningUUID, UUID]),
_ = publish_originate_resp(ServerId, JObj, WinningUUID),
CtrlPids = ecallmgr_call_control:control_procs(WinningUUID),
_ = case lists:member(CtrlPid, CtrlPids) of
'true' -> 'ok';
'false' -> ecallmgr_call_control:stop(CtrlPid)
end,
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', CallId} when ControlDisabled ->
lager:debug("originate completed for: ~s with no control pid", [CallId]),
_ = publish_originate_resp(ServerId, JObj, CallId),
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', CallId} ->
kz_log:put_callid(CallId),
lager:debug("originate is executing, waiting for completion"),
erlang:monitor_node(Node, 'true'),
bind_to_call_events(CallId),
CtrlQ = ecallmgr_call_control:queue_name(CtrlPid),
_ = publish_originate_started(ServerId, CallId, JObj, CtrlQ),
{'noreply', State#state{uuid={'api', CallId}}};
{'error', Error} ->
lager:debug("failed to originate: ~p", [Error]),
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State}
end;
handle_cast({'bridge_execute_complete', JObj}, #state{server_id=ServerId}=State) ->
lager:debug("received bridge complete event, sending originate response"),
_ = publish_originate_resp(ServerId, JObj),
{'stop', 'normal', State};
handle_cast({'channel_destroy', JObj}, #state{server_id=ServerId}=State) ->
lager:debug("received channel destroy event, sending originate response"),
_ = publish_originate_resp(ServerId, JObj),
{'stop', 'normal', State};
handle_cast({'gen_listener',{'is_consuming',_IsConsuming}}, State) ->
{'noreply', State};
handle_cast(_Msg, State) ->
lager:debug("unhandled cast: ~p", [_Msg]),
{'noreply', State, 'hibernate'}.
%%------------------------------------------------------------------------------
%% @doc Handling all non call/cast messages.
%% @end
%%------------------------------------------------------------------------------
-spec handle_info(any(), state()) -> kz_types:handle_info_ret_state(state()).
handle_info({'event', _UUID, FSJObj}, #state{uuid=CreatedUUID}=State) ->
{'noreply', State#state{uuid=handle_fs_event(FSJObj, CreatedUUID)}};
handle_info({'tcp', _, Data}, State) ->
Event = binary_to_term(Data),
handle_info(Event, State);
handle_info({'abandon_originate'}, #state{tref='undefined'}=State) ->
%% Cancelling a timer does not guarantee that the message has not
%% already been delivered to the message queue.
{'noreply', State};
handle_info({'abandon_originate'}, #state{originate_req=JObj
,uuid=UUID
,server_id=ServerId
}=State) ->
Error = <<"Failed to receive valid originate_execute in time">>,
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State};
handle_info({'nodedown', _}, #state{originate_req=JObj
,uuid=UUID
,server_id=ServerId
,node=Node
}=State) ->
erlang:monitor_node(Node, 'false'),
Error = <<"lost connection to freeswitch node">>,
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State};
handle_info(_Info, State) ->
lager:debug("unhandled message: ~p", [_Info]),
{'noreply', State, 'hibernate'}.
%%------------------------------------------------------------------------------
%% @doc Allows listener to pass options to handlers.
%% @end
%%------------------------------------------------------------------------------
-spec handle_event(kz_json:object(), state()) -> gen_listener:handle_event_return().
handle_event(_JObj, #state{uuid={_, UUID}}) ->
{'reply', [{'uuid', UUID}]};
handle_event(_JObj, #state{uuid=UUID}) ->
{'reply', [{'uuid', UUID}]}.
%%------------------------------------------------------------------------------
%% @doc This function is called by a `gen_server' when it is about to
%% terminate. It should be the opposite of `Module:init/1' and do any
%% necessary cleaning up. When it returns, the `gen_server' terminates
with . The return value is ignored .
%%
%% @end
%%------------------------------------------------------------------------------
-spec terminate(any(), state()) -> 'ok'.
terminate(_Reason, #state{control_pid=CtrlPid}) when is_pid(CtrlPid) ->
lager:debug("stop abandoned call control process ~p", [CtrlPid]),
ecallmgr_call_control:stop(CtrlPid),
lager:debug("originate termination: ~p", [_Reason]);
terminate(_Reason, _State) ->
lager:debug("originate termination: ~p", [_Reason]).
%%------------------------------------------------------------------------------
%% @doc Convert process state when code is changed.
%% @end
%%------------------------------------------------------------------------------
-spec code_change(any(), state(), any()) -> {'ok', state()}.
code_change(_OldVsn, State, _Extra) ->
{'ok', State}.
%%%=============================================================================
Internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec cache_fax_file(kz_term:ne_binary(), node()) -> {'ok' | 'error', kz_term:ne_binary()}.
cache_fax_file(File, Node) ->
Self = self(),
Fun = fun(Res, Reply) ->
lager:debug("cache fax file result : ~p", [{Res, Reply}]),
Self ! {cache_fax_file, {Res, Reply}}
end,
{ok, JobId} = freeswitch:bgapi(Node, 'http_get', <<"{prefetch=true}", File/binary>>, Fun),
lager:debug("waiting for cache fax file result ~s", [JobId]),
receive
{cache_fax_file, Reply} -> Reply
end.
-spec get_originate_action(kz_term:ne_binary(), kz_json:object(), node()) -> kz_term:ne_binary().
get_originate_action(<<"fax">>, JObj, Node) ->
lager:debug("got originate with action fax"),
Data = kz_json:get_value(<<"Application-Data">>, JObj),
{'ok', File} = cache_fax_file(Data, Node),
<<"&txfax(", File/binary, ")">>;
get_originate_action(<<"transfer">>, JObj, _Node) ->
get_transfer_action(JObj, kz_json:get_value([<<"Application-Data">>, <<"Route">>], JObj));
get_originate_action(<<"bridge">>, JObj, _Node) ->
lager:debug("got originate with action bridge"),
CallId = kz_json:get_binary_value(<<"Existing-Call-ID">>, JObj),
intercept_unbridged_only(CallId, JObj);
get_originate_action(<<"eavesdrop">>, JObj, _Node) ->
lager:debug("got originate with action eavesdrop"),
EavesdropCallId = kz_json:get_binary_value(<<"Eavesdrop-Call-ID">>, JObj),
case ecallmgr_fs_channel:node(EavesdropCallId) of
{'error', _} ->
lager:debug("failed to find channel ~p in node list", [kz_json:get_value(<<"Eavesdrop-Call-ID">>, JObj)]),
<<"error">>;
{'ok', N} ->
gen_listener:cast(self(), {'maybe_update_node', N}),
get_eavesdrop_action(JObj)
end;
get_originate_action(_, _, _) ->
lager:debug("got originate with action park"),
?ORIGINATE_PARK.
-spec get_transfer_action(kz_json:object(), kz_term:api_binary()) -> kz_term:ne_binary().
get_transfer_action(_JObj, 'undefined') -> <<"error">>;
get_transfer_action(JObj, Route) ->
Context = ?DEFAULT_FREESWITCH_CONTEXT,
UnsetVars = get_unset_vars(JObj),
list_to_binary(
["'m:^:", UnsetVars
,"transfer:", Route
," XML ", Context, "' inline"
]
).
-spec intercept_unbridged_only(kz_term:ne_binary() | 'undefined', kz_json:object()) -> kz_term:ne_binary().
intercept_unbridged_only('undefined', JObj) ->
get_bridge_action(JObj);
intercept_unbridged_only(ExistingCallId, JObj) ->
case kz_json:is_true(<<"Intercept-Unbridged-Only">>, JObj, 'true') of
'true' ->
<<" 'set:intercept_unbridged_only=true,intercept:", ExistingCallId/binary, "' inline ">>;
'false' ->
<<" 'set:intercept_unbridged_only=false,intercept:", ExistingCallId/binary, "' inline ">>
end.
-spec get_bridge_action(kz_json:object()) -> kz_term:ne_binary().
get_bridge_action(JObj) ->
Data = kz_json:get_value(<<"Application-Data">>, JObj),
case ecallmgr_util:build_channel(Data) of
{'error', _} -> <<"error">>;
{'ok', Channel} ->
UnsetVars = get_unset_vars(JObj),
list_to_binary(
["'m:^:", UnsetVars
,"bridge:", Channel, "' inline"
]
)
end.
-spec maybe_update_node(kz_json:object(), atom()) -> atom().
maybe_update_node(JObj, Node) ->
case kz_json:get_binary_value(<<"Existing-Call-ID">>, JObj) of
'undefined' -> Node;
CallId ->
case ecallmgr_fs_channel:node(CallId) of
{'error', _} -> Node;
{'ok', Node} -> Node;
{'ok', N} -> lager:debug("updating node from ~s to ~s", [Node, N]),
N
end
end.
-spec get_eavesdrop_action(kz_json:object()) -> kz_term:ne_binary().
get_eavesdrop_action(JObj) ->
{CallId, Group} = case kz_json:get_value(<<"Eavesdrop-Group-ID">>, JObj) of
'undefined' -> {kz_json:get_binary_value(<<"Eavesdrop-Call-ID">>, JObj), <<>>};
ID -> {<<"all">>, <<"eavesdrop_require_group=", ID/binary, ",">>}
end,
case kz_json:get_value(<<"Eavesdrop-Mode">>, JObj) of
<<"whisper">> -> <<Group/binary, "queue_dtmf:w2@500,eavesdrop:", CallId/binary, " inline">>;
<<"full">> -> <<Group/binary, "queue_dtmf:w3@500,eavesdrop:", CallId/binary, " inline">>;
<<"listen">> -> <<Group/binary, "eavesdrop:", CallId/binary, " inline">>;
'undefined' -> <<Group/binary, "eavesdrop:", CallId/binary, " inline">>
end.
-spec build_originate_args(kz_term:ne_binary(), state(), kz_json:object(), kz_term:ne_binary()) -> kz_term:api_binary().
build_originate_args(Action, State, JObj, FetchId) ->
case kz_json:get_value(<<"Endpoints">>, JObj, []) of
[] ->
lager:warning("no endpoints defined in originate request"),
'undefined';
[Endpoint] ->
lager:debug("only one endpoint, don't create per-endpoint UUIDs"),
build_originate_args_from_endpoints(Action, [update_endpoint(Endpoint, State)], JObj, FetchId);
Endpoints ->
lager:debug("multiple endpoints defined, assigning uuids to each"),
UpdatedEndpoints = [update_endpoint(Endpoint, State) || Endpoint <- Endpoints],
build_originate_args_from_endpoints(Action, UpdatedEndpoints, JObj, FetchId)
end.
-spec build_originate_args_from_endpoints(kz_term:ne_binary(), kz_json:objects(), kz_json:object(), kz_term:ne_binary()) ->
kz_term:ne_binary().
build_originate_args_from_endpoints(Action, Endpoints, JObj, FetchId) ->
lager:debug("building originate command arguments"),
DialSeparator = ecallmgr_util:get_dial_separator(JObj, Endpoints),
DialStrings = ecallmgr_util:build_bridge_string(Endpoints, DialSeparator),
ChannelVars = get_channel_vars(JObj, FetchId),
list_to_binary([ChannelVars, DialStrings, " ", Action]).
-spec get_channel_vars(kz_json:object(), kz_term:ne_binary()) -> iolist().
get_channel_vars(JObj, FetchId) ->
InteractionId = kz_json:get_value([<<"Custom-Channel-Vars">>, <<?CALL_INTERACTION_ID>>], JObj, ?CALL_INTERACTION_DEFAULT),
CCVs = [{<<"Fetch-ID">>, FetchId}
,{<<"Ecallmgr-Node">>, kz_term:to_binary(node())}
,{<<?CALL_INTERACTION_ID>>, InteractionId}
],
J = kz_json:from_list_recursive([{<<"Custom-Channel-Vars">>, add_ccvs(JObj, CCVs)}]),
ecallmgr_fs_xml:get_channel_vars(kz_json:merge(JObj, J)).
-spec add_ccvs(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
add_ccvs(JObj, Props) ->
Routines = [fun maybe_add_loopback/2
,fun maybe_add_origination_uuid/2
],
lists:foldl(fun(Fun, Acc) -> Fun(JObj, Acc) end, Props, Routines).
-spec maybe_add_origination_uuid(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
maybe_add_origination_uuid(JObj, Props) ->
case kz_json:get_ne_binary_value(<<"Outbound-Call-ID">>, JObj) of
'undefined' -> Props;
CallId -> [{<<"Origination-Call-ID">>, CallId} | Props]
end.
-spec maybe_add_loopback(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
maybe_add_loopback(JObj, Props) ->
case kz_json:get_binary_boolean(<<"Simplify-Loopback">>, JObj) of
'undefined' -> Props;
SimpliFly -> add_loopback(kz_term:is_true(SimpliFly)) ++ Props
end.
-spec add_loopback(boolean()) -> kz_term:proplist().
add_loopback('true') ->
[{<<"Simplify-Loopback">>, 'true'}
,{<<"Loopback-Bowout">>, 'true'}
];
add_loopback('false') ->
[{<<"Simplify-Loopback">>, 'false'}
,{<<"Loopback-Bowout">>, 'false'}
].
-spec originate_execute(atom(), kz_term:ne_binary(), pos_integer()) ->
{'ok', kz_term:ne_binary()} |
{'error', kz_term:ne_binary() | 'timeout' | 'crash'}.
originate_execute(Node, Dialstrings, _Timeout) ->
lager:debug("executing originate on ~s: ~s", [Node, Dialstrings]),
freeswitch:async_api(Node, 'originate', Dialstrings).
-spec bind_to_call_events(kz_term:ne_binary()) -> 'ok'.
bind_to_call_events(CallId) ->
lager:debug("binding to call events for ~s", [CallId]),
Options = [{'callid', CallId}
,{'restrict_to', ['events']}
],
gen_listener:add_binding(self(), 'call', Options).
-spec unbind_from_call_events() -> 'ok'.
unbind_from_call_events() ->
lager:debug("unbind from call events"),
gen_listener:rm_binding(self(), 'call', []).
-spec update_uuid(kz_term:api_binary(), kz_term:ne_binary()) -> 'ok'.
update_uuid(OldUUID, NewUUID) ->
kz_log:put_callid(NewUUID),
lager:debug("updating call id from ~s to ~s", [OldUUID, NewUUID]),
unbind_from_call_events(),
bind_to_call_events(NewUUID),
'ok'.
-spec create_uuid(atom()) -> created_uuid().
create_uuid(_Node) -> {'fs', kz_binary:rand_hex(18)}.
-spec create_uuid(kz_json:object(), atom()) -> created_uuid().
create_uuid(JObj, Node) ->
case kz_json:get_binary_value(<<"Outbound-Call-ID">>, JObj) of
'undefined' -> create_uuid(Node);
CallId -> {'api', CallId}
end.
-spec create_uuid(kz_json:object(), kz_json:object(), atom()) -> created_uuid().
create_uuid(Endpoint, _JObj, Node) ->
case kz_json:get_binary_value(<<"Outbound-Call-ID">>, Endpoint) of
'undefined' -> create_uuid(Node);
CallId -> {'api', CallId}
end.
-spec get_unset_vars(kz_json:object()) -> iolist().
get_unset_vars(JObj) ->
Refactor ( wishes he had unit tests here for you to use )
ExportProps = [{K, <<>>} || K <- kz_json:get_value(<<"Export-Custom-Channel-Vars">>, JObj, [])],
Export = [K || KV <- lists:foldr(fun ecallmgr_fs_xml:kazoo_var_to_fs_var/2
,[]
,[{<<"Custom-Channel-Vars">>, kz_json:from_list(ExportProps)}]
),
([K, _] = string:tokens(binary_to_list(KV), "=")) =/= 'undefined'
],
case ["unset:" ++ K
|| KV <- lists:foldr(fun ecallmgr_fs_xml:kazoo_var_to_fs_var/2, [], kz_json:to_proplist(JObj))
,not lists:member(begin [K, _] = string:tokens(binary_to_list(KV), "="), K end, Export)]
of
[] -> "";
Unset ->
[string:join(Unset, "^")
,maybe_fix_ignore_early_media(Export)
,maybe_fix_group_confirm(Export)
,maybe_fix_fs_auto_answer_bug(Export)
,maybe_fix_caller_id(Export, JObj)
,"^"
]
end.
-spec maybe_fix_ignore_early_media(kz_term:strings()) -> string().
maybe_fix_ignore_early_media(Export) ->
case lists:member("ignore_early_media", Export) of
'true' -> "";
'false' -> "^unset:ignore_early_media"
end.
-spec maybe_fix_group_confirm(kz_term:strings()) -> string().
maybe_fix_group_confirm(Export) ->
case lists:member("group_confirm_key", Export) of
'true' -> "";
'false' -> "^unset:group_confirm_key^unset:group_confirm_cancel_timeout^unset:group_confirm_file"
end.
-spec maybe_fix_fs_auto_answer_bug(kz_term:strings()) -> string().
maybe_fix_fs_auto_answer_bug(Export) ->
case lists:member("sip_auto_answer", Export) of
'true' -> "";
'false' ->
"^unset:sip_h_Call-Info^unset:sip_h_Alert-Info^unset:alert_info^unset:sip_invite_params^set:sip_auto_answer=false"
end.
-spec maybe_fix_caller_id(kz_term:strings(), kz_json:object()) -> string().
maybe_fix_caller_id(Export, JObj) ->
Fix = [
{lists:member("origination_callee_id_name", Export), kz_json:get_value(<<"Outbound-Callee-ID-Name">>, JObj), "origination_caller_id_name"}
,{lists:member("origination_callee_id_number", Export), kz_json:get_value(<<"Outbound-Callee-ID-Number">>, JObj), "origination_caller_id_number"}
],
string:join([ "^set:" ++ Key ++ "=" ++ erlang:binary_to_list(Value) || {IsTrue, Value, Key} <- Fix, IsTrue ], ":").
-spec publish_error(kz_term:ne_binary(), created_uuid() | kz_term:api_binary(), kz_json:object(), kz_term:api_binary()) -> 'ok'.
publish_error(_, _, _, 'undefined') -> 'ok';
publish_error(Error, {_, UUID}, Request, ServerId) ->
publish_error(Error, UUID, Request, ServerId);
publish_error(Error, UUID, Request, ServerId) ->
lager:debug("originate error: ~s", [Error]),
E = [{<<"Msg-ID">>, kz_api:msg_id(Request)}
,{<<"Call-ID">>, UUID}
,{<<"Request">>, Request}
,{<<"Error-Message">>, cleanup_error(Error)}
| kz_api:default_headers(<<"error">>, <<"originate_resp">>, ?APP_NAME, ?APP_VERSION)
],
kz_api:publish_error(ServerId, props:filter_undefined(E)).
-spec cleanup_error(kz_term:ne_binary()) -> kz_term:ne_binary().
cleanup_error(<<"-ERR ", E/binary>>) -> E;
cleanup_error(E) -> E.
-spec publish_originate_ready(kz_term:ne_binary(), created_uuid() | kz_term:ne_binary(), kz_json:object(), kz_term:api_binary(), kz_term:api_binary()) -> 'ok'.
publish_originate_ready(CtrlQ, {_, UUID}, Request, Q, ServerId) ->
publish_originate_ready(CtrlQ, UUID, Request, Q, ServerId);
publish_originate_ready(CtrlQ, UUID, Request, Q, ServerId) ->
lager:debug("originate command is ready, waiting for originate_execute"),
Props = [{<<"Msg-ID">>, kz_api:msg_id(Request, UUID)}
,{<<"Call-ID">>, UUID}
,{<<"Control-Queue">>, CtrlQ}
| kz_api:default_headers(Q, ?APP_NAME, ?APP_VERSION)
],
kapi_dialplan:publish_originate_ready(ServerId, Props).
-spec publish_originate_resp(kz_term:api_binary(), kz_json:object()) -> 'ok'.
publish_originate_resp('undefined', _) -> 'ok';
publish_originate_resp(ServerId, JObj) ->
Resp = kz_json:set_values([{<<"Event-Category">>, <<"resource">>}
,{<<"Event-Name">>, <<"originate_resp">>}
]
,JObj
),
kapi_resource:publish_originate_resp(ServerId, Resp).
-spec publish_originate_resp(kz_term:api_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_resp('undefined', _JObj, _UUID) -> 'ok';
publish_originate_resp(ServerId, JObj, UUID) ->
Resp = kz_json:set_values([{<<"Event-Category">>, <<"resource">>}
,{<<"Application-Response">>, <<"SUCCESS">>}
,{<<"Event-Name">>, <<"originate_resp">>}
,{<<"Call-ID">>, UUID}
]
,JObj
),
kapi_resource:publish_originate_resp(ServerId, Resp).
-spec publish_originate_started(kz_term:api_binary(), kz_term:ne_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_started('undefined', _, _, _) -> 'ok';
publish_originate_started(ServerId, CallId, JObj, CtrlQ) ->
Resp = kz_json:from_list(
[{<<"Call-ID">>, CallId}
,{<<"Msg-ID">>, kz_api:msg_id(JObj)}
,{<<"Control-Queue">>, CtrlQ}
| kz_api:default_headers(?APP_NAME, ?APP_VERSION)
]),
kapi_resource:publish_originate_started(ServerId, Resp).
-spec publish_originate_uuid(kz_term:api_binary(), created_uuid() | kz_term:ne_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_uuid('undefined', _, _, _) -> 'ok';
publish_originate_uuid(ServerId, UUID, JObj, CtrlQueue) ->
Resp = props:filter_undefined(
[{<<"Outbound-Call-ID">>, UUID}
,{<<"Msg-ID">>, kz_api:msg_id(JObj)}
,{<<"Outbound-Call-Control-Queue">>, CtrlQueue}
| kz_api:default_headers(?APP_NAME, ?APP_VERSION)
]),
lager:debug("sent originate_uuid to ~s", [ServerId]),
kapi_resource:publish_originate_uuid(ServerId, Resp).
-spec maybe_send_originate_uuid(created_uuid(), pid(), state()) -> 'ok'.
maybe_send_originate_uuid({_, UUID}, Pid, #state{server_id=ServerId
,originate_req=JObj
}) ->
CtlQ = gen_listener:queue_name(Pid),
publish_originate_uuid(ServerId, UUID, JObj, CtlQ).
-spec find_originate_timeout(kz_json:object()) -> pos_integer().
find_originate_timeout(JObj) ->
OTimeout = case kz_json:get_integer_value(<<"Timeout">>, JObj) of
'undefined' -> 10;
LT when LT > 0 -> LT;
_ -> 10
end,
find_max_endpoint_timeout(kz_json:get_list_value(<<"Endpoints">>, JObj, [])
,OTimeout
).
-spec find_max_endpoint_timeout(kz_json:objects(), pos_integer()) -> pos_integer().
find_max_endpoint_timeout([], T) -> T;
find_max_endpoint_timeout([EP|EPs], T) ->
case kz_json:get_integer_value(<<"Endpoint-Timeout">>, EP) of
'undefined' -> find_max_endpoint_timeout(EPs, T);
Timeout when Timeout > T -> find_max_endpoint_timeout(EPs, Timeout);
_ -> find_max_endpoint_timeout(EPs, T)
end.
-spec start_control_process(state()) ->
{'ok', state()} |
{'error', any()}.
start_control_process(#state{originate_req=JObj
,node=Node
,uuid={_, Id}=UUID
,controller_q=ControllerQ
,server_id=ServerId
,fetch_id=FetchId
,control_pid='undefined'
}=State) ->
Ctx = #{node => Node
,call_id => Id
,fetch_id => FetchId
,controller_q => ControllerQ
,initial_ccvs => kz_json:new()
},
case ecallmgr_call_sup:start_control_process(Ctx) of
{'ok', CtrlPid} when is_pid(CtrlPid) ->
_ = maybe_send_originate_uuid(UUID, CtrlPid, State),
kz_cache:store_local(?ECALLMGR_UTIL_CACHE, {Id, 'start_listener'}, 'true'),
lager:debug("started control pid ~p for uuid ~s", [CtrlPid, Id]),
{'ok', State#state{control_pid=CtrlPid}};
{'error', _E}=E ->
Error = <<"failed to preemptively start a call control process">>,
_ = publish_error(Error, UUID, JObj, ServerId),
E
end;
start_control_process(#state{control_pid=_Pid
,uuid=_UUID
}=State) ->
lager:debug("control process ~p exists for uuid ~p", [_Pid, _UUID]),
{'ok', State}.
-spec maybe_start_call_handlers(created_uuid(), state()) -> 'ok'.
maybe_start_call_handlers(UUID, #state{originate_req=JObj}=State) ->
case kz_json:is_true(<<"Start-Control-Process">>, JObj, 'true')
andalso start_control_process(State#state{uuid=UUID}) of
'false' -> 'ok';
{'ok', #state{control_pid=_Pid}} ->
lager:debug("started control process for ~p: ~p", [UUID, _Pid]);
{'error', _E} ->
lager:debug("failed to start control process for ~p: ~p", [UUID, _E])
end.
-spec start_abandon_timer() -> reference().
start_abandon_timer() ->
erlang:send_after(?REPLY_TIMEOUT, self(), {'abandon_originate'}).
-spec update_endpoint(kz_json:object(), state()) -> kz_json:object().
update_endpoint(Endpoint, #state{node=Node
,originate_req=JObj
,uuid=GlobalUUID
}=State) ->
{_, Id} = UUID =
case kz_json:get_value(<<"Outbound-Call-ID">>, Endpoint) of
'undefined' -> create_uuid(Endpoint, JObj, Node);
OutboundCallId -> {'api', OutboundCallId}
end,
case uuid_matches(UUID, GlobalUUID) of
'true' -> 'ok';
'false' ->
maybe_start_call_handlers(UUID, State#state{uuid=UUID
,control_pid='undefined'
})
end,
EP = kz_json:set_values([{<<"origination_uuid">>, Id}
], Endpoint),
fix_hold_media(EP).
-spec uuid_matches(created_uuid(), created_uuid()) -> boolean().
uuid_matches({_, UUID}, {_, UUID}) -> 'true';
uuid_matches(_, _) -> 'false'.
-spec fix_hold_media(kz_json:object()) -> kz_json:object().
fix_hold_media(Endpoint) ->
put('hold_media', kz_json:get_value(<<"Hold-Media">>, Endpoint)),
kz_json:delete_key(<<"Hold-Media">>, Endpoint).
-spec should_update_uuid(kz_term:api_binary(), kzd_freeswitch:data()) -> boolean().
should_update_uuid(OldUUID, FSJObj) ->
case kzd_freeswitch:event_subclass(FSJObj, kzd_freeswitch:event_name(FSJObj)) of
<<"loopback::bowout">> ->
lager:debug("bowout detected with ~s, old uuid is ~s"
,[kzd_freeswitch:resigning_id(FSJObj), OldUUID]
),
kzd_freeswitch:resigning_id(FSJObj) =:= OldUUID;
_ -> 'false'
end.
-spec handle_fs_event(kzd_freeswitch:data(), 'undefined' | created_uuid()) -> 'undefined' | created_uuid().
handle_fs_event(FSJObj, 'undefined') ->
case should_update_uuid('undefined', FSJObj) of
'false' -> 'undefined';
'true' ->
NewUUID = kzd_freeswitch:acquired_id(FSJObj),
_ = update_uuid('undefined', NewUUID),
{'api', NewUUID}
end;
handle_fs_event(FSJObj, {_, OldUUID}=UUID) ->
case should_update_uuid(OldUUID, FSJObj) of
'false' -> UUID;
'true' ->
NewUUID = kzd_freeswitch:acquired_id(FSJObj),
_ = update_uuid(OldUUID, NewUUID),
{'api', NewUUID}
end.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/ecallmgr/src/ecallmgr_originate.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
=============================================================================
API
=============================================================================
------------------------------------------------------------------------------
@doc Starts the server.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
=============================================================================
gen_server callbacks
=============================================================================
------------------------------------------------------------------------------
@doc Initializes the server.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Handling call messages.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Handling cast messages.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Handling all non call/cast messages.
@end
------------------------------------------------------------------------------
Cancelling a timer does not guarantee that the message has not
already been delivered to the message queue.
------------------------------------------------------------------------------
@doc Allows listener to pass options to handlers.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc This function is called by a `gen_server' when it is about to
terminate. It should be the opposite of `Module:init/1' and do any
necessary cleaning up. When it returns, the `gen_server' terminates
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Convert process state when code is changed.
@end
------------------------------------------------------------------------------
=============================================================================
=============================================================================
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------ | ( C ) 2012 - 2020 , 2600Hz
@author
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(ecallmgr_originate).
-behaviour(gen_listener).
-export([start_link/2]).
-export([handle_originate_execute/2]).
-export([handle_call_events/2]).
-export([init/1
,handle_call/3
,handle_cast/2
,handle_info/2
,handle_event/2
,terminate/2
,code_change/3
]).
-include("ecallmgr.hrl").
-define(SERVER, ?MODULE).
-type created_uuid() :: {'fs' | 'api', kz_term:ne_binary()}.
-record(state, {node :: atom()
,server_id :: kz_term:api_binary()
,controller_q :: kz_term:api_binary()
,originate_req = kz_json:new() :: kz_json:object()
,uuid :: created_uuid() | 'undefined'
,action :: kz_term:api_binary()
,app :: kz_term:api_binary()
,dialstrings :: kz_term:api_binary()
,queue :: kz_term:api_binary()
,control_pid :: kz_term:api_pid()
,tref :: kz_term:api_reference()
,fetch_id = kz_binary:rand_hex(16)
}).
-type state() :: #state{}.
-define(BINDINGS, [{'self', []}]).
-define(RESPONDERS, [{{?MODULE, 'handle_originate_execute'}
,[{<<"dialplan">>, <<"originate_execute">>}]
}
,{{?MODULE, 'handle_call_events'}
,[{<<"call_event">>, <<"*">>}]
}
]).
-define(QUEUE_NAME, <<>>).
-define(QUEUE_OPTIONS, []).
-define(CONSUME_OPTIONS, []).
-define(ORIGINATE_PARK, <<"&park()">>).
-define(ORIGINATE_EAVESDROP, <<"eavesdrop">>).
-define(REPLY_TIMEOUT, 5 * ?MILLISECONDS_IN_SECOND).
-spec start_link(atom(), kz_json:object()) -> kz_types:startlink_ret().
start_link(Node, JObj) ->
gen_listener:start_link(?SERVER
,[{'bindings', ?BINDINGS}
,{'responders', ?RESPONDERS}
,{'queue_name', ?QUEUE_NAME}
,{'queue_options', ?QUEUE_OPTIONS}
,{'consume_options', ?CONSUME_OPTIONS}
]
,[Node, JObj]
).
-spec handle_call_events(kz_call_event:doc(), kz_term:proplist()) -> 'ok'.
handle_call_events(JObj, Props) ->
Srv = props:get_value('server', Props),
case props:get_value('uuid', Props) =:= kz_api:call_id(JObj)
andalso kz_api:event_name(JObj)
of
<<"CHANNEL_EXECUTE_COMPLETE">> ->
case kz_call_event:application_name(JObj) of
<<"bridge">> ->
gen_listener:cast(Srv, {'bridge_execute_complete', JObj});
_Else -> 'ok'
end;
<<"CHANNEL_DESTROY">> ->
gen_listener:cast(Srv, {'channel_destroy', JObj});
_Else -> 'ok'
end.
-spec handle_originate_execute(kz_json:object(), kz_term:proplist()) -> 'ok'.
handle_originate_execute(JObj, Props) ->
'true' = kapi_dialplan:originate_execute_v(JObj),
Srv = props:get_value('server', Props),
UUID = props:get_value('uuid', Props),
lager:debug("recv originate_execute for ~s", [UUID]),
_ = case kz_api:queue_id(JObj) of
'undefined' -> 'ok';
ServerId -> gen_listener:cast(Srv, {'update_server_id', ServerId})
end,
kz_cache:store_local(?ECALLMGR_UTIL_CACHE, {UUID, 'start_listener'}, 'true'),
gen_listener:cast(Srv, {'originate_execute'}).
-spec init([node() | kz_json:object()]) -> {'stop', 'normal'} | {'ok', state()}.
init([Node, JObj]) ->
_ = kz_log:put_callid(JObj),
ServerId = kz_api:server_id(JObj),
ControllerQ = kz_api:queue_id(JObj),
_ = bind_to_events(Node),
case kapi_resource:originate_req_v(JObj) of
'false' ->
Error = <<"originate failed to execute as JObj did not validate">>,
publish_error(Error, 'undefined', JObj, ServerId),
{'stop', 'normal'};
'true' ->
{'ok', #state{node=Node
,originate_req=JObj
,server_id=ServerId
,controller_q = ControllerQ
}}
end.
-spec bind_to_events(atom()) -> 'ok'.
bind_to_events(Node) ->
gproc:reg({'p', 'l', ?FS_EVENT_REG_MSG(Node, <<"loopback::bowout">>)}).
-spec handle_call(any(), kz_term:pid_ref(), state()) -> kz_types:handle_call_ret_state(state()).
handle_call(_Request, _From, State) ->
{'reply', {'error', 'not_implemented'}, State}.
-spec handle_cast(any(), state()) -> kz_types:handle_cast_ret_state(state()).
handle_cast({'gen_listener', {'created_queue', Q}}, State) ->
lager:debug("starting originate request"),
gen_listener:cast(self(), {'get_originate_action'}),
{'noreply', State#state{queue=Q}};
handle_cast({'update_server_id', ServerId}, State) ->
{'noreply', State#state{server_id=ServerId}, 'hibernate'};
handle_cast({'maybe_update_node', Node}, #state{node=Node}=State) ->
{'noreply', State};
handle_cast({'maybe_update_node', Node}, #state{node=_OldNode}=State) ->
lager:debug("updating node from ~s to ~s", [_OldNode, Node]),
{'noreply', State#state{node=Node}, 'hibernate'};
handle_cast({'create_uuid'}, #state{node=Node
,originate_req=JObj
,uuid='undefined'
}=State) ->
UUID = {_, Id} = create_uuid(JObj, Node),
kz_log:put_callid(Id),
lager:debug("created uuid ~p", [UUID]),
case kz_json:is_true(<<"Start-Control-Process">>, JObj, 'true')
andalso start_control_process(State#state{uuid=UUID}) of
'false' ->
gen_listener:cast(self(), {'build_originate_args'}),
{'noreply', State#state{uuid=UUID}, 'hibernate'};
{'ok', #state{control_pid=Pid}=State1} ->
lager:debug("started control proc ~p uuid ~p", [Pid, UUID]),
maybe_send_originate_uuid(UUID, Pid, State),
gen_listener:cast(self(), {'build_originate_args'}),
{'noreply', State1, 'hibernate'};
{'error', _E} ->
lager:debug("failed to start control proc for ~p: ~p", [UUID, _E]),
{'stop', 'normal', State}
end;
handle_cast({'get_originate_action'}, #state{originate_req=JObj
,node=Node
}=State) ->
gen_listener:cast(self(), {'build_originate_args'}),
UseNode = maybe_update_node(JObj, Node),
ApplicationName = kz_json:get_value(<<"Application-Name">>, JObj),
Action = get_originate_action(ApplicationName, JObj, UseNode),
lager:debug("originate action: ~s", [Action]),
{'noreply', State#state{action=Action
,app=ApplicationName
,node=UseNode
}
,'hibernate'
};
handle_cast({'build_originate_args'}, #state{uuid='undefined'}=State) ->
gen_listener:cast(self(), {'create_uuid'}),
{'noreply', State};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action = ?ORIGINATE_PARK
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
case kz_json:is_true(<<"Originate-Immediate">>, JObj) of
'true' -> gen_listener:cast(self(), {'originate_execute'});
'false' -> gen_listener:cast(self(), {'originate_ready'})
end,
Endpoints = [update_endpoint(Endpoint, State)
|| Endpoint <- kz_json:get_ne_value(<<"Endpoints">>, JObj, [])
],
{'noreply', State#state{dialstrings=build_originate_args_from_endpoints(?ORIGINATE_PARK, Endpoints, JObj, FetchId)}};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action = Action
,app = ?ORIGINATE_EAVESDROP
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
gen_listener:cast(self(), {'originate_ready'}),
{'noreply', State#state{dialstrings=build_originate_args(Action, State, JObj, FetchId)}};
handle_cast({'build_originate_args'}, #state{originate_req=JObj
,action=Action
,fetch_id=FetchId
,dialstrings='undefined'
}=State) ->
case kz_json:is_true(<<"Originate-Immediate">>, JObj, 'true') of
'true' -> gen_listener:cast(self(), {'originate_execute'});
'false' -> gen_listener:cast(self(), {'originate_ready'})
end,
{'noreply', State#state{dialstrings=build_originate_args(Action, State, JObj, FetchId)}};
handle_cast({'originate_ready'}, #state{node=_Node}=State) ->
case start_control_process(State) of
{'ok', #state{control_pid=Pid
,uuid=UUID
,originate_req=JObj
,server_id=ServerId
,queue=Q
}=State1} ->
CtrlQ = gen_listener:queue_name(Pid),
_ = publish_originate_ready(CtrlQ, UUID, JObj, Q, ServerId),
{'noreply', State1#state{tref=start_abandon_timer()}};
{'error', _E} ->
lager:debug("failed to start control process: ~p", [_E]),
{'stop', 'normal', State}
end;
handle_cast({'originate_execute'}, #state{tref=TRef}=State) when is_reference(TRef) ->
_ = erlang:cancel_timer(TRef),
handle_cast({'originate_execute'}, State#state{tref='undefined'});
handle_cast({'originate_execute'}, #state{dialstrings=Dialstrings
,node=Node
,originate_req=JObj
,uuid={_, UUID}
,server_id=ServerId
,control_pid=CtrlPid
}=State) ->
ControlDisabled = kz_json:is_false(<<"Start-Control-Process">>, JObj, 'false'),
case originate_execute(Node, Dialstrings, find_originate_timeout(JObj)) of
{'ok', UUID} when is_pid(CtrlPid) ->
lager:debug("originate completed for: ~s with ctrl ~p", [UUID, CtrlPid]),
_ = publish_originate_resp(ServerId, JObj, UUID),
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', WinningUUID} when is_pid(CtrlPid) ->
lager:debug("originate completed for other UUID: ~s (not ~s)", [WinningUUID, UUID]),
_ = publish_originate_resp(ServerId, JObj, WinningUUID),
CtrlPids = ecallmgr_call_control:control_procs(WinningUUID),
_ = case lists:member(CtrlPid, CtrlPids) of
'true' -> 'ok';
'false' -> ecallmgr_call_control:stop(CtrlPid)
end,
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', CallId} when ControlDisabled ->
lager:debug("originate completed for: ~s with no control pid", [CallId]),
_ = publish_originate_resp(ServerId, JObj, CallId),
{'stop', 'normal', State#state{control_pid='undefined'}};
{'ok', CallId} ->
kz_log:put_callid(CallId),
lager:debug("originate is executing, waiting for completion"),
erlang:monitor_node(Node, 'true'),
bind_to_call_events(CallId),
CtrlQ = ecallmgr_call_control:queue_name(CtrlPid),
_ = publish_originate_started(ServerId, CallId, JObj, CtrlQ),
{'noreply', State#state{uuid={'api', CallId}}};
{'error', Error} ->
lager:debug("failed to originate: ~p", [Error]),
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State}
end;
handle_cast({'bridge_execute_complete', JObj}, #state{server_id=ServerId}=State) ->
lager:debug("received bridge complete event, sending originate response"),
_ = publish_originate_resp(ServerId, JObj),
{'stop', 'normal', State};
handle_cast({'channel_destroy', JObj}, #state{server_id=ServerId}=State) ->
lager:debug("received channel destroy event, sending originate response"),
_ = publish_originate_resp(ServerId, JObj),
{'stop', 'normal', State};
handle_cast({'gen_listener',{'is_consuming',_IsConsuming}}, State) ->
{'noreply', State};
handle_cast(_Msg, State) ->
lager:debug("unhandled cast: ~p", [_Msg]),
{'noreply', State, 'hibernate'}.
-spec handle_info(any(), state()) -> kz_types:handle_info_ret_state(state()).
handle_info({'event', _UUID, FSJObj}, #state{uuid=CreatedUUID}=State) ->
{'noreply', State#state{uuid=handle_fs_event(FSJObj, CreatedUUID)}};
handle_info({'tcp', _, Data}, State) ->
Event = binary_to_term(Data),
handle_info(Event, State);
handle_info({'abandon_originate'}, #state{tref='undefined'}=State) ->
{'noreply', State};
handle_info({'abandon_originate'}, #state{originate_req=JObj
,uuid=UUID
,server_id=ServerId
}=State) ->
Error = <<"Failed to receive valid originate_execute in time">>,
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State};
handle_info({'nodedown', _}, #state{originate_req=JObj
,uuid=UUID
,server_id=ServerId
,node=Node
}=State) ->
erlang:monitor_node(Node, 'false'),
Error = <<"lost connection to freeswitch node">>,
_ = publish_error(Error, UUID, JObj, ServerId),
{'stop', 'normal', State};
handle_info(_Info, State) ->
lager:debug("unhandled message: ~p", [_Info]),
{'noreply', State, 'hibernate'}.
-spec handle_event(kz_json:object(), state()) -> gen_listener:handle_event_return().
handle_event(_JObj, #state{uuid={_, UUID}}) ->
{'reply', [{'uuid', UUID}]};
handle_event(_JObj, #state{uuid=UUID}) ->
{'reply', [{'uuid', UUID}]}.
with . The return value is ignored .
-spec terminate(any(), state()) -> 'ok'.
terminate(_Reason, #state{control_pid=CtrlPid}) when is_pid(CtrlPid) ->
lager:debug("stop abandoned call control process ~p", [CtrlPid]),
ecallmgr_call_control:stop(CtrlPid),
lager:debug("originate termination: ~p", [_Reason]);
terminate(_Reason, _State) ->
lager:debug("originate termination: ~p", [_Reason]).
-spec code_change(any(), state(), any()) -> {'ok', state()}.
code_change(_OldVsn, State, _Extra) ->
{'ok', State}.
Internal functions
-spec cache_fax_file(kz_term:ne_binary(), node()) -> {'ok' | 'error', kz_term:ne_binary()}.
cache_fax_file(File, Node) ->
Self = self(),
Fun = fun(Res, Reply) ->
lager:debug("cache fax file result : ~p", [{Res, Reply}]),
Self ! {cache_fax_file, {Res, Reply}}
end,
{ok, JobId} = freeswitch:bgapi(Node, 'http_get', <<"{prefetch=true}", File/binary>>, Fun),
lager:debug("waiting for cache fax file result ~s", [JobId]),
receive
{cache_fax_file, Reply} -> Reply
end.
-spec get_originate_action(kz_term:ne_binary(), kz_json:object(), node()) -> kz_term:ne_binary().
get_originate_action(<<"fax">>, JObj, Node) ->
lager:debug("got originate with action fax"),
Data = kz_json:get_value(<<"Application-Data">>, JObj),
{'ok', File} = cache_fax_file(Data, Node),
<<"&txfax(", File/binary, ")">>;
get_originate_action(<<"transfer">>, JObj, _Node) ->
get_transfer_action(JObj, kz_json:get_value([<<"Application-Data">>, <<"Route">>], JObj));
get_originate_action(<<"bridge">>, JObj, _Node) ->
lager:debug("got originate with action bridge"),
CallId = kz_json:get_binary_value(<<"Existing-Call-ID">>, JObj),
intercept_unbridged_only(CallId, JObj);
get_originate_action(<<"eavesdrop">>, JObj, _Node) ->
lager:debug("got originate with action eavesdrop"),
EavesdropCallId = kz_json:get_binary_value(<<"Eavesdrop-Call-ID">>, JObj),
case ecallmgr_fs_channel:node(EavesdropCallId) of
{'error', _} ->
lager:debug("failed to find channel ~p in node list", [kz_json:get_value(<<"Eavesdrop-Call-ID">>, JObj)]),
<<"error">>;
{'ok', N} ->
gen_listener:cast(self(), {'maybe_update_node', N}),
get_eavesdrop_action(JObj)
end;
get_originate_action(_, _, _) ->
lager:debug("got originate with action park"),
?ORIGINATE_PARK.
-spec get_transfer_action(kz_json:object(), kz_term:api_binary()) -> kz_term:ne_binary().
get_transfer_action(_JObj, 'undefined') -> <<"error">>;
get_transfer_action(JObj, Route) ->
Context = ?DEFAULT_FREESWITCH_CONTEXT,
UnsetVars = get_unset_vars(JObj),
list_to_binary(
["'m:^:", UnsetVars
,"transfer:", Route
," XML ", Context, "' inline"
]
).
-spec intercept_unbridged_only(kz_term:ne_binary() | 'undefined', kz_json:object()) -> kz_term:ne_binary().
intercept_unbridged_only('undefined', JObj) ->
get_bridge_action(JObj);
intercept_unbridged_only(ExistingCallId, JObj) ->
case kz_json:is_true(<<"Intercept-Unbridged-Only">>, JObj, 'true') of
'true' ->
<<" 'set:intercept_unbridged_only=true,intercept:", ExistingCallId/binary, "' inline ">>;
'false' ->
<<" 'set:intercept_unbridged_only=false,intercept:", ExistingCallId/binary, "' inline ">>
end.
-spec get_bridge_action(kz_json:object()) -> kz_term:ne_binary().
get_bridge_action(JObj) ->
Data = kz_json:get_value(<<"Application-Data">>, JObj),
case ecallmgr_util:build_channel(Data) of
{'error', _} -> <<"error">>;
{'ok', Channel} ->
UnsetVars = get_unset_vars(JObj),
list_to_binary(
["'m:^:", UnsetVars
,"bridge:", Channel, "' inline"
]
)
end.
-spec maybe_update_node(kz_json:object(), atom()) -> atom().
maybe_update_node(JObj, Node) ->
case kz_json:get_binary_value(<<"Existing-Call-ID">>, JObj) of
'undefined' -> Node;
CallId ->
case ecallmgr_fs_channel:node(CallId) of
{'error', _} -> Node;
{'ok', Node} -> Node;
{'ok', N} -> lager:debug("updating node from ~s to ~s", [Node, N]),
N
end
end.
-spec get_eavesdrop_action(kz_json:object()) -> kz_term:ne_binary().
get_eavesdrop_action(JObj) ->
{CallId, Group} = case kz_json:get_value(<<"Eavesdrop-Group-ID">>, JObj) of
'undefined' -> {kz_json:get_binary_value(<<"Eavesdrop-Call-ID">>, JObj), <<>>};
ID -> {<<"all">>, <<"eavesdrop_require_group=", ID/binary, ",">>}
end,
case kz_json:get_value(<<"Eavesdrop-Mode">>, JObj) of
<<"whisper">> -> <<Group/binary, "queue_dtmf:w2@500,eavesdrop:", CallId/binary, " inline">>;
<<"full">> -> <<Group/binary, "queue_dtmf:w3@500,eavesdrop:", CallId/binary, " inline">>;
<<"listen">> -> <<Group/binary, "eavesdrop:", CallId/binary, " inline">>;
'undefined' -> <<Group/binary, "eavesdrop:", CallId/binary, " inline">>
end.
-spec build_originate_args(kz_term:ne_binary(), state(), kz_json:object(), kz_term:ne_binary()) -> kz_term:api_binary().
build_originate_args(Action, State, JObj, FetchId) ->
case kz_json:get_value(<<"Endpoints">>, JObj, []) of
[] ->
lager:warning("no endpoints defined in originate request"),
'undefined';
[Endpoint] ->
lager:debug("only one endpoint, don't create per-endpoint UUIDs"),
build_originate_args_from_endpoints(Action, [update_endpoint(Endpoint, State)], JObj, FetchId);
Endpoints ->
lager:debug("multiple endpoints defined, assigning uuids to each"),
UpdatedEndpoints = [update_endpoint(Endpoint, State) || Endpoint <- Endpoints],
build_originate_args_from_endpoints(Action, UpdatedEndpoints, JObj, FetchId)
end.
-spec build_originate_args_from_endpoints(kz_term:ne_binary(), kz_json:objects(), kz_json:object(), kz_term:ne_binary()) ->
kz_term:ne_binary().
build_originate_args_from_endpoints(Action, Endpoints, JObj, FetchId) ->
lager:debug("building originate command arguments"),
DialSeparator = ecallmgr_util:get_dial_separator(JObj, Endpoints),
DialStrings = ecallmgr_util:build_bridge_string(Endpoints, DialSeparator),
ChannelVars = get_channel_vars(JObj, FetchId),
list_to_binary([ChannelVars, DialStrings, " ", Action]).
-spec get_channel_vars(kz_json:object(), kz_term:ne_binary()) -> iolist().
get_channel_vars(JObj, FetchId) ->
InteractionId = kz_json:get_value([<<"Custom-Channel-Vars">>, <<?CALL_INTERACTION_ID>>], JObj, ?CALL_INTERACTION_DEFAULT),
CCVs = [{<<"Fetch-ID">>, FetchId}
,{<<"Ecallmgr-Node">>, kz_term:to_binary(node())}
,{<<?CALL_INTERACTION_ID>>, InteractionId}
],
J = kz_json:from_list_recursive([{<<"Custom-Channel-Vars">>, add_ccvs(JObj, CCVs)}]),
ecallmgr_fs_xml:get_channel_vars(kz_json:merge(JObj, J)).
-spec add_ccvs(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
add_ccvs(JObj, Props) ->
Routines = [fun maybe_add_loopback/2
,fun maybe_add_origination_uuid/2
],
lists:foldl(fun(Fun, Acc) -> Fun(JObj, Acc) end, Props, Routines).
-spec maybe_add_origination_uuid(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
maybe_add_origination_uuid(JObj, Props) ->
case kz_json:get_ne_binary_value(<<"Outbound-Call-ID">>, JObj) of
'undefined' -> Props;
CallId -> [{<<"Origination-Call-ID">>, CallId} | Props]
end.
-spec maybe_add_loopback(kz_json:object(), kz_term:proplist()) -> kz_term:proplist().
maybe_add_loopback(JObj, Props) ->
case kz_json:get_binary_boolean(<<"Simplify-Loopback">>, JObj) of
'undefined' -> Props;
SimpliFly -> add_loopback(kz_term:is_true(SimpliFly)) ++ Props
end.
-spec add_loopback(boolean()) -> kz_term:proplist().
add_loopback('true') ->
[{<<"Simplify-Loopback">>, 'true'}
,{<<"Loopback-Bowout">>, 'true'}
];
add_loopback('false') ->
[{<<"Simplify-Loopback">>, 'false'}
,{<<"Loopback-Bowout">>, 'false'}
].
-spec originate_execute(atom(), kz_term:ne_binary(), pos_integer()) ->
{'ok', kz_term:ne_binary()} |
{'error', kz_term:ne_binary() | 'timeout' | 'crash'}.
originate_execute(Node, Dialstrings, _Timeout) ->
lager:debug("executing originate on ~s: ~s", [Node, Dialstrings]),
freeswitch:async_api(Node, 'originate', Dialstrings).
-spec bind_to_call_events(kz_term:ne_binary()) -> 'ok'.
bind_to_call_events(CallId) ->
lager:debug("binding to call events for ~s", [CallId]),
Options = [{'callid', CallId}
,{'restrict_to', ['events']}
],
gen_listener:add_binding(self(), 'call', Options).
-spec unbind_from_call_events() -> 'ok'.
unbind_from_call_events() ->
lager:debug("unbind from call events"),
gen_listener:rm_binding(self(), 'call', []).
-spec update_uuid(kz_term:api_binary(), kz_term:ne_binary()) -> 'ok'.
update_uuid(OldUUID, NewUUID) ->
kz_log:put_callid(NewUUID),
lager:debug("updating call id from ~s to ~s", [OldUUID, NewUUID]),
unbind_from_call_events(),
bind_to_call_events(NewUUID),
'ok'.
-spec create_uuid(atom()) -> created_uuid().
create_uuid(_Node) -> {'fs', kz_binary:rand_hex(18)}.
-spec create_uuid(kz_json:object(), atom()) -> created_uuid().
create_uuid(JObj, Node) ->
case kz_json:get_binary_value(<<"Outbound-Call-ID">>, JObj) of
'undefined' -> create_uuid(Node);
CallId -> {'api', CallId}
end.
-spec create_uuid(kz_json:object(), kz_json:object(), atom()) -> created_uuid().
create_uuid(Endpoint, _JObj, Node) ->
case kz_json:get_binary_value(<<"Outbound-Call-ID">>, Endpoint) of
'undefined' -> create_uuid(Node);
CallId -> {'api', CallId}
end.
-spec get_unset_vars(kz_json:object()) -> iolist().
get_unset_vars(JObj) ->
Refactor ( wishes he had unit tests here for you to use )
ExportProps = [{K, <<>>} || K <- kz_json:get_value(<<"Export-Custom-Channel-Vars">>, JObj, [])],
Export = [K || KV <- lists:foldr(fun ecallmgr_fs_xml:kazoo_var_to_fs_var/2
,[]
,[{<<"Custom-Channel-Vars">>, kz_json:from_list(ExportProps)}]
),
([K, _] = string:tokens(binary_to_list(KV), "=")) =/= 'undefined'
],
case ["unset:" ++ K
|| KV <- lists:foldr(fun ecallmgr_fs_xml:kazoo_var_to_fs_var/2, [], kz_json:to_proplist(JObj))
,not lists:member(begin [K, _] = string:tokens(binary_to_list(KV), "="), K end, Export)]
of
[] -> "";
Unset ->
[string:join(Unset, "^")
,maybe_fix_ignore_early_media(Export)
,maybe_fix_group_confirm(Export)
,maybe_fix_fs_auto_answer_bug(Export)
,maybe_fix_caller_id(Export, JObj)
,"^"
]
end.
-spec maybe_fix_ignore_early_media(kz_term:strings()) -> string().
maybe_fix_ignore_early_media(Export) ->
case lists:member("ignore_early_media", Export) of
'true' -> "";
'false' -> "^unset:ignore_early_media"
end.
-spec maybe_fix_group_confirm(kz_term:strings()) -> string().
maybe_fix_group_confirm(Export) ->
case lists:member("group_confirm_key", Export) of
'true' -> "";
'false' -> "^unset:group_confirm_key^unset:group_confirm_cancel_timeout^unset:group_confirm_file"
end.
-spec maybe_fix_fs_auto_answer_bug(kz_term:strings()) -> string().
maybe_fix_fs_auto_answer_bug(Export) ->
case lists:member("sip_auto_answer", Export) of
'true' -> "";
'false' ->
"^unset:sip_h_Call-Info^unset:sip_h_Alert-Info^unset:alert_info^unset:sip_invite_params^set:sip_auto_answer=false"
end.
-spec maybe_fix_caller_id(kz_term:strings(), kz_json:object()) -> string().
maybe_fix_caller_id(Export, JObj) ->
Fix = [
{lists:member("origination_callee_id_name", Export), kz_json:get_value(<<"Outbound-Callee-ID-Name">>, JObj), "origination_caller_id_name"}
,{lists:member("origination_callee_id_number", Export), kz_json:get_value(<<"Outbound-Callee-ID-Number">>, JObj), "origination_caller_id_number"}
],
string:join([ "^set:" ++ Key ++ "=" ++ erlang:binary_to_list(Value) || {IsTrue, Value, Key} <- Fix, IsTrue ], ":").
-spec publish_error(kz_term:ne_binary(), created_uuid() | kz_term:api_binary(), kz_json:object(), kz_term:api_binary()) -> 'ok'.
publish_error(_, _, _, 'undefined') -> 'ok';
publish_error(Error, {_, UUID}, Request, ServerId) ->
publish_error(Error, UUID, Request, ServerId);
publish_error(Error, UUID, Request, ServerId) ->
lager:debug("originate error: ~s", [Error]),
E = [{<<"Msg-ID">>, kz_api:msg_id(Request)}
,{<<"Call-ID">>, UUID}
,{<<"Request">>, Request}
,{<<"Error-Message">>, cleanup_error(Error)}
| kz_api:default_headers(<<"error">>, <<"originate_resp">>, ?APP_NAME, ?APP_VERSION)
],
kz_api:publish_error(ServerId, props:filter_undefined(E)).
-spec cleanup_error(kz_term:ne_binary()) -> kz_term:ne_binary().
cleanup_error(<<"-ERR ", E/binary>>) -> E;
cleanup_error(E) -> E.
-spec publish_originate_ready(kz_term:ne_binary(), created_uuid() | kz_term:ne_binary(), kz_json:object(), kz_term:api_binary(), kz_term:api_binary()) -> 'ok'.
publish_originate_ready(CtrlQ, {_, UUID}, Request, Q, ServerId) ->
publish_originate_ready(CtrlQ, UUID, Request, Q, ServerId);
publish_originate_ready(CtrlQ, UUID, Request, Q, ServerId) ->
lager:debug("originate command is ready, waiting for originate_execute"),
Props = [{<<"Msg-ID">>, kz_api:msg_id(Request, UUID)}
,{<<"Call-ID">>, UUID}
,{<<"Control-Queue">>, CtrlQ}
| kz_api:default_headers(Q, ?APP_NAME, ?APP_VERSION)
],
kapi_dialplan:publish_originate_ready(ServerId, Props).
-spec publish_originate_resp(kz_term:api_binary(), kz_json:object()) -> 'ok'.
publish_originate_resp('undefined', _) -> 'ok';
publish_originate_resp(ServerId, JObj) ->
Resp = kz_json:set_values([{<<"Event-Category">>, <<"resource">>}
,{<<"Event-Name">>, <<"originate_resp">>}
]
,JObj
),
kapi_resource:publish_originate_resp(ServerId, Resp).
-spec publish_originate_resp(kz_term:api_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_resp('undefined', _JObj, _UUID) -> 'ok';
publish_originate_resp(ServerId, JObj, UUID) ->
Resp = kz_json:set_values([{<<"Event-Category">>, <<"resource">>}
,{<<"Application-Response">>, <<"SUCCESS">>}
,{<<"Event-Name">>, <<"originate_resp">>}
,{<<"Call-ID">>, UUID}
]
,JObj
),
kapi_resource:publish_originate_resp(ServerId, Resp).
-spec publish_originate_started(kz_term:api_binary(), kz_term:ne_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_started('undefined', _, _, _) -> 'ok';
publish_originate_started(ServerId, CallId, JObj, CtrlQ) ->
Resp = kz_json:from_list(
[{<<"Call-ID">>, CallId}
,{<<"Msg-ID">>, kz_api:msg_id(JObj)}
,{<<"Control-Queue">>, CtrlQ}
| kz_api:default_headers(?APP_NAME, ?APP_VERSION)
]),
kapi_resource:publish_originate_started(ServerId, Resp).
-spec publish_originate_uuid(kz_term:api_binary(), created_uuid() | kz_term:ne_binary(), kz_json:object(), kz_term:ne_binary()) -> 'ok'.
publish_originate_uuid('undefined', _, _, _) -> 'ok';
publish_originate_uuid(ServerId, UUID, JObj, CtrlQueue) ->
Resp = props:filter_undefined(
[{<<"Outbound-Call-ID">>, UUID}
,{<<"Msg-ID">>, kz_api:msg_id(JObj)}
,{<<"Outbound-Call-Control-Queue">>, CtrlQueue}
| kz_api:default_headers(?APP_NAME, ?APP_VERSION)
]),
lager:debug("sent originate_uuid to ~s", [ServerId]),
kapi_resource:publish_originate_uuid(ServerId, Resp).
-spec maybe_send_originate_uuid(created_uuid(), pid(), state()) -> 'ok'.
maybe_send_originate_uuid({_, UUID}, Pid, #state{server_id=ServerId
,originate_req=JObj
}) ->
CtlQ = gen_listener:queue_name(Pid),
publish_originate_uuid(ServerId, UUID, JObj, CtlQ).
-spec find_originate_timeout(kz_json:object()) -> pos_integer().
find_originate_timeout(JObj) ->
OTimeout = case kz_json:get_integer_value(<<"Timeout">>, JObj) of
'undefined' -> 10;
LT when LT > 0 -> LT;
_ -> 10
end,
find_max_endpoint_timeout(kz_json:get_list_value(<<"Endpoints">>, JObj, [])
,OTimeout
).
-spec find_max_endpoint_timeout(kz_json:objects(), pos_integer()) -> pos_integer().
find_max_endpoint_timeout([], T) -> T;
find_max_endpoint_timeout([EP|EPs], T) ->
case kz_json:get_integer_value(<<"Endpoint-Timeout">>, EP) of
'undefined' -> find_max_endpoint_timeout(EPs, T);
Timeout when Timeout > T -> find_max_endpoint_timeout(EPs, Timeout);
_ -> find_max_endpoint_timeout(EPs, T)
end.
-spec start_control_process(state()) ->
{'ok', state()} |
{'error', any()}.
start_control_process(#state{originate_req=JObj
,node=Node
,uuid={_, Id}=UUID
,controller_q=ControllerQ
,server_id=ServerId
,fetch_id=FetchId
,control_pid='undefined'
}=State) ->
Ctx = #{node => Node
,call_id => Id
,fetch_id => FetchId
,controller_q => ControllerQ
,initial_ccvs => kz_json:new()
},
case ecallmgr_call_sup:start_control_process(Ctx) of
{'ok', CtrlPid} when is_pid(CtrlPid) ->
_ = maybe_send_originate_uuid(UUID, CtrlPid, State),
kz_cache:store_local(?ECALLMGR_UTIL_CACHE, {Id, 'start_listener'}, 'true'),
lager:debug("started control pid ~p for uuid ~s", [CtrlPid, Id]),
{'ok', State#state{control_pid=CtrlPid}};
{'error', _E}=E ->
Error = <<"failed to preemptively start a call control process">>,
_ = publish_error(Error, UUID, JObj, ServerId),
E
end;
start_control_process(#state{control_pid=_Pid
,uuid=_UUID
}=State) ->
lager:debug("control process ~p exists for uuid ~p", [_Pid, _UUID]),
{'ok', State}.
-spec maybe_start_call_handlers(created_uuid(), state()) -> 'ok'.
maybe_start_call_handlers(UUID, #state{originate_req=JObj}=State) ->
case kz_json:is_true(<<"Start-Control-Process">>, JObj, 'true')
andalso start_control_process(State#state{uuid=UUID}) of
'false' -> 'ok';
{'ok', #state{control_pid=_Pid}} ->
lager:debug("started control process for ~p: ~p", [UUID, _Pid]);
{'error', _E} ->
lager:debug("failed to start control process for ~p: ~p", [UUID, _E])
end.
-spec start_abandon_timer() -> reference().
start_abandon_timer() ->
erlang:send_after(?REPLY_TIMEOUT, self(), {'abandon_originate'}).
-spec update_endpoint(kz_json:object(), state()) -> kz_json:object().
update_endpoint(Endpoint, #state{node=Node
,originate_req=JObj
,uuid=GlobalUUID
}=State) ->
{_, Id} = UUID =
case kz_json:get_value(<<"Outbound-Call-ID">>, Endpoint) of
'undefined' -> create_uuid(Endpoint, JObj, Node);
OutboundCallId -> {'api', OutboundCallId}
end,
case uuid_matches(UUID, GlobalUUID) of
'true' -> 'ok';
'false' ->
maybe_start_call_handlers(UUID, State#state{uuid=UUID
,control_pid='undefined'
})
end,
EP = kz_json:set_values([{<<"origination_uuid">>, Id}
], Endpoint),
fix_hold_media(EP).
-spec uuid_matches(created_uuid(), created_uuid()) -> boolean().
uuid_matches({_, UUID}, {_, UUID}) -> 'true';
uuid_matches(_, _) -> 'false'.
-spec fix_hold_media(kz_json:object()) -> kz_json:object().
fix_hold_media(Endpoint) ->
put('hold_media', kz_json:get_value(<<"Hold-Media">>, Endpoint)),
kz_json:delete_key(<<"Hold-Media">>, Endpoint).
-spec should_update_uuid(kz_term:api_binary(), kzd_freeswitch:data()) -> boolean().
should_update_uuid(OldUUID, FSJObj) ->
case kzd_freeswitch:event_subclass(FSJObj, kzd_freeswitch:event_name(FSJObj)) of
<<"loopback::bowout">> ->
lager:debug("bowout detected with ~s, old uuid is ~s"
,[kzd_freeswitch:resigning_id(FSJObj), OldUUID]
),
kzd_freeswitch:resigning_id(FSJObj) =:= OldUUID;
_ -> 'false'
end.
-spec handle_fs_event(kzd_freeswitch:data(), 'undefined' | created_uuid()) -> 'undefined' | created_uuid().
handle_fs_event(FSJObj, 'undefined') ->
case should_update_uuid('undefined', FSJObj) of
'false' -> 'undefined';
'true' ->
NewUUID = kzd_freeswitch:acquired_id(FSJObj),
_ = update_uuid('undefined', NewUUID),
{'api', NewUUID}
end;
handle_fs_event(FSJObj, {_, OldUUID}=UUID) ->
case should_update_uuid(OldUUID, FSJObj) of
'false' -> UUID;
'true' ->
NewUUID = kzd_freeswitch:acquired_id(FSJObj),
_ = update_uuid(OldUUID, NewUUID),
{'api', NewUUID}
end.
|
921f991ca2afae7063bf5713c618efd3d4faec5cd5a777e820eba03f4388b0c7 | travelping/ergw | 3gpp_qos.erl | -module('3gpp_qos').
-export([decode/1, encode/1]).
-include("include/3gpp.hrl").
%%====================================================================
%% API
%%====================================================================
decode(<<_:2, DelayClass:3, ReliabilityClass:3,
PeakThroughput:4, _:1, PrecedenceClass:3,
_:3, MeanThroughput:5,
TrafficClass:3, DeliveryOrder:2, DeliveryOfErroneorousSDU:3,
MaxSDUsize:8,
MaxBitRateUpLink:8,
MaxBitRateDownLink:8,
ResidualBER:4, SDUerrorRatio:4,
TransferDelay:6, TrafficHandlingPriority:2,
GuaranteedBitRateUpLink:8,
GuaranteedBitRateDownLink:8,
Optional/binary>> = IE) ->
QoS = #qos{
delay_class = DelayClass,
reliability_class = ReliabilityClass,
peak_throughput = PeakThroughput,
precedence_class = PrecedenceClass,
mean_throughput = MeanThroughput,
traffic_class = TrafficClass,
delivery_order = DeliveryOrder,
delivery_of_erroneorous_sdu = DeliveryOfErroneorousSDU,
max_sdu_size = decode_sdu_size(MaxSDUsize),
max_bit_rate_uplink = decode_br(MaxBitRateUpLink),
max_bit_rate_downlink = decode_br(MaxBitRateDownLink),
residual_ber = ResidualBER,
sdu_error_ratio = SDUerrorRatio,
transfer_delay = decode_delay(TransferDelay),
traffic_handling_priority = TrafficHandlingPriority,
guaranteed_bit_rate_uplink = decode_br(GuaranteedBitRateUpLink),
guaranteed_bit_rate_downlink = decode_br(GuaranteedBitRateDownLink),
signaling_indication = 0,
source_statistics_descriptor = 0
},
decode(IE, 14, Optional, QoS);
decode(IE) ->
IE.
encode(IE) when is_binary(IE) ->
IE;
encode(#qos{
delay_class = DelayClass,
reliability_class = ReliabilityClass,
peak_throughput = PeakThroughput,
precedence_class = PrecedenceClass,
mean_throughput = MeanThroughput,
traffic_class = TrafficClass,
delivery_order = DeliveryOrder,
delivery_of_erroneorous_sdu = DeliveryOfErroneorousSDU,
max_sdu_size = MaxSDUsize,
max_bit_rate_uplink = MaxBitRateUpLink,
max_bit_rate_downlink = MaxBitRateDownLink,
residual_ber = ResidualBER,
sdu_error_ratio = SDUerrorRatio,
transfer_delay = TransferDelay,
traffic_handling_priority = TrafficHandlingPriority,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink
} = QoS) ->
IE = <<0:2, DelayClass:3, ReliabilityClass:3,
PeakThroughput:4, 0:1, PrecedenceClass:3,
0:3, MeanThroughput:5,
TrafficClass:3, DeliveryOrder:2, DeliveryOfErroneorousSDU:3,
(encode_sdu_size(MaxSDUsize)):8,
(encode_br(MaxBitRateUpLink)):8,
(encode_br(MaxBitRateDownLink)):8,
ResidualBER:4, SDUerrorRatio:4,
(encode_delay(TransferDelay)):6, TrafficHandlingPriority:2,
(encode_br(GuaranteedBitRateUpLink)):8,
(encode_br(GuaranteedBitRateDownLink)):8>>,
encode_optional(IE, QoS).
%%%===================================================================
Internal functions
%%%===================================================================
pad_length(Width, Length) ->
(Width - Length rem Width) rem Width.
%%
%% pad binary to specific length
%% -> -questions/2008-December/040709.html
%%
pad_to(Width, Binary) ->
case pad_length(Width, size(Binary)) of
0 -> Binary;
N -> <<Binary/binary, 0:(N*8)>>
end.
3GPP TS 24.008 , Sect . 10.5.6.5 , Table 10.5.156
decode_sdu_size(0) -> subscribed;
decode_sdu_size(X) when X =< 2#10010110 -> X * 10;
decode_sdu_size(2#10010111) -> 1502;
decode_sdu_size(2#10011000) -> 1510;
decode_sdu_size(2#10011001) -> 1520;
decode_sdu_size(_) -> reserved.
encode_sdu_size(subscribed) -> 0;
encode_sdu_size(X) when is_integer(X), X =< 1500 -> (X + 9) div 10;
encode_sdu_size(1502) -> 2#10010111;
encode_sdu_size(1510) -> 2#10011000;
encode_sdu_size(1520) -> 2#10011001;
encode_sdu_size(_) -> 2#11111111.
decode_br(2#00000000) -> subscribed;
decode_br(2#11111111) -> 0;
decode_br(V) ->
case V bsr 6 of
0 -> V;
1 -> 64 + (V - 2#01000000) * 8;
_ -> 576 + (V - 2#10000000) * 64
end.
encode_br(subscribed) -> 2#00000000;
encode_br(0) -> 2#11111111;
encode_br(V) when V =< 63 -> V;
encode_br(V) when V =< 568 -> ((V - 64) div 8) + 2#01000000;
encode_br(V) when V =< 8640 -> ((V - 576) div 64) + 2#10000000;
encode_br(_) -> 2#11111110.
decode_ext_br(0, P) -> P;
decode_ext_br(V, _) when V =< 2#01001010 -> 8600 + V * 100;
decode_ext_br(V, _) when V =< 2#10111010 -> 16000 + (V - 2#01001010) * 1000;
decode_ext_br(V, _) when V =< 2#11111010 -> 128000 + (V - 2#10111010) * 2000;
decode_ext_br(_, _) -> 256000.
encode_ext_br(subscribed) -> 0;
encode_ext_br(V) when V =< 8640 -> 2#00000000;
encode_ext_br(V) when V =< 16000 -> ((V - 8600) div 100);
encode_ext_br(V) when V =< 128000 -> ((V - 16000) div 1000) + 2#01001010;
encode_ext_br(V) when V =< 256000 -> ((V -128000) div 2000) + 2#10111010;
encode_ext_br(_) -> 2#11111010.
decode_ext2_br(0, P) -> P;
decode_ext2_br(V, _) when V =< 2#00111101 -> 256000 + V * 4000;
decode_ext2_br(V, _) when V =< 2#10100001 -> 500000 + (V - 2#00111101) * 10000;
decode_ext2_br(V, _) when V =< 2#11110110 -> 1500000 + (V - 2#10100001) * 100000;
decode_ext2_br(_, _) -> 10000000.
encode_ext2_br(subscribed) -> 0;
encode_ext2_br(V) when V =< 256000 -> 2#00000000;
encode_ext2_br(V) when V =< 500000 -> ((V - 256000) div 4000);
encode_ext2_br(V) when V =< 1500000 -> ((V - 500000) div 10000) + 2#00111101;
encode_ext2_br(V) when V =< 10000000 -> ((V - 1500000) div 100000) + 2#10100001;
encode_ext2_br(_) -> 2#11110110.
decode_delay(0) -> subscribed;
decode_delay(V) when V =< 2#001111 -> V * 10;
decode_delay(V) when V =< 2#011111 -> 200 + (V - 2#010000) * 50;
decode_delay(V) when V =< 2#111110 -> 1000 + (V - 2#100000) * 100;
decode_delay(_) -> reserved.
encode_delay(subscribed) -> 0;
encode_delay(V) when V =< 150 -> V div 10;
encode_delay(V) when V =< 950 -> ((V - 200) div 50) + 2#010000;
encode_delay(V) when V =< 4000 -> ((V - 1000) div 100) + 2#100000;
encode_delay(_) -> 2#111111.
decode(_IE, _Octet, <<>>, QoS) ->
QoS;
decode(IE, 14, <<_:3, SignalingIndication:1, SourceStatisticsDescriptor:4, Optional/binary>>, QoS0) ->
QoS = QoS0#qos{
signaling_indication = SignalingIndication,
source_statistics_descriptor = SourceStatisticsDescriptor
},
decode(IE, 15, Optional, QoS);
decode(IE, 15, <<MaxBitRateDownLinkExt:8, GuaranteedBitRateDownLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_downlink = decode_ext_br(MaxBitRateDownLinkExt, MaxBitRateDownLink),
guaranteed_bit_rate_downlink = decode_ext_br(GuaranteedBitRateDownLinkExt, GuaranteedBitRateDownLink)
},
decode(IE, 17, Optional, QoS);
decode(IE, 17, <<MaxBitRateUpLinkExt:8, GuaranteedBitRateUpLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_uplink = decode_ext_br(MaxBitRateUpLinkExt, MaxBitRateUpLink),
guaranteed_bit_rate_uplink = decode_ext_br(GuaranteedBitRateUpLinkExt, GuaranteedBitRateUpLink)
},
decode(IE, 19, Optional, QoS);
decode(IE, 19, <<MaxBitRateDownLinkExt:8, GuaranteedBitRateDownLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_downlink = decode_ext2_br(MaxBitRateDownLinkExt, MaxBitRateDownLink),
guaranteed_bit_rate_downlink = decode_ext2_br(GuaranteedBitRateDownLinkExt, GuaranteedBitRateDownLink)
},
decode(IE, 21, Optional, QoS);
decode(IE, 21, <<MaxBitRateUpLinkExt:8, GuaranteedBitRateUpLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_uplink = decode_ext2_br(MaxBitRateUpLinkExt, MaxBitRateUpLink),
guaranteed_bit_rate_uplink = decode_ext2_br(GuaranteedBitRateUpLinkExt, GuaranteedBitRateUpLink)
},
decode(IE, 23, Optional, QoS);
decode(_IE, _Octet, _Rest, QoS) ->
%% decoding of optional fields failed, but
TS 29.060 , Sect . 11.1.6 Invalid IE Length says :
%%
%% if the Length field value is less than the number of fixed octets
defined for that IE , preceding the extended field(s ) , the receiver
%% shall try to continue the procedure, if possible.
%%
%% so, lets continue what we have so far
QoS.
-define(OPTIONAL_OCTETS, [14, 15, 17, 19, 21]).
encode_optional(IE, QoS) ->
lists:foldl(fun(Octet, Acc) -> encode(Octet, Acc, QoS) end, IE, ?OPTIONAL_OCTETS).
encode(14, IE, #qos{signaling_indication = SignalingIndication,
source_statistics_descriptor = SourceStatisticsDescriptor})
when is_integer(SignalingIndication),
is_integer(SourceStatisticsDescriptor) ->
<<(pad_to(11, IE))/binary, 0:3, SignalingIndication:1, SourceStatisticsDescriptor:4>>;
encode(15, IE, #qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink})
when (is_integer(MaxBitRateDownLink) andalso MaxBitRateDownLink > 8600) orelse
(is_integer(GuaranteedBitRateDownLink) andalso GuaranteedBitRateDownLink > 8600) ->
<<(pad_to(12, IE))/binary, (encode_ext_br(MaxBitRateDownLink)):8,
(encode_ext_br(GuaranteedBitRateDownLink)):8>>;
encode(17, IE, #qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink})
when (is_integer(MaxBitRateUpLink) andalso MaxBitRateUpLink > 8600) orelse
(is_integer(GuaranteedBitRateUpLink) andalso GuaranteedBitRateUpLink > 8600) ->
<<(pad_to(14, IE))/binary, (encode_ext_br(MaxBitRateUpLink)):8,
(encode_ext_br(GuaranteedBitRateUpLink)):8>>;
encode(19, IE, #qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink})
when (is_integer(MaxBitRateDownLink) andalso MaxBitRateDownLink > 256000) orelse
(is_integer(GuaranteedBitRateDownLink) andalso GuaranteedBitRateDownLink > 256000) ->
<<(pad_to(16, IE))/binary, (encode_ext2_br(MaxBitRateDownLink)):8,
(encode_ext2_br(GuaranteedBitRateDownLink)):8>>;
encode(21, IE, #qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink})
when (is_integer(MaxBitRateUpLink) andalso MaxBitRateUpLink > 256000) orelse
(is_integer(GuaranteedBitRateUpLink) andalso GuaranteedBitRateUpLink > 256000) ->
<<(pad_to(18, IE))/binary, (encode_ext2_br(MaxBitRateUpLink)):8,
(encode_ext2_br(GuaranteedBitRateUpLink)):8>>;
encode(_Octet, IE, _QoS) ->
IE.
| null | https://raw.githubusercontent.com/travelping/ergw/1b6cc3ee89eea4cf9df1d7de612744f0a850dfd9/apps/ergw_core/src/3gpp_qos.erl | erlang | ====================================================================
API
====================================================================
===================================================================
===================================================================
pad binary to specific length
-> -questions/2008-December/040709.html
decoding of optional fields failed, but
if the Length field value is less than the number of fixed octets
shall try to continue the procedure, if possible.
so, lets continue what we have so far | -module('3gpp_qos').
-export([decode/1, encode/1]).
-include("include/3gpp.hrl").
decode(<<_:2, DelayClass:3, ReliabilityClass:3,
PeakThroughput:4, _:1, PrecedenceClass:3,
_:3, MeanThroughput:5,
TrafficClass:3, DeliveryOrder:2, DeliveryOfErroneorousSDU:3,
MaxSDUsize:8,
MaxBitRateUpLink:8,
MaxBitRateDownLink:8,
ResidualBER:4, SDUerrorRatio:4,
TransferDelay:6, TrafficHandlingPriority:2,
GuaranteedBitRateUpLink:8,
GuaranteedBitRateDownLink:8,
Optional/binary>> = IE) ->
QoS = #qos{
delay_class = DelayClass,
reliability_class = ReliabilityClass,
peak_throughput = PeakThroughput,
precedence_class = PrecedenceClass,
mean_throughput = MeanThroughput,
traffic_class = TrafficClass,
delivery_order = DeliveryOrder,
delivery_of_erroneorous_sdu = DeliveryOfErroneorousSDU,
max_sdu_size = decode_sdu_size(MaxSDUsize),
max_bit_rate_uplink = decode_br(MaxBitRateUpLink),
max_bit_rate_downlink = decode_br(MaxBitRateDownLink),
residual_ber = ResidualBER,
sdu_error_ratio = SDUerrorRatio,
transfer_delay = decode_delay(TransferDelay),
traffic_handling_priority = TrafficHandlingPriority,
guaranteed_bit_rate_uplink = decode_br(GuaranteedBitRateUpLink),
guaranteed_bit_rate_downlink = decode_br(GuaranteedBitRateDownLink),
signaling_indication = 0,
source_statistics_descriptor = 0
},
decode(IE, 14, Optional, QoS);
decode(IE) ->
IE.
encode(IE) when is_binary(IE) ->
IE;
encode(#qos{
delay_class = DelayClass,
reliability_class = ReliabilityClass,
peak_throughput = PeakThroughput,
precedence_class = PrecedenceClass,
mean_throughput = MeanThroughput,
traffic_class = TrafficClass,
delivery_order = DeliveryOrder,
delivery_of_erroneorous_sdu = DeliveryOfErroneorousSDU,
max_sdu_size = MaxSDUsize,
max_bit_rate_uplink = MaxBitRateUpLink,
max_bit_rate_downlink = MaxBitRateDownLink,
residual_ber = ResidualBER,
sdu_error_ratio = SDUerrorRatio,
transfer_delay = TransferDelay,
traffic_handling_priority = TrafficHandlingPriority,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink
} = QoS) ->
IE = <<0:2, DelayClass:3, ReliabilityClass:3,
PeakThroughput:4, 0:1, PrecedenceClass:3,
0:3, MeanThroughput:5,
TrafficClass:3, DeliveryOrder:2, DeliveryOfErroneorousSDU:3,
(encode_sdu_size(MaxSDUsize)):8,
(encode_br(MaxBitRateUpLink)):8,
(encode_br(MaxBitRateDownLink)):8,
ResidualBER:4, SDUerrorRatio:4,
(encode_delay(TransferDelay)):6, TrafficHandlingPriority:2,
(encode_br(GuaranteedBitRateUpLink)):8,
(encode_br(GuaranteedBitRateDownLink)):8>>,
encode_optional(IE, QoS).
Internal functions
pad_length(Width, Length) ->
(Width - Length rem Width) rem Width.
pad_to(Width, Binary) ->
case pad_length(Width, size(Binary)) of
0 -> Binary;
N -> <<Binary/binary, 0:(N*8)>>
end.
3GPP TS 24.008 , Sect . 10.5.6.5 , Table 10.5.156
decode_sdu_size(0) -> subscribed;
decode_sdu_size(X) when X =< 2#10010110 -> X * 10;
decode_sdu_size(2#10010111) -> 1502;
decode_sdu_size(2#10011000) -> 1510;
decode_sdu_size(2#10011001) -> 1520;
decode_sdu_size(_) -> reserved.
encode_sdu_size(subscribed) -> 0;
encode_sdu_size(X) when is_integer(X), X =< 1500 -> (X + 9) div 10;
encode_sdu_size(1502) -> 2#10010111;
encode_sdu_size(1510) -> 2#10011000;
encode_sdu_size(1520) -> 2#10011001;
encode_sdu_size(_) -> 2#11111111.
decode_br(2#00000000) -> subscribed;
decode_br(2#11111111) -> 0;
decode_br(V) ->
case V bsr 6 of
0 -> V;
1 -> 64 + (V - 2#01000000) * 8;
_ -> 576 + (V - 2#10000000) * 64
end.
encode_br(subscribed) -> 2#00000000;
encode_br(0) -> 2#11111111;
encode_br(V) when V =< 63 -> V;
encode_br(V) when V =< 568 -> ((V - 64) div 8) + 2#01000000;
encode_br(V) when V =< 8640 -> ((V - 576) div 64) + 2#10000000;
encode_br(_) -> 2#11111110.
decode_ext_br(0, P) -> P;
decode_ext_br(V, _) when V =< 2#01001010 -> 8600 + V * 100;
decode_ext_br(V, _) when V =< 2#10111010 -> 16000 + (V - 2#01001010) * 1000;
decode_ext_br(V, _) when V =< 2#11111010 -> 128000 + (V - 2#10111010) * 2000;
decode_ext_br(_, _) -> 256000.
encode_ext_br(subscribed) -> 0;
encode_ext_br(V) when V =< 8640 -> 2#00000000;
encode_ext_br(V) when V =< 16000 -> ((V - 8600) div 100);
encode_ext_br(V) when V =< 128000 -> ((V - 16000) div 1000) + 2#01001010;
encode_ext_br(V) when V =< 256000 -> ((V -128000) div 2000) + 2#10111010;
encode_ext_br(_) -> 2#11111010.
decode_ext2_br(0, P) -> P;
decode_ext2_br(V, _) when V =< 2#00111101 -> 256000 + V * 4000;
decode_ext2_br(V, _) when V =< 2#10100001 -> 500000 + (V - 2#00111101) * 10000;
decode_ext2_br(V, _) when V =< 2#11110110 -> 1500000 + (V - 2#10100001) * 100000;
decode_ext2_br(_, _) -> 10000000.
encode_ext2_br(subscribed) -> 0;
encode_ext2_br(V) when V =< 256000 -> 2#00000000;
encode_ext2_br(V) when V =< 500000 -> ((V - 256000) div 4000);
encode_ext2_br(V) when V =< 1500000 -> ((V - 500000) div 10000) + 2#00111101;
encode_ext2_br(V) when V =< 10000000 -> ((V - 1500000) div 100000) + 2#10100001;
encode_ext2_br(_) -> 2#11110110.
decode_delay(0) -> subscribed;
decode_delay(V) when V =< 2#001111 -> V * 10;
decode_delay(V) when V =< 2#011111 -> 200 + (V - 2#010000) * 50;
decode_delay(V) when V =< 2#111110 -> 1000 + (V - 2#100000) * 100;
decode_delay(_) -> reserved.
encode_delay(subscribed) -> 0;
encode_delay(V) when V =< 150 -> V div 10;
encode_delay(V) when V =< 950 -> ((V - 200) div 50) + 2#010000;
encode_delay(V) when V =< 4000 -> ((V - 1000) div 100) + 2#100000;
encode_delay(_) -> 2#111111.
decode(_IE, _Octet, <<>>, QoS) ->
QoS;
decode(IE, 14, <<_:3, SignalingIndication:1, SourceStatisticsDescriptor:4, Optional/binary>>, QoS0) ->
QoS = QoS0#qos{
signaling_indication = SignalingIndication,
source_statistics_descriptor = SourceStatisticsDescriptor
},
decode(IE, 15, Optional, QoS);
decode(IE, 15, <<MaxBitRateDownLinkExt:8, GuaranteedBitRateDownLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_downlink = decode_ext_br(MaxBitRateDownLinkExt, MaxBitRateDownLink),
guaranteed_bit_rate_downlink = decode_ext_br(GuaranteedBitRateDownLinkExt, GuaranteedBitRateDownLink)
},
decode(IE, 17, Optional, QoS);
decode(IE, 17, <<MaxBitRateUpLinkExt:8, GuaranteedBitRateUpLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_uplink = decode_ext_br(MaxBitRateUpLinkExt, MaxBitRateUpLink),
guaranteed_bit_rate_uplink = decode_ext_br(GuaranteedBitRateUpLinkExt, GuaranteedBitRateUpLink)
},
decode(IE, 19, Optional, QoS);
decode(IE, 19, <<MaxBitRateDownLinkExt:8, GuaranteedBitRateDownLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_downlink = decode_ext2_br(MaxBitRateDownLinkExt, MaxBitRateDownLink),
guaranteed_bit_rate_downlink = decode_ext2_br(GuaranteedBitRateDownLinkExt, GuaranteedBitRateDownLink)
},
decode(IE, 21, Optional, QoS);
decode(IE, 21, <<MaxBitRateUpLinkExt:8, GuaranteedBitRateUpLinkExt:8, Optional/binary>>,
#qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink} = QoS0) ->
QoS = QoS0#qos{
max_bit_rate_uplink = decode_ext2_br(MaxBitRateUpLinkExt, MaxBitRateUpLink),
guaranteed_bit_rate_uplink = decode_ext2_br(GuaranteedBitRateUpLinkExt, GuaranteedBitRateUpLink)
},
decode(IE, 23, Optional, QoS);
decode(_IE, _Octet, _Rest, QoS) ->
TS 29.060 , Sect . 11.1.6 Invalid IE Length says :
defined for that IE , preceding the extended field(s ) , the receiver
QoS.
-define(OPTIONAL_OCTETS, [14, 15, 17, 19, 21]).
encode_optional(IE, QoS) ->
lists:foldl(fun(Octet, Acc) -> encode(Octet, Acc, QoS) end, IE, ?OPTIONAL_OCTETS).
encode(14, IE, #qos{signaling_indication = SignalingIndication,
source_statistics_descriptor = SourceStatisticsDescriptor})
when is_integer(SignalingIndication),
is_integer(SourceStatisticsDescriptor) ->
<<(pad_to(11, IE))/binary, 0:3, SignalingIndication:1, SourceStatisticsDescriptor:4>>;
encode(15, IE, #qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink})
when (is_integer(MaxBitRateDownLink) andalso MaxBitRateDownLink > 8600) orelse
(is_integer(GuaranteedBitRateDownLink) andalso GuaranteedBitRateDownLink > 8600) ->
<<(pad_to(12, IE))/binary, (encode_ext_br(MaxBitRateDownLink)):8,
(encode_ext_br(GuaranteedBitRateDownLink)):8>>;
encode(17, IE, #qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink})
when (is_integer(MaxBitRateUpLink) andalso MaxBitRateUpLink > 8600) orelse
(is_integer(GuaranteedBitRateUpLink) andalso GuaranteedBitRateUpLink > 8600) ->
<<(pad_to(14, IE))/binary, (encode_ext_br(MaxBitRateUpLink)):8,
(encode_ext_br(GuaranteedBitRateUpLink)):8>>;
encode(19, IE, #qos{max_bit_rate_downlink = MaxBitRateDownLink,
guaranteed_bit_rate_downlink = GuaranteedBitRateDownLink})
when (is_integer(MaxBitRateDownLink) andalso MaxBitRateDownLink > 256000) orelse
(is_integer(GuaranteedBitRateDownLink) andalso GuaranteedBitRateDownLink > 256000) ->
<<(pad_to(16, IE))/binary, (encode_ext2_br(MaxBitRateDownLink)):8,
(encode_ext2_br(GuaranteedBitRateDownLink)):8>>;
encode(21, IE, #qos{max_bit_rate_uplink = MaxBitRateUpLink,
guaranteed_bit_rate_uplink = GuaranteedBitRateUpLink})
when (is_integer(MaxBitRateUpLink) andalso MaxBitRateUpLink > 256000) orelse
(is_integer(GuaranteedBitRateUpLink) andalso GuaranteedBitRateUpLink > 256000) ->
<<(pad_to(18, IE))/binary, (encode_ext2_br(MaxBitRateUpLink)):8,
(encode_ext2_br(GuaranteedBitRateUpLink)):8>>;
encode(_Octet, IE, _QoS) ->
IE.
|
3f371df43c07c6d8a2c3114b44c3d89b43c861b34a414babfc8d3869cf525517 | erlang/corba | property_SUITE.erl | %%----------------------------------------------------------------------
%%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2000 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%----------------------------------------------------------------------
%% File : property_SUITE.erl
%% Description :
%%
%%----------------------------------------------------------------------
-module(property_SUITE).
%%--------------- INCLUDES -----------------------------------
-include_lib("orber/include/corba.hrl").
-include_lib("orber/src/orber_iiop.hrl").
-include_lib("cosProperty/src/cosProperty.hrl").
-include_lib("cosProperty/include/CosPropertyService.hrl").
-include_lib("common_test/include/ct.hrl").
--------------- DEFINES ------------------------------------
-define(default_timeout, test_server:minutes(20)).
-define(match(ExpectedRes, Expr),
fun() ->
AcTuAlReS = (catch (Expr)),
case AcTuAlReS of
ExpectedRes ->
io:format("------ CORRECT RESULT ------~n~p~n",
[AcTuAlReS]),
AcTuAlReS;
_ ->
io:format("###### ERROR ERROR ######~n~p~n",
[AcTuAlReS]),
exit(AcTuAlReS)
end
end()).
-define(match_inverse(NotExpectedRes, Expr),
fun() ->
AcTuAlReS = (catch (Expr)),
case AcTuAlReS of
NotExpectedRes ->
io:format("###### ERROR ERROR ######~n ~p~n",
[AcTuAlReS]),
exit(AcTuAlReS);
_ ->
io:format("------ CORRECT RESULT ------~n~p~n",
[AcTuAlReS]),
AcTuAlReS
end
end()).
-define(val1, #any{typecode=tk_short, value=1}).
-define(val2, #any{typecode=tk_short, value=2}).
-define(val3, #any{typecode=tk_short, value=3}).
-define(val4, #any{typecode=tk_short, value=4}).
-define(val5, #any{typecode=tk_long, value=5}).
-define(badval, #any{typecode=tk_shirt, value=5}).
-define(id1, "id1").
-define(id2, "id2").
-define(id3, "id3").
-define(id4, "id4").
-define(id5, "id5").
-define(badid, "").
%%-----------------------------------------------------------------
%% External exports
%%-----------------------------------------------------------------
%% Fixed exports
-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2, cases/0,
init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2]).
%% Test cases
-export([create_setdef_api/1, create_set_api/1, define_with_mode_api/1,
define_api/1, names_iterator_api/1, properties_iterator_api/1,
app_test/1]).
%%-----------------------------------------------------------------
%% Func: all/1
:
%% Returns:
%%-----------------------------------------------------------------
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
cases().
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
cases() ->
[create_setdef_api, create_set_api,
define_with_mode_api, define_api, names_iterator_api,
properties_iterator_api, app_test].
%%-----------------------------------------------------------------
Init and cleanup functions .
%%-----------------------------------------------------------------
init_per_testcase(_Case, Config) ->
Path = code:which(?MODULE),
code:add_pathz(filename:join(filename:dirname(Path), "idl_output")),
Dog=test_server:timetrap(?default_timeout),
[{watchdog, Dog}|Config].
end_per_testcase(_Case, Config) ->
Path = code:which(?MODULE),
code:del_path(filename:join(filename:dirname(Path), "idl_output")),
Dog = proplists:get_value(watchdog, Config),
test_server:timetrap_cancel(Dog),
ok.
init_per_suite(Config) ->
Path = code:which(?MODULE),
code:add_pathz(filename:join(filename:dirname(Path), "idl_output")),
orber:jump_start(),
cosProperty:install(),
cosProperty:install_db(),
?match(ok, application:start(cosProperty)),
if
is_list(Config) ->
Config;
true ->
exit("Config not a list")
end.
end_per_suite(Config) ->
Path = code:which(?MODULE),
code:del_path(filename:join(filename:dirname(Path), "idl_output")),
application:stop(cosProperty),
cosProperty:uninstall_db(),
cosProperty:uninstall(),
orber:jump_stop(),
Config.
%%-----------------------------------------------------------------
%% Tests app file
%%-----------------------------------------------------------------
app_test(_Config) ->
ok=test_server:app_test(cosProperty),
ok.
%%-----------------------------------------------------------------
%% CosPropertyService_PropertySetDefFactory API tests
%%-----------------------------------------------------------------
create_setdef_api(_Config) ->
ValidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id2,
property_value = ?val2,
property_mode = normal}],
InvalidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?badid,
property_value = ?badval,
property_mode = normal}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetDefFactory()),
Obj1 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_propertysetdef(Fac)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj1),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_short], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj2),
Both arguments correct but ' ValidDefs ' contain other TC : s than
%% tk_null.
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_null], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_null], InvalidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
The allowed TC not supported .
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_noll], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
Obj4 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj4),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, InvalidDefs)),
?match(ok, cosProperty:stop_SetDefFactory(Fac)),
ok.
%%-----------------------------------------------------------------
%% CosPropertyService_PropertySetFactory API tests
%%-----------------------------------------------------------------
create_set_api(_Config) ->
Valid = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?id2,
property_value = ?val2}],
Invalid = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?badid,
property_value = ?badval}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj1 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj1),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_short], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj2),
Both arguments correct but ' Valid ' contain other TC : s than
%% tk_null.
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_null], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_null], Invalid)),
'CosPropertyService_PropertySetDef_impl':dump(),
The allowed TC not supported .
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_noll], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
Obj4 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj4),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, Invalid)),
?match(ok, cosProperty:stop_SetFactory(Fac)),
ok.
%%-----------------------------------------------------------------
%% CosPropertyService_PropertySetDef API tests
%%-----------------------------------------------------------------
define_api(_Config) ->
ValidDefs = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'
{property_name = ?id3,
property_value = ?val3}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
io:format("@@@@ Testing PropertySet returned by the factory operation create_propertyset/1 @@@@", []),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match({true, [_]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1])),
?match({true, [_, _, _]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1, ?id2, ?id3])),
?match({false,[_, _, _]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1, "wrong", ?id3])),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj, ?id2)),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj, ?id1)),
?match(2, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id2, ?val2)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id3, ?val3)),
?match(true, 'CosPropertyService_PropertySet':delete_all_properties(Obj)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj, "wrongID")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj, ?id1)),
This function is not supported by PropertySet .
?match({'EXCEPTION',{'NO_IMPLEMENT',_,_,_}},
'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id2, ?id3])),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj, [?id1, ?id2, ?id3, "wrongID"])),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
corba:dispose(Obj),
io:format("@@@@ Testing PropertySet returned by the factory operation create_constrained_propertyset/3 @@@@", []),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_short], ValidDefs)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION', {'CosPropertyService_UnsupportedProperty',_}},
'CosPropertyService_PropertySet':define_property(Obj2, ?id4, ?val4)),
?match({'EXCEPTION', {'CosPropertyService_UnsupportedTypeCode',_}},
'CosPropertyService_PropertySet':define_property(Obj2, ?id1, ?val5)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj2, ?id1, ?val1)),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':
define_properties(Obj2, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3},
#'CosPropertyService_Property'{property_name = "wrongId",
property_value = ?val2}])),
?match(ok,'CosPropertyService_PropertySet':
define_properties(Obj2, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj2, "wrongID")),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj2, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj2, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj2, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj2, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj2, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':delete_property(Obj2, "wrongID")),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj2, ?id1)),
?match(2, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySet':delete_properties(Obj2, [?id2])),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj2, [?id3, "wrongID"])),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
corba:dispose(Obj2),
io:format("@@@@ Testing PropertySet returned by the factory operation create_initial_propertyset/2 @@@@", []),
Obj3 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, ValidDefs)),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj3, ?id4, ?val4)),
?match(4, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(ok,'CosPropertyService_PropertySet':
define_properties(Obj3, [#'CosPropertyService_Property'{property_name = ?id5,
property_value = ?val5}])),
?match(5, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj3, "wrongID")),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj3, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj3, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj3, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj3, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj3, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':delete_property(Obj3, "wrongId")),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj3, ?id5)),
?match(4, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj3, [?id1, ?id2, ?id3, "wrongID"])),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(true, 'CosPropertyService_PropertySet':delete_all_properties(Obj3)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
corba:dispose(Obj3),
?match(ok, cosProperty:stop_SetFactory(Fac)),
ok.
%%-----------------------------------------------------------------
%% CosPropertyService_PropertySetDef API tests
%%-----------------------------------------------------------------
define_with_mode_api(_Config) ->
ValidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id3,
property_value = ?val3,
property_mode = normal}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetDefFactory()),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_propertysetdef/1 @@@@", []),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_propertysetdef(Fac)),
no prop 's created and no restrictions at all
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj)),
Add two properties .
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id4, ?val4, read_only)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val5, normal)),
Try to add the same property again ( should n't add another since using the sam I d ) .
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val5, normal)),
%% Try to add another identical proprty with wrong TC.
?match({'EXCEPTION',{'CosPropertyService_ConflictingProperty',_}},
'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val4, normal)),
Should be two now .
?match(2, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id4)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id5)),
?match(ok, 'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj,
[#'CosPropertyService_PropertyDef'{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = ?id3,
property_value = ?val3,
property_mode = normal}])),
Should be five now .
?match(5, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match({true, [_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id3])),
?match({false, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id3, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj, ?id1, read_only)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySetDef':set_property_mode(Obj, "wrongID", read_only)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj,
[#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = read_only}])),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id2)),
?match(ok,
'CosPropertyService_PropertySetDef':
set_property_modes(Obj,
[#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id2)),
corba:dispose(Obj),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_constrained_propertysetdef/3 @@@@", []),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_short], ValidDefs)),
no prop 's created and the restrictions that only Properties eq . to ValidDefs
%% may be handled.
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match({ok, [tk_short]}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj2)),
%% We cannot be sure in which order it's returned. Hmm, that's not really true but it
%% may change in the future.
?match({ok, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj2)),
%% Try to add a Property with and Id not eq. to ?id1, ?id2 or ?id3; must fail.
?match({'EXCEPTION', {'CosPropertyService_UnsupportedProperty',_}},
'CosPropertyService_PropertySetDef':define_property_with_mode(Obj2, ?id4, ?val4, read_only)),
%% To be sure that nothing was updated.
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
%% Add a valid Property.
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj2, ?id1, ?val1, normal)),
?match(1, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
Add a sequence of 1 valid and one invalid Prop 's
?match({'EXCEPTION', {'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj2,
[#'CosPropertyService_PropertyDef'{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = "wrongID",
property_value = ?val2,
property_mode = normal}])),
%% One should be added.
?match(1, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj2,
[#'CosPropertyService_PropertyDef'{property_name = ?id3,
property_value = ?val3,
property_mode = normal}])),
Add a sequence of 1 valid Prop .
?match(2, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
?match({true, [_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj2, [?id1, ?id3])),
?match({false, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj2, [?id1, ?id3, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj2, ?id1, read_only)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(ok, 'CosPropertyService_PropertySetDef':
set_property_modes(Obj2,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj2,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = normal},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = normal},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = normal}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
corba:dispose(Obj2),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_initial_propertysetdef/2 @@@@", []),
Obj3 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, ValidDefs)),
the supplied prop 's are created and no restrictions .
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj3)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj3)),
%% Add a new properties an test if they have been inserted.
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj3, ?id4, ?val4, read_only)),
?match(4, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj3, ?id5, ?val5, read_only)),
?match(5, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
%% Lookup each Property's mode.
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id1)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id2)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id3)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id5)),
?match({true, [_,_,_,_,_]},
'CosPropertyService_PropertySetDef':get_property_modes(Obj3, [?id1, ?id2, ?id3, ?id4, ?id5])),
?match({false, [_,_]},
'CosPropertyService_PropertySetDef':get_property_modes(Obj3, [?id1, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj3, ?id4, normal)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
?match(ok, 'CosPropertyService_PropertySetDef':
set_property_modes(Obj3,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj3,
[#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id4,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = read_only}])),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id3)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
corba:dispose(Obj3),
?match(ok, cosProperty:stop_SetDefFactory(Fac)),
ok.
%%-----------------------------------------------------------------
CosPropertyService_PropertyNamesIterator API tests
%%-----------------------------------------------------------------
names_iterator_api(_Config) ->
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
{_, _,ItObj} = ?match({ok, [], _}, 'CosPropertyService_PropertySetDef':get_all_property_names(Obj, 0)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,3)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':reset(ItObj)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,4)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':reset(ItObj)),
?match({true, [_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,1)),
?match({true, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match({true, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match({false, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':destroy(ItObj)),
corba:dispose(Obj),
ok.
%%-----------------------------------------------------------------
%% CosPropertyService_PropertiesIterator API tests
%%-----------------------------------------------------------------
properties_iterator_api(_Config) ->
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
{_, _,ItObj} = ?match({ok, [], _},
'CosPropertyService_PropertySetDef':get_all_properties(Obj, 0)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,3)),
?match(ok, 'CosPropertyService_PropertiesIterator':reset(ItObj)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,4)),
?match(ok, 'CosPropertyService_PropertiesIterator':reset(ItObj)),
?match({true, [_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,1)),
?match({true, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match({true, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match({false, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match(ok, 'CosPropertyService_PropertiesIterator':destroy(ItObj)),
corba:dispose(Obj),
ok.
%%-----------------------------------------------------------------
%% END OF MODULE
%%-----------------------------------------------------------------
| null | https://raw.githubusercontent.com/erlang/corba/396df81473a386d0315bbba830db6f9d4b12a04f/lib/cosProperty/test/property_SUITE.erl | erlang | ----------------------------------------------------------------------
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
File : property_SUITE.erl
Description :
----------------------------------------------------------------------
--------------- INCLUDES -----------------------------------
-----------------------------------------------------------------
External exports
-----------------------------------------------------------------
Fixed exports
Test cases
-----------------------------------------------------------------
Func: all/1
Returns:
-----------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
Tests app file
-----------------------------------------------------------------
-----------------------------------------------------------------
CosPropertyService_PropertySetDefFactory API tests
-----------------------------------------------------------------
tk_null.
-----------------------------------------------------------------
CosPropertyService_PropertySetFactory API tests
-----------------------------------------------------------------
tk_null.
-----------------------------------------------------------------
CosPropertyService_PropertySetDef API tests
-----------------------------------------------------------------
-----------------------------------------------------------------
CosPropertyService_PropertySetDef API tests
-----------------------------------------------------------------
Try to add another identical proprty with wrong TC.
may be handled.
We cannot be sure in which order it's returned. Hmm, that's not really true but it
may change in the future.
Try to add a Property with and Id not eq. to ?id1, ?id2 or ?id3; must fail.
To be sure that nothing was updated.
Add a valid Property.
One should be added.
Add a new properties an test if they have been inserted.
Lookup each Property's mode.
-----------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
CosPropertyService_PropertiesIterator API tests
-----------------------------------------------------------------
-----------------------------------------------------------------
END OF MODULE
----------------------------------------------------------------- | Copyright Ericsson AB 2000 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(property_SUITE).
-include_lib("orber/include/corba.hrl").
-include_lib("orber/src/orber_iiop.hrl").
-include_lib("cosProperty/src/cosProperty.hrl").
-include_lib("cosProperty/include/CosPropertyService.hrl").
-include_lib("common_test/include/ct.hrl").
--------------- DEFINES ------------------------------------
-define(default_timeout, test_server:minutes(20)).
-define(match(ExpectedRes, Expr),
fun() ->
AcTuAlReS = (catch (Expr)),
case AcTuAlReS of
ExpectedRes ->
io:format("------ CORRECT RESULT ------~n~p~n",
[AcTuAlReS]),
AcTuAlReS;
_ ->
io:format("###### ERROR ERROR ######~n~p~n",
[AcTuAlReS]),
exit(AcTuAlReS)
end
end()).
-define(match_inverse(NotExpectedRes, Expr),
fun() ->
AcTuAlReS = (catch (Expr)),
case AcTuAlReS of
NotExpectedRes ->
io:format("###### ERROR ERROR ######~n ~p~n",
[AcTuAlReS]),
exit(AcTuAlReS);
_ ->
io:format("------ CORRECT RESULT ------~n~p~n",
[AcTuAlReS]),
AcTuAlReS
end
end()).
-define(val1, #any{typecode=tk_short, value=1}).
-define(val2, #any{typecode=tk_short, value=2}).
-define(val3, #any{typecode=tk_short, value=3}).
-define(val4, #any{typecode=tk_short, value=4}).
-define(val5, #any{typecode=tk_long, value=5}).
-define(badval, #any{typecode=tk_shirt, value=5}).
-define(id1, "id1").
-define(id2, "id2").
-define(id3, "id3").
-define(id4, "id4").
-define(id5, "id5").
-define(badid, "").
-export([all/0, suite/0,groups/0,init_per_group/2,end_per_group/2, cases/0,
init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2]).
-export([create_setdef_api/1, create_set_api/1, define_with_mode_api/1,
define_api/1, names_iterator_api/1, properties_iterator_api/1,
app_test/1]).
:
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
cases().
groups() ->
[].
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
cases() ->
[create_setdef_api, create_set_api,
define_with_mode_api, define_api, names_iterator_api,
properties_iterator_api, app_test].
Init and cleanup functions .
init_per_testcase(_Case, Config) ->
Path = code:which(?MODULE),
code:add_pathz(filename:join(filename:dirname(Path), "idl_output")),
Dog=test_server:timetrap(?default_timeout),
[{watchdog, Dog}|Config].
end_per_testcase(_Case, Config) ->
Path = code:which(?MODULE),
code:del_path(filename:join(filename:dirname(Path), "idl_output")),
Dog = proplists:get_value(watchdog, Config),
test_server:timetrap_cancel(Dog),
ok.
init_per_suite(Config) ->
Path = code:which(?MODULE),
code:add_pathz(filename:join(filename:dirname(Path), "idl_output")),
orber:jump_start(),
cosProperty:install(),
cosProperty:install_db(),
?match(ok, application:start(cosProperty)),
if
is_list(Config) ->
Config;
true ->
exit("Config not a list")
end.
end_per_suite(Config) ->
Path = code:which(?MODULE),
code:del_path(filename:join(filename:dirname(Path), "idl_output")),
application:stop(cosProperty),
cosProperty:uninstall_db(),
cosProperty:uninstall(),
orber:jump_stop(),
Config.
app_test(_Config) ->
ok=test_server:app_test(cosProperty),
ok.
create_setdef_api(_Config) ->
ValidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id2,
property_value = ?val2,
property_mode = normal}],
InvalidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?badid,
property_value = ?badval,
property_mode = normal}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetDefFactory()),
Obj1 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_propertysetdef(Fac)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj1),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_short], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj2),
Both arguments correct but ' ValidDefs ' contain other TC : s than
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_null], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_null], InvalidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
The allowed TC not supported .
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_noll], ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
Obj4 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, ValidDefs)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj4),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, InvalidDefs)),
?match(ok, cosProperty:stop_SetDefFactory(Fac)),
ok.
create_set_api(_Config) ->
Valid = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?id2,
property_value = ?val2}],
Invalid = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?badid,
property_value = ?badval}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj1 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj1),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_short], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj2),
Both arguments correct but ' Valid ' contain other TC : s than
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_null], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_null], Invalid)),
'CosPropertyService_PropertySetDef_impl':dump(),
The allowed TC not supported .
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_noll], Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
Obj4 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, Valid)),
'CosPropertyService_PropertySetDef_impl':dump(),
corba:dispose(Obj4),
?match({'EXCEPTION', _}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, Invalid)),
?match(ok, cosProperty:stop_SetFactory(Fac)),
ok.
define_api(_Config) ->
ValidDefs = [#'CosPropertyService_Property'
{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'
{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'
{property_name = ?id3,
property_value = ?val3}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
io:format("@@@@ Testing PropertySet returned by the factory operation create_propertyset/1 @@@@", []),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match({true, [_]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1])),
?match({true, [_, _, _]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1, ?id2, ?id3])),
?match({false,[_, _, _]}, 'CosPropertyService_PropertySet':get_properties(Obj, [?id1, "wrong", ?id3])),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj, ?id2)),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj, ?id1)),
?match(2, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id1, ?val1)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id2, ?val2)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj, ?id3, ?val3)),
?match(true, 'CosPropertyService_PropertySet':delete_all_properties(Obj)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj, "wrongID")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj, ?id1)),
This function is not supported by PropertySet .
?match({'EXCEPTION',{'NO_IMPLEMENT',_,_,_}},
'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id2, ?id3])),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj, [?id1, ?id2, ?id3, "wrongID"])),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj)),
corba:dispose(Obj),
io:format("@@@@ Testing PropertySet returned by the factory operation create_constrained_propertyset/3 @@@@", []),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_constrained_propertyset(Fac, [tk_short], ValidDefs)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION', {'CosPropertyService_UnsupportedProperty',_}},
'CosPropertyService_PropertySet':define_property(Obj2, ?id4, ?val4)),
?match({'EXCEPTION', {'CosPropertyService_UnsupportedTypeCode',_}},
'CosPropertyService_PropertySet':define_property(Obj2, ?id1, ?val5)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj2, ?id1, ?val1)),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':
define_properties(Obj2, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3},
#'CosPropertyService_Property'{property_name = "wrongId",
property_value = ?val2}])),
?match(ok,'CosPropertyService_PropertySet':
define_properties(Obj2, [#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj2, "wrongID")),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj2, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj2, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj2, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj2, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj2, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':delete_property(Obj2, "wrongID")),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj2, ?id1)),
?match(2, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySet':delete_properties(Obj2, [?id2])),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj2, [?id3, "wrongID"])),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj2)),
corba:dispose(Obj2),
io:format("@@@@ Testing PropertySet returned by the factory operation create_initial_propertyset/2 @@@@", []),
Obj3 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_initial_propertyset(Fac, ValidDefs)),
?match(3, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(ok, 'CosPropertyService_PropertySet':define_property(Obj3, ?id4, ?val4)),
?match(4, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(ok,'CosPropertyService_PropertySet':
define_properties(Obj3, [#'CosPropertyService_Property'{property_name = ?id5,
property_value = ?val5}])),
?match(5, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':get_property_value(Obj3, "wrongID")),
?match(?val2, 'CosPropertyService_PropertySet':get_property_value(Obj3, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':get_property_value(Obj3, "")),
?match({'EXCEPTION',{'CosPropertyService_InvalidPropertyName',_}},
'CosPropertyService_PropertySet':is_property_defined(Obj3, "")),
?match(false, 'CosPropertyService_PropertySet':is_property_defined(Obj3, "wrongID")),
?match(true, 'CosPropertyService_PropertySet':is_property_defined(Obj3, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySet':delete_property(Obj3, "wrongId")),
?match(ok, 'CosPropertyService_PropertySet':delete_property(Obj3, ?id5)),
?match(4, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySet':delete_properties(Obj3, [?id1, ?id2, ?id3, "wrongID"])),
?match(1, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
?match(true, 'CosPropertyService_PropertySet':delete_all_properties(Obj3)),
?match(0, 'CosPropertyService_PropertySet':get_number_of_properties(Obj3)),
corba:dispose(Obj3),
?match(ok, cosProperty:stop_SetFactory(Fac)),
ok.
define_with_mode_api(_Config) ->
ValidDefs = [#'CosPropertyService_PropertyDef'
{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'
{property_name = ?id3,
property_value = ?val3,
property_mode = normal}],
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetDefFactory()),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_propertysetdef/1 @@@@", []),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_propertysetdef(Fac)),
no prop 's created and no restrictions at all
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj)),
Add two properties .
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id4, ?val4, read_only)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val5, normal)),
Try to add the same property again ( should n't add another since using the sam I d ) .
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val5, normal)),
?match({'EXCEPTION',{'CosPropertyService_ConflictingProperty',_}},
'CosPropertyService_PropertySetDef':define_property_with_mode(Obj, ?id5, ?val4, normal)),
Should be two now .
?match(2, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id4)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id5)),
?match(ok, 'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj,
[#'CosPropertyService_PropertyDef'{property_name = ?id1,
property_value = ?val1,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = ?id3,
property_value = ?val3,
property_mode = normal}])),
Should be five now .
?match(5, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
?match({true, [_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id3])),
?match({false, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj, [?id1, ?id3, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj, ?id1, read_only)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id1)),
?match({'EXCEPTION',{'CosPropertyService_PropertyNotFound',_}},
'CosPropertyService_PropertySetDef':set_property_mode(Obj, "wrongID", read_only)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj,
[#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = read_only}])),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id2)),
?match(ok,
'CosPropertyService_PropertySetDef':
set_property_modes(Obj,
[#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj, ?id2)),
corba:dispose(Obj),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_constrained_propertysetdef/3 @@@@", []),
Obj2 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_constrained_propertysetdef(Fac, [tk_short], ValidDefs)),
no prop 's created and the restrictions that only Properties eq . to ValidDefs
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match({ok, [tk_short]}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj2)),
?match({ok, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj2)),
?match({'EXCEPTION', {'CosPropertyService_UnsupportedProperty',_}},
'CosPropertyService_PropertySetDef':define_property_with_mode(Obj2, ?id4, ?val4, read_only)),
?match(0, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj2, ?id1, ?val1, normal)),
?match(1, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
Add a sequence of 1 valid and one invalid Prop 's
?match({'EXCEPTION', {'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj2,
[#'CosPropertyService_PropertyDef'{property_name = ?id2,
property_value = ?val2,
property_mode = normal},
#'CosPropertyService_PropertyDef'{property_name = "wrongID",
property_value = ?val2,
property_mode = normal}])),
?match(1, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match(ok, 'CosPropertyService_PropertySetDef':
define_properties_with_modes(Obj2,
[#'CosPropertyService_PropertyDef'{property_name = ?id3,
property_value = ?val3,
property_mode = normal}])),
Add a sequence of 1 valid Prop .
?match(2, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj2)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
?match({true, [_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj2, [?id1, ?id3])),
?match({false, [_,_,_]}, 'CosPropertyService_PropertySetDef':get_property_modes(Obj2, [?id1, ?id3, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj2, ?id1, read_only)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(ok, 'CosPropertyService_PropertySetDef':
set_property_modes(Obj2,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj2,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = normal},
#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = normal},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = normal}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj2, ?id3)),
corba:dispose(Obj2),
io:format("@@@@ Testing PropertySetDef returned by the factory operation create_initial_propertysetdef/2 @@@@", []),
Obj3 = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetDefFactory':
create_initial_propertysetdef(Fac, ValidDefs)),
the supplied prop 's are created and no restrictions .
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_property_types(Obj3)),
?match({ok, []}, 'CosPropertyService_PropertySetDef':get_allowed_properties(Obj3)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj3, ?id4, ?val4, read_only)),
?match(4, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
?match(ok, 'CosPropertyService_PropertySetDef':define_property_with_mode(Obj3, ?id5, ?val5, read_only)),
?match(5, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj3)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id1)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id2)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id3)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id5)),
?match({true, [_,_,_,_,_]},
'CosPropertyService_PropertySetDef':get_property_modes(Obj3, [?id1, ?id2, ?id3, ?id4, ?id5])),
?match({false, [_,_]},
'CosPropertyService_PropertySetDef':get_property_modes(Obj3, [?id1, "wrongID"])),
?match(ok, 'CosPropertyService_PropertySetDef':set_property_mode(Obj3, ?id4, normal)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
?match(ok, 'CosPropertyService_PropertySetDef':
set_property_modes(Obj3,
[#'CosPropertyService_PropertyMode'{property_name = ?id1,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id2,
property_mode = read_only}])),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id1)),
?match(read_only, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id2)),
?match({'EXCEPTION',{'CosPropertyService_MultipleExceptions',_,_}},
'CosPropertyService_PropertySetDef':
set_property_modes(Obj3,
[#'CosPropertyService_PropertyMode'{property_name = ?id3,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = ?id4,
property_mode = read_only},
#'CosPropertyService_PropertyMode'{property_name = "wrongID",
property_mode = read_only}])),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id3)),
?match(normal, 'CosPropertyService_PropertySetDef':get_property_mode(Obj3, ?id4)),
corba:dispose(Obj3),
?match(ok, cosProperty:stop_SetDefFactory(Fac)),
ok.
CosPropertyService_PropertyNamesIterator API tests
names_iterator_api(_Config) ->
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
{_, _,ItObj} = ?match({ok, [], _}, 'CosPropertyService_PropertySetDef':get_all_property_names(Obj, 0)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,3)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':reset(ItObj)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,4)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':reset(ItObj)),
?match({true, [_]}, 'CosPropertyService_PropertyNamesIterator':next_n(ItObj,1)),
?match({true, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match({true, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match({false, _}, 'CosPropertyService_PropertyNamesIterator':next_one(ItObj)),
?match(ok, 'CosPropertyService_PropertyNamesIterator':destroy(ItObj)),
corba:dispose(Obj),
ok.
properties_iterator_api(_Config) ->
Fac = ?match({_,pseudo,_,_,_,_}, cosProperty:start_SetFactory()),
Obj = ?match({_,pseudo,_,_,_,_}, 'CosPropertyService_PropertySetFactory':
create_propertyset(Fac)),
?match(ok, 'CosPropertyService_PropertySet':
define_properties(Obj, [#'CosPropertyService_Property'{property_name = ?id1,
property_value = ?val1},
#'CosPropertyService_Property'{property_name = ?id2,
property_value = ?val2},
#'CosPropertyService_Property'{property_name = ?id3,
property_value = ?val3}])),
?match(3, 'CosPropertyService_PropertySetDef':get_number_of_properties(Obj)),
{_, _,ItObj} = ?match({ok, [], _},
'CosPropertyService_PropertySetDef':get_all_properties(Obj, 0)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,3)),
?match(ok, 'CosPropertyService_PropertiesIterator':reset(ItObj)),
?match({false, [_,_,_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,4)),
?match(ok, 'CosPropertyService_PropertiesIterator':reset(ItObj)),
?match({true, [_]}, 'CosPropertyService_PropertiesIterator':next_n(ItObj,1)),
?match({true, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match({true, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match({false, {'CosPropertyService_Property',_,_}},
'CosPropertyService_PropertiesIterator':next_one(ItObj)),
?match(ok, 'CosPropertyService_PropertiesIterator':destroy(ItObj)),
corba:dispose(Obj),
ok.
|
d703cc618292e17a73136625c7241476adc684103532d658ee8eb06c6b1a51f9 | SonarQubeCommunity/sonar-erlang | erlcount_counter2.erl | -module(erlcount_counter2).
-behaviour(gen_server).
-export([start_link/4]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-record(state, {dispatcher, ref, file, re}).
start_link(DispatcherPid, Ref, FileName, Regex) ->
gen_server:start_link(?MODULE, [DispatcherPid, Ref, FileName, Regex], []).
%%============================================
%Comment 0
%%--------------------------------------------
init([DispatcherPid, Ref, FileName, Regex]) ->
self() ! start,
{ok, #state{dispatcher=DispatcherPid,
ref = Ref,
file = FileName,
re = Regex}}.
%%-------------------------------------
handle_call(_Msg, _From, State) ->
{noreply, State}.
%%======================================
handle_cast(_Msg, State) ->
{noreply, State}.
%%======================================
%Comment
handle_info(start, S = #state{re=Re, ref=Ref}) ->
{ok, Bin} = file:read_file(S#state.file),
Count = erlcount_lib:regex_count(Re, Bin),
erlcount_dispatch:complete(S#state.dispatcher, Re, Ref, Count),
{stop, normal, S}.
Comment2
%---------------------------------------
terminate(_Reason, _State) ->
ok.
%%Comment3
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/SonarQubeCommunity/sonar-erlang/279eb7ccd84787c1c0cfd34b9a07981eb20183e3/sonar-erlang-plugin/src/test/resources/org/sonar/plugins/erlang/erlcount/.eunit/erlcount_counter2.erl | erlang | ============================================
Comment 0
--------------------------------------------
-------------------------------------
======================================
======================================
Comment
---------------------------------------
Comment3 | -module(erlcount_counter2).
-behaviour(gen_server).
-export([start_link/4]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-record(state, {dispatcher, ref, file, re}).
start_link(DispatcherPid, Ref, FileName, Regex) ->
gen_server:start_link(?MODULE, [DispatcherPid, Ref, FileName, Regex], []).
init([DispatcherPid, Ref, FileName, Regex]) ->
self() ! start,
{ok, #state{dispatcher=DispatcherPid,
ref = Ref,
file = FileName,
re = Regex}}.
handle_call(_Msg, _From, State) ->
{noreply, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(start, S = #state{re=Re, ref=Ref}) ->
{ok, Bin} = file:read_file(S#state.file),
Count = erlcount_lib:regex_count(Re, Bin),
erlcount_dispatch:complete(S#state.dispatcher, Re, Ref, Count),
{stop, normal, S}.
Comment2
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
463dd3810cf0ecd392cd186e684ab659402dc80ac8473966b13e9b8164fe9313 | heraldry/heraldicon | tooltip.cljs | (ns heraldicon.frontend.tooltip
(:require
[heraldicon.frontend.language :refer [tr]]))
(defn- tooltip [message & {:keys [element width class center? style]
:or {element [:i.ui-icon.fas.fa-question-circle]
class "info"}}]
(when message
[:div.tooltip {:class class
:style style}
element
[:div.bottom {:style {:width width}}
[:i]
(cond
(vector? message) message
center? [:h3 {:style {:text-align "center"}} [tr message]]
:else [:p [tr message]])]]))
(defn info [message & {:as options}]
[tooltip message (assoc options
:class "info"
:style {:display "inline-block"
:margin-left "0.2em"
:vertical-align "top"})])
(defn choice [message element & {:as options}]
[tooltip message (assoc options
:class "choice"
:element element
:center? true)])
| null | https://raw.githubusercontent.com/heraldry/heraldicon/f742958ce1e85f47c8222f99c6c594792ac5a793/src/heraldicon/frontend/tooltip.cljs | clojure | (ns heraldicon.frontend.tooltip
(:require
[heraldicon.frontend.language :refer [tr]]))
(defn- tooltip [message & {:keys [element width class center? style]
:or {element [:i.ui-icon.fas.fa-question-circle]
class "info"}}]
(when message
[:div.tooltip {:class class
:style style}
element
[:div.bottom {:style {:width width}}
[:i]
(cond
(vector? message) message
center? [:h3 {:style {:text-align "center"}} [tr message]]
:else [:p [tr message]])]]))
(defn info [message & {:as options}]
[tooltip message (assoc options
:class "info"
:style {:display "inline-block"
:margin-left "0.2em"
:vertical-align "top"})])
(defn choice [message element & {:as options}]
[tooltip message (assoc options
:class "choice"
:element element
:center? true)])
| |
1b3cc49a4b17701afbfc7b2e708e359358be56a854477373466f595bc5320f3c | lambdaisland/glogi | print.cljs | (ns lambdaisland.glogi.print
(:require [lambdaisland.glogi :as glogi]
[goog.object :as gobj]))
;; -tomorrow-scheme/blob/master/tomorrow.yaml
(def colors
{:white "#ffffff"
:gray1 "#e0e0e0"
:gray2 "#d6d6d6"
:gray3 "#8e908c"
:gray4 "#969896"
:gray5 "#4d4d4c"
:gray6 "#282a2e"
:black "#1d1f21"
:red "#c82829"
:orange "#f5871f"
:yellow "#eab700"
:green "#718c00"
:turqoise "#3e999f"
:blue "#4271ae"
:purple "#8959a8"
:brown "#a3685a"})
(defn level-color [level]
(condp <= (glogi/level-value level)
(glogi/level-value :severe) :red
(glogi/level-value :warning) :orange
(glogi/level-value :info) :blue
(glogi/level-value :config) :green
(glogi/level-value :fine) :yellow
(glogi/level-value :finer) :gray3
(glogi/level-value :finest) :gray4
:gray2))
(defn add
([[res res-css] s]
[(str res s) res-css])
([[res res-css] s color]
[(str res "%c" (str s) "%c") (conj res-css (str "color:" (get colors color)) "color:black")])
([[res res-css] s fg bg]
[(str res "%c" (str s) "%c") (conj res-css
(str "color:" (get colors fg)
";background-color:" (get colors bg))
"color:black;background-color:inherit")]))
(defn print-console-log-css [res value]
(cond
(= ::comma value)
(add res ", " :gray2)
(= ::space value)
(add res " ")
(keyword? value)
(add res value :blue)
(symbol? value)
(add res value :green)
(string? value)
(add res (pr-str value) :turqoise)
(map-entry? value)
(-> res
(print-console-log-css (key value))
(add " ")
(print-console-log-css (val value)))
(or (instance? cljs.core/PersistentArrayMap value)
(instance? cljs.core/PersistentHashMap value))
(as-> res %
(add % "{" :purple)
(reduce print-console-log-css % (interpose ::comma value))
(add % "}" :purple))
(map? value) ;; non-standard map implementation
(as-> res %
(add % (str "#" (let [t (type value)
n (.-name t)]
(if (empty? n)
(pr-str t)
n)) " ") :brown)
(add % "{" :purple)
(reduce print-console-log-css % (interpose ::comma value))
(add % "}" :purple))
(set? value)
(as-> res %
(add % "#{" :purple)
(reduce print-console-log-css % (interpose ::space value))
(add % "}" :purple))
(vector? value)
(as-> res %
(add % "[" :purple)
(reduce print-console-log-css % (interpose ::space value))
(add % "]" :purple))
(instance? cljs.core.PersistentQueue value)
(-> res
(add "#queue " :brown)
(recur (into [] value)))
(seq? value)
(as-> res %
(add % "(" :brown)
(reduce print-console-log-css % (interpose ::space value))
(add % ")" :brown))
(satisfies? IAtom value)
(-> res
(add "#atom " :brown)
(recur @value))
(uuid? value)
(-> res
(add "#uuid " :brown)
(recur (str value)))
(object? value)
(-> res
(add "#js " :brown)
(recur (reduce #(assoc %1 (keyword %2) (gobj/get value %2)) {} (js/Object.keys value))))
(array? value)
(-> res
(add "#js " :brown)
(recur (into [] value)))
:else
(add res (pr-str value) :gray5)))
(defn format [level logger-name value]
(let [color (level-color level)
[res res-css] (-> ["" []]
(add "[" :white color)
(add logger-name :white color)
(add "]" :white color)
(add " ")
(print-console-log-css value))]
(cons res res-css)))
| null | https://raw.githubusercontent.com/lambdaisland/glogi/431f535c1683500126254ae79eb3ba8ffe07ab92/src/lambdaisland/glogi/print.cljs | clojure | -tomorrow-scheme/blob/master/tomorrow.yaml
non-standard map implementation | (ns lambdaisland.glogi.print
(:require [lambdaisland.glogi :as glogi]
[goog.object :as gobj]))
(def colors
{:white "#ffffff"
:gray1 "#e0e0e0"
:gray2 "#d6d6d6"
:gray3 "#8e908c"
:gray4 "#969896"
:gray5 "#4d4d4c"
:gray6 "#282a2e"
:black "#1d1f21"
:red "#c82829"
:orange "#f5871f"
:yellow "#eab700"
:green "#718c00"
:turqoise "#3e999f"
:blue "#4271ae"
:purple "#8959a8"
:brown "#a3685a"})
(defn level-color [level]
(condp <= (glogi/level-value level)
(glogi/level-value :severe) :red
(glogi/level-value :warning) :orange
(glogi/level-value :info) :blue
(glogi/level-value :config) :green
(glogi/level-value :fine) :yellow
(glogi/level-value :finer) :gray3
(glogi/level-value :finest) :gray4
:gray2))
(defn add
([[res res-css] s]
[(str res s) res-css])
([[res res-css] s color]
[(str res "%c" (str s) "%c") (conj res-css (str "color:" (get colors color)) "color:black")])
([[res res-css] s fg bg]
[(str res "%c" (str s) "%c") (conj res-css
(str "color:" (get colors fg)
";background-color:" (get colors bg))
"color:black;background-color:inherit")]))
(defn print-console-log-css [res value]
(cond
(= ::comma value)
(add res ", " :gray2)
(= ::space value)
(add res " ")
(keyword? value)
(add res value :blue)
(symbol? value)
(add res value :green)
(string? value)
(add res (pr-str value) :turqoise)
(map-entry? value)
(-> res
(print-console-log-css (key value))
(add " ")
(print-console-log-css (val value)))
(or (instance? cljs.core/PersistentArrayMap value)
(instance? cljs.core/PersistentHashMap value))
(as-> res %
(add % "{" :purple)
(reduce print-console-log-css % (interpose ::comma value))
(add % "}" :purple))
(as-> res %
(add % (str "#" (let [t (type value)
n (.-name t)]
(if (empty? n)
(pr-str t)
n)) " ") :brown)
(add % "{" :purple)
(reduce print-console-log-css % (interpose ::comma value))
(add % "}" :purple))
(set? value)
(as-> res %
(add % "#{" :purple)
(reduce print-console-log-css % (interpose ::space value))
(add % "}" :purple))
(vector? value)
(as-> res %
(add % "[" :purple)
(reduce print-console-log-css % (interpose ::space value))
(add % "]" :purple))
(instance? cljs.core.PersistentQueue value)
(-> res
(add "#queue " :brown)
(recur (into [] value)))
(seq? value)
(as-> res %
(add % "(" :brown)
(reduce print-console-log-css % (interpose ::space value))
(add % ")" :brown))
(satisfies? IAtom value)
(-> res
(add "#atom " :brown)
(recur @value))
(uuid? value)
(-> res
(add "#uuid " :brown)
(recur (str value)))
(object? value)
(-> res
(add "#js " :brown)
(recur (reduce #(assoc %1 (keyword %2) (gobj/get value %2)) {} (js/Object.keys value))))
(array? value)
(-> res
(add "#js " :brown)
(recur (into [] value)))
:else
(add res (pr-str value) :gray5)))
(defn format [level logger-name value]
(let [color (level-color level)
[res res-css] (-> ["" []]
(add "[" :white color)
(add logger-name :white color)
(add "]" :white color)
(add " ")
(print-console-log-css value))]
(cons res res-css)))
|
06ec471d51d08a5ad4fa6d5a962ccca3d82971b0f222096f437e899fc9ef1618 | ygrek/mldonkey | syslog.mli | syslog(3 ) routines for ocaml
Copyright ( C ) 2002 < >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2002 Shawn Wagner <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* Syslog routines
(** These are loosely based on the unix syslog(3) function and
relatives. *)
* The assorted logging facilities . The default is [ ` LOG_USER ] . You
can set a new default with openlog , or give a specific facility per
syslog call .
can set a new default with openlog, or give a specific facility per
syslog call. *)
type facility =
[ `LOG_KERN | `LOG_USER | `LOG_MAIL | `LOG_DAEMON | `LOG_AUTH
| `LOG_SYSLOG | `LOG_LPR | `LOG_NEWS | `LOG_UUCP | `LOG_CRON
| `LOG_AUTHPRIV | `LOG_FTP | `LOG_NTP | `LOG_SECURITY
| `LOG_CONSOLE | `LOG_LOCAL0 | `LOG_LOCAL1 | `LOG_LOCAL2
| `LOG_LOCAL3 | `LOG_LOCAL4 | `LOG_LOCAL5 | `LOG_LOCAL6
| `LOG_LOCAL7 ]
* Flags to pass to openlog . [ ` LOG_CONS ] is n't implemented
yet . is mandatory and implied
yet. LOG_NDELAY is mandatory and implied *)
type flag = [ `LOG_CONS | `LOG_PERROR | `LOG_PID ]
(** The priority of the error. *)
type level = [ `LOG_EMERG | `LOG_ALERT | `LOG_CRIT | `LOG_ERR | `LOG_WARNING
| `LOG_NOTICE | `LOG_INFO | `LOG_DEBUG ]
(** the type of a syslog connection *)
type t
* given a string descibing a facility , return the facility . The
strings consist of the name of the facility with the LOG _ chopped
off . They are not case sensitive . @raise Syslog_error when given
an invalid facility
strings consist of the name of the facility with the LOG_ chopped
off. They are not case sensitive. @raise Syslog_error when given
an invalid facility *)
val facility_of_string: string -> facility
* openlog ? ( logpath = AUTODETECTED ) ? ( facility=`LOG_USER ) ? ( flags= [ ] )
program_name , similar to openlog(3 ) @raise Syslog_error on
error
program_name, similar to openlog(3) @raise Syslog_error on
error *)
val openlog: ?logpath:string -> ?facility:facility -> ?flags:flag list -> string -> t
* Same as syslog(3 ) , except there 's no formats . @raise Syslog_error
on error ( very rare )
on error (very rare) *)
val syslog: ?fac:facility -> t -> level -> string -> unit
(** Close the log. @raise Syslog_error on error *)
val closelog: t -> unit
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/utils/lib/syslog.mli | ocaml | * These are loosely based on the unix syslog(3) function and
relatives.
* The priority of the error.
* the type of a syslog connection
* Close the log. @raise Syslog_error on error | syslog(3 ) routines for ocaml
Copyright ( C ) 2002 < >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2002 Shawn Wagner <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* Syslog routines
* The assorted logging facilities . The default is [ ` LOG_USER ] . You
can set a new default with openlog , or give a specific facility per
syslog call .
can set a new default with openlog, or give a specific facility per
syslog call. *)
type facility =
[ `LOG_KERN | `LOG_USER | `LOG_MAIL | `LOG_DAEMON | `LOG_AUTH
| `LOG_SYSLOG | `LOG_LPR | `LOG_NEWS | `LOG_UUCP | `LOG_CRON
| `LOG_AUTHPRIV | `LOG_FTP | `LOG_NTP | `LOG_SECURITY
| `LOG_CONSOLE | `LOG_LOCAL0 | `LOG_LOCAL1 | `LOG_LOCAL2
| `LOG_LOCAL3 | `LOG_LOCAL4 | `LOG_LOCAL5 | `LOG_LOCAL6
| `LOG_LOCAL7 ]
* Flags to pass to openlog . [ ` LOG_CONS ] is n't implemented
yet . is mandatory and implied
yet. LOG_NDELAY is mandatory and implied *)
type flag = [ `LOG_CONS | `LOG_PERROR | `LOG_PID ]
type level = [ `LOG_EMERG | `LOG_ALERT | `LOG_CRIT | `LOG_ERR | `LOG_WARNING
| `LOG_NOTICE | `LOG_INFO | `LOG_DEBUG ]
type t
* given a string descibing a facility , return the facility . The
strings consist of the name of the facility with the LOG _ chopped
off . They are not case sensitive . @raise Syslog_error when given
an invalid facility
strings consist of the name of the facility with the LOG_ chopped
off. They are not case sensitive. @raise Syslog_error when given
an invalid facility *)
val facility_of_string: string -> facility
* openlog ? ( logpath = AUTODETECTED ) ? ( facility=`LOG_USER ) ? ( flags= [ ] )
program_name , similar to openlog(3 ) @raise Syslog_error on
error
program_name, similar to openlog(3) @raise Syslog_error on
error *)
val openlog: ?logpath:string -> ?facility:facility -> ?flags:flag list -> string -> t
* Same as syslog(3 ) , except there 's no formats . @raise Syslog_error
on error ( very rare )
on error (very rare) *)
val syslog: ?fac:facility -> t -> level -> string -> unit
val closelog: t -> unit
|
4df211f5c39bfac95642fc18e1aed57e9b04781a049a323793ea6293a9fbe63b | jlollis/sicp-solutions | 1.03.scm | #lang sicp
Exercise 1.3
(define (square x) (* x x))
(define (sumsquares x y) (+ (square x) (square y)))
(define (sqsumlargest a b c)
(cond
((and (>= a c) (>= b c)) (sumsquares a b))
((and (>= b a) (>= c a)) (sumsquares b c))
((and (>= a b) (>= c b)) (sumsquares a c))))
Enter ( sqsumlargest < val_1 > < val_2 > < val_3 > ) in terminal for result
| null | https://raw.githubusercontent.com/jlollis/sicp-solutions/7b03befcfe82e26a7fb28d94bc99292bc484f9dd/Chapter%201%20Exercises/1.03.scm | scheme | #lang sicp
Exercise 1.3
(define (square x) (* x x))
(define (sumsquares x y) (+ (square x) (square y)))
(define (sqsumlargest a b c)
(cond
((and (>= a c) (>= b c)) (sumsquares a b))
((and (>= b a) (>= c a)) (sumsquares b c))
((and (>= a b) (>= c b)) (sumsquares a c))))
Enter ( sqsumlargest < val_1 > < val_2 > < val_3 > ) in terminal for result
| |
7c8588e0d28d06090d5a75b340d2fa2b36823e52beeefdf3995c07b5aa79072a | symphonyoss/clj-symphony | user_connection.clj | ;
Copyright 2017 Fintech Open Source Foundation
SPDX - License - Identifier : Apache-2.0
;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;
(ns clj-symphony.user-connection
"Operations related to user connections and requests for those connections.
In Symphony, a user connection is an explicitly established relationship
between users in different pods."
(:require [clj-symphony.user :as syu]))
(def user-connection-states
"The set of possible user connections states in Symphony, as keywords."
(set (map #(keyword (str %)) (org.symphonyoss.symphony.clients.model.SymUserConnection$Status/values))))
(defn userconnectionobj->map
"Converts a `org.symphonyoss.symphony.clients.model.SymUserConnection` object
into a map with these keys:
| Key | Description |
|-----------------------|---------------------------------------------------------------------|
| `:user-id` | The id of the user to whom the request was made. |
| `:status` | The current status of the request (see [[user-connection-states]]). |
| `:first-request-date` | The date a connection was first requested. |
| `:update-date` | The date the last connection request was initiated. |
| `:request-count` | The total number of connection requests made. |
"
[^org.symphonyoss.symphony.clients.model.SymUserConnection uc]
(if uc
{
:user-id (.getUserId uc)
:status (if-let [status (.getStatus uc)]
(keyword (str status)))
:first-request-date (if-let [first-requested-at-epoch (.getFirstRequestedAt uc)]
(java.util.Date. first-requested-at-epoch))
:update-date (if-let [updated-at-epoch (.getUpdatedAt uc)]
(java.util.Date. updated-at-epoch))
:request-count (.getRequestCounter uc)
}))
(defn user-connectionsobjs
"Returns all `org.symphonyoss.symphony.clients.model.SymUserConnection`
objects for the authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getAllConnections (.getConnectionsClient c)))
(defn user-connections
"Returns all user connections for the authenticated user, as a lazy sequence
of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (user-connectionsobjs c)))
(defn accepted-requestsobjs
"Returns all accepted user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getAcceptedRequests (.getConnectionsClient c)))
(defn accepted-requests
"Returns all accepted user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (accepted-requestsobjs c)))
(defn pending-requestsobjs
"Returns all pending user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getPendingRequests (.getConnectionsClient c)))
(defn pending-requests
"Returns all pending user connection requests for the authenticated user, as a
lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (pending-requestsobjs c)))
(defn rejected-requestsobjs
"Returns all rejected user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getRejectedRequests (.getConnectionsClient c)))
(defn rejected-requests
"Returns all rejected user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (rejected-requestsobjs c)))
(defn incoming-requestsobjs
"Returns all incoming user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getIncomingRequests (.getConnectionsClient c)))
(defn incoming-requests
"Returns all incoming user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (incoming-requestsobjs c)))
(defn- ^org.symphonyoss.symphony.clients.model.SymUserConnectionRequest build-connection-requestobj
[u]
(org.symphonyoss.symphony.clients.model.SymUserConnectionRequest.
(doto (org.symphonyoss.symphony.clients.model.SymUserConnection.)
(.setUserId (syu/user-id u)))))
(defn send-connection-request!
"Sends a connection request to the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.sendConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn accept-connection-request!
"Accepts a connection request from the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.acceptConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn reject-connection-request!
"Rejects a connection request from the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.rejectConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn accept-all-connection-requests!
"Convenience method that unconditionally accepts all incoming user connection
requests, returning the number accepted."
[c]
(let [incoming-requests (incoming-requests c)]
(doall (map (partial accept-connection-request! c) incoming-requests))
(count incoming-requests)))
(defn reject-all-connection-requests!
"Convenience method that unconditionally rejects all incoming user connection
requests, returning the number rejected."
[c]
(let [incoming-requests (incoming-requests c)]
(doall (map (partial reject-connection-request! c) incoming-requests))
(count incoming-requests)))
| null | https://raw.githubusercontent.com/symphonyoss/clj-symphony/6efcb46c931d48ec7abccfd3b891436fb3261d46/src/clj_symphony/user_connection.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| Copyright 2017 Fintech Open Source Foundation
SPDX - License - Identifier : Apache-2.0
distributed under the License is distributed on an " AS IS " BASIS ,
(ns clj-symphony.user-connection
"Operations related to user connections and requests for those connections.
In Symphony, a user connection is an explicitly established relationship
between users in different pods."
(:require [clj-symphony.user :as syu]))
(def user-connection-states
"The set of possible user connections states in Symphony, as keywords."
(set (map #(keyword (str %)) (org.symphonyoss.symphony.clients.model.SymUserConnection$Status/values))))
(defn userconnectionobj->map
"Converts a `org.symphonyoss.symphony.clients.model.SymUserConnection` object
into a map with these keys:
| Key | Description |
|-----------------------|---------------------------------------------------------------------|
| `:user-id` | The id of the user to whom the request was made. |
| `:status` | The current status of the request (see [[user-connection-states]]). |
| `:first-request-date` | The date a connection was first requested. |
| `:update-date` | The date the last connection request was initiated. |
| `:request-count` | The total number of connection requests made. |
"
[^org.symphonyoss.symphony.clients.model.SymUserConnection uc]
(if uc
{
:user-id (.getUserId uc)
:status (if-let [status (.getStatus uc)]
(keyword (str status)))
:first-request-date (if-let [first-requested-at-epoch (.getFirstRequestedAt uc)]
(java.util.Date. first-requested-at-epoch))
:update-date (if-let [updated-at-epoch (.getUpdatedAt uc)]
(java.util.Date. updated-at-epoch))
:request-count (.getRequestCounter uc)
}))
(defn user-connectionsobjs
"Returns all `org.symphonyoss.symphony.clients.model.SymUserConnection`
objects for the authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getAllConnections (.getConnectionsClient c)))
(defn user-connections
"Returns all user connections for the authenticated user, as a lazy sequence
of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (user-connectionsobjs c)))
(defn accepted-requestsobjs
"Returns all accepted user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getAcceptedRequests (.getConnectionsClient c)))
(defn accepted-requests
"Returns all accepted user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (accepted-requestsobjs c)))
(defn pending-requestsobjs
"Returns all pending user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getPendingRequests (.getConnectionsClient c)))
(defn pending-requests
"Returns all pending user connection requests for the authenticated user, as a
lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (pending-requestsobjs c)))
(defn rejected-requestsobjs
"Returns all rejected user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getRejectedRequests (.getConnectionsClient c)))
(defn rejected-requests
"Returns all rejected user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (rejected-requestsobjs c)))
(defn incoming-requestsobjs
"Returns all incoming user connection requests as
`org.symphonyoss.symphony.clients.model.SymUserConnection` objects for the
authenticated user."
[^org.symphonyoss.client.SymphonyClient c]
(.getIncomingRequests (.getConnectionsClient c)))
(defn incoming-requests
"Returns all incoming user connection requests for the authenticated user, as
a lazy sequence of maps (see [[userconnectionobj->map]] for details)."
[c]
(map userconnectionobj->map (incoming-requestsobjs c)))
(defn- ^org.symphonyoss.symphony.clients.model.SymUserConnectionRequest build-connection-requestobj
[u]
(org.symphonyoss.symphony.clients.model.SymUserConnectionRequest.
(doto (org.symphonyoss.symphony.clients.model.SymUserConnection.)
(.setUserId (syu/user-id u)))))
(defn send-connection-request!
"Sends a connection request to the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.sendConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn accept-connection-request!
"Accepts a connection request from the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.acceptConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn reject-connection-request!
"Rejects a connection request from the given user."
[^org.symphonyoss.client.SymphonyClient c u]
(.rejectConnectionRequest (.getConnectionsClient c) (build-connection-requestobj u))
nil)
(defn accept-all-connection-requests!
"Convenience method that unconditionally accepts all incoming user connection
requests, returning the number accepted."
[c]
(let [incoming-requests (incoming-requests c)]
(doall (map (partial accept-connection-request! c) incoming-requests))
(count incoming-requests)))
(defn reject-all-connection-requests!
"Convenience method that unconditionally rejects all incoming user connection
requests, returning the number rejected."
[c]
(let [incoming-requests (incoming-requests c)]
(doall (map (partial reject-connection-request! c) incoming-requests))
(count incoming-requests)))
|
8b707d5619ecc4a5411970e0268d7b700f83e15a9b20dfddbc2e4e1402ae0233 | PaulRivier/kiwi | Pandoc.hs | # LANGUAGE FlexibleContexts , OverloadedStrings #
module Kiwi.Pandoc where
import Control.Applicative ((<|>))
import Control.Monad.Writer.Lazy (runWriter, tell)
import qualified Data.Map as M
import Data.Maybe (fromMaybe, catMaybes)
import qualified Data.Text as T
import qualified Data.Time as Time
import qualified System.Directory as D
import qualified System.FilePath as FP
import qualified Text.Pandoc as P
import qualified Text.Pandoc.Walk as PW
import qualified Text . Pandoc . Shared as PS
import Text.Read (readMaybe)
import Kiwi.Types
import Kiwi.Utils (getFileContent, splitOnFirst, (>:), for, splitMeta,
pathToPageId)
import qualified Kiwi.Utils as U
loadPage :: T.Text -> T.Text -> MetaData -> [CustomMetaConfig] ->
FP.FilePath -> Either String PandocPage
loadPage c source md cmc dir = case splitOnFirst "\n\n" c of
Nothing -> Left "Metadata is not valid"
Just (metaC, content) ->
let docMeta = parseMetaData metaC
meta = md { metaId = T.concat <$> M.lookup "id" docMeta
, metaTitle = findWith (metaTitle md) (T.intercalate " ")
"title" docMeta
, metaTags = findWith (metaTags md) (splitMeta . T.intercalate ",")
"tags" docMeta
, metaAccess = findWith (metaAccess md) (splitMeta . T.intercalate ",")
"access" docMeta
, metaLang = fromMaybe (metaLang md)
( M.lookup "lang" docMeta >>=
readMaybe . T.unpack . last )
, metaCustom = parseCustomMeta docMeta cmc
}
dirT = T.pack $ dir
imagesDir = findWith dirT T.concat "images-dir" docMeta
filesDir = findWith dirT T.concat "files-dir" docMeta
in case (P.runPure $ P.readCommonMark mdConf content) of
Left e -> Left $ show e
Right doc'->
let (doc, collected) = walkDoc source dirT imagesDir filesDir doc'
colLinks = [ (source, l) | CollectedPageLink l <- collected ]
in Right (PandocPage doc meta colLinks)
where
mdConf = P.def { P.readerExtensions = P.pandocExtensions }
findWith def join' field meta = fromMaybe def $
fmap join' $
M.lookup field meta
data LinkType = PageLink T.Text
| AnchorLink T.Text
| ImageLink T.Text
| FileLink T.Text
| OtherLink T.Text
walkDoc :: T.Text -> T.Text -> T.Text -> T.Text ->
P.Pandoc -> (P.Pandoc, [CollectedFromDoc])
walkDoc source pageDir imagesDir filesDir doc =
runWriter (PW.walkM (fixInlines pageDir imagesDir filesDir) doc)
where
-- chemin des images
fixInlines _ ipd _ (P.Image attr desc (rawLink, lName)) = do
return $ P.Image attr desc ((fixImage ipd rawLink), "fig:" <> lName)
chemin des pages , fichiers ,
fixInlines ppd ipd fpd (P.Link attr txt (rawLink, lName)) = do
let parsedLink = parseLink rawLink
( newAttr , newLink , newTxt )
PageLink l -> do
tell [CollectedPageLink $ absolutePageId ppd l]
return $ P.Link (addClass "kiwi-link-page" attr) txt
(pathToPage ppd l, lName)
AnchorLink l -> return $ P.Link (addClass "kiwi-link-anchor" attr) txt
(l, lName)
ImageLink l -> return $ P.Link (addClass "kiwi-link-image" attr) txt
(pathToImage ipd l, lName)
FileLink l -> return $ P.Link (addClass "kiwi-link-file" attr) txt
(pathToFile fpd l, lName)
OtherLink l -> return $ P.Link (addClass "kiwi-link-external" attr) txt
(l, lName)
-- reste
fixInlines _ _ _ x = return $ x
parseLink :: T.Text -> LinkType
parseLink l = let ltM = (PageLink <$> T.stripPrefix ("page:") l) <|>
(ImageLink <$> T.stripPrefix ("image:") l) <|>
(FileLink <$> T.stripPrefix ("file:") l) <|>
(if T.isPrefixOf "#" l then Just (AnchorLink l) else Nothing)
in fromMaybe (OtherLink l) ltM
addClass :: T.Text -> P.Attr -> P.Attr
addClass c (idAttr, classAttr, kvs) = (idAttr, c:classAttr, kvs)
-- setId :: T.Text -> P.Attr -> P.Attr
setId i ( _ , , kvs ) = ( i , , kvs )
fixImage :: T.Text -> T.Text -> T.Text
fixImage ipd l = let nl = pathToImage ipd <$> T.stripPrefix "image:" l
in fromMaybe l nl
pathToImage d r = pathToR "/image" d r
pathToFile d r = pathToR "/file" d r
pathToR n d r =
case T.uncons r of
-- absolute link
Just ('/', l) -> U.joinPathT [n, source, l]
-- relative link
_ -> U.joinPathT [n, source, d, r]
pathToPage d r = U.joinPathT ["/page", source, absolutePageId d r]
absolutePageId d r =
case T.uncons r of
-- absolute link
Just ('/', l) -> U.normalizePageId l
-- relative link
_ -> U.normalizePageId $ U.joinPathT [d, r]
loadPageIO :: FP.FilePath -> FP.FilePath -> T.Text ->
MetaData -> [CustomMetaConfig] -> Maybe SourceConfig ->
IO Page
loadPageIO fullPath relPath source md cmc scM = do
cM <- getFileContent fullPath
mt <- D.getModificationTime fullPath
let pageDir = case FP.takeDirectory relPath of
"." -> ""
x -> x
let rootTag = fromMaybe [] (scRootTag <$> scM)
case cM of
Nothing -> error ("file not found : " ++ fullPath)
Just c -> case (loadPage c source md cmc pageDir) of
Left _ -> error "Unable to read markdown"
Right p -> let meta = pandocMeta p
pId = fromMaybe (pathToPageId relPath) $ metaId meta in
return $ Page { pageUID = (source, pId)
, pageAbsoluteFSPath = fullPath
, pageMTime = mt
, pageDoc = pandocDoc p
, pageTitle = metaTitle meta
, pageTags = map (U.prefixNormalizeTag rootTag) $
metaTags meta
, pageAccess = metaAccess meta
, pageLang = metaLang meta
, pageLinks = pandocPageLinks p
, pageCustomMeta = metaCustom meta
}
parseCustomMeta :: M.Map T.Text [T.Text] -> [CustomMetaConfig] -> [CustomMetaData]
parseCustomMeta m c = catMaybes $ for c $
\cmc -> case M.lookup (cmcName cmc) m of
Nothing -> Nothing
Just v -> let cmdKs = doRead (cmcType cmc) $ concatMap (splitMeta) v
in Just $ CustomMetaData (cmcName cmc) cmdKs
where
doRead CmtText = map KeyText
doRead CmtInt = map KeyInt . catMaybes . map (readMaybe . T.unpack)
doRead CmtDate = map KeyDay . catMaybes . map readDateMaybe
doRead CmtBool = map KeyBool . map readBool
readDateMaybe :: T.Text -> Maybe Time.Day
readDateMaybe d = Time.parseTimeM True Time.defaultTimeLocale "%Y-%-m-%-d" (T.unpack d)
readBool :: T.Text -> Bool
readBool b = if elem (T.toLower b) ["yes", "true", "1"] then True else False
parseMetaData :: T.Text -> M.Map T.Text [T.Text]
parseMetaData t = let metaLines = T.lines t
in M.fromListWith (flip (++)) $
catMaybes $ map parseMetaLine metaLines
where
parseMetaLine l = (splitOnFirst ":" l) >:
\(k,v) -> (T.strip k, [T.strip v])
| null | https://raw.githubusercontent.com/PaulRivier/kiwi/fa6974af68809f59a39cb1458f58b082d0e552b3/src/Kiwi/Pandoc.hs | haskell | chemin des images
reste
setId :: T.Text -> P.Attr -> P.Attr
absolute link
relative link
absolute link
relative link | # LANGUAGE FlexibleContexts , OverloadedStrings #
module Kiwi.Pandoc where
import Control.Applicative ((<|>))
import Control.Monad.Writer.Lazy (runWriter, tell)
import qualified Data.Map as M
import Data.Maybe (fromMaybe, catMaybes)
import qualified Data.Text as T
import qualified Data.Time as Time
import qualified System.Directory as D
import qualified System.FilePath as FP
import qualified Text.Pandoc as P
import qualified Text.Pandoc.Walk as PW
import qualified Text . Pandoc . Shared as PS
import Text.Read (readMaybe)
import Kiwi.Types
import Kiwi.Utils (getFileContent, splitOnFirst, (>:), for, splitMeta,
pathToPageId)
import qualified Kiwi.Utils as U
loadPage :: T.Text -> T.Text -> MetaData -> [CustomMetaConfig] ->
FP.FilePath -> Either String PandocPage
loadPage c source md cmc dir = case splitOnFirst "\n\n" c of
Nothing -> Left "Metadata is not valid"
Just (metaC, content) ->
let docMeta = parseMetaData metaC
meta = md { metaId = T.concat <$> M.lookup "id" docMeta
, metaTitle = findWith (metaTitle md) (T.intercalate " ")
"title" docMeta
, metaTags = findWith (metaTags md) (splitMeta . T.intercalate ",")
"tags" docMeta
, metaAccess = findWith (metaAccess md) (splitMeta . T.intercalate ",")
"access" docMeta
, metaLang = fromMaybe (metaLang md)
( M.lookup "lang" docMeta >>=
readMaybe . T.unpack . last )
, metaCustom = parseCustomMeta docMeta cmc
}
dirT = T.pack $ dir
imagesDir = findWith dirT T.concat "images-dir" docMeta
filesDir = findWith dirT T.concat "files-dir" docMeta
in case (P.runPure $ P.readCommonMark mdConf content) of
Left e -> Left $ show e
Right doc'->
let (doc, collected) = walkDoc source dirT imagesDir filesDir doc'
colLinks = [ (source, l) | CollectedPageLink l <- collected ]
in Right (PandocPage doc meta colLinks)
where
mdConf = P.def { P.readerExtensions = P.pandocExtensions }
findWith def join' field meta = fromMaybe def $
fmap join' $
M.lookup field meta
data LinkType = PageLink T.Text
| AnchorLink T.Text
| ImageLink T.Text
| FileLink T.Text
| OtherLink T.Text
walkDoc :: T.Text -> T.Text -> T.Text -> T.Text ->
P.Pandoc -> (P.Pandoc, [CollectedFromDoc])
walkDoc source pageDir imagesDir filesDir doc =
runWriter (PW.walkM (fixInlines pageDir imagesDir filesDir) doc)
where
fixInlines _ ipd _ (P.Image attr desc (rawLink, lName)) = do
return $ P.Image attr desc ((fixImage ipd rawLink), "fig:" <> lName)
chemin des pages , fichiers ,
fixInlines ppd ipd fpd (P.Link attr txt (rawLink, lName)) = do
let parsedLink = parseLink rawLink
( newAttr , newLink , newTxt )
PageLink l -> do
tell [CollectedPageLink $ absolutePageId ppd l]
return $ P.Link (addClass "kiwi-link-page" attr) txt
(pathToPage ppd l, lName)
AnchorLink l -> return $ P.Link (addClass "kiwi-link-anchor" attr) txt
(l, lName)
ImageLink l -> return $ P.Link (addClass "kiwi-link-image" attr) txt
(pathToImage ipd l, lName)
FileLink l -> return $ P.Link (addClass "kiwi-link-file" attr) txt
(pathToFile fpd l, lName)
OtherLink l -> return $ P.Link (addClass "kiwi-link-external" attr) txt
(l, lName)
fixInlines _ _ _ x = return $ x
parseLink :: T.Text -> LinkType
parseLink l = let ltM = (PageLink <$> T.stripPrefix ("page:") l) <|>
(ImageLink <$> T.stripPrefix ("image:") l) <|>
(FileLink <$> T.stripPrefix ("file:") l) <|>
(if T.isPrefixOf "#" l then Just (AnchorLink l) else Nothing)
in fromMaybe (OtherLink l) ltM
addClass :: T.Text -> P.Attr -> P.Attr
addClass c (idAttr, classAttr, kvs) = (idAttr, c:classAttr, kvs)
setId i ( _ , , kvs ) = ( i , , kvs )
fixImage :: T.Text -> T.Text -> T.Text
fixImage ipd l = let nl = pathToImage ipd <$> T.stripPrefix "image:" l
in fromMaybe l nl
pathToImage d r = pathToR "/image" d r
pathToFile d r = pathToR "/file" d r
pathToR n d r =
case T.uncons r of
Just ('/', l) -> U.joinPathT [n, source, l]
_ -> U.joinPathT [n, source, d, r]
pathToPage d r = U.joinPathT ["/page", source, absolutePageId d r]
absolutePageId d r =
case T.uncons r of
Just ('/', l) -> U.normalizePageId l
_ -> U.normalizePageId $ U.joinPathT [d, r]
loadPageIO :: FP.FilePath -> FP.FilePath -> T.Text ->
MetaData -> [CustomMetaConfig] -> Maybe SourceConfig ->
IO Page
loadPageIO fullPath relPath source md cmc scM = do
cM <- getFileContent fullPath
mt <- D.getModificationTime fullPath
let pageDir = case FP.takeDirectory relPath of
"." -> ""
x -> x
let rootTag = fromMaybe [] (scRootTag <$> scM)
case cM of
Nothing -> error ("file not found : " ++ fullPath)
Just c -> case (loadPage c source md cmc pageDir) of
Left _ -> error "Unable to read markdown"
Right p -> let meta = pandocMeta p
pId = fromMaybe (pathToPageId relPath) $ metaId meta in
return $ Page { pageUID = (source, pId)
, pageAbsoluteFSPath = fullPath
, pageMTime = mt
, pageDoc = pandocDoc p
, pageTitle = metaTitle meta
, pageTags = map (U.prefixNormalizeTag rootTag) $
metaTags meta
, pageAccess = metaAccess meta
, pageLang = metaLang meta
, pageLinks = pandocPageLinks p
, pageCustomMeta = metaCustom meta
}
parseCustomMeta :: M.Map T.Text [T.Text] -> [CustomMetaConfig] -> [CustomMetaData]
parseCustomMeta m c = catMaybes $ for c $
\cmc -> case M.lookup (cmcName cmc) m of
Nothing -> Nothing
Just v -> let cmdKs = doRead (cmcType cmc) $ concatMap (splitMeta) v
in Just $ CustomMetaData (cmcName cmc) cmdKs
where
doRead CmtText = map KeyText
doRead CmtInt = map KeyInt . catMaybes . map (readMaybe . T.unpack)
doRead CmtDate = map KeyDay . catMaybes . map readDateMaybe
doRead CmtBool = map KeyBool . map readBool
readDateMaybe :: T.Text -> Maybe Time.Day
readDateMaybe d = Time.parseTimeM True Time.defaultTimeLocale "%Y-%-m-%-d" (T.unpack d)
readBool :: T.Text -> Bool
readBool b = if elem (T.toLower b) ["yes", "true", "1"] then True else False
parseMetaData :: T.Text -> M.Map T.Text [T.Text]
parseMetaData t = let metaLines = T.lines t
in M.fromListWith (flip (++)) $
catMaybes $ map parseMetaLine metaLines
where
parseMetaLine l = (splitOnFirst ":" l) >:
\(k,v) -> (T.strip k, [T.strip v])
|
30a7f301281bf5e8bd7d68a114bb75d371ff6e217e84fe242aeffdaeccf015d8 | liqd/thentos | Action.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ViewPatterns #
module Thentos.Adhocracy3.Action
( a3ServiceId
, activate
, addUser
, externalUrlOfDefaultPersona
, login
, makeExternalUrl
, resetPassword
) where
import Control.Lens ((^.))
import Control.Monad.Except (throwError)
import Data.Configifier ((>>.), Tagged(Tagged))
import Data.Monoid ((<>))
import Data.Proxy (Proxy(Proxy))
import Data.String.Conversions (SBS, ST, cs)
import System.Log (Priority(DEBUG))
import qualified Data.Aeson.Encode.Pretty as Aeson
import qualified Data.Text as ST
import qualified URI.ByteString as URI
import Thentos.Adhocracy3.Action.Types
import Thentos.Action.TCB (loggerA)
import Thentos.Config
import Thentos.Types
import qualified Thentos.Action as A
import qualified Thentos.Action.TCB as A
import qualified Thentos.Action.Unsafe as U
import qualified Thentos.Adhocracy3.Action.Unsafe as U
| Add a user in Thentos , but not yet in A3 . Only after the Thentos user has been activated
-- (confirmed), a persona is created in thentos together with a corresponding adhocracy user in A3
-- that corresponds to that persona.
addUser :: A3UserWithPass -> A3Action TypedPathWithCacheControl
addUser (A3UserWithPass user) = A.logIfError $ do
loggerA DEBUG . ("route addUser: " <>) . cs .
Aeson.encodePretty $ A3UserNoPass user
A.addUnconfirmedUser user
config <- A.getConfig
let dummyPath = a3backendPath config ""
return $ TypedPathWithCacheControl (TypedPath dummyPath CTUser) [] [] [] []
-- FIXME Which path should we return here, considering that no A3 user exists yet?!
-- FIXME We correctly return empty cache-control info since the A3 DB isn't (yet) changed,
but normally the A3 backend would return the following info if a new user is created :
-- changedDescendants: "", "principals/", "principals/users/", "principals/groups/"
-- created: userPath
-- modified: "principals/users/", "principals/groups/authenticated/"
Find out if not returning this stuff leads to problems in the A3 frontend !
--
-- possible solution: deliver thentos registration widget; disable all adhocracy frontend-code
-- that touches this end-point; provide user resources from outside of widgets only.
| Activate a new user . This also creates a persona and a corresponding adhocracy user in the A3 backend ,
so that the user is able to log into A3 . The user 's actual password and email address are
only stored in Thentos and NOT exposed to A3 .
activate :: ActivationRequest -> A3Action RequestResult
activate ar@(ActivationRequest confToken) = A.logIfError $ do
loggerA DEBUG . ("route activate:" <>) . cs $ Aeson.encodePretty ar
(uid, stok) <- A.confirmNewUser confToken
-- Promote access rights so we can look up the user and create a persona
U.extendClearanceOnAgent (UserA uid)
user <- snd <$> A.lookupConfirmedUser uid
let persName = PersonaName . fromUserName $ user ^. userName
externalUrl <- makeExternalUrl persName
persona <- A.addPersona persName uid $ Just externalUrl
sid <- a3ServiceId
-- Register persona for the default ("") context of the default service (A3)
A.registerPersonaWithContext persona sid ""
pure $ RequestSuccess (Path . cs . renderUri $ externalUrl) stok
| Make user path relative to our exposed URL instead of the proxied A3 backend URL . Only works
-- for @/principals/users/...@. (Returns exposed url.)
makeExternalUrl :: PersonaName -> A3Action Uri
makeExternalUrl pn = U.createUserInA3 pn >>= f
where
f :: Path -> A3Action Uri
f (Path path@(ST.breakOn "/principals/users/" -> (_, localPath)))
| ST.null localPath = do
throwError . OtherError . A3UriParseError . URI.OtherError $ "bad A3 user uri: " <> cs path
| otherwise = do
config <- A.getConfig
let (Path fullPath) = a3backendPath config localPath
case parseUri $ cs fullPath of
Left err -> throwError . OtherError $ A3UriParseError err
Right uri -> pure uri
-- | Log a user in.
login :: LoginRequest -> A3Action RequestResult
login r = A.logIfError $ do
loggerA DEBUG "/login/"
(uid, stok) <- case r of
LoginByName uname pass -> A.startThentosSessionByUserName uname pass
LoginByEmail email pass -> A.startThentosSessionByUserEmail email pass
userUrl <- externalUrlOfDefaultPersona uid
return $ RequestSuccess (Path $ cs userUrl) stok
| Finish password reset with email confirmation and open a new ThentosSession for the user .
resetPassword :: PasswordResetRequest -> A3Action RequestResult
resetPassword (PasswordResetRequest resetTok pass) = A.logIfError $ do
loggerA DEBUG $ "route password_reset for token: " <> show resetTok
uid <- A.resetPassword resetTok pass
sessTok <- A.startThentosSessionByUserId uid pass
userUrl <- externalUrlOfDefaultPersona uid
return $ RequestSuccess (Path $ cs userUrl) sessTok
| Convert a local file name into a absolute path relative to the A3 backend endpoint . ( Returns
-- exposed url.)
a3backendPath :: ThentosConfig -> ST -> Path
a3backendPath config localPath = Path $ cs (exposeUrl beHttp) <//> localPath
where
beHttp = case config >>. (Proxy :: Proxy '["backend"]) of
Nothing -> error "a3backendPath: backend not configured!"
Just v -> Tagged v
-- * helper actions
| Find the ServiceId of the A3 backend , which should be registered as default proxied app .
a3ServiceId :: A3Action ServiceId
a3ServiceId = do
config <- A.getConfig
maybe (error "a3ServiceId: A3 proxy not configured") return $
ServiceId <$> config >>. (Proxy :: Proxy '["proxy", "service_id"])
-- | Return the external URL of a user's default ("") persona, in rendered form.
externalUrlOfDefaultPersona :: UserId -> A3Action SBS
externalUrlOfDefaultPersona uid = do
sid <- a3ServiceId
persona <- A.findPersona uid sid "" >>=
maybe (throwError . OtherError $ A3NoDefaultPersona uid sid) pure
userUrl <- maybe (throwError $ OtherError A3PersonaLacksExternalUrl) pure $
persona ^. personaExternalUrl
pure $ renderUri userUrl
| null | https://raw.githubusercontent.com/liqd/thentos/f7d53d8e9d11956d2cc83efb5f5149876109b098/thentos-adhocracy/src/Thentos/Adhocracy3/Action.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE OverloadedStrings #
(confirmed), a persona is created in thentos together with a corresponding adhocracy user in A3
that corresponds to that persona.
FIXME Which path should we return here, considering that no A3 user exists yet?!
FIXME We correctly return empty cache-control info since the A3 DB isn't (yet) changed,
changedDescendants: "", "principals/", "principals/users/", "principals/groups/"
created: userPath
modified: "principals/users/", "principals/groups/authenticated/"
possible solution: deliver thentos registration widget; disable all adhocracy frontend-code
that touches this end-point; provide user resources from outside of widgets only.
Promote access rights so we can look up the user and create a persona
Register persona for the default ("") context of the default service (A3)
for @/principals/users/...@. (Returns exposed url.)
| Log a user in.
exposed url.)
* helper actions
| Return the external URL of a user's default ("") persona, in rendered form. | # LANGUAGE FlexibleContexts #
# LANGUAGE ViewPatterns #
module Thentos.Adhocracy3.Action
( a3ServiceId
, activate
, addUser
, externalUrlOfDefaultPersona
, login
, makeExternalUrl
, resetPassword
) where
import Control.Lens ((^.))
import Control.Monad.Except (throwError)
import Data.Configifier ((>>.), Tagged(Tagged))
import Data.Monoid ((<>))
import Data.Proxy (Proxy(Proxy))
import Data.String.Conversions (SBS, ST, cs)
import System.Log (Priority(DEBUG))
import qualified Data.Aeson.Encode.Pretty as Aeson
import qualified Data.Text as ST
import qualified URI.ByteString as URI
import Thentos.Adhocracy3.Action.Types
import Thentos.Action.TCB (loggerA)
import Thentos.Config
import Thentos.Types
import qualified Thentos.Action as A
import qualified Thentos.Action.TCB as A
import qualified Thentos.Action.Unsafe as U
import qualified Thentos.Adhocracy3.Action.Unsafe as U
| Add a user in Thentos , but not yet in A3 . Only after the Thentos user has been activated
addUser :: A3UserWithPass -> A3Action TypedPathWithCacheControl
addUser (A3UserWithPass user) = A.logIfError $ do
loggerA DEBUG . ("route addUser: " <>) . cs .
Aeson.encodePretty $ A3UserNoPass user
A.addUnconfirmedUser user
config <- A.getConfig
let dummyPath = a3backendPath config ""
return $ TypedPathWithCacheControl (TypedPath dummyPath CTUser) [] [] [] []
but normally the A3 backend would return the following info if a new user is created :
Find out if not returning this stuff leads to problems in the A3 frontend !
| Activate a new user . This also creates a persona and a corresponding adhocracy user in the A3 backend ,
so that the user is able to log into A3 . The user 's actual password and email address are
only stored in Thentos and NOT exposed to A3 .
activate :: ActivationRequest -> A3Action RequestResult
activate ar@(ActivationRequest confToken) = A.logIfError $ do
loggerA DEBUG . ("route activate:" <>) . cs $ Aeson.encodePretty ar
(uid, stok) <- A.confirmNewUser confToken
U.extendClearanceOnAgent (UserA uid)
user <- snd <$> A.lookupConfirmedUser uid
let persName = PersonaName . fromUserName $ user ^. userName
externalUrl <- makeExternalUrl persName
persona <- A.addPersona persName uid $ Just externalUrl
sid <- a3ServiceId
A.registerPersonaWithContext persona sid ""
pure $ RequestSuccess (Path . cs . renderUri $ externalUrl) stok
| Make user path relative to our exposed URL instead of the proxied A3 backend URL . Only works
makeExternalUrl :: PersonaName -> A3Action Uri
makeExternalUrl pn = U.createUserInA3 pn >>= f
where
f :: Path -> A3Action Uri
f (Path path@(ST.breakOn "/principals/users/" -> (_, localPath)))
| ST.null localPath = do
throwError . OtherError . A3UriParseError . URI.OtherError $ "bad A3 user uri: " <> cs path
| otherwise = do
config <- A.getConfig
let (Path fullPath) = a3backendPath config localPath
case parseUri $ cs fullPath of
Left err -> throwError . OtherError $ A3UriParseError err
Right uri -> pure uri
login :: LoginRequest -> A3Action RequestResult
login r = A.logIfError $ do
loggerA DEBUG "/login/"
(uid, stok) <- case r of
LoginByName uname pass -> A.startThentosSessionByUserName uname pass
LoginByEmail email pass -> A.startThentosSessionByUserEmail email pass
userUrl <- externalUrlOfDefaultPersona uid
return $ RequestSuccess (Path $ cs userUrl) stok
| Finish password reset with email confirmation and open a new ThentosSession for the user .
resetPassword :: PasswordResetRequest -> A3Action RequestResult
resetPassword (PasswordResetRequest resetTok pass) = A.logIfError $ do
loggerA DEBUG $ "route password_reset for token: " <> show resetTok
uid <- A.resetPassword resetTok pass
sessTok <- A.startThentosSessionByUserId uid pass
userUrl <- externalUrlOfDefaultPersona uid
return $ RequestSuccess (Path $ cs userUrl) sessTok
| Convert a local file name into a absolute path relative to the A3 backend endpoint . ( Returns
a3backendPath :: ThentosConfig -> ST -> Path
a3backendPath config localPath = Path $ cs (exposeUrl beHttp) <//> localPath
where
beHttp = case config >>. (Proxy :: Proxy '["backend"]) of
Nothing -> error "a3backendPath: backend not configured!"
Just v -> Tagged v
| Find the ServiceId of the A3 backend , which should be registered as default proxied app .
a3ServiceId :: A3Action ServiceId
a3ServiceId = do
config <- A.getConfig
maybe (error "a3ServiceId: A3 proxy not configured") return $
ServiceId <$> config >>. (Proxy :: Proxy '["proxy", "service_id"])
externalUrlOfDefaultPersona :: UserId -> A3Action SBS
externalUrlOfDefaultPersona uid = do
sid <- a3ServiceId
persona <- A.findPersona uid sid "" >>=
maybe (throwError . OtherError $ A3NoDefaultPersona uid sid) pure
userUrl <- maybe (throwError $ OtherError A3PersonaLacksExternalUrl) pure $
persona ^. personaExternalUrl
pure $ renderUri userUrl
|
b129e5465fd12bc7d4d1960016ed9c5d41fbd6b7fd440e09f9e0e0ae6db6301b | haskell-beam/beam | Ord.hs | # LANGUAGE UndecidableInstances #
| Defines classen ' ' and ' SqlOrd ' that can be used to perform equality
-- and comparison operations on certain expressions.
--
In particular , any ' Beamable ' value over ' QGenExpr ' or any ' QGenExpr '
-- object can be compared for equality and inequality using the '(==.)' and
-- '(/=.)' operators respectively.
--
-- Simple (scalar) 'QGenExpr's can be compared using the '(<.)', '(>.)',
-- '(<=.)', and '(>=.)' operators respectively.
--
-- The "Quantified Comparison Syntax" (i.e., @.. > ANY (..)@) is supported
-- using the corresponding operators suffixed with a @*@ before the dot. For
-- example, @x == ANY(SELECT ..)@ can be written.
--
-- > x ==*. anyOf_ ..
--
Or , for example , @x > ALL(SELECT .. ) @ can be written
--
-- > x >*. allOf_ ..
module Database.Beam.Query.Ord
( SqlEq(..), SqlEqQuantified(..), SqlIn(..)
, HasSqlInTable(..)
, SqlOrd(..), SqlOrdQuantified(..)
, QQuantified(..)
, HasSqlEqualityCheck(..), HasSqlQuantifiedEqualityCheck(..)
, HasTableEquality, HasTableEqualityNullable
, isTrue_, isNotTrue_
, isFalse_, isNotFalse_
, isUnknown_, isNotUnknown_
, unknownAs_, sqlBool_
, possiblyNullBool_
, fromPossiblyNullBool_
, anyOf_, anyIn_
, allOf_, allIn_
, inQuery_
, between_
) where
import Database.Beam.Query.Internal
import Database.Beam.Query.Types
import Database.Beam.Query.Operator
import Database.Beam.Schema.Tables
import Database.Beam.Backend.SQL
import Database . Beam . Backend . SQL.AST ( Expression )
import Database . Beam . Backend . SQL.Builder ( SqlSyntaxBackend )
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.Proxy
import Data.Kind
import Data.Word
import Data.Int
import Data.Tagged
import Data.Text (Text)
import Data.Time (UTCTime, LocalTime, Day, TimeOfDay)
import GHC.TypeLits
-- | A data structure representing the set to quantify a comparison operator over.
data QQuantified be s r
= QQuantified (BeamSqlBackendExpressionQuantifierSyntax be) (WithExprContext (BeamSqlBackendExpressionSyntax be))
| Convert a /known not to a ' SqlBool ' . See ' unknownAs _ ' for the inverse
sqlBool_ :: QGenExpr context syntax s Bool -> QGenExpr context syntax s SqlBool
sqlBool_ (QExpr s) = QExpr s
-- | SQL @IS TRUE@ operator
isTrue_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isTrue_ (QExpr s) = QExpr (fmap isTrueE s)
| SQL @IS NOT TRUE@ operator
isNotTrue_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotTrue_ (QExpr s) = QExpr (fmap isNotTrueE s)
-- | SQL @IS FALSE@ operator
isFalse_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isFalse_ (QExpr s) = QExpr (fmap isFalseE s)
-- | SQL @IS NOT FALSE@ operator
isNotFalse_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotFalse_ (QExpr s) = QExpr (fmap isNotFalseE s)
-- | SQL @IS UNKNOWN@ operator
isUnknown_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isUnknown_ (QExpr s) = QExpr (fmap isUnknownE s)
-- | SQL @IS NOT UNKNOWN@ operator
isNotUnknown_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotUnknown_ (QExpr s) = QExpr (fmap isNotUnknownE s)
| Return the first argument if the expression has the unknown SQL value
-- See 'sqlBool_' for the inverse
unknownAs_ :: BeamSqlBackend be
=> Bool -> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
unknownAs_ False = isTrue_ -- If unknown is being treated as false, then return true only if the expression is true
unknownAs_ True = isNotFalse_ -- If unknown is being treated as true, then return true only if the expression is not false
| Retrieve a ' SqlBool ' value as a potentially @NULL@ ' ' . This
-- is useful if you want to get the value of a SQL boolean expression
directly , without having to specify what to do on @UNKNOWN@. Note
-- that both @NULL@ and @UNKNOWN@ will be returned as 'Nothing'.
possiblyNullBool_ :: QGenExpr context be s SqlBool -> QGenExpr context be s (Maybe Bool)
possiblyNullBool_ (QExpr e) = QExpr e
| Convert a possibly @NULL@ ' ' to a ' SqlBool ' .
fromPossiblyNullBool_ :: QGenExpr context be s (Maybe Bool) -> QGenExpr context be s SqlBool
fromPossiblyNullBool_ (QExpr e) = QExpr e
-- | A 'QQuantified' representing a SQL @ALL(..)@ for use with a
-- <#quantified-comparison-operator quantified comparison operator>
--
-- Accepts a subquery. Use 'allIn_' for an explicit list
allOf_
:: forall s a be db
. ( BeamSqlBackend be, HasQBuilder be )
=> Q be db (QNested s) (QExpr be (QNested s) a)
-> QQuantified be s a
allOf_ s = QQuantified quantifyOverAll (\tblPfx -> subqueryE (buildSqlQuery tblPfx s))
-- | A 'QQuantified' representing a SQL @ALL(..)@ for use with a
-- <#quantified-comparison-operator quantified comparison operator>
--
-- Accepts an explicit list of typed expressions. Use 'allOf_' for
-- a subquery
allIn_
:: forall s a be
. BeamSqlBackend be
=> [QExpr be s a]
-> QQuantified be s a
allIn_ es = QQuantified quantifyOverAll (quantifierListE <$> mapM (\(QExpr e) -> e) es)
-- | A 'QQuantified' representing a SQL @ANY(..)@ for use with a
-- <#quantified-comparison-operator quantified comparison operator>
--
-- Accepts a subquery. Use 'anyIn_' for an explicit list
anyOf_
:: forall s a be db
. ( BeamSqlBackend be, HasQBuilder be )
=> Q be db (QNested s) (QExpr be (QNested s) a)
-> QQuantified be s a
anyOf_ s = QQuantified quantifyOverAny (\tblPfx -> subqueryE (buildSqlQuery tblPfx s))
-- | A 'QQuantified' representing a SQL @ANY(..)@ for use with a
-- <#quantified-comparison-operator quantified comparison operator>
--
-- Accepts an explicit list of typed expressions. Use 'anyOf_' for
-- a subquery
anyIn_
:: forall s a be
. BeamSqlBackend be
=> [QExpr be s a]
-> QQuantified be s a
anyIn_ es = QQuantified quantifyOverAny (quantifierListE <$> mapM (\(QExpr e) -> e) es)
-- | SQL @BETWEEN@ clause
between_ :: BeamSqlBackend be
=> QGenExpr context be s a -> QGenExpr context be s a
-> QGenExpr context be s a -> QGenExpr context be s Bool
between_ (QExpr a) (QExpr min_) (QExpr max_) =
QExpr (liftA3 betweenE a min_ max_)
class SqlIn expr a | a -> expr where
-- | SQL @IN@ predicate
in_ :: a -> [ a ] -> expr Bool
instance BeamSqlBackend be => SqlIn (QGenExpr context be s) (QGenExpr context be s a) where
in_ _ [] = QExpr (pure (valueE (sqlValueSyntax False)))
in_ (QExpr row) options = QExpr (inE <$> row <*> mapM (\(QExpr o) -> o) options)
-- | Class for backends which support SQL @IN@ on lists of row values, which is
-- not part of ANSI SQL. This is useful for @IN@ on primary keys.
class BeamSqlBackend be => HasSqlInTable be where
inRowValuesE
:: Proxy be
-> BeamSqlBackendExpressionSyntax be
-> [ BeamSqlBackendExpressionSyntax be ]
-> BeamSqlBackendExpressionSyntax be
inRowValuesE Proxy = inE
instance ( HasSqlInTable be, Beamable table ) =>
SqlIn (QGenExpr context be s) (table (QGenExpr context be s)) where
in_ _ [] = QExpr (pure (valueE (sqlValueSyntax False)))
in_ row options = QExpr (inRowValuesE (Proxy @be) <$> toExpr row <*> (mapM toExpr options))
where toExpr :: table (QGenExpr context be s) -> TablePrefix -> BeamSqlBackendExpressionSyntax be
toExpr = fmap rowE . sequence . allBeamValues (\(Columnar' (QExpr x)) -> x)
infix 4 `between_`, `in_`, `inQuery_`
inQuery_ :: (HasQBuilder be, BeamSqlBackend be)
=> QGenExpr ctx be s a -> Q be db s (QExpr be s a) -> QGenExpr ctx be s Bool
inQuery_ (QExpr needle) haystack = QExpr (inSelectE <$> needle <*> flip buildSqlQuery haystack)
-- | Class for expression types or expression containers for which there is a
-- notion of equality.
--
-- Instances are provided to check the equality of expressions of the same
type as well as entire ' Beamable ' types parameterized over ' QGenExpr '
class SqlEq expr a | a -> expr where
| Given two expressions , returns whether they are equal , using semantics ( NULLs handled properly )
(==.) :: a -> a -> expr Bool
| Given two expressions , returns whether they are not equal , using semantics ( NULLs handled properly )
(/=.) :: a -> a -> expr Bool
| Given two expressions , returns the /SQL tri - state boolean/ when compared for equality
(==?.) :: a -> a -> expr SqlBool
| Given two expressions , returns the /SQL tri - state boolean/ when compared for inequality
(/=?.) :: a -> a -> expr SqlBool
-- | Class for expression types for which there is a notion of /quantified/
-- equality.
class SqlEq expr a => SqlEqQuantified expr quantified a | a -> expr quantified where
| Quantified equality and inequality using /SQL semantics/ ( tri - state boolean )
(==*.), (/=*.) :: a -> quantified -> expr SqlBool
infix 4 ==., /=., ==?., /=?., ==*., /=*.
infix 4 <., >., <=., >=.
infix 4 <*., >*., <=*., >=*.
| Class for types that can be compared for equality in the given backend
class BeamSqlBackend be => HasSqlEqualityCheck be a where
sqlEqE, sqlNeqE :: Proxy a -> Proxy be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlEqE _ _ = eqE Nothing
sqlNeqE _ _ = neqE Nothing
-- | Tri-state equality
sqlEqTriE, sqlNeqTriE :: Proxy a -> Proxy be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlEqTriE _ _ = eqE Nothing
sqlNeqTriE _ _ = neqE Nothing
type family CanCheckMaybeEquality a :: Constraint where
CanCheckMaybeEquality (Maybe a) =
TypeError ('Text "Attempt to check equality of nested Maybe." ':$$:
'Text "Beam can only reasonably check equality of a single nesting of Maybe.")
CanCheckMaybeEquality a = ()
instance (HasSqlEqualityCheck be a, CanCheckMaybeEquality a) => HasSqlEqualityCheck be (Maybe a) where
sqlEqE _ _ a b = eqMaybeE a b (sqlEqE (Proxy @a) (Proxy @be) a b)
sqlNeqE _ _ a b = neqMaybeE a b (sqlNeqE (Proxy @a) (Proxy @be) a b)
instance HasSqlEqualityCheck be a => HasSqlEqualityCheck be (SqlSerial a) where
sqlEqE _ = sqlEqE (Proxy @a)
sqlNeqE _ = sqlNeqE (Proxy @a)
sqlEqTriE _ = sqlEqTriE (Proxy @a)
sqlNeqTriE _ = sqlNeqTriE (Proxy @a)
| Class for types that can be compared for quantified equality in the given backend
class HasSqlEqualityCheck be a => HasSqlQuantifiedEqualityCheck be a where
sqlQEqE, sqlQNeqE :: Proxy a -> Proxy be
-> Maybe (BeamSqlBackendExpressionQuantifierSyntax be)
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlQEqE _ _ = eqE
sqlQNeqE _ _ = neqE
instance (HasSqlQuantifiedEqualityCheck syntax a, CanCheckMaybeEquality a) => HasSqlQuantifiedEqualityCheck syntax (Maybe a) where
sqlQEqE _ = sqlQEqE (Proxy @a)
sqlQNeqE _ = sqlQNeqE (Proxy @a)
instance HasSqlQuantifiedEqualityCheck syntax a => HasSqlQuantifiedEqualityCheck syntax (SqlSerial a) where
sqlQEqE _ = sqlQEqE (Proxy @a)
sqlQNeqE _ = sqlQNeqE (Proxy @a)
| Compare two arbitrary expressions ( of the same type ) for equality
instance ( BeamSqlBackend be, HasSqlEqualityCheck be a ) =>
SqlEq (QGenExpr context be s) (QGenExpr context be s a) where
(==.) = qBinOpE (sqlEqE (Proxy @a) (Proxy @be))
(/=.) = qBinOpE (sqlNeqE (Proxy @a) (Proxy @be))
(==?.) = qBinOpE (sqlEqTriE (Proxy @a) (Proxy @be))
(/=?.) = qBinOpE (sqlNeqTriE (Proxy @a) (Proxy @be))
-- | Two arbitrary expressions can be quantifiably compared for equality.
instance ( BeamSqlBackend be, HasSqlQuantifiedEqualityCheck be a ) =>
SqlEqQuantified (QGenExpr context be s) (QQuantified be s a) (QGenExpr context be s a) where
a ==*. QQuantified q b = qBinOpE (sqlQEqE (Proxy @a) (Proxy @be) (Just q)) a (QExpr b)
a /=*. QQuantified q b = qBinOpE (sqlQNeqE (Proxy @a) (Proxy @be) (Just q)) a (QExpr b)
| Constraint synonym to check if two tables can be compared for equality
type HasTableEquality be tbl =
(FieldsFulfillConstraint (HasSqlEqualityCheck be) tbl, Beamable tbl)
type HasTableEqualityNullable be tbl =
(FieldsFulfillConstraintNullable (HasSqlEqualityCheck be) tbl, Beamable tbl)
| Compare two arbitrary ' Beamable ' types containing ' QGenExpr 's for equality .
instance ( BeamSqlBackend be, Beamable tbl
, FieldsFulfillConstraint (HasSqlEqualityCheck be) tbl ) =>
SqlEq (QGenExpr context be s) (tbl (QGenExpr context be s)) where
a ==. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==. y
Just expr' -> Just $ expr' &&. x ==. y)
return x') (withConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=. b = not_ (a ==. b)
a ==?. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==?. y
Just expr' -> Just $ expr' &&?. x ==?. y)
return x') (withConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=?. b = sqlNot_ (a ==?. b)
instance ( BeamSqlBackend be, Beamable tbl
, FieldsFulfillConstraintNullable (HasSqlEqualityCheck be) tbl )
=> SqlEq (QGenExpr context be s) (tbl (Nullable (QGenExpr context be s))) where
a ==. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) -> do
modify (\expr ->
case expr of
Nothing -> Just $ x ==. y
Just expr' -> Just $ expr' &&. x ==. y)
return x')
(withNullableConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=. b = not_ (a ==. b)
a ==?. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==?. y
Just expr' -> Just $ expr' &&?. x ==?. y)
return x') (withNullableConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=?. b = sqlNot_ (a ==?. b)
-- * Comparisons
-- | Class for expression types or expression containers for which there is a
-- notion of ordering.
--
-- Instances are provided to check the ordering of expressions of the same
-- type. Since there is no universal notion of ordering for an arbitrary
number of expressions , no instance is provided for ' Beamable ' types .
class SqlOrd expr e | e -> expr where
(<.), (>.), (<=.), (>=.) :: e -> e -> expr Bool
-- | Class for things which can be /quantifiably/ compared.
class SqlOrd expr e =>
SqlOrdQuantified expr quantified e | e -> expr quantified where
(<*.), (>*.), (<=*.), (>=*.) :: e -> quantified -> expr Bool
instance BeamSqlBackend be =>
SqlOrd (QGenExpr context be s) (QGenExpr context be s a) where
(<.) = qBinOpE (ltE Nothing)
(>.) = qBinOpE (gtE Nothing)
(<=.) = qBinOpE (leE Nothing)
(>=.) = qBinOpE (geE Nothing)
instance BeamSqlBackend be =>
SqlOrdQuantified (QGenExpr context be s) (QQuantified be s a) (QGenExpr context be s a) where
a <*. QQuantified q b = qBinOpE (ltE (Just q)) a (QExpr b)
a <=*. QQuantified q b = qBinOpE (leE (Just q)) a (QExpr b)
a >*. QQuantified q b = qBinOpE (gtE (Just q)) a (QExpr b)
a >=*. QQuantified q b = qBinOpE (geE (Just q)) a (QExpr b)
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Text
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Integer
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Double
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Float
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Bool
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) UTCTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) LocalTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Day
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) TimeOfDay
instance ( BeamSqlBackend (MockSqlBackend cmd)
, HasSqlEqualityCheck (MockSqlBackend cmd) a
) => HasSqlEqualityCheck (MockSqlBackend cmd) (Tagged t a)
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Text
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Integer
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Double
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Float
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Bool
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) UTCTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) LocalTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Day
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) TimeOfDay
instance ( BeamSqlBackend (MockSqlBackend cmd)
, HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) a
) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) (Tagged t a)
| null | https://raw.githubusercontent.com/haskell-beam/beam/4ed616fdd9f0431a8ce634733bd6d43fed05b173/beam-core/Database/Beam/Query/Ord.hs | haskell | and comparison operations on certain expressions.
object can be compared for equality and inequality using the '(==.)' and
'(/=.)' operators respectively.
Simple (scalar) 'QGenExpr's can be compared using the '(<.)', '(>.)',
'(<=.)', and '(>=.)' operators respectively.
The "Quantified Comparison Syntax" (i.e., @.. > ANY (..)@) is supported
using the corresponding operators suffixed with a @*@ before the dot. For
example, @x == ANY(SELECT ..)@ can be written.
> x ==*. anyOf_ ..
> x >*. allOf_ ..
| A data structure representing the set to quantify a comparison operator over.
| SQL @IS TRUE@ operator
| SQL @IS FALSE@ operator
| SQL @IS NOT FALSE@ operator
| SQL @IS UNKNOWN@ operator
| SQL @IS NOT UNKNOWN@ operator
See 'sqlBool_' for the inverse
If unknown is being treated as false, then return true only if the expression is true
If unknown is being treated as true, then return true only if the expression is not false
is useful if you want to get the value of a SQL boolean expression
that both @NULL@ and @UNKNOWN@ will be returned as 'Nothing'.
| A 'QQuantified' representing a SQL @ALL(..)@ for use with a
<#quantified-comparison-operator quantified comparison operator>
Accepts a subquery. Use 'allIn_' for an explicit list
| A 'QQuantified' representing a SQL @ALL(..)@ for use with a
<#quantified-comparison-operator quantified comparison operator>
Accepts an explicit list of typed expressions. Use 'allOf_' for
a subquery
| A 'QQuantified' representing a SQL @ANY(..)@ for use with a
<#quantified-comparison-operator quantified comparison operator>
Accepts a subquery. Use 'anyIn_' for an explicit list
| A 'QQuantified' representing a SQL @ANY(..)@ for use with a
<#quantified-comparison-operator quantified comparison operator>
Accepts an explicit list of typed expressions. Use 'anyOf_' for
a subquery
| SQL @BETWEEN@ clause
| SQL @IN@ predicate
| Class for backends which support SQL @IN@ on lists of row values, which is
not part of ANSI SQL. This is useful for @IN@ on primary keys.
| Class for expression types or expression containers for which there is a
notion of equality.
Instances are provided to check the equality of expressions of the same
| Class for expression types for which there is a notion of /quantified/
equality.
| Tri-state equality
| Two arbitrary expressions can be quantifiably compared for equality.
* Comparisons
| Class for expression types or expression containers for which there is a
notion of ordering.
Instances are provided to check the ordering of expressions of the same
type. Since there is no universal notion of ordering for an arbitrary
| Class for things which can be /quantifiably/ compared. | # LANGUAGE UndecidableInstances #
| Defines classen ' ' and ' SqlOrd ' that can be used to perform equality
In particular , any ' Beamable ' value over ' QGenExpr ' or any ' QGenExpr '
Or , for example , @x > ALL(SELECT .. ) @ can be written
module Database.Beam.Query.Ord
( SqlEq(..), SqlEqQuantified(..), SqlIn(..)
, HasSqlInTable(..)
, SqlOrd(..), SqlOrdQuantified(..)
, QQuantified(..)
, HasSqlEqualityCheck(..), HasSqlQuantifiedEqualityCheck(..)
, HasTableEquality, HasTableEqualityNullable
, isTrue_, isNotTrue_
, isFalse_, isNotFalse_
, isUnknown_, isNotUnknown_
, unknownAs_, sqlBool_
, possiblyNullBool_
, fromPossiblyNullBool_
, anyOf_, anyIn_
, allOf_, allIn_
, inQuery_
, between_
) where
import Database.Beam.Query.Internal
import Database.Beam.Query.Types
import Database.Beam.Query.Operator
import Database.Beam.Schema.Tables
import Database.Beam.Backend.SQL
import Database . Beam . Backend . SQL.AST ( Expression )
import Database . Beam . Backend . SQL.Builder ( SqlSyntaxBackend )
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.Proxy
import Data.Kind
import Data.Word
import Data.Int
import Data.Tagged
import Data.Text (Text)
import Data.Time (UTCTime, LocalTime, Day, TimeOfDay)
import GHC.TypeLits
data QQuantified be s r
= QQuantified (BeamSqlBackendExpressionQuantifierSyntax be) (WithExprContext (BeamSqlBackendExpressionSyntax be))
| Convert a /known not to a ' SqlBool ' . See ' unknownAs _ ' for the inverse
sqlBool_ :: QGenExpr context syntax s Bool -> QGenExpr context syntax s SqlBool
sqlBool_ (QExpr s) = QExpr s
isTrue_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isTrue_ (QExpr s) = QExpr (fmap isTrueE s)
| SQL @IS NOT TRUE@ operator
isNotTrue_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotTrue_ (QExpr s) = QExpr (fmap isNotTrueE s)
isFalse_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isFalse_ (QExpr s) = QExpr (fmap isFalseE s)
isNotFalse_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotFalse_ (QExpr s) = QExpr (fmap isNotFalseE s)
isUnknown_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isUnknown_ (QExpr s) = QExpr (fmap isUnknownE s)
isNotUnknown_ :: BeamSqlBackend be
=> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
isNotUnknown_ (QExpr s) = QExpr (fmap isNotUnknownE s)
| Return the first argument if the expression has the unknown SQL value
unknownAs_ :: BeamSqlBackend be
=> Bool -> QGenExpr context be s SqlBool -> QGenExpr context be s Bool
| Retrieve a ' SqlBool ' value as a potentially @NULL@ ' ' . This
directly , without having to specify what to do on @UNKNOWN@. Note
possiblyNullBool_ :: QGenExpr context be s SqlBool -> QGenExpr context be s (Maybe Bool)
possiblyNullBool_ (QExpr e) = QExpr e
| Convert a possibly @NULL@ ' ' to a ' SqlBool ' .
fromPossiblyNullBool_ :: QGenExpr context be s (Maybe Bool) -> QGenExpr context be s SqlBool
fromPossiblyNullBool_ (QExpr e) = QExpr e
allOf_
:: forall s a be db
. ( BeamSqlBackend be, HasQBuilder be )
=> Q be db (QNested s) (QExpr be (QNested s) a)
-> QQuantified be s a
allOf_ s = QQuantified quantifyOverAll (\tblPfx -> subqueryE (buildSqlQuery tblPfx s))
allIn_
:: forall s a be
. BeamSqlBackend be
=> [QExpr be s a]
-> QQuantified be s a
allIn_ es = QQuantified quantifyOverAll (quantifierListE <$> mapM (\(QExpr e) -> e) es)
anyOf_
:: forall s a be db
. ( BeamSqlBackend be, HasQBuilder be )
=> Q be db (QNested s) (QExpr be (QNested s) a)
-> QQuantified be s a
anyOf_ s = QQuantified quantifyOverAny (\tblPfx -> subqueryE (buildSqlQuery tblPfx s))
anyIn_
:: forall s a be
. BeamSqlBackend be
=> [QExpr be s a]
-> QQuantified be s a
anyIn_ es = QQuantified quantifyOverAny (quantifierListE <$> mapM (\(QExpr e) -> e) es)
between_ :: BeamSqlBackend be
=> QGenExpr context be s a -> QGenExpr context be s a
-> QGenExpr context be s a -> QGenExpr context be s Bool
between_ (QExpr a) (QExpr min_) (QExpr max_) =
QExpr (liftA3 betweenE a min_ max_)
class SqlIn expr a | a -> expr where
in_ :: a -> [ a ] -> expr Bool
instance BeamSqlBackend be => SqlIn (QGenExpr context be s) (QGenExpr context be s a) where
in_ _ [] = QExpr (pure (valueE (sqlValueSyntax False)))
in_ (QExpr row) options = QExpr (inE <$> row <*> mapM (\(QExpr o) -> o) options)
class BeamSqlBackend be => HasSqlInTable be where
inRowValuesE
:: Proxy be
-> BeamSqlBackendExpressionSyntax be
-> [ BeamSqlBackendExpressionSyntax be ]
-> BeamSqlBackendExpressionSyntax be
inRowValuesE Proxy = inE
instance ( HasSqlInTable be, Beamable table ) =>
SqlIn (QGenExpr context be s) (table (QGenExpr context be s)) where
in_ _ [] = QExpr (pure (valueE (sqlValueSyntax False)))
in_ row options = QExpr (inRowValuesE (Proxy @be) <$> toExpr row <*> (mapM toExpr options))
where toExpr :: table (QGenExpr context be s) -> TablePrefix -> BeamSqlBackendExpressionSyntax be
toExpr = fmap rowE . sequence . allBeamValues (\(Columnar' (QExpr x)) -> x)
infix 4 `between_`, `in_`, `inQuery_`
inQuery_ :: (HasQBuilder be, BeamSqlBackend be)
=> QGenExpr ctx be s a -> Q be db s (QExpr be s a) -> QGenExpr ctx be s Bool
inQuery_ (QExpr needle) haystack = QExpr (inSelectE <$> needle <*> flip buildSqlQuery haystack)
type as well as entire ' Beamable ' types parameterized over ' QGenExpr '
class SqlEq expr a | a -> expr where
| Given two expressions , returns whether they are equal , using semantics ( NULLs handled properly )
(==.) :: a -> a -> expr Bool
| Given two expressions , returns whether they are not equal , using semantics ( NULLs handled properly )
(/=.) :: a -> a -> expr Bool
| Given two expressions , returns the /SQL tri - state boolean/ when compared for equality
(==?.) :: a -> a -> expr SqlBool
| Given two expressions , returns the /SQL tri - state boolean/ when compared for inequality
(/=?.) :: a -> a -> expr SqlBool
class SqlEq expr a => SqlEqQuantified expr quantified a | a -> expr quantified where
| Quantified equality and inequality using /SQL semantics/ ( tri - state boolean )
(==*.), (/=*.) :: a -> quantified -> expr SqlBool
infix 4 ==., /=., ==?., /=?., ==*., /=*.
infix 4 <., >., <=., >=.
infix 4 <*., >*., <=*., >=*.
| Class for types that can be compared for equality in the given backend
class BeamSqlBackend be => HasSqlEqualityCheck be a where
sqlEqE, sqlNeqE :: Proxy a -> Proxy be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlEqE _ _ = eqE Nothing
sqlNeqE _ _ = neqE Nothing
sqlEqTriE, sqlNeqTriE :: Proxy a -> Proxy be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlEqTriE _ _ = eqE Nothing
sqlNeqTriE _ _ = neqE Nothing
type family CanCheckMaybeEquality a :: Constraint where
CanCheckMaybeEquality (Maybe a) =
TypeError ('Text "Attempt to check equality of nested Maybe." ':$$:
'Text "Beam can only reasonably check equality of a single nesting of Maybe.")
CanCheckMaybeEquality a = ()
instance (HasSqlEqualityCheck be a, CanCheckMaybeEquality a) => HasSqlEqualityCheck be (Maybe a) where
sqlEqE _ _ a b = eqMaybeE a b (sqlEqE (Proxy @a) (Proxy @be) a b)
sqlNeqE _ _ a b = neqMaybeE a b (sqlNeqE (Proxy @a) (Proxy @be) a b)
instance HasSqlEqualityCheck be a => HasSqlEqualityCheck be (SqlSerial a) where
sqlEqE _ = sqlEqE (Proxy @a)
sqlNeqE _ = sqlNeqE (Proxy @a)
sqlEqTriE _ = sqlEqTriE (Proxy @a)
sqlNeqTriE _ = sqlNeqTriE (Proxy @a)
| Class for types that can be compared for quantified equality in the given backend
class HasSqlEqualityCheck be a => HasSqlQuantifiedEqualityCheck be a where
sqlQEqE, sqlQNeqE :: Proxy a -> Proxy be
-> Maybe (BeamSqlBackendExpressionQuantifierSyntax be)
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
-> BeamSqlBackendExpressionSyntax be
sqlQEqE _ _ = eqE
sqlQNeqE _ _ = neqE
instance (HasSqlQuantifiedEqualityCheck syntax a, CanCheckMaybeEquality a) => HasSqlQuantifiedEqualityCheck syntax (Maybe a) where
sqlQEqE _ = sqlQEqE (Proxy @a)
sqlQNeqE _ = sqlQNeqE (Proxy @a)
instance HasSqlQuantifiedEqualityCheck syntax a => HasSqlQuantifiedEqualityCheck syntax (SqlSerial a) where
sqlQEqE _ = sqlQEqE (Proxy @a)
sqlQNeqE _ = sqlQNeqE (Proxy @a)
| Compare two arbitrary expressions ( of the same type ) for equality
instance ( BeamSqlBackend be, HasSqlEqualityCheck be a ) =>
SqlEq (QGenExpr context be s) (QGenExpr context be s a) where
(==.) = qBinOpE (sqlEqE (Proxy @a) (Proxy @be))
(/=.) = qBinOpE (sqlNeqE (Proxy @a) (Proxy @be))
(==?.) = qBinOpE (sqlEqTriE (Proxy @a) (Proxy @be))
(/=?.) = qBinOpE (sqlNeqTriE (Proxy @a) (Proxy @be))
instance ( BeamSqlBackend be, HasSqlQuantifiedEqualityCheck be a ) =>
SqlEqQuantified (QGenExpr context be s) (QQuantified be s a) (QGenExpr context be s a) where
a ==*. QQuantified q b = qBinOpE (sqlQEqE (Proxy @a) (Proxy @be) (Just q)) a (QExpr b)
a /=*. QQuantified q b = qBinOpE (sqlQNeqE (Proxy @a) (Proxy @be) (Just q)) a (QExpr b)
| Constraint synonym to check if two tables can be compared for equality
type HasTableEquality be tbl =
(FieldsFulfillConstraint (HasSqlEqualityCheck be) tbl, Beamable tbl)
type HasTableEqualityNullable be tbl =
(FieldsFulfillConstraintNullable (HasSqlEqualityCheck be) tbl, Beamable tbl)
| Compare two arbitrary ' Beamable ' types containing ' QGenExpr 's for equality .
instance ( BeamSqlBackend be, Beamable tbl
, FieldsFulfillConstraint (HasSqlEqualityCheck be) tbl ) =>
SqlEq (QGenExpr context be s) (tbl (QGenExpr context be s)) where
a ==. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==. y
Just expr' -> Just $ expr' &&. x ==. y)
return x') (withConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=. b = not_ (a ==. b)
a ==?. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==?. y
Just expr' -> Just $ expr' &&?. x ==?. y)
return x') (withConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=?. b = sqlNot_ (a ==?. b)
instance ( BeamSqlBackend be, Beamable tbl
, FieldsFulfillConstraintNullable (HasSqlEqualityCheck be) tbl )
=> SqlEq (QGenExpr context be s) (tbl (Nullable (QGenExpr context be s))) where
a ==. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) -> do
modify (\expr ->
case expr of
Nothing -> Just $ x ==. y
Just expr' -> Just $ expr' &&. x ==. y)
return x')
(withNullableConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=. b = not_ (a ==. b)
a ==?. b = let (_, e) = runState (zipBeamFieldsM
(\x'@(Columnar' (Columnar' HasConstraint :*: Columnar' x)) (Columnar' y) ->
do modify (\expr ->
case expr of
Nothing -> Just $ x ==?. y
Just expr' -> Just $ expr' &&?. x ==?. y)
return x') (withNullableConstraints @(HasSqlEqualityCheck be) `alongsideTable` a) b) Nothing
in fromMaybe (QExpr (\_ -> valueE (sqlValueSyntax True))) e
a /=?. b = sqlNot_ (a ==?. b)
number of expressions , no instance is provided for ' Beamable ' types .
class SqlOrd expr e | e -> expr where
(<.), (>.), (<=.), (>=.) :: e -> e -> expr Bool
class SqlOrd expr e =>
SqlOrdQuantified expr quantified e | e -> expr quantified where
(<*.), (>*.), (<=*.), (>=*.) :: e -> quantified -> expr Bool
instance BeamSqlBackend be =>
SqlOrd (QGenExpr context be s) (QGenExpr context be s a) where
(<.) = qBinOpE (ltE Nothing)
(>.) = qBinOpE (gtE Nothing)
(<=.) = qBinOpE (leE Nothing)
(>=.) = qBinOpE (geE Nothing)
instance BeamSqlBackend be =>
SqlOrdQuantified (QGenExpr context be s) (QQuantified be s a) (QGenExpr context be s a) where
a <*. QQuantified q b = qBinOpE (ltE (Just q)) a (QExpr b)
a <=*. QQuantified q b = qBinOpE (leE (Just q)) a (QExpr b)
a >*. QQuantified q b = qBinOpE (gtE (Just q)) a (QExpr b)
a >=*. QQuantified q b = qBinOpE (geE (Just q)) a (QExpr b)
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Text
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Integer
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Int64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Word64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Double
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Float
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Bool
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) UTCTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) LocalTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) Day
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlEqualityCheck (MockSqlBackend cmd) TimeOfDay
instance ( BeamSqlBackend (MockSqlBackend cmd)
, HasSqlEqualityCheck (MockSqlBackend cmd) a
) => HasSqlEqualityCheck (MockSqlBackend cmd) (Tagged t a)
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Text
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Integer
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Int64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word8
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word16
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word32
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Word64
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Double
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Float
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Bool
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) UTCTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) LocalTime
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) Day
instance BeamSqlBackend (MockSqlBackend cmd) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) TimeOfDay
instance ( BeamSqlBackend (MockSqlBackend cmd)
, HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) a
) => HasSqlQuantifiedEqualityCheck (MockSqlBackend cmd) (Tagged t a)
|
222ad43728ce43e7ff2966ef0609e5d8446ff377a5939b9216705055ca86dac2 | clj-puredata/clj-puredata | parse.clj | (ns clj-puredata.parse
"Facilites for parsing hiccup-style PureData node definitions into Clojure maps, and automatically generating connection entities as needed."
(:require [clojure.test :as t]
[clj-puredata.common :refer :all]
[clj-puredata.layout :as l]))
(defonce counter (atom 0))
(def parse-context
(atom []))
(defn- current-context
"Return last index found in `parse-context`."
[]
(dec (count @parse-context)))
(defn- update-in-parse-context
"Update the most recent context in `parse-context`."
[key & rest]
(apply swap! parse-context update-in [(current-context) key] rest))
(defn- processed?
"Check if the node is part of the set `:processed-node-ids`."
[node]
((:processed-node-ids (last @parse-context)) (:unique-id node)))
(defn- get-processed-id
[node]
(assoc node :id (processed? node)))
(defn- record-as-processed
"Remember the NODE :id as processed inside the current context."
[node]
( swap ! parse - context update - in [ ( current - context ) : processed - node - ids ] (: i d node ) )
(update-in-parse-context :processed-node-ids assoc (:unique-id node) (:id node)))
(defn- node-or-explicit-skip?
"When determining the target inlet of a connection, the source nodes argument position is consulted.
An argument of NIL is interpreted as explicitly 'skipping' an inlet.
Any other arguments (literals/numbers/strings) are ignored in this count."
[x]
(or (node? x) (other? x) (nil? x)))
(defn setup-parse-context []
(swap! parse-context conj {:current-node-id 0
:lines []
:processed-node-ids {}}))
(defn teardown-parse-context []
(try
(swap! parse-context pop)
(catch IllegalStateException e
[])))
(defn- add-element!
"Add NODE to the current PARSE-CONTEXT."
[e]
(update-in-parse-context :lines conj e)
e)
(defn- dispense-unique-id
[]
(swap! counter inc))
(defn- dispense-node-id
"When a PARSE-CONTEXT is active, dispense one new (running) index."
([]
(if-let [id (:current-node-id (last @parse-context))]
(do (update-in-parse-context :current-node-id inc)
id)
-1))
([node]
(if (other? node)
node
(merge node {:id (dispense-node-id)}))))
(defn- resolve-other
"Try to find the referenced node in the current PARSE-CONTEXT."
[other]
(let [solve (first (filter #(= (:other other) (get-in % [:options :name]))
((last @parse-context) :lines)))]
(if (nil? solve)
(throw (Exception. (str "Cannot resolve other node " other)))
solve)))
(defn- resolve-all-other!
"Resolve references to OTHER nodes in connections with the actual node ids.
Called by IN-CONTEXT once all nodes have been walked."
[]
(update-in-parse-context :lines
(fn [lines]
(vec (for [l lines]
(cond
(node? l) l
(connection? l) (let [from (get-in l [:from-node :id])
to (get-in l [:to-node :id])]
(cond-> l
(other? from) (assoc-in [:from-node :id] (:id (resolve-other from)))
(other? to) (assoc-in [:to-node :id] (:id (resolve-other to)))))
:else l))))))
#_(defn- assoc-layout
[layout line]
(if (node? line)
(let [pos (first (filter #(= (str (:id line)) (:text %)) layout))]
(if (and pos
(nil? (get-in line [:options :x]))
(nil? (get-in line [:options :y])))
(-> line
(assoc-in [:options :x] (+ 5 (:xpos pos)))
(assoc-in [:options :y] (+ 5 (:ypos pos)))
(assoc :auto-layout true))
line))
line))
#_(defn layout-lines
[lines]
(let [connections (filter #(= :connection (:type %)) lines)
edges (map #(vector (get-in % [:from-node :id])
(get-in % [:to-node :id]))
connections)]
(if (empty? edges)
lines
(mapv (partial assoc-layout (v/layout-graph v/image-dim edges {} true))
lines))))
(defn- sort-lines
[lines]
(->> lines
(sort-by :id)
(sort-by (comp :id :from-node))
vec))
(defn- subs-trailing-dash
"In strings > 1 character containing a trailing dash \"-\", substitute a tilde \"~\"."
[op]
(clojure.string/replace op #"^(.+)-$" "$1~"))
(defn- op-from-kw
"Keyword -> string, e.g. :+ -> \"+\".
Turns keywords containing trailing dashes into strings with trailing tildes, e.g. :osc- -> \"osc~\".
Recognizes & passes strings untouched."
[op-kw]
(if (keyword? op-kw)
(subs-trailing-dash (name op-kw))
(str op-kw)))
(defn- remove-node-args
[node]
(update node :args (comp vec (partial remove node-or-explicit-skip?))))
(defn- connection
[from-node to-node inlet]
{:type :connection
:from-node {:id (cond-> from-node (not (other? from-node)) :id) ;;(if (other? from-node) from-node (:id from-node))
:outlet (:outlet from-node 0)} ; if :outlet is defined, use it, else use 0
:to-node {:id (cond-> to-node (not (other? to-node)) :id)
if : inlet is defined , use it , else use INLET parameter ( defaults to argument position )
(declare walk-node!)
(defn- walk-node-args!
[node]
(let [node-or-nil-list (filter node-or-explicit-skip? (:args node))]
(when (not (empty? node-or-nil-list))
(doall (map-indexed (fn [arg-pos arg] (when (or (node? arg) (other? arg))
(add-element! (connection (walk-node! arg) node arg-pos))))
node-or-nil-list)))))
(defn- walk-node!
"The main, recursive function responsible for adding nodes and connections to the PARSE-CONTEXT.
Respects special cases for OTHER, INLET and OUTLET nodes."
([node]
(cond
(other? node) node
(processed? node) (get-processed-id node)
(user-connection? node) (add-element! (connection (walk-node! (:from node))
(walk-node! (:to node))
0))
:else (let [id-node (dispense-node-id node)]
(record-as-processed id-node)
(add-element! (remove-node-args id-node))
(walk-node-args! id-node)
id-node))))
(defn lines
"Set up fresh `parse-context`, evaluate NODES, return lines ready for translation.
Assumes NODES is a list."
[nodes]
(assert (or (node? nodes)
(user-connection? nodes)
(and (seq? nodes)
(every? #(or (node? %)
(user-connection? %))
nodes))))
(do
(setup-parse-context)
(doall (map walk-node! (if (seq? nodes) nodes (vector nodes))))
(resolve-all-other!)
(let [lines (-> (last @parse-context)
:lines
l/layout-lines
sort-lines)]
(teardown-parse-context)
lines)))
(defn- pd-single
"Turn hiccup vectors into trees of node maps, ready to be walked by WALK-TREE!."
[form]
(cond
(hiccup? form)
(let [[options args] (if (and (map? (second form))
(not (node? (second form)))
(not (other? (second form))))
[(second form) (drop 2 form)]
[{} (rest form)])
op (op-from-kw (first form))
FIXME : need to use unique i d for determining if node was processed ( user might bind a node and reuse it ) , but current implementation only assigns ids by walking the composed tree ( not on first creation ) . this means that reuse of nodes requires use of ` other ` . ( pd 3/7/2021 )
parsed-args (mapv pd-single args)
node {:type :node :op op :unique-id unique-id :options options :args parsed-args}]
node)
(literal? form) form
(node? form) form
;;(connection? form) form
(user-connection? form) form
(other? form) form
(map? form) (throw (Exception. (str "Parser encountered map that is a) not a node and b) not an options map (e.g. not the second element in a hiccup vector): " form)))
(fn? form) (form)
(or (list? form)
(vector? form)
(seq? form)) (doall (map pd-single form))
:else (throw (Exception. (str "Parser does not recognize form: " form)))))
(defn pd
"Turn hiccup into nodes. Returns single node or list of nodes depending on input."
[& forms]
(let [r (doall (map pd-single forms))]
(if (> (count r) 1)
r
(first r))))
(defn- assoc-node-or-hiccup
[node which n]
(assert (number? n))
(assert (or (hiccup? node)
(seq? node)
(node? node)
(other? node)))
(assoc (cond (hiccup? node) (first (pd node))
(seq? node) (first node)
:else node)
which n))
(defn outlet
"Use OUTLET to specify the intended outlet of a connection.
The default outlet is 0, which is not always what you want.
Operates on hiccup or nodes.
`(pd [:+ (outlet [:moses ...] 1)])`
`(pd [:+ (outlet (pd [:moses ...]) 1)])
The default outlet is 0."
[node n]
(assert (or (node? node)
(other? node)
(hiccup? node)))
(assert (number? n))
(assoc-node-or-hiccup (if (hiccup? node) (pd node) node) :outlet n))
(defn inlet
"Use INLET to specify the intended inlet for a connection.
E.g. `(pd [:/ 1 (inlet (pd ...) 1)])`. The default inlet is determined
by the source node argument position (not counting literals, only
NIL and other nodes) (e.g. 0 in the previous example)."
[node n]
(assert (or (node? node)
(other? node)
(hiccup? node)))
(assert (number? n))
(assoc-node-or-hiccup (if (hiccup? node) (pd node) node) :inlet n))
(defn other
"An OTHER is a special node that refers to another node.
It is a placeholder for the node with `:name` = NAME in its `:options`
map. It is instrumental to craft mutually connected nodes, and can
be used to reduce the number of LETs in patch definitions. OTHER
nodes are de-referenced after the entire patch has been walked, so
forward reference is possible.
Examples:
```clojure
connecting the same node to 2 inlets
(pd [:osc- {:name \"foo\"} 200])
(pd [:dac- (other \"foo\") (other \"foo\")])
```
```clojure
;; circular connections
(pd [:float {:name 'f} [:msg \"bang\"] [:+ 1 (other 'f)]])
```
```clojure
;; connecting to nodes ahead of their definition
(pd [:float {:name 'f} [:msg \"bang\"] (other '+)])
(pd [:+ {:name '+} 1 (other 'f)])
```"
[reference]
{:type :other
:other reference})
(defn connect
([from-node outlet_ to-node inlet_]
{:type :user-connection
:from (-> from-node
(outlet outlet_)
(inlet inlet_))
:to (pd to-node)})
([from-node to-node]
(connect from-node (:outlet from-node 0) to-node (:inlet from-node 0))))
;; (connect-to to-node from-node1 from-node2 from-node3 ...)
;; (connect-from from-node to-node1 to-node2 to-node3
| null | https://raw.githubusercontent.com/clj-puredata/clj-puredata/bb4879891b4960fee0fc511d4289a967ff0be393/src/clj_puredata/parse.clj | clojure | (if (other? from-node) from-node (:id from-node))
if :outlet is defined, use it, else use 0
(connection? form) form
circular connections
connecting to nodes ahead of their definition
(connect-to to-node from-node1 from-node2 from-node3 ...)
(connect-from from-node to-node1 to-node2 to-node3 | (ns clj-puredata.parse
"Facilites for parsing hiccup-style PureData node definitions into Clojure maps, and automatically generating connection entities as needed."
(:require [clojure.test :as t]
[clj-puredata.common :refer :all]
[clj-puredata.layout :as l]))
(defonce counter (atom 0))
(def parse-context
(atom []))
(defn- current-context
"Return last index found in `parse-context`."
[]
(dec (count @parse-context)))
(defn- update-in-parse-context
"Update the most recent context in `parse-context`."
[key & rest]
(apply swap! parse-context update-in [(current-context) key] rest))
(defn- processed?
"Check if the node is part of the set `:processed-node-ids`."
[node]
((:processed-node-ids (last @parse-context)) (:unique-id node)))
(defn- get-processed-id
[node]
(assoc node :id (processed? node)))
(defn- record-as-processed
"Remember the NODE :id as processed inside the current context."
[node]
( swap ! parse - context update - in [ ( current - context ) : processed - node - ids ] (: i d node ) )
(update-in-parse-context :processed-node-ids assoc (:unique-id node) (:id node)))
(defn- node-or-explicit-skip?
"When determining the target inlet of a connection, the source nodes argument position is consulted.
An argument of NIL is interpreted as explicitly 'skipping' an inlet.
Any other arguments (literals/numbers/strings) are ignored in this count."
[x]
(or (node? x) (other? x) (nil? x)))
(defn setup-parse-context []
(swap! parse-context conj {:current-node-id 0
:lines []
:processed-node-ids {}}))
(defn teardown-parse-context []
(try
(swap! parse-context pop)
(catch IllegalStateException e
[])))
(defn- add-element!
"Add NODE to the current PARSE-CONTEXT."
[e]
(update-in-parse-context :lines conj e)
e)
(defn- dispense-unique-id
[]
(swap! counter inc))
(defn- dispense-node-id
"When a PARSE-CONTEXT is active, dispense one new (running) index."
([]
(if-let [id (:current-node-id (last @parse-context))]
(do (update-in-parse-context :current-node-id inc)
id)
-1))
([node]
(if (other? node)
node
(merge node {:id (dispense-node-id)}))))
(defn- resolve-other
"Try to find the referenced node in the current PARSE-CONTEXT."
[other]
(let [solve (first (filter #(= (:other other) (get-in % [:options :name]))
((last @parse-context) :lines)))]
(if (nil? solve)
(throw (Exception. (str "Cannot resolve other node " other)))
solve)))
(defn- resolve-all-other!
"Resolve references to OTHER nodes in connections with the actual node ids.
Called by IN-CONTEXT once all nodes have been walked."
[]
(update-in-parse-context :lines
(fn [lines]
(vec (for [l lines]
(cond
(node? l) l
(connection? l) (let [from (get-in l [:from-node :id])
to (get-in l [:to-node :id])]
(cond-> l
(other? from) (assoc-in [:from-node :id] (:id (resolve-other from)))
(other? to) (assoc-in [:to-node :id] (:id (resolve-other to)))))
:else l))))))
#_(defn- assoc-layout
[layout line]
(if (node? line)
(let [pos (first (filter #(= (str (:id line)) (:text %)) layout))]
(if (and pos
(nil? (get-in line [:options :x]))
(nil? (get-in line [:options :y])))
(-> line
(assoc-in [:options :x] (+ 5 (:xpos pos)))
(assoc-in [:options :y] (+ 5 (:ypos pos)))
(assoc :auto-layout true))
line))
line))
#_(defn layout-lines
[lines]
(let [connections (filter #(= :connection (:type %)) lines)
edges (map #(vector (get-in % [:from-node :id])
(get-in % [:to-node :id]))
connections)]
(if (empty? edges)
lines
(mapv (partial assoc-layout (v/layout-graph v/image-dim edges {} true))
lines))))
(defn- sort-lines
[lines]
(->> lines
(sort-by :id)
(sort-by (comp :id :from-node))
vec))
(defn- subs-trailing-dash
"In strings > 1 character containing a trailing dash \"-\", substitute a tilde \"~\"."
[op]
(clojure.string/replace op #"^(.+)-$" "$1~"))
(defn- op-from-kw
"Keyword -> string, e.g. :+ -> \"+\".
Turns keywords containing trailing dashes into strings with trailing tildes, e.g. :osc- -> \"osc~\".
Recognizes & passes strings untouched."
[op-kw]
(if (keyword? op-kw)
(subs-trailing-dash (name op-kw))
(str op-kw)))
(defn- remove-node-args
[node]
(update node :args (comp vec (partial remove node-or-explicit-skip?))))
(defn- connection
[from-node to-node inlet]
{:type :connection
:to-node {:id (cond-> to-node (not (other? to-node)) :id)
if : inlet is defined , use it , else use INLET parameter ( defaults to argument position )
(declare walk-node!)
(defn- walk-node-args!
[node]
(let [node-or-nil-list (filter node-or-explicit-skip? (:args node))]
(when (not (empty? node-or-nil-list))
(doall (map-indexed (fn [arg-pos arg] (when (or (node? arg) (other? arg))
(add-element! (connection (walk-node! arg) node arg-pos))))
node-or-nil-list)))))
(defn- walk-node!
"The main, recursive function responsible for adding nodes and connections to the PARSE-CONTEXT.
Respects special cases for OTHER, INLET and OUTLET nodes."
([node]
(cond
(other? node) node
(processed? node) (get-processed-id node)
(user-connection? node) (add-element! (connection (walk-node! (:from node))
(walk-node! (:to node))
0))
:else (let [id-node (dispense-node-id node)]
(record-as-processed id-node)
(add-element! (remove-node-args id-node))
(walk-node-args! id-node)
id-node))))
(defn lines
"Set up fresh `parse-context`, evaluate NODES, return lines ready for translation.
Assumes NODES is a list."
[nodes]
(assert (or (node? nodes)
(user-connection? nodes)
(and (seq? nodes)
(every? #(or (node? %)
(user-connection? %))
nodes))))
(do
(setup-parse-context)
(doall (map walk-node! (if (seq? nodes) nodes (vector nodes))))
(resolve-all-other!)
(let [lines (-> (last @parse-context)
:lines
l/layout-lines
sort-lines)]
(teardown-parse-context)
lines)))
(defn- pd-single
"Turn hiccup vectors into trees of node maps, ready to be walked by WALK-TREE!."
[form]
(cond
(hiccup? form)
(let [[options args] (if (and (map? (second form))
(not (node? (second form)))
(not (other? (second form))))
[(second form) (drop 2 form)]
[{} (rest form)])
op (op-from-kw (first form))
FIXME : need to use unique i d for determining if node was processed ( user might bind a node and reuse it ) , but current implementation only assigns ids by walking the composed tree ( not on first creation ) . this means that reuse of nodes requires use of ` other ` . ( pd 3/7/2021 )
parsed-args (mapv pd-single args)
node {:type :node :op op :unique-id unique-id :options options :args parsed-args}]
node)
(literal? form) form
(node? form) form
(user-connection? form) form
(other? form) form
(map? form) (throw (Exception. (str "Parser encountered map that is a) not a node and b) not an options map (e.g. not the second element in a hiccup vector): " form)))
(fn? form) (form)
(or (list? form)
(vector? form)
(seq? form)) (doall (map pd-single form))
:else (throw (Exception. (str "Parser does not recognize form: " form)))))
(defn pd
"Turn hiccup into nodes. Returns single node or list of nodes depending on input."
[& forms]
(let [r (doall (map pd-single forms))]
(if (> (count r) 1)
r
(first r))))
(defn- assoc-node-or-hiccup
[node which n]
(assert (number? n))
(assert (or (hiccup? node)
(seq? node)
(node? node)
(other? node)))
(assoc (cond (hiccup? node) (first (pd node))
(seq? node) (first node)
:else node)
which n))
(defn outlet
"Use OUTLET to specify the intended outlet of a connection.
The default outlet is 0, which is not always what you want.
Operates on hiccup or nodes.
`(pd [:+ (outlet [:moses ...] 1)])`
`(pd [:+ (outlet (pd [:moses ...]) 1)])
The default outlet is 0."
[node n]
(assert (or (node? node)
(other? node)
(hiccup? node)))
(assert (number? n))
(assoc-node-or-hiccup (if (hiccup? node) (pd node) node) :outlet n))
(defn inlet
"Use INLET to specify the intended inlet for a connection.
E.g. `(pd [:/ 1 (inlet (pd ...) 1)])`. The default inlet is determined
by the source node argument position (not counting literals, only
NIL and other nodes) (e.g. 0 in the previous example)."
[node n]
(assert (or (node? node)
(other? node)
(hiccup? node)))
(assert (number? n))
(assoc-node-or-hiccup (if (hiccup? node) (pd node) node) :inlet n))
(defn other
"An OTHER is a special node that refers to another node.
It is a placeholder for the node with `:name` = NAME in its `:options`
map. It is instrumental to craft mutually connected nodes, and can
be used to reduce the number of LETs in patch definitions. OTHER
nodes are de-referenced after the entire patch has been walked, so
forward reference is possible.
Examples:
```clojure
connecting the same node to 2 inlets
(pd [:osc- {:name \"foo\"} 200])
(pd [:dac- (other \"foo\") (other \"foo\")])
```
```clojure
(pd [:float {:name 'f} [:msg \"bang\"] [:+ 1 (other 'f)]])
```
```clojure
(pd [:float {:name 'f} [:msg \"bang\"] (other '+)])
(pd [:+ {:name '+} 1 (other 'f)])
```"
[reference]
{:type :other
:other reference})
(defn connect
([from-node outlet_ to-node inlet_]
{:type :user-connection
:from (-> from-node
(outlet outlet_)
(inlet inlet_))
:to (pd to-node)})
([from-node to-node]
(connect from-node (:outlet from-node 0) to-node (:inlet from-node 0))))
|
830c345d0a9c5906c9563ab96bdbe646bcc8abe0767353085a4d2fccd9eb722b | ngerakines/mochevent | mochevent_request.erl | Copyright ( c ) 2007 Mochi Media , Inc.
Copyright ( c ) 2009 < >
%%
%% Permission is hereby granted, free of charge, to any person
%% obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
%% restriction, including without limitation the rights to use,
%% copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the
%% Software is furnished to do so, subject to the following
%% conditions:
%%
%% The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
%% OTHER DEALINGS IN THE SOFTWARE.
%%
%% @doc A light-weight (hahahaha) request module.
This module is based on the mochiweb_request module developed by
Ippolito of Mochi Media , Inc.
-module(mochevent_request, [Pid, ReqID, Method, RawPath, Version, Headers, Body]).
-export([get_header_value/1, get_primary_header_value/1, get/1]).
-export([recv_body/0, recv_body/1]).
-export([respond/1, ok/1]).
-export([not_found/0, not_found/1]).
-export([redirect/1, redirect/2]).
-export([parse_post/0, parse_qs/0]).
-export([parse_cookie/0, get_cookie_value/1]).
-export([parse_range_request/1]).
-export([body_length/0, server_headers/0, make_code/1, make_version/1]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
get_header_value(K) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
get(socket) -> exit(unimplemented);
get(method) -> Method;
get(raw_path) -> RawPath;
get(version) -> Version;
get(headers) -> Headers;
get(peer) -> exit(unimplemented);
get(path) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length) -> erlang:get(?SAVE_BODY_LENGTH);
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
parse_range_request(RawRange)
end.
body_length() ->
case get_header_value("transfer-encoding") of
undefined ->
case get_header_value("content-length") of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
recv_body() ->
Body.
recv_body(_) ->
Body.
respond({Code, ResponseHeaders, ResponseBody}) ->
CleanHeaders = [{list_to_binary(K), list_to_binary(V)} || {K,V} <- ResponseHeaders, is_list(K), is_list(V)],
Pid ! {ReqID, Code, CleanHeaders, ResponseBody}.
not_found() ->
not_found([]).
not_found(ExtraHeaders) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}).
redirect(Path) ->
redirect(Path, []).
redirect(Path, ExtraHeaders) ->
respond({302, % using a Found instead of See Other as per compatibility note -sec10.html#sec10.3.3
[{"Content-Type", "text/plain"},
{"Location", Path} | ExtraHeaders],
<<"Found.">>}).
ok({ContentType, Body}) ->
ok({ContentType, [], Body});
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when X =:= undefined; X =:= fail ->
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
[] -> %% no valid ranges
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody})
end
end.
parse_qs() ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
get_cookie_value(Key) ->
proplists:get_value(Key, parse_cookie()).
parse_cookie() ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie") of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
parse_post() ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body() of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type") of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
server_headers() ->
[{"Server", "RuPl/0.1"},
{"Date", httpd_util:rfc1123_date()}].
make_io(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
make_io(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
make_io(Io) when is_list(Io); is_binary(Io) ->
Io.
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
parts_to_body([{Start, End, Body}], ContentType, Size) ->
%% return body for a range reponse with a single body
HeaderList = [{"Content-Type", ContentType},
{"Content-Range",
["bytes ",
make_io(Start), "-", make_io(End),
"/", make_io(Size)]}],
{HeaderList, Body};
parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
HeaderList = [{"Content-Type",
["multipart/byteranges; ",
"boundary=", Boundary]}],
MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
{HeaderList, MultiPartBody}.
multipart_body([], _ContentType, Boundary, _Size) ->
["--", Boundary, "--\r\n"];
multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
["--", Boundary, "\r\n",
"Content-Type: ", ContentType, "\r\n",
"Content-Range: ",
"bytes ", make_io(Start), "-", make_io(End),
"/", make_io(Size), "\r\n\r\n",
Body, "\r\n"
| multipart_body(BodyList, ContentType, Boundary, Size)].
iodevice_size(IoDevice) ->
{ok, Size} = file:position(IoDevice, eof),
{ok, 0} = file:position(IoDevice, bof),
Size.
range_parts({file, IoDevice}, Ranges) ->
Size = iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
{Skip, Skip + Length - 1, PartialBody}
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
range_skip_length(Spec, Size) ->
case Spec of
{none, R} when R =< Size, R >= 0 ->
{Size - R, R};
{none, _OutOfRange} ->
{0, Size};
{R, none} when R >= 0, R < Size ->
{R, Size - R};
{_OutOfRange, none} ->
invalid_range;
{Start, End} when 0 =< Start, Start =< End, End < Size ->
{Start, End - Start + 1};
{_OutOfRange, _End} ->
invalid_range
end.
parse_range_request(RawRange) when is_list(RawRange) ->
try
"bytes=" ++ RangeString = RawRange,
Ranges = string:tokens(RangeString, ","),
lists:map(fun ("-" ++ V) ->
{none, list_to_integer(V)};
(R) ->
case string:tokens(R, "-") of
[S1, S2] ->
{list_to_integer(S1), list_to_integer(S2)};
[S] ->
{list_to_integer(S), none}
end
end,
Ranges)
catch
_:_ ->
fail
end.
| null | https://raw.githubusercontent.com/ngerakines/mochevent/f526222357308b580835ad1bc61362a50ed1251b/src/mochevent_request.erl | erlang |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
@doc A light-weight (hahahaha) request module.
using a Found instead of See Other as per compatibility note -sec10.html#sec10.3.3
no valid ranges
return body for a range reponse with a single body | Copyright ( c ) 2007 Mochi Media , Inc.
Copyright ( c ) 2009 < >
files ( the " Software " ) , to deal in the Software without
copies of the Software , and to permit persons to whom the
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
This module is based on the mochiweb_request module developed by
Ippolito of Mochi Media , Inc.
-module(mochevent_request, [Pid, ReqID, Method, RawPath, Version, Headers, Body]).
-export([get_header_value/1, get_primary_header_value/1, get/1]).
-export([recv_body/0, recv_body/1]).
-export([respond/1, ok/1]).
-export([not_found/0, not_found/1]).
-export([redirect/1, redirect/2]).
-export([parse_post/0, parse_qs/0]).
-export([parse_cookie/0, get_cookie_value/1]).
-export([parse_range_request/1]).
-export([body_length/0, server_headers/0, make_code/1, make_version/1]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
get_header_value(K) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
get(socket) -> exit(unimplemented);
get(method) -> Method;
get(raw_path) -> RawPath;
get(version) -> Version;
get(headers) -> Headers;
get(peer) -> exit(unimplemented);
get(path) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length) -> erlang:get(?SAVE_BODY_LENGTH);
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
parse_range_request(RawRange)
end.
body_length() ->
case get_header_value("transfer-encoding") of
undefined ->
case get_header_value("content-length") of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
recv_body() ->
Body.
recv_body(_) ->
Body.
respond({Code, ResponseHeaders, ResponseBody}) ->
CleanHeaders = [{list_to_binary(K), list_to_binary(V)} || {K,V} <- ResponseHeaders, is_list(K), is_list(V)],
Pid ! {ReqID, Code, CleanHeaders, ResponseBody}.
not_found() ->
not_found([]).
not_found(ExtraHeaders) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}).
redirect(Path) ->
redirect(Path, []).
redirect(Path, ExtraHeaders) ->
[{"Content-Type", "text/plain"},
{"Location", Path} | ExtraHeaders],
<<"Found.">>}).
ok({ContentType, Body}) ->
ok({ContentType, [], Body});
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when X =:= undefined; X =:= fail ->
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody})
end
end.
parse_qs() ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
get_cookie_value(Key) ->
proplists:get_value(Key, parse_cookie()).
parse_cookie() ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie") of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
parse_post() ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body() of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type") of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
server_headers() ->
[{"Server", "RuPl/0.1"},
{"Date", httpd_util:rfc1123_date()}].
make_io(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
make_io(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
make_io(Io) when is_list(Io); is_binary(Io) ->
Io.
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
parts_to_body([{Start, End, Body}], ContentType, Size) ->
HeaderList = [{"Content-Type", ContentType},
{"Content-Range",
["bytes ",
make_io(Start), "-", make_io(End),
"/", make_io(Size)]}],
{HeaderList, Body};
parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
HeaderList = [{"Content-Type",
["multipart/byteranges; ",
"boundary=", Boundary]}],
MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
{HeaderList, MultiPartBody}.
multipart_body([], _ContentType, Boundary, _Size) ->
["--", Boundary, "--\r\n"];
multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
["--", Boundary, "\r\n",
"Content-Type: ", ContentType, "\r\n",
"Content-Range: ",
"bytes ", make_io(Start), "-", make_io(End),
"/", make_io(Size), "\r\n\r\n",
Body, "\r\n"
| multipart_body(BodyList, ContentType, Boundary, Size)].
iodevice_size(IoDevice) ->
{ok, Size} = file:position(IoDevice, eof),
{ok, 0} = file:position(IoDevice, bof),
Size.
range_parts({file, IoDevice}, Ranges) ->
Size = iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
{Skip, Skip + Length - 1, PartialBody}
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
range_skip_length(Spec, Size) ->
case Spec of
{none, R} when R =< Size, R >= 0 ->
{Size - R, R};
{none, _OutOfRange} ->
{0, Size};
{R, none} when R >= 0, R < Size ->
{R, Size - R};
{_OutOfRange, none} ->
invalid_range;
{Start, End} when 0 =< Start, Start =< End, End < Size ->
{Start, End - Start + 1};
{_OutOfRange, _End} ->
invalid_range
end.
parse_range_request(RawRange) when is_list(RawRange) ->
try
"bytes=" ++ RangeString = RawRange,
Ranges = string:tokens(RangeString, ","),
lists:map(fun ("-" ++ V) ->
{none, list_to_integer(V)};
(R) ->
case string:tokens(R, "-") of
[S1, S2] ->
{list_to_integer(S1), list_to_integer(S2)};
[S] ->
{list_to_integer(S), none}
end
end,
Ranges)
catch
_:_ ->
fail
end.
|
7fe29ba282d350451c19000c14f75e3017cfa0826be04f114c6c3640ecc8099c | iijlab/direct-hs | Client.hs | {-# LANGUAGE CPP #-}
# LANGUAGE RecordWildCards #
module Web.Direct.Client
( Client
, clientRpcClient
, clientLoginInfo
, clientChannels
, sendMessage
, uploadFile
, newClient
, setDomains
, getDomains
, modifyTalkRooms
, setTalkRooms
, getTalkRooms
, setMe
, getMe
, setAcquaintances
, getAcquaintances
, hasAcquaintancesCached
, initialiseAcquaintances
, invalidateCachedAcquaintances
, modifyAcquaintances
, getUsers
, getCurrentDomain
, setCurrentDomain
, isActive
, findUser
, findTalkRoom
, getTalkUsers
, leaveTalkRoom
, removeUserFromTalkRoom
, findChannel
, withChannel
, getChannelAcquaintances
, shutdown
-- * Hooks when some changes are made in talk room members.
, onAddTalkers
, onDeleteTalk
, onDeleteTalker
-- * re-exporting
, dispatch
, Channel
, haltChannel
, getChannels
, send
, recv
)
where
import qualified Control.Concurrent.STM as S
import Control.Error.Util (failWith)
import Control.Monad (when)
import Control.Monad.Except (ExceptT (ExceptT),
runExceptT,
throwError)
import Control.Monad.IO.Class (liftIO)
import Data.Foldable (for_)
import qualified Data.IORef as I
import Data.List ((\\))
import qualified Data.List as L
import Data.Maybe (catMaybes, fromMaybe)
import Data.Traversable (mapAccumL)
import Data.Tuple (swap)
import qualified Network.MessagePack.RPC.Client.WebSocket as RPC
import Web.Direct.Client.Channel
import Web.Direct.Client.Status
import Web.Direct.DirectRPC hiding
(getAcquaintances,
getDomains)
import qualified Web.Direct.DirectRPC as DirectRPC
import Web.Direct.Exception
import Web.Direct.LoginInfo
import Web.Direct.Types
import Web.Direct.Upload
----------------------------------------------------------------
-- | Direct client.
data Client = Client {
clientLoginInfo :: !LoginInfo
, clientRpcClient :: !RPC.Client
, clientDomains :: I.IORef [Domain]
, clientTalkRooms :: I.IORef [TalkRoom]
, clientMe :: I.IORef User
, clientAcquaintances :: I.IORef (Cached [User])
, clientChannels :: ChannelDB
, clientStatus :: StatusVar
, clientCurrentDomain :: Domain
}
data Cached a = Invalidated | Cached a deriving Show
newClient :: LoginInfo -> RPC.Client -> Domain -> User -> IO Client
newClient pinfo rpcClient initialDomain me =
Client pinfo rpcClient
<$> I.newIORef []
<*> I.newIORef []
<*> I.newIORef me
<*> I.newIORef Invalidated
<*> newChannelDB
<*> S.newTVarIO Active
<*> pure initialDomain
----------------------------------------------------------------
setDomains :: Client -> [Domain] -> IO ()
setDomains = I.writeIORef . clientDomains
getDomains :: Client -> IO [Domain]
getDomains client = I.readIORef (clientDomains client)
modifyTalkRooms :: Client -> ([TalkRoom] -> ([TalkRoom], r)) -> IO r
modifyTalkRooms client = I.atomicModifyIORef' (clientTalkRooms client)
setTalkRooms :: Client -> [TalkRoom] -> IO ()
setTalkRooms = I.writeIORef . clientTalkRooms
getTalkRooms :: Client -> IO [TalkRoom]
getTalkRooms = I.readIORef . clientTalkRooms
setMe :: Client -> User -> IO ()
setMe = I.writeIORef . clientMe
getMe :: Client -> IO User
getMe = I.readIORef . clientMe
setAcquaintances :: Client -> [User] -> IO ()
setAcquaintances client = I.writeIORef (clientAcquaintances client) . Cached
getAcquaintances :: Client -> IO [User]
getAcquaintances client = do
cached <- I.readIORef $ clientAcquaintances client
case cached of
Cached users -> return users
Invalidated -> initialiseAcquaintances client
hasAcquaintancesCached :: Client -> IO Bool
hasAcquaintancesCached client = do
cached <- I.readIORef $ clientAcquaintances client
case cached of
Cached _ -> return True
Invalidated -> return False
modifyAcquaintances :: Client -> ([User] -> ([User], r)) -> IO r
modifyAcquaintances client f = do
cached <- I.readIORef $ clientAcquaintances client
users <- case cached of
Cached users -> return users
Invalidated -> fetchAcquaintance client
let (newUsers, r) = f users
setAcquaintances client newUsers
return r
initialiseAcquaintances :: Client -> IO [User]
initialiseAcquaintances client = do
acqs <- fetchAcquaintance client
setAcquaintances client acqs
return acqs
fetchAcquaintance :: Client -> IO [User]
fetchAcquaintance client = do
allAcqs <- DirectRPC.getAcquaintances $ clientRpcClient client
return . fromMaybe [] $ lookup (domainId $ clientCurrentDomain client)
allAcqs
invalidateCachedAcquaintances :: Client -> IO ()
invalidateCachedAcquaintances =
(`I.writeIORef` Invalidated) . clientAcquaintances
--- | Getting acquaintances and me. The head of the list is myself.
getUsers :: Client -> IO [User]
getUsers client = do
me <- getMe client
acqs <- getAcquaintances client
return $ me : acqs
getCurrentDomain :: Client -> Domain
getCurrentDomain = clientCurrentDomain
setCurrentDomain :: Client -> Domain -> Client
setCurrentDomain client did = client { clientCurrentDomain = did }
----------------------------------------------------------------
findUser :: UserId -> Client -> IO (Maybe User)
findUser uid client = do
users <- getUsers client
return $ L.find (\u -> userId u == uid) users
findTalkRoom :: TalkId -> Client -> IO (Maybe TalkRoom)
findTalkRoom tid client = do
rooms <- getTalkRooms client
return $ L.find (\r -> talkId r == tid) rooms
--- | Getting talk room members. The head of the list is myself.
getTalkUsers :: Client -> TalkRoom -> IO [User]
getTalkUsers client talk = do
me <- getMe client
talkAcqs <- getTalkAcquaintances client talk
return $ me : talkAcqs
getTalkAcquaintances :: Client -> TalkRoom -> IO [User]
getTalkAcquaintances client talk = do
me <- getMe client
users <- catMaybes <$> mapM (`findUser` client) (talkUserIds talk)
return $ filter ((/= userId me) . userId) users
----------------------------------------------------------------
leaveTalkRoom :: Client -> TalkId -> IO (Either Exception ())
leaveTalkRoom client tid = runExceptT $ do
_ <- failWith InvalidTalkId =<< liftIO (findTalkRoom tid client)
me <- liftIO $ getMe client
ExceptT $ deleteTalker (clientRpcClient client) tid (userId me)
removeUserFromTalkRoom :: Client -> TalkId -> UserId -> IO (Either Exception ())
removeUserFromTalkRoom client tid uid = runExceptT $ do
talk <- failWith InvalidTalkId =<< liftIO (findTalkRoom tid client)
Can not ban a friend on PairTalk
when (talkType talk == PairTalk) $ throwError InvalidTalkType
user <- failWith InvalidUserId =<< liftIO (findUser uid client)
talkAcqs <- liftIO $ getTalkAcquaintances client talk
when (user `notElem` talkAcqs) $ throwError InvalidUserId
ExceptT $ deleteTalker (clientRpcClient client) tid uid
liftIO $ do
let did = domainId $ getCurrentDomain client
muidsAfterDeleted <-
fmap (filter (/= uid) . talkUserIds) <$> findTalkRoom tid client
for_ muidsAfterDeleted $ \uidsAfterDeleted ->
onDeleteTalker client did tid uidsAfterDeleted [uid]
----------------------------------------------------------------
-- | Sending a message in the main 'IO' or 'directCreateMessageHandler'.
sendMessage :: Client -> Message -> TalkId -> IO (Either Exception MessageId)
sendMessage = createMessage . clientRpcClient
----------------------------------------------------------------
uploadFile :: Client -> UploadFile -> TalkId -> IO (Either Exception MessageId)
uploadFile client upf@UploadFile {..} tid = runExceptT $ do
ua@UploadAuth {..} <- ExceptT $ createUploadAuth
(clientRpcClient client)
uploadFileName
uploadFileMimeType
uploadFileSize
(domainId $ getCurrentDomain client)
ExceptT $ runUploadFile upf ua
let files = Files
[ File uploadAuthGetUrl
uploadFileMimeType
uploadFileSize
uploadFileName
uploadAuthFileId
]
uploadFileAttachedText
ExceptT $ sendMessage client files tid
isActive :: Client -> IO Bool
isActive = S.atomically . isActiveSTM . clientStatus
findChannel :: Client -> ChannelKey -> IO (Maybe Channel)
findChannel = findChannel' . clientChannels
| A new channel is created according to the first three arguments .
Then the fourth argument runs in a new thread with the channel .
-- In this case, 'True' is returned.
-- If 'shutdown' is already called, a new thread is not spawned
-- and 'False' is returned.
withChannel
:: Client
-> TalkRoom -- ^ where to talk
-> Maybe User -- ^ limit of who to talk with; 'Nothing' means everyone (no limits)
-> (Channel -> IO ())
-> IO Bool
withChannel client = withChannel' (clientRpcClient client)
(clientChannels client)
(clientStatus client)
getChannelAcquaintances :: Client -> Channel -> IO [User]
getChannelAcquaintances client chan = case channelUserLimit chan of
Just user -> return [user]
Nothing -> getTalkAcquaintances client $ channelTalkRoom chan
-- | This function lets 'directCreateMessageHandler' to not accept any message,
-- then sends the maintenance message to all channels,
and finnaly waits that all channels are closed .
shutdown :: Client -> Message -> IO ()
shutdown client = shutdown' (clientRpcClient client)
(clientChannels client)
(clientStatus client)
onAddTalkers :: Client -> DomainId -> TalkRoom -> IO ()
onAddTalkers client _did newTalk = do
newUserIds <- modifyTalkRooms client updateTalks
alreadyKnownIds <- map userId <$> getUsers client
let hasNewAcqs = any (not . (`elem` alreadyKnownIds)) newUserIds
when hasNewAcqs (invalidateCachedAcquaintances client)
where
updateTalks :: [TalkRoom] -> ([TalkRoom], [UserId])
updateTalks talks =
let (newUsers, newTalks) = mapAccumL updateTalk [] talks
in if null newUsers
then
let newTalks' =
if any ((talkId newTalk ==) . talkId) newTalks
then newTalks
else newTalk : newTalks
in (newTalks', talkUserIds newTalk)
else (newTalks, newUsers)
updateTalk :: [UserId] -> TalkRoom -> ([UserId], TalkRoom)
updateTalk foundUserIds oldTalk = if talkId oldTalk == talkId newTalk
then if null foundUserIds
then (talkUserIds newTalk \\ talkUserIds oldTalk, newTalk)
else (foundUserIds, newTalk)
else (foundUserIds, oldTalk)
onDeleteTalk :: Client -> TalkId -> IO ()
onDeleteTalk client tid = do
-- Remove talk
userIdsInLeftRooms <- modifyTalkRooms client $ \talks ->
let left = filter ((tid /=) . talkId) talks
in (left, concatMap talkUserIds left)
-- Remove acquaintances who don't belong to same rooms with the client user anymore.
modifyAcquaintances client
$ \acqs -> (filter ((`elem` userIdsInLeftRooms) . userId) acqs, ())
-- Close channels for talk
let chanDB = clientChannels client
getChannels chanDB tid >>= mapM_ (haltChannel chanDB)
onDeleteTalker :: Client -> DomainId -> TalkId -> [UserId] -> [UserId] -> IO ()
onDeleteTalker client _ tid uidsAfterDeleted deletedUids = do
someRoomIsUpdated <- modifyTalkRooms client
$ \talks -> swap $ mapAccumL updateTalkUserIds False talks
sharesWithDeletedUsers <- any (any (`elem` deletedUids) . talkUserIds)
<$> getTalkRooms client
when (someRoomIsUpdated && not sharesWithDeletedUsers)
$ modifyAcquaintances client
$ \acqs -> (filter ((`notElem` deletedUids) . userId) acqs, ())
where
updateTalkUserIds hasUpdated talk = if not hasUpdated && talkId talk == tid
then (True, talk { talkUserIds = uidsAfterDeleted })
else (hasUpdated, talk)
| null | https://raw.githubusercontent.com/iijlab/direct-hs/2422fd6fe008109e8dfb74f31d65b0d5a0330788/direct-hs/src/Web/Direct/Client.hs | haskell | # LANGUAGE CPP #
* Hooks when some changes are made in talk room members.
* re-exporting
--------------------------------------------------------------
| Direct client.
--------------------------------------------------------------
- | Getting acquaintances and me. The head of the list is myself.
--------------------------------------------------------------
- | Getting talk room members. The head of the list is myself.
--------------------------------------------------------------
--------------------------------------------------------------
| Sending a message in the main 'IO' or 'directCreateMessageHandler'.
--------------------------------------------------------------
In this case, 'True' is returned.
If 'shutdown' is already called, a new thread is not spawned
and 'False' is returned.
^ where to talk
^ limit of who to talk with; 'Nothing' means everyone (no limits)
| This function lets 'directCreateMessageHandler' to not accept any message,
then sends the maintenance message to all channels,
Remove talk
Remove acquaintances who don't belong to same rooms with the client user anymore.
Close channels for talk | # LANGUAGE RecordWildCards #
module Web.Direct.Client
( Client
, clientRpcClient
, clientLoginInfo
, clientChannels
, sendMessage
, uploadFile
, newClient
, setDomains
, getDomains
, modifyTalkRooms
, setTalkRooms
, getTalkRooms
, setMe
, getMe
, setAcquaintances
, getAcquaintances
, hasAcquaintancesCached
, initialiseAcquaintances
, invalidateCachedAcquaintances
, modifyAcquaintances
, getUsers
, getCurrentDomain
, setCurrentDomain
, isActive
, findUser
, findTalkRoom
, getTalkUsers
, leaveTalkRoom
, removeUserFromTalkRoom
, findChannel
, withChannel
, getChannelAcquaintances
, shutdown
, onAddTalkers
, onDeleteTalk
, onDeleteTalker
, dispatch
, Channel
, haltChannel
, getChannels
, send
, recv
)
where
import qualified Control.Concurrent.STM as S
import Control.Error.Util (failWith)
import Control.Monad (when)
import Control.Monad.Except (ExceptT (ExceptT),
runExceptT,
throwError)
import Control.Monad.IO.Class (liftIO)
import Data.Foldable (for_)
import qualified Data.IORef as I
import Data.List ((\\))
import qualified Data.List as L
import Data.Maybe (catMaybes, fromMaybe)
import Data.Traversable (mapAccumL)
import Data.Tuple (swap)
import qualified Network.MessagePack.RPC.Client.WebSocket as RPC
import Web.Direct.Client.Channel
import Web.Direct.Client.Status
import Web.Direct.DirectRPC hiding
(getAcquaintances,
getDomains)
import qualified Web.Direct.DirectRPC as DirectRPC
import Web.Direct.Exception
import Web.Direct.LoginInfo
import Web.Direct.Types
import Web.Direct.Upload
data Client = Client {
clientLoginInfo :: !LoginInfo
, clientRpcClient :: !RPC.Client
, clientDomains :: I.IORef [Domain]
, clientTalkRooms :: I.IORef [TalkRoom]
, clientMe :: I.IORef User
, clientAcquaintances :: I.IORef (Cached [User])
, clientChannels :: ChannelDB
, clientStatus :: StatusVar
, clientCurrentDomain :: Domain
}
data Cached a = Invalidated | Cached a deriving Show
newClient :: LoginInfo -> RPC.Client -> Domain -> User -> IO Client
newClient pinfo rpcClient initialDomain me =
Client pinfo rpcClient
<$> I.newIORef []
<*> I.newIORef []
<*> I.newIORef me
<*> I.newIORef Invalidated
<*> newChannelDB
<*> S.newTVarIO Active
<*> pure initialDomain
setDomains :: Client -> [Domain] -> IO ()
setDomains = I.writeIORef . clientDomains
getDomains :: Client -> IO [Domain]
getDomains client = I.readIORef (clientDomains client)
modifyTalkRooms :: Client -> ([TalkRoom] -> ([TalkRoom], r)) -> IO r
modifyTalkRooms client = I.atomicModifyIORef' (clientTalkRooms client)
setTalkRooms :: Client -> [TalkRoom] -> IO ()
setTalkRooms = I.writeIORef . clientTalkRooms
getTalkRooms :: Client -> IO [TalkRoom]
getTalkRooms = I.readIORef . clientTalkRooms
setMe :: Client -> User -> IO ()
setMe = I.writeIORef . clientMe
getMe :: Client -> IO User
getMe = I.readIORef . clientMe
setAcquaintances :: Client -> [User] -> IO ()
setAcquaintances client = I.writeIORef (clientAcquaintances client) . Cached
getAcquaintances :: Client -> IO [User]
getAcquaintances client = do
cached <- I.readIORef $ clientAcquaintances client
case cached of
Cached users -> return users
Invalidated -> initialiseAcquaintances client
hasAcquaintancesCached :: Client -> IO Bool
hasAcquaintancesCached client = do
cached <- I.readIORef $ clientAcquaintances client
case cached of
Cached _ -> return True
Invalidated -> return False
modifyAcquaintances :: Client -> ([User] -> ([User], r)) -> IO r
modifyAcquaintances client f = do
cached <- I.readIORef $ clientAcquaintances client
users <- case cached of
Cached users -> return users
Invalidated -> fetchAcquaintance client
let (newUsers, r) = f users
setAcquaintances client newUsers
return r
initialiseAcquaintances :: Client -> IO [User]
initialiseAcquaintances client = do
acqs <- fetchAcquaintance client
setAcquaintances client acqs
return acqs
fetchAcquaintance :: Client -> IO [User]
fetchAcquaintance client = do
allAcqs <- DirectRPC.getAcquaintances $ clientRpcClient client
return . fromMaybe [] $ lookup (domainId $ clientCurrentDomain client)
allAcqs
invalidateCachedAcquaintances :: Client -> IO ()
invalidateCachedAcquaintances =
(`I.writeIORef` Invalidated) . clientAcquaintances
getUsers :: Client -> IO [User]
getUsers client = do
me <- getMe client
acqs <- getAcquaintances client
return $ me : acqs
getCurrentDomain :: Client -> Domain
getCurrentDomain = clientCurrentDomain
setCurrentDomain :: Client -> Domain -> Client
setCurrentDomain client did = client { clientCurrentDomain = did }
findUser :: UserId -> Client -> IO (Maybe User)
findUser uid client = do
users <- getUsers client
return $ L.find (\u -> userId u == uid) users
findTalkRoom :: TalkId -> Client -> IO (Maybe TalkRoom)
findTalkRoom tid client = do
rooms <- getTalkRooms client
return $ L.find (\r -> talkId r == tid) rooms
getTalkUsers :: Client -> TalkRoom -> IO [User]
getTalkUsers client talk = do
me <- getMe client
talkAcqs <- getTalkAcquaintances client talk
return $ me : talkAcqs
getTalkAcquaintances :: Client -> TalkRoom -> IO [User]
getTalkAcquaintances client talk = do
me <- getMe client
users <- catMaybes <$> mapM (`findUser` client) (talkUserIds talk)
return $ filter ((/= userId me) . userId) users
leaveTalkRoom :: Client -> TalkId -> IO (Either Exception ())
leaveTalkRoom client tid = runExceptT $ do
_ <- failWith InvalidTalkId =<< liftIO (findTalkRoom tid client)
me <- liftIO $ getMe client
ExceptT $ deleteTalker (clientRpcClient client) tid (userId me)
removeUserFromTalkRoom :: Client -> TalkId -> UserId -> IO (Either Exception ())
removeUserFromTalkRoom client tid uid = runExceptT $ do
talk <- failWith InvalidTalkId =<< liftIO (findTalkRoom tid client)
Can not ban a friend on PairTalk
when (talkType talk == PairTalk) $ throwError InvalidTalkType
user <- failWith InvalidUserId =<< liftIO (findUser uid client)
talkAcqs <- liftIO $ getTalkAcquaintances client talk
when (user `notElem` talkAcqs) $ throwError InvalidUserId
ExceptT $ deleteTalker (clientRpcClient client) tid uid
liftIO $ do
let did = domainId $ getCurrentDomain client
muidsAfterDeleted <-
fmap (filter (/= uid) . talkUserIds) <$> findTalkRoom tid client
for_ muidsAfterDeleted $ \uidsAfterDeleted ->
onDeleteTalker client did tid uidsAfterDeleted [uid]
sendMessage :: Client -> Message -> TalkId -> IO (Either Exception MessageId)
sendMessage = createMessage . clientRpcClient
uploadFile :: Client -> UploadFile -> TalkId -> IO (Either Exception MessageId)
uploadFile client upf@UploadFile {..} tid = runExceptT $ do
ua@UploadAuth {..} <- ExceptT $ createUploadAuth
(clientRpcClient client)
uploadFileName
uploadFileMimeType
uploadFileSize
(domainId $ getCurrentDomain client)
ExceptT $ runUploadFile upf ua
let files = Files
[ File uploadAuthGetUrl
uploadFileMimeType
uploadFileSize
uploadFileName
uploadAuthFileId
]
uploadFileAttachedText
ExceptT $ sendMessage client files tid
isActive :: Client -> IO Bool
isActive = S.atomically . isActiveSTM . clientStatus
findChannel :: Client -> ChannelKey -> IO (Maybe Channel)
findChannel = findChannel' . clientChannels
| A new channel is created according to the first three arguments .
Then the fourth argument runs in a new thread with the channel .
withChannel
:: Client
-> (Channel -> IO ())
-> IO Bool
withChannel client = withChannel' (clientRpcClient client)
(clientChannels client)
(clientStatus client)
getChannelAcquaintances :: Client -> Channel -> IO [User]
getChannelAcquaintances client chan = case channelUserLimit chan of
Just user -> return [user]
Nothing -> getTalkAcquaintances client $ channelTalkRoom chan
and finnaly waits that all channels are closed .
shutdown :: Client -> Message -> IO ()
shutdown client = shutdown' (clientRpcClient client)
(clientChannels client)
(clientStatus client)
onAddTalkers :: Client -> DomainId -> TalkRoom -> IO ()
onAddTalkers client _did newTalk = do
newUserIds <- modifyTalkRooms client updateTalks
alreadyKnownIds <- map userId <$> getUsers client
let hasNewAcqs = any (not . (`elem` alreadyKnownIds)) newUserIds
when hasNewAcqs (invalidateCachedAcquaintances client)
where
updateTalks :: [TalkRoom] -> ([TalkRoom], [UserId])
updateTalks talks =
let (newUsers, newTalks) = mapAccumL updateTalk [] talks
in if null newUsers
then
let newTalks' =
if any ((talkId newTalk ==) . talkId) newTalks
then newTalks
else newTalk : newTalks
in (newTalks', talkUserIds newTalk)
else (newTalks, newUsers)
updateTalk :: [UserId] -> TalkRoom -> ([UserId], TalkRoom)
updateTalk foundUserIds oldTalk = if talkId oldTalk == talkId newTalk
then if null foundUserIds
then (talkUserIds newTalk \\ talkUserIds oldTalk, newTalk)
else (foundUserIds, newTalk)
else (foundUserIds, oldTalk)
onDeleteTalk :: Client -> TalkId -> IO ()
onDeleteTalk client tid = do
userIdsInLeftRooms <- modifyTalkRooms client $ \talks ->
let left = filter ((tid /=) . talkId) talks
in (left, concatMap talkUserIds left)
modifyAcquaintances client
$ \acqs -> (filter ((`elem` userIdsInLeftRooms) . userId) acqs, ())
let chanDB = clientChannels client
getChannels chanDB tid >>= mapM_ (haltChannel chanDB)
onDeleteTalker :: Client -> DomainId -> TalkId -> [UserId] -> [UserId] -> IO ()
onDeleteTalker client _ tid uidsAfterDeleted deletedUids = do
someRoomIsUpdated <- modifyTalkRooms client
$ \talks -> swap $ mapAccumL updateTalkUserIds False talks
sharesWithDeletedUsers <- any (any (`elem` deletedUids) . talkUserIds)
<$> getTalkRooms client
when (someRoomIsUpdated && not sharesWithDeletedUsers)
$ modifyAcquaintances client
$ \acqs -> (filter ((`notElem` deletedUids) . userId) acqs, ())
where
updateTalkUserIds hasUpdated talk = if not hasUpdated && talkId talk == tid
then (True, talk { talkUserIds = uidsAfterDeleted })
else (hasUpdated, talk)
|
f4dc9ee29bf9e0f6a0b6930646bbccf6107683948bf2149108150d1de6b3b67e | DSiSc/why3 | ml_printer.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
* { 2 Library for code used by different printers }
open Format
open Pdriver
open Expr
open Ident
open Term
open Ty
open Ity
open Printer
open Pp
open Theory
open Pmodule
open Compile
open Mltree
type info = {
info_syn : syntax_map;
info_convert : syntax_map;
info_literal : syntax_map;
info_current_th : Theory.theory;
info_current_mo : Pmodule.pmodule option;
info_th_known_map : Decl.known_map;
info_mo_known_map : Pdecl.known_map;
info_fname : string option;
info_flat : bool;
info_current_ph : string list; (* current path *)
}
let create_info pargs fname ~flat ({mod_theory = th} as m) = {
info_syn = pargs.syntax;
info_convert = pargs.converter;
info_literal = pargs.literal;
info_current_th = th;
info_current_mo = Some m;
info_th_known_map = th.th_known;
info_mo_known_map = m.Pmodule.mod_known;
info_fname = fname;
info_flat = flat;
info_current_ph = [];
}
let add_current_path info s =
{ info with info_current_ph = s :: info.info_current_ph }
let protect_on b s =
if b then "(" ^^ s ^^ ")" else s
let star fmt () = fprintf fmt " *@ "
let rec print_list2 sep sep_m print1 print2 fmt (l1, l2) =
match l1, l2 with
| [x1], [x2] ->
print1 fmt x1; sep_m fmt (); print2 fmt x2
| x1 :: r1, x2 :: r2 ->
print1 fmt x1; sep_m fmt (); print2 fmt x2; sep fmt ();
print_list2 sep sep_m print1 print2 fmt (r1, r2)
| _ -> ()
let check_val_in_drv info ({rs_name = {id_loc = loc}} as rs) =
(* here [rs] refers to a [val] declaration *)
match query_syntax info.info_convert rs.rs_name,
query_syntax info.info_syn rs.rs_name with
| None, None (* when info.info_flat *) ->
Loc.errorm ?loc "Function %a cannot be extracted" Expr.print_rs rs
| _ -> ()
module type S = sig
val iprinter : Ident.ident_printer
val aprinter : Ident.ident_printer
val tprinter : Ident.ident_printer
val forget_id : Ident.ident -> unit
val _forget_ids : Ident.ident list -> unit
val forget_var : Mltree.var -> unit
val forget_vars : Mltree.var list -> unit
val forget_let_defn : Mltree.let_def -> unit
val forget_pat : Mltree.pat -> unit
val print_global_ident :
sanitizer:(string -> string) -> Format.formatter -> Ident.ident -> unit
val print_path :
sanitizer:(string -> string) ->
Format.formatter -> string list * Ident.ident -> unit
val print_lident : info -> Format.formatter -> Ident.Sid.elt -> unit
val print_uident : info -> Format.formatter -> Ident.Sid.elt -> unit
val print_tv : Format.formatter -> Ty.tvsymbol -> unit
val print_rs : info -> Format.formatter -> Expr.rsymbol -> unit
(* FIXME : make this independent of the printing function for ident *)
val check_type_in_drv : info -> ident -> unit
val print_ty : ?paren:bool -> info -> ty pp
end
module MLPrinter (K: sig val keywords: string list end) = struct
(* iprinter: local names
aprinter: type variables
tprinter: toplevel definitions *)
let iprinter, aprinter, tprinter =
let isanitize = sanitizer char_to_alpha char_to_alnumus in
let lsanitize = sanitizer char_to_lalpha char_to_alnumus in
create_ident_printer K.keywords ~sanitizer:isanitize,
create_ident_printer K.keywords ~sanitizer:lsanitize,
create_ident_printer K.keywords ~sanitizer:lsanitize
let forget_id id = forget_id iprinter id
let _forget_ids = List.iter forget_id
let forget_var ((id, _, _): Mltree.var) = forget_id id
let forget_vars = List.iter forget_var
let forget_let_defn = function
| Lvar (v,_) -> forget_id v.pv_vs.vs_name
| Lsym (s,_,_,_) | Lany (s,_,_) -> forget_rs s
| Lrec rdl -> List.iter (fun fd -> forget_rs fd.rec_sym) rdl
let rec forget_pat = function
| Pwild -> ()
| Pvar {vs_name=id} -> forget_id id
| Papp (_, pl) | Ptuple pl -> List.iter forget_pat pl
| Por (p1, p2) -> forget_pat p1; forget_pat p2
| Pas (p, _) -> forget_pat p
let print_global_ident ~sanitizer fmt id =
let s = id_unique ~sanitizer tprinter id in
Ident.forget_id tprinter id;
fprintf fmt "%s" s
let print_path ~sanitizer fmt (q, id) =
assert (List.length q >= 1);
match Lists.chop_last q with
| [], _ -> print_global_ident ~sanitizer fmt id
| q, _ ->
fprintf fmt "%a.%a"
(print_list dot string) q (print_global_ident ~sanitizer) id
let rec remove_prefix acc current_path = match acc, current_path with
| [], _ | _, [] -> acc
| p1 :: _, p2 :: _ when p1 <> p2 -> acc
| _ :: r1, _ :: r2 -> remove_prefix r1 r2
let is_local_id info id =
Sid.mem id info.info_current_th.th_local ||
Opt.fold (fun _ m -> Sid.mem id m.Pmodule.mod_local)
false info.info_current_mo
exception Local
let print_qident ~sanitizer info fmt id =
try
if info.info_flat then raise Not_found;
if is_local_id info id then raise Local;
let p, t, q =
try Pmodule.restore_path id with Not_found -> Theory.restore_path id in
let fname = if p = [] then info.info_fname else None in
let m = Strings.capitalize (module_name ?fname p t) in
fprintf fmt "%s.%a" m (print_path ~sanitizer) (q, id)
with
| Not_found ->
let s = id_unique ~sanitizer iprinter id in
fprintf fmt "%s" s
| Local ->
let _, _, q = try Pmodule.restore_path id with Not_found ->
Theory.restore_path id in
let q = remove_prefix q (List.rev info.info_current_ph) in
print_path ~sanitizer fmt (q, id)
let print_lident = print_qident ~sanitizer:Strings.uncapitalize
let print_uident = print_qident ~sanitizer:Strings.capitalize
let print_tv fmt tv =
fprintf fmt "'%s" (id_unique aprinter tv.tv_name)
let print_rs info fmt rs =
fprintf fmt "%a" (print_lident info) rs.rs_name
let check_type_in_drv info ({id_loc = loc} as ty_id) =
match query_syntax info.info_convert ty_id,
query_syntax info.info_syn ty_id with
| None, None ->
Loc.errorm ?loc "Type %a cannot be extracted" (print_lident info) ty_id
| _ -> ()
(** Types *)
let rec print_ty ?(paren=false) info fmt = function
| Tvar tv ->
print_tv fmt tv
| Ttuple [] ->
fprintf fmt "unit"
| Ttuple [t] ->
print_ty ~paren info fmt t
| Ttuple tl ->
fprintf fmt (protect_on paren "@[%a@]")
(print_list star (print_ty ~paren:true info)) tl
| Tapp (ts, tl) ->
match query_syntax info.info_syn ts with
| Some s ->
fprintf fmt (protect_on paren "%a")
(syntax_arguments s (print_ty ~paren:true info)) tl
| None ->
match tl with
| [] ->
(print_lident info) fmt ts
| [ty] ->
fprintf fmt (protect_on paren "%a@ %a")
(print_ty ~paren:true info) ty (print_lident info) ts
| tl ->
fprintf fmt (protect_on paren "(%a)@ %a")
(print_list comma (print_ty ~paren:false info)) tl
(print_lident info) ts
end
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/mlw/ml_printer.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
current path
here [rs] refers to a [val] declaration
when info.info_flat
FIXME : make this independent of the printing function for ident
iprinter: local names
aprinter: type variables
tprinter: toplevel definitions
* Types | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
* { 2 Library for code used by different printers }
open Format
open Pdriver
open Expr
open Ident
open Term
open Ty
open Ity
open Printer
open Pp
open Theory
open Pmodule
open Compile
open Mltree
type info = {
info_syn : syntax_map;
info_convert : syntax_map;
info_literal : syntax_map;
info_current_th : Theory.theory;
info_current_mo : Pmodule.pmodule option;
info_th_known_map : Decl.known_map;
info_mo_known_map : Pdecl.known_map;
info_fname : string option;
info_flat : bool;
}
let create_info pargs fname ~flat ({mod_theory = th} as m) = {
info_syn = pargs.syntax;
info_convert = pargs.converter;
info_literal = pargs.literal;
info_current_th = th;
info_current_mo = Some m;
info_th_known_map = th.th_known;
info_mo_known_map = m.Pmodule.mod_known;
info_fname = fname;
info_flat = flat;
info_current_ph = [];
}
let add_current_path info s =
{ info with info_current_ph = s :: info.info_current_ph }
let protect_on b s =
if b then "(" ^^ s ^^ ")" else s
let star fmt () = fprintf fmt " *@ "
let rec print_list2 sep sep_m print1 print2 fmt (l1, l2) =
match l1, l2 with
| [x1], [x2] ->
print1 fmt x1; sep_m fmt (); print2 fmt x2
| x1 :: r1, x2 :: r2 ->
print1 fmt x1; sep_m fmt (); print2 fmt x2; sep fmt ();
print_list2 sep sep_m print1 print2 fmt (r1, r2)
| _ -> ()
let check_val_in_drv info ({rs_name = {id_loc = loc}} as rs) =
match query_syntax info.info_convert rs.rs_name,
query_syntax info.info_syn rs.rs_name with
Loc.errorm ?loc "Function %a cannot be extracted" Expr.print_rs rs
| _ -> ()
module type S = sig
val iprinter : Ident.ident_printer
val aprinter : Ident.ident_printer
val tprinter : Ident.ident_printer
val forget_id : Ident.ident -> unit
val _forget_ids : Ident.ident list -> unit
val forget_var : Mltree.var -> unit
val forget_vars : Mltree.var list -> unit
val forget_let_defn : Mltree.let_def -> unit
val forget_pat : Mltree.pat -> unit
val print_global_ident :
sanitizer:(string -> string) -> Format.formatter -> Ident.ident -> unit
val print_path :
sanitizer:(string -> string) ->
Format.formatter -> string list * Ident.ident -> unit
val print_lident : info -> Format.formatter -> Ident.Sid.elt -> unit
val print_uident : info -> Format.formatter -> Ident.Sid.elt -> unit
val print_tv : Format.formatter -> Ty.tvsymbol -> unit
val print_rs : info -> Format.formatter -> Expr.rsymbol -> unit
val check_type_in_drv : info -> ident -> unit
val print_ty : ?paren:bool -> info -> ty pp
end
module MLPrinter (K: sig val keywords: string list end) = struct
let iprinter, aprinter, tprinter =
let isanitize = sanitizer char_to_alpha char_to_alnumus in
let lsanitize = sanitizer char_to_lalpha char_to_alnumus in
create_ident_printer K.keywords ~sanitizer:isanitize,
create_ident_printer K.keywords ~sanitizer:lsanitize,
create_ident_printer K.keywords ~sanitizer:lsanitize
let forget_id id = forget_id iprinter id
let _forget_ids = List.iter forget_id
let forget_var ((id, _, _): Mltree.var) = forget_id id
let forget_vars = List.iter forget_var
let forget_let_defn = function
| Lvar (v,_) -> forget_id v.pv_vs.vs_name
| Lsym (s,_,_,_) | Lany (s,_,_) -> forget_rs s
| Lrec rdl -> List.iter (fun fd -> forget_rs fd.rec_sym) rdl
let rec forget_pat = function
| Pwild -> ()
| Pvar {vs_name=id} -> forget_id id
| Papp (_, pl) | Ptuple pl -> List.iter forget_pat pl
| Por (p1, p2) -> forget_pat p1; forget_pat p2
| Pas (p, _) -> forget_pat p
let print_global_ident ~sanitizer fmt id =
let s = id_unique ~sanitizer tprinter id in
Ident.forget_id tprinter id;
fprintf fmt "%s" s
let print_path ~sanitizer fmt (q, id) =
assert (List.length q >= 1);
match Lists.chop_last q with
| [], _ -> print_global_ident ~sanitizer fmt id
| q, _ ->
fprintf fmt "%a.%a"
(print_list dot string) q (print_global_ident ~sanitizer) id
let rec remove_prefix acc current_path = match acc, current_path with
| [], _ | _, [] -> acc
| p1 :: _, p2 :: _ when p1 <> p2 -> acc
| _ :: r1, _ :: r2 -> remove_prefix r1 r2
let is_local_id info id =
Sid.mem id info.info_current_th.th_local ||
Opt.fold (fun _ m -> Sid.mem id m.Pmodule.mod_local)
false info.info_current_mo
exception Local
let print_qident ~sanitizer info fmt id =
try
if info.info_flat then raise Not_found;
if is_local_id info id then raise Local;
let p, t, q =
try Pmodule.restore_path id with Not_found -> Theory.restore_path id in
let fname = if p = [] then info.info_fname else None in
let m = Strings.capitalize (module_name ?fname p t) in
fprintf fmt "%s.%a" m (print_path ~sanitizer) (q, id)
with
| Not_found ->
let s = id_unique ~sanitizer iprinter id in
fprintf fmt "%s" s
| Local ->
let _, _, q = try Pmodule.restore_path id with Not_found ->
Theory.restore_path id in
let q = remove_prefix q (List.rev info.info_current_ph) in
print_path ~sanitizer fmt (q, id)
let print_lident = print_qident ~sanitizer:Strings.uncapitalize
let print_uident = print_qident ~sanitizer:Strings.capitalize
let print_tv fmt tv =
fprintf fmt "'%s" (id_unique aprinter tv.tv_name)
let print_rs info fmt rs =
fprintf fmt "%a" (print_lident info) rs.rs_name
let check_type_in_drv info ({id_loc = loc} as ty_id) =
match query_syntax info.info_convert ty_id,
query_syntax info.info_syn ty_id with
| None, None ->
Loc.errorm ?loc "Type %a cannot be extracted" (print_lident info) ty_id
| _ -> ()
let rec print_ty ?(paren=false) info fmt = function
| Tvar tv ->
print_tv fmt tv
| Ttuple [] ->
fprintf fmt "unit"
| Ttuple [t] ->
print_ty ~paren info fmt t
| Ttuple tl ->
fprintf fmt (protect_on paren "@[%a@]")
(print_list star (print_ty ~paren:true info)) tl
| Tapp (ts, tl) ->
match query_syntax info.info_syn ts with
| Some s ->
fprintf fmt (protect_on paren "%a")
(syntax_arguments s (print_ty ~paren:true info)) tl
| None ->
match tl with
| [] ->
(print_lident info) fmt ts
| [ty] ->
fprintf fmt (protect_on paren "%a@ %a")
(print_ty ~paren:true info) ty (print_lident info) ts
| tl ->
fprintf fmt (protect_on paren "(%a)@ %a")
(print_list comma (print_ty ~paren:false info)) tl
(print_lident info) ts
end
|
f46c918b088dd04d3416f05fc6e3937f6868767b9967c85d7c13f6c57582930e | factorhouse/shortcut | project.clj | (defproject io.operatr/shortcut "0.1.0"
:description "Shortcut by Operatr.IO"
:min-lein-version "2.9.0"
:dependencies [[org.clojure/clojure "1.10.3"]
[org.clojure/tools.logging "1.1.0"]
[org.clojure/core.async "1.3.622"]
[org.clojure/tools.reader "1.3.6"]
[com.cognitect/transit-clj "1.0.324"]
[ch.qos.logback/logback-classic "1.2.6"]
[org.slf4j/slf4j-api "1.7.32"]
[cheshire "5.10.1"]]
:clean-targets ^{:protect false} ["resources/public/js" "dev-resources/public/js" :target-path ".shadow-cljs"]
:profiles {:cljs {:dependencies [[thheller/shadow-cljs "2.15.10" :exclusions [commons-codec]]
[com.cognitect/transit-cljs "0.8.269"]]}
:dev {:resource-paths ["dev-resources"]
:plugins [[lein-cljfmt "0.8.0" :exclusions [org.clojure/clojure]]
[lein-shell "0.5.0"]]
:dependencies [[clj-kondo "2021.09.25" :exclusions [com.fasterxml.jackson.core/jackson-core]]]}
:uberjar {:prep-tasks ["clean" ["shell" "lein" "release-cljs"] "javac" "compile"]
:aot :all
:omit-source true}
:smoke {:pedantic? :abort}}
:aliases {"check" ["with-profile" "+smoke" "check"]
"fmt" ["with-profile" "+smoke" "cljfmt" "check"]
"fmtfix" ["with-profile" "+smoke" "cljfmt" "fix"]
"kondo" ["with-profile" "+smoke" "run" "-m" "clj-kondo.main" "--lint" "src"]
"smoke" ["with-profile" "+smoke,+cljs" ["do" ["clean"] ["check"] ["cljfmt" "check"] ["run" "-m" "clj-kondo.main" "--lint" "src"] ["test"]]]
"release-cljs" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "release" "app"]
"live" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "watch" "live"]
"demo" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "release" "live"]}
:source-paths ["src"]
:test-paths ["test"]
:uberjar-name "shortcut-standalone.jar"
:main shortcut
:aot [shortcut]
:java-source-paths ["src-java"]
:javac-options ["-target" "11" "-source" "11" "-Xlint:-options"]
:pedantic? :warn)
| null | https://raw.githubusercontent.com/factorhouse/shortcut/f58496b74fe16dbe31fc58ff1de7e420f8248112/project.clj | clojure | (defproject io.operatr/shortcut "0.1.0"
:description "Shortcut by Operatr.IO"
:min-lein-version "2.9.0"
:dependencies [[org.clojure/clojure "1.10.3"]
[org.clojure/tools.logging "1.1.0"]
[org.clojure/core.async "1.3.622"]
[org.clojure/tools.reader "1.3.6"]
[com.cognitect/transit-clj "1.0.324"]
[ch.qos.logback/logback-classic "1.2.6"]
[org.slf4j/slf4j-api "1.7.32"]
[cheshire "5.10.1"]]
:clean-targets ^{:protect false} ["resources/public/js" "dev-resources/public/js" :target-path ".shadow-cljs"]
:profiles {:cljs {:dependencies [[thheller/shadow-cljs "2.15.10" :exclusions [commons-codec]]
[com.cognitect/transit-cljs "0.8.269"]]}
:dev {:resource-paths ["dev-resources"]
:plugins [[lein-cljfmt "0.8.0" :exclusions [org.clojure/clojure]]
[lein-shell "0.5.0"]]
:dependencies [[clj-kondo "2021.09.25" :exclusions [com.fasterxml.jackson.core/jackson-core]]]}
:uberjar {:prep-tasks ["clean" ["shell" "lein" "release-cljs"] "javac" "compile"]
:aot :all
:omit-source true}
:smoke {:pedantic? :abort}}
:aliases {"check" ["with-profile" "+smoke" "check"]
"fmt" ["with-profile" "+smoke" "cljfmt" "check"]
"fmtfix" ["with-profile" "+smoke" "cljfmt" "fix"]
"kondo" ["with-profile" "+smoke" "run" "-m" "clj-kondo.main" "--lint" "src"]
"smoke" ["with-profile" "+smoke,+cljs" ["do" ["clean"] ["check"] ["cljfmt" "check"] ["run" "-m" "clj-kondo.main" "--lint" "src"] ["test"]]]
"release-cljs" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "release" "app"]
"live" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "watch" "live"]
"demo" ["with-profile" "+cljs" "run" "-m" "shadow.cljs.devtools.cli" "release" "live"]}
:source-paths ["src"]
:test-paths ["test"]
:uberjar-name "shortcut-standalone.jar"
:main shortcut
:aot [shortcut]
:java-source-paths ["src-java"]
:javac-options ["-target" "11" "-source" "11" "-Xlint:-options"]
:pedantic? :warn)
| |
09a513efd04b9ddf926f37e6978a26358177314876188961ed447be96c398e21 | nuvla/api-server | credential_template_totp_2fa.clj | (ns sixsq.nuvla.server.resources.credential-template-totp-2fa
"
This resource allows nuvla server to store generated TOTP secret in `credential`
. A token allows server to authenticate a user.
"
(:require
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential-template :as p]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-template-totp-2fa :as totp]
[sixsq.nuvla.server.util.metadata :as gen-md]))
(def ^:const credential-subtype "totp-2fa")
(def ^:const resource-url credential-subtype)
(def ^:const method credential-subtype)
(def resource-acl (acl-utils/normalize-acl {:owners ["group/nuvla-admin"]}))
;;
;; resource
;;
(def ^:const template
{:subtype credential-subtype
:method method
:name "Two factor authentication TOTP secret"
:description "stores a TOTP secret"
:acl resource-acl
:resource-metadata "resource-metadata/credential-template-totp-2fa"})
;;
;; initialization: register this credential-template
;;
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::totp/schema))
(def resource-metadata-create (gen-md/generate-metadata ::ns ::p/ns ::totp/schema-create "create"))
(defn initialize
[]
(p/register template)
(md/register resource-metadata)
(md/register resource-metadata-create))
;;
;; multimethods for validation
;;
(def validate-fn (u/create-spec-validation-fn ::totp/schema))
(defmethod p/validate-subtype method
[resource]
(validate-fn resource))
| null | https://raw.githubusercontent.com/nuvla/api-server/504512e622e1bd677cbfe2c8b3c15524c2bc8ffe/code/src/sixsq/nuvla/server/resources/credential_template_totp_2fa.clj | clojure |
resource
initialization: register this credential-template
multimethods for validation
| (ns sixsq.nuvla.server.resources.credential-template-totp-2fa
"
This resource allows nuvla server to store generated TOTP secret in `credential`
. A token allows server to authenticate a user.
"
(:require
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential-template :as p]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-template-totp-2fa :as totp]
[sixsq.nuvla.server.util.metadata :as gen-md]))
(def ^:const credential-subtype "totp-2fa")
(def ^:const resource-url credential-subtype)
(def ^:const method credential-subtype)
(def resource-acl (acl-utils/normalize-acl {:owners ["group/nuvla-admin"]}))
(def ^:const template
{:subtype credential-subtype
:method method
:name "Two factor authentication TOTP secret"
:description "stores a TOTP secret"
:acl resource-acl
:resource-metadata "resource-metadata/credential-template-totp-2fa"})
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::totp/schema))
(def resource-metadata-create (gen-md/generate-metadata ::ns ::p/ns ::totp/schema-create "create"))
(defn initialize
[]
(p/register template)
(md/register resource-metadata)
(md/register resource-metadata-create))
(def validate-fn (u/create-spec-validation-fn ::totp/schema))
(defmethod p/validate-subtype method
[resource]
(validate-fn resource))
|
3c56ee504fdcad800c0458aaaa5fddd45aaa7dc711d2bd6668b122b572360903 | ocaml-multicore/ocaml-tsan | scheduling.ml | # 2 "asmcomp/arm64/scheduling.ml"
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2013 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
The " open ! " directive below is necessary because , although
this module does not actually depend on Schedgen in this backend , the
dependency exists in other backends and our build system requires
that all the backends have the same dependencies .
We thus have to use " open ! " and disable the corresponding warning
only for this compilation unit .
this module does not actually depend on Schedgen in this backend, the
dependency exists in other backends and our build system requires
that all the backends have the same dependencies.
We thus have to use "open!" and disable the corresponding warning
only for this compilation unit.
*)
open! Schedgen [@@warning "-66"]
(* Scheduling is turned off because the processor schedules dynamically
much better than what we could do. *)
let fundecl f = f
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/ae9c1502103845550162a49fcd3f76276cdfa866/asmcomp/arm64/scheduling.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Scheduling is turned off because the processor schedules dynamically
much better than what we could do. | # 2 "asmcomp/arm64/scheduling.ml"
, projet Gallium , INRIA Rocquencourt
Copyright 2013 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
The " open ! " directive below is necessary because , although
this module does not actually depend on Schedgen in this backend , the
dependency exists in other backends and our build system requires
that all the backends have the same dependencies .
We thus have to use " open ! " and disable the corresponding warning
only for this compilation unit .
this module does not actually depend on Schedgen in this backend, the
dependency exists in other backends and our build system requires
that all the backends have the same dependencies.
We thus have to use "open!" and disable the corresponding warning
only for this compilation unit.
*)
open! Schedgen [@@warning "-66"]
let fundecl f = f
|
a4c028d05f60f21be2c03ab0f187642cccc0473fb07f71d409bf2d4bb64bd777 | edalorzo/learning-clojure | replace_addition.clj | (ns learn.macros.basics.replace-addition)
(comment
There are two phases : read and eval
;; We can get in the middle of reading to change
;; the output data structures before they are evaled.
(let [expression (read-string "(+ 1 2 3 4 5)")]
(let [changed (cons (read-string "*")
(rest expression))]
(eval changed)))
;; We cannot simply type the expression (+ 1 2 3 4 5)
;; because it would be evaulated right away, so to
;; suppress the evaluation of a list we must quote it.
(let [expression (quote (+ 1 2 3 4 5))]
(let [changed (cons (quote *)
(rest expression))]
(eval changed))))
(comment
;; there ia reader macro to simplify quoting
(let [expression '(+ 1 2 3 4 5)]
(let [changed (cons '* (rest expression))]
(eval changed))))
| null | https://raw.githubusercontent.com/edalorzo/learning-clojure/b5db63d8e783767a85af19ccb8dbf4d4e0f53d62/src/learn/macros/basics/replace_addition.clj | clojure | We can get in the middle of reading to change
the output data structures before they are evaled.
We cannot simply type the expression (+ 1 2 3 4 5)
because it would be evaulated right away, so to
suppress the evaluation of a list we must quote it.
there ia reader macro to simplify quoting | (ns learn.macros.basics.replace-addition)
(comment
There are two phases : read and eval
(let [expression (read-string "(+ 1 2 3 4 5)")]
(let [changed (cons (read-string "*")
(rest expression))]
(eval changed)))
(let [expression (quote (+ 1 2 3 4 5))]
(let [changed (cons (quote *)
(rest expression))]
(eval changed))))
(comment
(let [expression '(+ 1 2 3 4 5)]
(let [changed (cons '* (rest expression))]
(eval changed))))
|
c581651e7eef5185920da91d3dc98c0f46092a1b86d8bf7dd0821f9806714334 | rpav/spell-and-dagger | quadtree.lisp | ;;; - NOTICE - NOTICE - NOTICE -
;;;
;;; This was taken from some other code I had laying around for a
;;; quadtree; it was not written during the jam. It may have been
;;; modified though.
;;;
;;; - NOTICE - NOTICE - NOTICE -
(in-package :game)
;; Quadtree
(defclass qt-node ()
((size :initform nil :initarg :size)
(center-point :initform nil :initarg :at)
(quads :initform (make-array 4 :initial-element nil))
(objects :initform nil)))
(defclass quadtree ()
((top-node)
(max-depth :initform 3 :initarg :max-depth)
(object-node :initform (make-hash-table))
(key-fun :initform #'identity :initarg :key)))
(defmethod initialize-instance :after
((qt quadtree) &key x y size &allow-other-keys)
(with-slots (top-node) qt
(let ((x (or x (/ size 2.0)))
(y (or y (/ size 2.0))))
(setf top-node (make-instance 'qt-node :at (gk-vec2 x y) :size size)))))
(defgeneric quadtree-add (quadtree item)
(:documentation "Add `ITEM` to `QUADTREE`."))
(defgeneric quadtree-delete (quadtree item)
(:documentation "Delete `ITEM` from `QUADTREE`."))
(defgeneric quadtree-select (quadtree box &optional offs)
(:documentation "Select items from `QUADTREE` inside `BOX` with
optional offset, `OFFS`"))
(defun quadtree-contains (quadtree object)
(with-slots (object-node) quadtree
(nth-value 1 (gethash object object-node))))
(defun point-quad (x y qt-node &optional (test #'<))
"Return the quadrant `POINT` would occupy."
(with-slots ((c center-point)) qt-node
(let ((x< (funcall test x (vx c)))
(y< (funcall test y (vy c))))
(cond
((and x< y<) 0)
(y< 1)
(x< 2)
(t 3)))))
(defun rect-quad (rect offs qt-node)
"Return the quadrant `RECT` should occupy, or `NIL` if it does not
fit into any single quad"
(with-box (x0 y0 x1 y1) rect
(when offs
(incf x0 (vx offs))
(incf y0 (vy offs))
(incf x1 (vx offs))
(incf y1 (vy offs)))
(let ((q1 (point-quad x0 y0 qt-node))
(q2 (point-quad x1 y1 qt-node #'<=)))
(if (= q1 q2)
q1
nil))))
(defun qn-pos (qn qt-node)
(with-slots ((at center-point) size) qt-node
(let ((offset (/ size 4.0))
(x (vx at))
(y (vy at)))
(ecase qn
(0 (gk-vec2 (- x offset) (- y offset)))
(1 (gk-vec2 (+ x offset) (- y offset)))
(2 (gk-vec2 (- x offset) (+ y offset)))
(3 (gk-vec2 (+ x offset) (+ y offset)))))))
(defun ensure-rect-quad (rect offs qt-node)
(let ((qn (rect-quad rect offs qt-node)))
(if qn
(with-slots (quads size) qt-node
(or (aref quads qn)
(setf (aref quads qn)
(make-instance 'qt-node
:at (qn-pos qn qt-node)
:size (/ size 2.0)))))
qt-node)))
(defun next-node (rect offs qt-node)
"Return the next (child) node that `RECT` fits into in `QT-NODE`,
or `NIL`"
(with-slots (quads) qt-node
(let ((qn (rect-quad rect offs qt-node)))
(when qn (aref quads qn)))))
(defmethod quadtree-add ((qt quadtree) item)
(with-slots (top-node object-node max-depth key-fun) qt
(multiple-value-bind (rect offs)
(funcall key-fun item)
(loop for depth from 0 below max-depth
as last-node = nil then node
as node = top-node then (ensure-rect-quad rect offs node)
while (not (eq node last-node))
finally (push item (slot-value node 'objects))
(setf (gethash item object-node) node))))
item)
(defmethod quadtree-delete ((qt quadtree) item)
(with-slots (object-node) qt
(let ((node (gethash item object-node)))
(if node
(progn
(deletef (slot-value node 'objects) item)
(remhash item object-node))
(error "Object ~S not in tree" item)))))
(defun map-all-subtrees (function qt-node)
"Call `FUNCTION` on all children of `QT-NODE`, but not `QT-NODE`
itself."
(loop for child across (slot-value qt-node 'quads)
when child do
(funcall function (slot-value child 'objects))
(map-all-subtrees function child)))
(defun map-matching-subtrees (box offs function qt-node)
"Call `FUNCTION` on `QT-NODE` and any children of `QT-NODE` which
`BOX` fits in exactly. Return the last node `BOX` fits in."
(funcall function (slot-value qt-node 'objects))
(if-let ((next-node (next-node box offs qt-node)))
(map-matching-subtrees box offs function next-node)
qt-node))
(defmethod quadtree-select ((qt quadtree) box &optional offs)
"Select all objects which overlap with `BOX`"
(let (overlaps)
(with-slots (top-node object-node key-fun) qt
(flet ((select (objects)
(loop for object in objects
when
(multiple-value-bind (box1 offs1) (funcall key-fun object)
(box-intersect-p box box1 offs offs1))
do (push object overlaps))))
(let ((subtree (map-matching-subtrees box offs #'select top-node)))
(map-all-subtrees #'select subtree))
overlaps))))
| null | https://raw.githubusercontent.com/rpav/spell-and-dagger/0424416ff14a6758d27cc0f84c21bf85df024a7b/src/quadtree.lisp | lisp | - NOTICE - NOTICE - NOTICE -
This was taken from some other code I had laying around for a
quadtree; it was not written during the jam. It may have been
modified though.
- NOTICE - NOTICE - NOTICE -
Quadtree |
(in-package :game)
(defclass qt-node ()
((size :initform nil :initarg :size)
(center-point :initform nil :initarg :at)
(quads :initform (make-array 4 :initial-element nil))
(objects :initform nil)))
(defclass quadtree ()
((top-node)
(max-depth :initform 3 :initarg :max-depth)
(object-node :initform (make-hash-table))
(key-fun :initform #'identity :initarg :key)))
(defmethod initialize-instance :after
((qt quadtree) &key x y size &allow-other-keys)
(with-slots (top-node) qt
(let ((x (or x (/ size 2.0)))
(y (or y (/ size 2.0))))
(setf top-node (make-instance 'qt-node :at (gk-vec2 x y) :size size)))))
(defgeneric quadtree-add (quadtree item)
(:documentation "Add `ITEM` to `QUADTREE`."))
(defgeneric quadtree-delete (quadtree item)
(:documentation "Delete `ITEM` from `QUADTREE`."))
(defgeneric quadtree-select (quadtree box &optional offs)
(:documentation "Select items from `QUADTREE` inside `BOX` with
optional offset, `OFFS`"))
(defun quadtree-contains (quadtree object)
(with-slots (object-node) quadtree
(nth-value 1 (gethash object object-node))))
(defun point-quad (x y qt-node &optional (test #'<))
"Return the quadrant `POINT` would occupy."
(with-slots ((c center-point)) qt-node
(let ((x< (funcall test x (vx c)))
(y< (funcall test y (vy c))))
(cond
((and x< y<) 0)
(y< 1)
(x< 2)
(t 3)))))
(defun rect-quad (rect offs qt-node)
"Return the quadrant `RECT` should occupy, or `NIL` if it does not
fit into any single quad"
(with-box (x0 y0 x1 y1) rect
(when offs
(incf x0 (vx offs))
(incf y0 (vy offs))
(incf x1 (vx offs))
(incf y1 (vy offs)))
(let ((q1 (point-quad x0 y0 qt-node))
(q2 (point-quad x1 y1 qt-node #'<=)))
(if (= q1 q2)
q1
nil))))
(defun qn-pos (qn qt-node)
(with-slots ((at center-point) size) qt-node
(let ((offset (/ size 4.0))
(x (vx at))
(y (vy at)))
(ecase qn
(0 (gk-vec2 (- x offset) (- y offset)))
(1 (gk-vec2 (+ x offset) (- y offset)))
(2 (gk-vec2 (- x offset) (+ y offset)))
(3 (gk-vec2 (+ x offset) (+ y offset)))))))
(defun ensure-rect-quad (rect offs qt-node)
(let ((qn (rect-quad rect offs qt-node)))
(if qn
(with-slots (quads size) qt-node
(or (aref quads qn)
(setf (aref quads qn)
(make-instance 'qt-node
:at (qn-pos qn qt-node)
:size (/ size 2.0)))))
qt-node)))
(defun next-node (rect offs qt-node)
"Return the next (child) node that `RECT` fits into in `QT-NODE`,
or `NIL`"
(with-slots (quads) qt-node
(let ((qn (rect-quad rect offs qt-node)))
(when qn (aref quads qn)))))
(defmethod quadtree-add ((qt quadtree) item)
(with-slots (top-node object-node max-depth key-fun) qt
(multiple-value-bind (rect offs)
(funcall key-fun item)
(loop for depth from 0 below max-depth
as last-node = nil then node
as node = top-node then (ensure-rect-quad rect offs node)
while (not (eq node last-node))
finally (push item (slot-value node 'objects))
(setf (gethash item object-node) node))))
item)
(defmethod quadtree-delete ((qt quadtree) item)
(with-slots (object-node) qt
(let ((node (gethash item object-node)))
(if node
(progn
(deletef (slot-value node 'objects) item)
(remhash item object-node))
(error "Object ~S not in tree" item)))))
(defun map-all-subtrees (function qt-node)
"Call `FUNCTION` on all children of `QT-NODE`, but not `QT-NODE`
itself."
(loop for child across (slot-value qt-node 'quads)
when child do
(funcall function (slot-value child 'objects))
(map-all-subtrees function child)))
(defun map-matching-subtrees (box offs function qt-node)
"Call `FUNCTION` on `QT-NODE` and any children of `QT-NODE` which
`BOX` fits in exactly. Return the last node `BOX` fits in."
(funcall function (slot-value qt-node 'objects))
(if-let ((next-node (next-node box offs qt-node)))
(map-matching-subtrees box offs function next-node)
qt-node))
(defmethod quadtree-select ((qt quadtree) box &optional offs)
"Select all objects which overlap with `BOX`"
(let (overlaps)
(with-slots (top-node object-node key-fun) qt
(flet ((select (objects)
(loop for object in objects
when
(multiple-value-bind (box1 offs1) (funcall key-fun object)
(box-intersect-p box box1 offs offs1))
do (push object overlaps))))
(let ((subtree (map-matching-subtrees box offs #'select top-node)))
(map-all-subtrees #'select subtree))
overlaps))))
|
b041cfded0a2cac11fb56d9450b2c63b49bd8194561c9a03f55891c568e1fbdb | solita/mnt-teet | map_styles.cljs | (ns teet.map.map-styles
(:require [teet.theme.theme-colors :as theme-colors]))
(defn map-controls
([] (map-controls {}))
([{:keys [position right]
:or {position :top
right "80px"}}]
(merge
{:background-color "white"
:max-width "40vw"
:position :absolute
:right right
:z-index 999
:box-shadow "0px 2px 8px rgba(0, 0, 0, 0.25)"}
(case position
:top {:top "25px"}
:bottom {:bottom "25px"
:min-width "250px"
:right "25px"}))))
(defn map-layer-controls []
(map-controls {:position :bottom}))
(defn map-legend-controls []
(map-controls {:position :top}))
(defn map-layer-controls-body []
{:padding "0.5rem"})
(defn add-layer-button []
{:width "100%"})
(defn edit-layer-type []
{:padding "1rem"
:background-color theme-colors/gray
:color theme-colors/white
:height "100%"})
(defn edit-layer-form []
{:padding "1.5rem"
:display :flex
:flex-direction :column
:min-height "40vh"})
(defn edit-layer-type-heading [selected?]
{:cursor :pointer
:margin-bottom "1rem"
:color (if selected?
theme-colors/white
theme-colors/gray-lighter)})
(defn edit-layer-options []
{:flex-grow 1
:padding "1rem"
:margin-bottom "1rem"
:background-color theme-colors/gray-lightest})
(defn map-controls-heading
[]
{:display :flex
:justify-content :space-between
:align-items :center
:padding "1rem"
:margin 0
:background theme-colors/gray-lighter
:border-bottom (str "1px solid " theme-colors/gray-light)})
(defn layer-edit-save-style
[]
{:margin-left "1rem"})
(defn layer-heading-style
[]
{:padding-bottom "1rem"})
(defn layer-edit-button-container-style
[]
{:display :flex
:justify-content :flex-end})
(defn map-control-buttons
[]
{:position :absolute
:display :flex
:flex-direction :row
:top "20px"
:right "25px"
:z-index 999})
(defn map-legend-header []
{:background-color theme-colors/gray-lighter
:font-weight :bold})
(defn map-legend-box []
{:background-color theme-colors/gray-lightest
:margin "0.5rem"
:padding "0.5rem"
:max-height "50vh"
:overflow-y :scroll})
(defn map-control-button
[]
{:opacity "0.9"
:margin-top "5px"
:transition "all 0.2s ease-in-out"})
(defn map-overlay
[]
{}
#_{:right 0
:position :absolute})
(def overlay-background-color "#005E87")
(defn map-overlay-container
[width height arrow-direction background-color]
(let [half-height (when height
(str (/ height 2) "px"))
half-width (when width
(str (/ width 2) "px"))
positioning (cond
(= arrow-direction :right)
{:right half-height}
(= arrow-direction :left)
{:left half-height}
(= arrow-direction :top)
{:left (str "-" half-width)
:top "20px"}
:else
{})]
(merge
(when width
{:width (str width "px")})
(when height
{:height (str height "px")})
{:background-color background-color
:position :absolute
:display :flex
:align-items :center
:top (str "-" half-height)}
positioning)))
(defn map-overlay-arrow [width height arrow-direction]
(let [half-height (when height
(str (/ height 2) "px"))
half-width (when width
(str (/ width 2) "px"))]
(merge
{:width 0
:height 0
:position :absolute}
(case arrow-direction
:right {:border-top (str half-height " solid transparent")
:border-bottom (str half-height " solid transparent")
:border-left (str half-height " solid " overlay-background-color)
:right (str "-" half-height)}
:left {:border-top (str half-height " solid transparent")
:border-bottom (str half-height " solid transparent")
:left (str "-" half-height)
:border-right (str half-height " solid " overlay-background-color)}
:top {:border-left "20px solid transparent"
:border-right "20px solid transparent"
:left (str "calc(" half-width " - 20px)")
:top "-14px"
:border-bottom (str "15px solid " overlay-background-color)}))))
(defn map-overlay-content [single-line?]
(if single-line?
{:display :inline-block
:margin "0 0.5rem"
:white-space :nowrap
:color theme-colors/white}
{:display :block
:margin "0 0.5rem"
:color theme-colors/white}))
| null | https://raw.githubusercontent.com/solita/mnt-teet/7a5124975ce1c7f3e7a7c55fe23257ca3f7b6411/app/frontend/src/cljs/teet/map/map_styles.cljs | clojure | (ns teet.map.map-styles
(:require [teet.theme.theme-colors :as theme-colors]))
(defn map-controls
([] (map-controls {}))
([{:keys [position right]
:or {position :top
right "80px"}}]
(merge
{:background-color "white"
:max-width "40vw"
:position :absolute
:right right
:z-index 999
:box-shadow "0px 2px 8px rgba(0, 0, 0, 0.25)"}
(case position
:top {:top "25px"}
:bottom {:bottom "25px"
:min-width "250px"
:right "25px"}))))
(defn map-layer-controls []
(map-controls {:position :bottom}))
(defn map-legend-controls []
(map-controls {:position :top}))
(defn map-layer-controls-body []
{:padding "0.5rem"})
(defn add-layer-button []
{:width "100%"})
(defn edit-layer-type []
{:padding "1rem"
:background-color theme-colors/gray
:color theme-colors/white
:height "100%"})
(defn edit-layer-form []
{:padding "1.5rem"
:display :flex
:flex-direction :column
:min-height "40vh"})
(defn edit-layer-type-heading [selected?]
{:cursor :pointer
:margin-bottom "1rem"
:color (if selected?
theme-colors/white
theme-colors/gray-lighter)})
(defn edit-layer-options []
{:flex-grow 1
:padding "1rem"
:margin-bottom "1rem"
:background-color theme-colors/gray-lightest})
(defn map-controls-heading
[]
{:display :flex
:justify-content :space-between
:align-items :center
:padding "1rem"
:margin 0
:background theme-colors/gray-lighter
:border-bottom (str "1px solid " theme-colors/gray-light)})
(defn layer-edit-save-style
[]
{:margin-left "1rem"})
(defn layer-heading-style
[]
{:padding-bottom "1rem"})
(defn layer-edit-button-container-style
[]
{:display :flex
:justify-content :flex-end})
(defn map-control-buttons
[]
{:position :absolute
:display :flex
:flex-direction :row
:top "20px"
:right "25px"
:z-index 999})
(defn map-legend-header []
{:background-color theme-colors/gray-lighter
:font-weight :bold})
(defn map-legend-box []
{:background-color theme-colors/gray-lightest
:margin "0.5rem"
:padding "0.5rem"
:max-height "50vh"
:overflow-y :scroll})
(defn map-control-button
[]
{:opacity "0.9"
:margin-top "5px"
:transition "all 0.2s ease-in-out"})
(defn map-overlay
[]
{}
#_{:right 0
:position :absolute})
(def overlay-background-color "#005E87")
(defn map-overlay-container
[width height arrow-direction background-color]
(let [half-height (when height
(str (/ height 2) "px"))
half-width (when width
(str (/ width 2) "px"))
positioning (cond
(= arrow-direction :right)
{:right half-height}
(= arrow-direction :left)
{:left half-height}
(= arrow-direction :top)
{:left (str "-" half-width)
:top "20px"}
:else
{})]
(merge
(when width
{:width (str width "px")})
(when height
{:height (str height "px")})
{:background-color background-color
:position :absolute
:display :flex
:align-items :center
:top (str "-" half-height)}
positioning)))
(defn map-overlay-arrow [width height arrow-direction]
(let [half-height (when height
(str (/ height 2) "px"))
half-width (when width
(str (/ width 2) "px"))]
(merge
{:width 0
:height 0
:position :absolute}
(case arrow-direction
:right {:border-top (str half-height " solid transparent")
:border-bottom (str half-height " solid transparent")
:border-left (str half-height " solid " overlay-background-color)
:right (str "-" half-height)}
:left {:border-top (str half-height " solid transparent")
:border-bottom (str half-height " solid transparent")
:left (str "-" half-height)
:border-right (str half-height " solid " overlay-background-color)}
:top {:border-left "20px solid transparent"
:border-right "20px solid transparent"
:left (str "calc(" half-width " - 20px)")
:top "-14px"
:border-bottom (str "15px solid " overlay-background-color)}))))
(defn map-overlay-content [single-line?]
(if single-line?
{:display :inline-block
:margin "0 0.5rem"
:white-space :nowrap
:color theme-colors/white}
{:display :block
:margin "0 0.5rem"
:color theme-colors/white}))
| |
de8c118fd6edb4fbb554026b0145f02edd04420e53b11f15682d86687a524978 | jwiegley/notes | sequence.hs | module Main where
import Criterion
import Criterion.Main
import Control.Monad hiding (sequence)
import Prelude hiding (sequence)
import System.IO.Unsafe
-- | Evaluate each action in the sequence from left to right,
-- and collect the results.
sequence :: Monad m => [m a] -> m [a]
# INLINE sequence #
sequence ms = go (return []) ms
where
go z [] = z
go z (m:ms) = do
x <- m
xs <- go z ms
return $ (x:xs)
-- -- | Evaluate each action in the sequence from left to right,
-- -- and ignore the results.
sequence _ : : = > [ m a ] - > m ( )
-- {-# INLINE sequence_ #-}
sequence _ ms ( > > ) ( return ( ) ) ms
sequence' :: Monad m => [m a] -> m [a]
{-# INLINE sequence' #-}
sequence' l = go l id
where
go [] dlist = return $ dlist []
go (m:ms) dlist = do x <- m
go ms (dlist . (x:))
foo :: [Int] -> IO [Int]
foo = foldM (\m x -> return (x:m)) (return 0)
stress :: Int -> ([IO Int] -> IO [Int]) -> IO Int
stress cnt f = do
l <- f (replicate cnt $ return 1)
return $ head l
go = (unsafePerformIO .) . stress
main =
defaultMain [
bench "sequence" $ nf (go 1000000) sequence
-- , bench "sequence_" $ nf stress sequence_
, bench "sequence'" $ nf (go 1000000) sequence'
-- , bench "sequence_'" $ nf stress sequence_'
]
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/f719a3d41696d48f6005/misc/sequence.hs | haskell | | Evaluate each action in the sequence from left to right,
and collect the results.
-- | Evaluate each action in the sequence from left to right,
-- and ignore the results.
{-# INLINE sequence_ #-}
# INLINE sequence' #
, bench "sequence_" $ nf stress sequence_
, bench "sequence_'" $ nf stress sequence_' | module Main where
import Criterion
import Criterion.Main
import Control.Monad hiding (sequence)
import Prelude hiding (sequence)
import System.IO.Unsafe
sequence :: Monad m => [m a] -> m [a]
# INLINE sequence #
sequence ms = go (return []) ms
where
go z [] = z
go z (m:ms) = do
x <- m
xs <- go z ms
return $ (x:xs)
sequence _ : : = > [ m a ] - > m ( )
sequence _ ms ( > > ) ( return ( ) ) ms
sequence' :: Monad m => [m a] -> m [a]
sequence' l = go l id
where
go [] dlist = return $ dlist []
go (m:ms) dlist = do x <- m
go ms (dlist . (x:))
foo :: [Int] -> IO [Int]
foo = foldM (\m x -> return (x:m)) (return 0)
stress :: Int -> ([IO Int] -> IO [Int]) -> IO Int
stress cnt f = do
l <- f (replicate cnt $ return 1)
return $ head l
go = (unsafePerformIO .) . stress
main =
defaultMain [
bench "sequence" $ nf (go 1000000) sequence
, bench "sequence'" $ nf (go 1000000) sequence'
]
|
402244133ed96e64f7cf6c862f74392dd7cf53316856c170306c2b9539394100 | 3b/3bgl-misc | demo.lisp | #++(asdf:load-systems '3bgl-misc)
(defpackage #:embed-test
(:use :cl :basecode))
(in-package #:embed-test)
(defparameter *command* "xterm -fn 10x20 -b 0 -into ~d &")
(defclass embed-test (basecode-glop
perspective-projection
basecode-clear
fps-graph basecode-draw-ground-plane
freelook-camera
basecode-exit-on-esc
3bgl-embed::basecode-embed-helper)
((embed :accessor embed :initform nil)
(cube-shader :accessor cube-shader :initform nil)
(point-vbo :accessor point-vbo :initform nil)
(point-vbo-size :accessor point-vbo-size :initform nil))
(:default-initargs :look-at-eye '(3 2 15)))
(defparameter *w* nil)
(defmethod run-main-loop :before ((w embed-test))
(format t "run-main-loop :before embed-test~%")
(setf (embed w)
(glop:open-window
(make-instance '3bgl-embed::glop-embedded :command *command*)
"" 256 256
:double-buffer nil
:parent (glop:x11-window-id (basecode::%glop-window w))))
(glop:show-window (embed w)))
(defmethod basecode-draw ((w embed-test))
(when (embed w)
(glop:dispatch-events (embed w) :blocking nil :on-foo nil))
(setf *w* w)
(gl:enable :depth-test)
(gl:enable :texture-2d)
(3bgl-embed::bind-texture (embed w))
(gl:tex-parameter :texture-2d :texture-min-filter :linear)
(gl:tex-parameter :texture-2d :texture-mag-filter :linear)
(case (3bgl-embed::child-state (embed w))
((nil)
(gl:color 0.2 0.2 0.8 1))
(:mapped
(gl:color 1 1 1 1))
(:unmapped
(gl:color 0.5 0.5 0.5 1))
(t
(gl:color 0.5 0.1 0.1 1)))
(gl:with-pushed-matrix* (:modelview)
(gl:scale 1 1 1)
(gl:rotate 90 1 0 0)
(gl:translate 0 -10 -10)
(gl:disable :cull-face)
(if (cube-shader w)
(let ((p (cube-shader w)))
(gl:use-program p)
(3bgl-shaders::uniform-matrix
p "mvp" (sb-cga:matrix*
(basecode::projection-matrix w)
(basecode::freelook-camera-modelview w)
(sb-cga:rotate-around (sb-cga:vec 1.0 0.0 0.0)
(float (kit.math:deg-to-rad 180) 1.0))
(sb-cga:translate* 0.0 -10.0 0.0)
(sb-cga:scale* 0.02 0.02 0.08)))
(3bgl-shaders::uniform-matrix p "normalMatrix"
(sb-cga:identity-matrix))
(3bgl-shaders::uniformi p "utexture" 0)
(let ((wx (1+ (glop:window-width (embed w))))
(wy (1+ (glop:window-height (embed w)))))
(unless (and (point-vbo w) (equal (list wx wy) (point-vbo-size w)))
(unless (point-vbo w)
(setf (point-vbo w) (car (gl:gen-buffers 1))))
(gl:bind-buffer :array-buffer (point-vbo w))
(static-vectors:with-static-vector (b (* wx wy 2)
:element-type 'single-float)
(loop for y below wy
do (loop for x below wx
do (setf (aref b (+ (* y wx 2) (* x 2))) (float x)
(aref b (+ (* y wx 2) (* x 2) 1)) (float y))))
(gl:enable-vertex-attrib-array 0)
(%gl:buffer-data :array-buffer (* wx wy 2 4)
(static-vectors:static-vector-pointer b)
:static-draw)
(gl:vertex-attrib-pointer 0 2 :float nil 0 (cffi:null-pointer))
(setf (point-vbo-size w) (list wx wy))))
(gl:bind-buffer :array-buffer (point-vbo w))
(%gl:draw-arrays :points 0 (* wx wy))
(gl:bind-buffer :array-buffer 0)
(gl:use-program 0)))
(gl:with-primitives :quads
(gl:tex-coord 0 0)
(gl:vertex -10 1 0)
(gl:tex-coord 1 0)
(gl:vertex 10 1 0)
(gl:tex-coord 1 1)
(gl:vertex 10 1 10)
(gl:tex-coord 0 1)
(gl:vertex -10 1 10))))
(3bgl-embed::release-texture (embed w)))
(defmethod key-up :after ((w embed-test) k)
(case k
(:l2
(format t "changing focus to child~%")
(3bgl-embed::focus-embedded (embed w)))))
(defmethod mouse-down ((w embed-test) b x y)
;; move input focus back to main window on mouse click for now
(3bgl-embed::unfocus-embedded (embed w) w))
(defmethod key-down :after ((w embed-test) k)
(print k)
(case k
(:r
(setf (cube-shader w)
(3bgl-shaders::reload-program (cube-shader w)
'pixel-cube-shader::vertex
'pixel-cube-shader::fragment
:geometry
'pixel-cube-shader::geometry)))
((#\m :m)
)
(:f7
)
(:f10
(3bgl-embed::resize-embedded (embed w) 80 25)
)
(:l1
(3bgl-embed::resize-embedded (embed w) 80 24)
#++(glop-xlib:x-set-geometry (glop::x11-window-display
(basecode::%glop-window w))
(win w)
0 0
(* 80 6) (* 160 13))
)
(:backspace
(basecode::reset-freelook-camera w)
)
))
#++
(basecode-run (make-instance 'embed-test))
| null | https://raw.githubusercontent.com/3b/3bgl-misc/e3bf2781d603feb6b44e5c4ec20f06225648ffd9/xembed/demo.lisp | lisp | move input focus back to main window on mouse click for now | #++(asdf:load-systems '3bgl-misc)
(defpackage #:embed-test
(:use :cl :basecode))
(in-package #:embed-test)
(defparameter *command* "xterm -fn 10x20 -b 0 -into ~d &")
(defclass embed-test (basecode-glop
perspective-projection
basecode-clear
fps-graph basecode-draw-ground-plane
freelook-camera
basecode-exit-on-esc
3bgl-embed::basecode-embed-helper)
((embed :accessor embed :initform nil)
(cube-shader :accessor cube-shader :initform nil)
(point-vbo :accessor point-vbo :initform nil)
(point-vbo-size :accessor point-vbo-size :initform nil))
(:default-initargs :look-at-eye '(3 2 15)))
(defparameter *w* nil)
(defmethod run-main-loop :before ((w embed-test))
(format t "run-main-loop :before embed-test~%")
(setf (embed w)
(glop:open-window
(make-instance '3bgl-embed::glop-embedded :command *command*)
"" 256 256
:double-buffer nil
:parent (glop:x11-window-id (basecode::%glop-window w))))
(glop:show-window (embed w)))
(defmethod basecode-draw ((w embed-test))
(when (embed w)
(glop:dispatch-events (embed w) :blocking nil :on-foo nil))
(setf *w* w)
(gl:enable :depth-test)
(gl:enable :texture-2d)
(3bgl-embed::bind-texture (embed w))
(gl:tex-parameter :texture-2d :texture-min-filter :linear)
(gl:tex-parameter :texture-2d :texture-mag-filter :linear)
(case (3bgl-embed::child-state (embed w))
((nil)
(gl:color 0.2 0.2 0.8 1))
(:mapped
(gl:color 1 1 1 1))
(:unmapped
(gl:color 0.5 0.5 0.5 1))
(t
(gl:color 0.5 0.1 0.1 1)))
(gl:with-pushed-matrix* (:modelview)
(gl:scale 1 1 1)
(gl:rotate 90 1 0 0)
(gl:translate 0 -10 -10)
(gl:disable :cull-face)
(if (cube-shader w)
(let ((p (cube-shader w)))
(gl:use-program p)
(3bgl-shaders::uniform-matrix
p "mvp" (sb-cga:matrix*
(basecode::projection-matrix w)
(basecode::freelook-camera-modelview w)
(sb-cga:rotate-around (sb-cga:vec 1.0 0.0 0.0)
(float (kit.math:deg-to-rad 180) 1.0))
(sb-cga:translate* 0.0 -10.0 0.0)
(sb-cga:scale* 0.02 0.02 0.08)))
(3bgl-shaders::uniform-matrix p "normalMatrix"
(sb-cga:identity-matrix))
(3bgl-shaders::uniformi p "utexture" 0)
(let ((wx (1+ (glop:window-width (embed w))))
(wy (1+ (glop:window-height (embed w)))))
(unless (and (point-vbo w) (equal (list wx wy) (point-vbo-size w)))
(unless (point-vbo w)
(setf (point-vbo w) (car (gl:gen-buffers 1))))
(gl:bind-buffer :array-buffer (point-vbo w))
(static-vectors:with-static-vector (b (* wx wy 2)
:element-type 'single-float)
(loop for y below wy
do (loop for x below wx
do (setf (aref b (+ (* y wx 2) (* x 2))) (float x)
(aref b (+ (* y wx 2) (* x 2) 1)) (float y))))
(gl:enable-vertex-attrib-array 0)
(%gl:buffer-data :array-buffer (* wx wy 2 4)
(static-vectors:static-vector-pointer b)
:static-draw)
(gl:vertex-attrib-pointer 0 2 :float nil 0 (cffi:null-pointer))
(setf (point-vbo-size w) (list wx wy))))
(gl:bind-buffer :array-buffer (point-vbo w))
(%gl:draw-arrays :points 0 (* wx wy))
(gl:bind-buffer :array-buffer 0)
(gl:use-program 0)))
(gl:with-primitives :quads
(gl:tex-coord 0 0)
(gl:vertex -10 1 0)
(gl:tex-coord 1 0)
(gl:vertex 10 1 0)
(gl:tex-coord 1 1)
(gl:vertex 10 1 10)
(gl:tex-coord 0 1)
(gl:vertex -10 1 10))))
(3bgl-embed::release-texture (embed w)))
(defmethod key-up :after ((w embed-test) k)
(case k
(:l2
(format t "changing focus to child~%")
(3bgl-embed::focus-embedded (embed w)))))
(defmethod mouse-down ((w embed-test) b x y)
(3bgl-embed::unfocus-embedded (embed w) w))
(defmethod key-down :after ((w embed-test) k)
(print k)
(case k
(:r
(setf (cube-shader w)
(3bgl-shaders::reload-program (cube-shader w)
'pixel-cube-shader::vertex
'pixel-cube-shader::fragment
:geometry
'pixel-cube-shader::geometry)))
((#\m :m)
)
(:f7
)
(:f10
(3bgl-embed::resize-embedded (embed w) 80 25)
)
(:l1
(3bgl-embed::resize-embedded (embed w) 80 24)
#++(glop-xlib:x-set-geometry (glop::x11-window-display
(basecode::%glop-window w))
(win w)
0 0
(* 80 6) (* 160 13))
)
(:backspace
(basecode::reset-freelook-camera w)
)
))
#++
(basecode-run (make-instance 'embed-test))
|
fd3590097a4cb293ad0c9ff5fef77bbff1e73aa11e7fd09ab09539870d6c161d | janestreet/merlin-jst | parmatch.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Detection of partial matches and unused match cases. *)
open Misc
open Asttypes
open Types
open Typedtree
(*************************************)
Utilities for building patterns
(*************************************)
let make_pat desc ty mode tenv =
{pat_desc = desc; pat_loc = Location.none; pat_extra = [];
pat_type = ty ; pat_mode = mode; pat_env = tenv;
pat_attributes = [];
}
let omega = Patterns.omega
let omegas = Patterns.omegas
let omega_list = Patterns.omega_list
let extra_pat =
make_pat
(Tpat_var (Ident.create_local "+", mknoloc "+"))
Ctype.none Value_mode.max_mode Env.empty
(*******************)
Coherence check
(*******************)
For some of the operations we do in this module , we would like ( because it
simplifies matters ) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ ( think " of the same type " ) .
Unfortunately that is not always true .
Consider the following ( well - typed ) example :
{ [
type _ t = S : string t | U : unit t
let f ( type a ) ( t1 : a t ) ( t2 : a t ) ( a : a ) =
match t1 , t2 , a with
| U , _ , ( ) - > ( )
| _ , S , " " - > ( )
] }
Clearly the 3rd column contains incoherent patterns .
On the example above , most of the algorithms will explore the pattern matrix
as illustrated by the following tree :
{ v
S
------- > | " " |
U | S , " " | _ _ / | ( ) |
-------- > | _ , ( ) | \ not S
| U , _ , ( ) | _ _ / ------- > | ( ) |
| _ , S , " " | \
--------- > | S , " " | ---------- > | " " |
not U S
v }
where following an edge labelled by a pattern P means " assuming the value I
am matching on is filtered by [ P ] on the column I am currently looking at ,
then the following submatrix is still reachable " .
Notice that at any point of that tree , if the first column of a matrix is
incoherent , then the branch leading to it can only be taken if the scrutinee
is ill - typed .
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [ t1 , t2 , a ] to be [ U , S , ... ] . However such
a value would be ill - typed , so we can never actually get there .
Checking the first column at each step of the recursion and making the
conscious decision of " aborting " the algorithm whenever the first column
becomes incoherent , allows us to retain the initial assumption in later
stages of the algorithms .
---
N.B. two patterns can be considered coherent even though they might not be of
the same type .
That 's in part because we only care about the " head " of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm :
( ' a ' , ' b ' ) and ( 1 , ( ) ) will be deemed coherent because they are both a tuples
of arity 2 ( we 'll notice at a later stage the incoherence of ' a ' and 1 ) .
But also because it can be hard / costly to determine exactly whether two
patterns are of the same type or not ( eg . in the example above with _ and S ,
but see also the module [ Coherence_illustration ] in
testsuite / tests / basic - more / robustmatch.ml ) .
For the moment our weak , loosely - syntactic , coherence check seems to be
enough and we leave it to each user to consider ( and document ! ) what happens
when an " incoherence " is not detected by this check .
simplifies matters) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ (think "of the same type").
Unfortunately that is not always true.
Consider the following (well-typed) example:
{[
type _ t = S : string t | U : unit t
let f (type a) (t1 : a t) (t2 : a t) (a : a) =
match t1, t2, a with
| U, _, () -> ()
| _, S, "" -> ()
]}
Clearly the 3rd column contains incoherent patterns.
On the example above, most of the algorithms will explore the pattern matrix
as illustrated by the following tree:
{v
S
-------> | "" |
U | S, "" | __/ | () |
--------> | _, () | \ not S
| U, _, () | __/ -------> | () |
| _, S, "" | \
---------> | S, "" | ----------> | "" |
not U S
v}
where following an edge labelled by a pattern P means "assuming the value I
am matching on is filtered by [P] on the column I am currently looking at,
then the following submatrix is still reachable".
Notice that at any point of that tree, if the first column of a matrix is
incoherent, then the branch leading to it can only be taken if the scrutinee
is ill-typed.
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [t1, t2, a] to be [U, S, ...]. However such
a value would be ill-typed, so we can never actually get there.
Checking the first column at each step of the recursion and making the
conscious decision of "aborting" the algorithm whenever the first column
becomes incoherent, allows us to retain the initial assumption in later
stages of the algorithms.
---
N.B. two patterns can be considered coherent even though they might not be of
the same type.
That's in part because we only care about the "head" of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm:
('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples
of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1).
But also because it can be hard/costly to determine exactly whether two
patterns are of the same type or not (eg. in the example above with _ and S,
but see also the module [Coherence_illustration] in
testsuite/tests/basic-more/robustmatch.ml).
For the moment our weak, loosely-syntactic, coherence check seems to be
enough and we leave it to each user to consider (and document!) what happens
when an "incoherence" is not detected by this check.
*)
Given the first column of a simplified matrix , this function first looks for
a " discriminating " pattern on that column ( i.e. a non - omega one ) and then
check that every other head pattern in the column is coherent with that one .
a "discriminating" pattern on that column (i.e. a non-omega one) and then
check that every other head pattern in the column is coherent with that one.
*)
let all_coherent column =
let open Patterns.Head in
let coherent_heads hp1 hp2 =
match hp1.pat_desc, hp2.pat_desc with
| Construct c, Construct c' ->
c.cstr_consts = c'.cstr_consts
&& c.cstr_nonconsts = c'.cstr_nonconsts
| Constant c1, Constant c2 -> begin
match c1, c2 with
| Const_char _, Const_char _
| Const_int _, Const_int _
| Const_int32 _, Const_int32 _
| Const_int64 _, Const_int64 _
| Const_nativeint _, Const_nativeint _
| Const_float _, Const_float _
| Const_string _, Const_string _ -> true
| ( Const_char _
| Const_int _
| Const_int32 _
| Const_int64 _
| Const_nativeint _
| Const_float _
| Const_string _), _ -> false
end
| Tuple l1, Tuple l2 -> l1 = l2
| Record (lbl1 :: _), Record (lbl2 :: _) ->
Array.length lbl1.lbl_all = Array.length lbl2.lbl_all
| Any, _
| _, Any
| Record [], Record []
| Variant _, Variant _
| Array _, Array _
| Lazy, Lazy -> true
| _, _ -> false
in
match
List.find
(function
| { pat_desc = Any } -> false
| _ -> true)
column
with
| exception Not_found ->
(* only omegas on the column: the column is coherent. *)
true
| discr_pat ->
List.for_all (coherent_heads discr_pat) column
let first_column simplified_matrix =
List.map (fun ((head, _args), _rest) -> head) simplified_matrix
(***********************)
(* Compatibility check *)
(***********************)
Patterns p and q compatible means :
there exists value V that matches both , However ....
The case of extension types is dubious , as constructor rebind permits
that different constructors are the same ( and are thus compatible ) .
Compilation must take this into account , consider :
type t = ..
type t + = A|B
type t + = C = A
let f x y = match x , y with
| true , A - > ' 1 '
| _ , C - > ' 2 '
| false , A - > ' 3 '
| _ , _ - > ' _ '
As C is bound to A the value of f false A is ' 2 ' ( and not ' 3 ' as it would
be in the absence of rebinding ) .
Not considering rebinding , patterns " false , A " and " _ , C " are incompatible
and the compiler can swap the second and third clause , resulting in the
( more efficiently compiled ) matching
match x , y with
| true , A - > ' 1 '
| false , A - > ' 3 '
| _ , C - > ' 2 '
| _ , _ - > ' _ '
This is not correct : when C is bound to A , " f false A " returns ' 2 ' ( not ' 3 ' )
However , diagnostics do not take constructor rebinding into account .
Notice , that due to module abstraction constructor rebinding is hidden .
module X : sig type t = .. type t + = A|B end = struct
type t = ..
type t + = A
type t + = B = A
end
open X
let f x = match x with
| A - > ' 1 '
| B - > ' 2 '
| _ - > ' _ '
The second clause above will NOT ( and can not ) be flagged as useless .
Finally , there are two compatibility functions :
compat p q --- > ' syntactic compatibility , used for diagnostics .
may_compat p q --- > a safe approximation of possible compat ,
for compilation
there exists value V that matches both, However....
The case of extension types is dubious, as constructor rebind permits
that different constructors are the same (and are thus compatible).
Compilation must take this into account, consider:
type t = ..
type t += A|B
type t += C=A
let f x y = match x,y with
| true,A -> '1'
| _,C -> '2'
| false,A -> '3'
| _,_ -> '_'
As C is bound to A the value of f false A is '2' (and not '3' as it would
be in the absence of rebinding).
Not considering rebinding, patterns "false,A" and "_,C" are incompatible
and the compiler can swap the second and third clause, resulting in the
(more efficiently compiled) matching
match x,y with
| true,A -> '1'
| false,A -> '3'
| _,C -> '2'
| _,_ -> '_'
This is not correct: when C is bound to A, "f false A" returns '2' (not '3')
However, diagnostics do not take constructor rebinding into account.
Notice, that due to module abstraction constructor rebinding is hidden.
module X : sig type t = .. type t += A|B end = struct
type t = ..
type t += A
type t += B=A
end
open X
let f x = match x with
| A -> '1'
| B -> '2'
| _ -> '_'
The second clause above will NOT (and cannot) be flagged as useless.
Finally, there are two compatibility functions:
compat p q ---> 'syntactic compatibility, used for diagnostics.
may_compat p q ---> a safe approximation of possible compat,
for compilation
*)
let is_absent tag row = row_field_repr (get_row_field tag !row) = Rabsent
let is_absent_pat d =
match d.pat_desc with
| Patterns.Head.Variant { tag; cstr_row; _ } -> is_absent tag cstr_row
| _ -> false
let const_compare x y =
match x,y with
| Const_float f1, Const_float f2 ->
Stdlib.compare (float_of_string f1) (float_of_string f2)
| Const_string (s1, _, _), Const_string (s2, _, _) ->
String.compare s1 s2
| (Const_int _
|Const_char _
|Const_string (_, _, _)
|Const_float _
|Const_int32 _
|Const_int64 _
|Const_nativeint _
), _ -> Stdlib.compare x y
let records_args l1 l2 =
Invariant : fields are already sorted by Typecore.type_label_a_list
let rec combine r1 r2 l1 l2 = match l1,l2 with
| [],[] -> List.rev r1, List.rev r2
| [],(_,_,p2)::rem2 -> combine (omega::r1) (p2::r2) [] rem2
| (_,_,p1)::rem1,[] -> combine (p1::r1) (omega::r2) rem1 []
| (_,lbl1,p1)::rem1, ( _,lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
combine (p1::r1) (omega::r2) rem1 l2
else if lbl1.lbl_pos > lbl2.lbl_pos then
combine (omega::r1) (p2::r2) l1 rem2
else (* same label on both sides *)
combine (p1::r1) (p2::r2) rem1 rem2 in
combine [] [] l1 l2
module Compat
(Constr:sig
val equal :
Types.constructor_description ->
Types.constructor_description ->
bool
end) = struct
let rec compat p q = match p.pat_desc,q.pat_desc with
(* Variables match any value *)
| ((Tpat_any|Tpat_var _),_)
| (_,(Tpat_any|Tpat_var _)) -> true
(* Structural induction *)
| Tpat_alias (p,_,_),_ -> compat p q
| _,Tpat_alias (q,_,_) -> compat p q
| Tpat_or (p1,p2,_),_ ->
(compat p1 q || compat p2 q)
| _,Tpat_or (q1,q2,_) ->
(compat p q1 || compat p q2)
(* Constructors, with special case for extension *)
| Tpat_construct (_, c1, ps1, _), Tpat_construct (_, c2, ps2, _) ->
Constr.equal c1 c2 && compats ps1 ps2
(* More standard stuff *)
| Tpat_variant(l1,op1, _), Tpat_variant(l2,op2,_) ->
l1=l2 && ocompat op1 op2
| Tpat_constant c1, Tpat_constant c2 ->
const_compare c1 c2 = 0
| Tpat_tuple ps, Tpat_tuple qs -> compats ps qs
| Tpat_lazy p, Tpat_lazy q -> compat p q
| Tpat_record (l1,_),Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
compats ps qs
| Tpat_array ps, Tpat_array qs ->
List.length ps = List.length qs &&
compats ps qs
| _,_ -> false
and ocompat op oq = match op,oq with
| None,None -> true
| Some p,Some q -> compat p q
| (None,Some _)|(Some _,None) -> false
and compats ps qs = match ps,qs with
| [], [] -> true
| p::ps, q::qs -> compat p q && compats ps qs
| _,_ -> false
end
module SyntacticCompat =
Compat
(struct
let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag
end)
let compat = SyntacticCompat.compat
and compats = SyntacticCompat.compats
Due to ( potential ) rebinding , two extension constructors
of the same arity type may equal
of the same arity type may equal *)
exception Empty (* Empty pattern *)
(****************************************)
Utilities for retrieving type paths
(****************************************)
May need a clean copy , cf . PR#4745
let clean_copy ty =
if get_level ty = Btype.generic_level then ty
else Subst.type_expr Subst.identity ty
let get_constructor_type_path ty tenv =
let ty = Ctype.expand_head tenv (clean_copy ty) in
match get_desc ty with
| Tconstr (path,_,_) -> path
| _ -> assert false
(****************************)
Utilities for matching
(****************************)
(* Check top matching *)
let simple_match d h =
let open Patterns.Head in
match d.pat_desc, h.pat_desc with
| Construct c1, Construct c2 ->
Types.equal_tag c1.cstr_tag c2.cstr_tag
| Variant { tag = t1; _ }, Variant { tag = t2 } ->
t1 = t2
| Constant c1, Constant c2 -> const_compare c1 c2 = 0
| Lazy, Lazy -> true
| Record _, Record _ -> true
| Tuple len1, Tuple len2
| Array len1, Array len2 -> len1 = len2
| _, Any -> true
| _, _ -> false
(* extract record fields as a whole *)
let record_arg ph =
let open Patterns.Head in
match ph.pat_desc with
| Any -> []
| Record args -> args
| _ -> fatal_error "Parmatch.as_record"
let extract_fields lbls arg =
let get_field pos arg =
match List.find (fun (lbl,_) -> pos = lbl.lbl_pos) arg with
| _, p -> p
| exception Not_found -> omega
in
List.map (fun lbl -> get_field lbl.lbl_pos arg) lbls
(* Build argument list when p2 >= p1, where p1 is a simple pattern *)
let simple_match_args discr head args =
let open Patterns.Head in
match head.pat_desc with
| Constant _ -> []
| Construct _
| Variant _
| Tuple _
| Array _
| Lazy -> args
| Record lbls -> extract_fields (record_arg discr) (List.combine lbls args)
| Any ->
begin match discr.pat_desc with
| Construct cstr -> Patterns.omegas cstr.cstr_arity
| Variant { has_arg = true }
| Lazy -> [Patterns.omega]
| Record lbls -> omega_list lbls
| Array len
| Tuple len -> Patterns.omegas len
| Variant { has_arg = false }
| Any
| Constant _ -> []
end
Consider a pattern matrix whose first column has been simplified to contain
only _ or a head constructor
| p1 , r1 ...
| p2 , r2 ...
| p3 , r3 ...
| ...
We build a normalized /discriminating/ pattern from a pattern [ q ] by folding
over the first column of the matrix , " refining " [ q ] as we go :
- when we encounter a row starting with [ Tuple ] or [ Lazy ] then we
can stop and return that head , as we can not refine any further . Indeed ,
these constructors are alone in their signature , so they will subsume
whatever other head we might find , as well as the head we 're threading
along .
- when we find a [ Record ] then it is a bit more involved : it is also alone
in its signature , however it might only be matching a subset of the
record fields . We use these fields to refine our accumulator and keep going
as another row might match on different fields .
- rows starting with a wildcard do not bring any information , so we ignore
them and keep going
- if we encounter anything else ( i.e. any other constructor ) , then we just
stop and return our accumulator .
only _ or a head constructor
| p1, r1...
| p2, r2...
| p3, r3...
| ...
We build a normalized /discriminating/ pattern from a pattern [q] by folding
over the first column of the matrix, "refining" [q] as we go:
- when we encounter a row starting with [Tuple] or [Lazy] then we
can stop and return that head, as we cannot refine any further. Indeed,
these constructors are alone in their signature, so they will subsume
whatever other head we might find, as well as the head we're threading
along.
- when we find a [Record] then it is a bit more involved: it is also alone
in its signature, however it might only be matching a subset of the
record fields. We use these fields to refine our accumulator and keep going
as another row might match on different fields.
- rows starting with a wildcard do not bring any information, so we ignore
them and keep going
- if we encounter anything else (i.e. any other constructor), then we just
stop and return our accumulator.
*)
let discr_pat q pss =
let open Patterns.Head in
let rec refine_pat acc = function
| [] -> acc
| ((head, _), _) :: rows ->
match head.pat_desc with
| Any -> refine_pat acc rows
| Tuple _ | Lazy -> head
| Record lbls ->
N.B. we could make this case " simpler " by refining the record case
using [ all_record_args ] .
In which case we would n't need to fold over the first column for
records .
However it makes the witness we generate for the exhaustivity warning
less pretty .
using [all_record_args].
In which case we wouldn't need to fold over the first column for
records.
However it makes the witness we generate for the exhaustivity warning
less pretty. *)
let fields =
List.fold_right (fun lbl r ->
if List.exists (fun l -> l.lbl_pos = lbl.lbl_pos) r then
r
else
lbl :: r
) lbls (record_arg acc)
in
let d = { head with pat_desc = Record fields } in
refine_pat d rows
| _ -> acc
in
let q, _ = deconstruct q in
match q.pat_desc with
(* short-circuiting: clearly if we have anything other than [Record] or
[Any] to start with, we're not going to be able refine at all. So
there's no point going over the matrix. *)
| Any | Record _ -> refine_pat q pss
| _ -> q
(*
In case a matching value is found, set actual arguments
of the matching pattern.
*)
let rec read_args xs r = match xs,r with
| [],_ -> [],r
| _::xs, arg::rest ->
let args,rest = read_args xs rest in
arg::args,rest
| _,_ ->
fatal_error "Parmatch.read_args"
let do_set_args ~erase_mutable q r = match q with
| {pat_desc = Tpat_tuple omegas} ->
let args,rest = read_args omegas r in
make_pat (Tpat_tuple args) q.pat_type q.pat_mode q.pat_env::rest
| {pat_desc = Tpat_record (omegas,closed)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_record
(List.map2 (fun (lid, lbl,_) arg ->
if
erase_mutable &&
(match lbl.lbl_mut with
| Mutable -> true | Immutable -> false)
then
lid, lbl, omega
else
lid, lbl, arg)
omegas args, closed))
q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_construct (lid, c, omegas, _)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_construct (lid, c, args, None))
q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_variant (l, omega, row)} ->
let arg, rest =
match omega, r with
Some _, a::r -> Some a, r
| None, r -> None, r
| _ -> assert false
in
make_pat
(Tpat_variant (l, arg, row)) q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_lazy _omega} ->
begin match r with
arg::rest ->
make_pat (Tpat_lazy arg) q.pat_type q.pat_mode q.pat_env::rest
| _ -> fatal_error "Parmatch.do_set_args (lazy)"
end
| {pat_desc = Tpat_array omegas} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_array args) q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc=Tpat_constant _|Tpat_any} ->
q::r (* case any is used in matching.ml *)
| _ -> fatal_error "Parmatch.set_args"
let set_args q r = do_set_args ~erase_mutable:false q r
and set_args_erase_mutable q r = do_set_args ~erase_mutable:true q r
Given a matrix of non - empty rows
p1 : : r1 ...
p2 : : r2 ...
p3 : : r3 ...
Simplify the first column [ p1 p2 p3 ] by splitting all or - patterns .
The result is a list of pairs
( ( pattern head , arguments ) , rest of row )
For example ,
x : : r1
( Some _ ) as y : : r2
( None as x ) as y : : r3
( Some x | ( None as x ) ) : : becomes
( ( _ , [ ] ) , r1 )
( ( Some , [ _ ] ) , r2 )
( ( None , [ ] ) , r3 )
( ( Some , [ x ] ) , r4 )
( ( None , [ ] ) , r4 )
p1 :: r1...
p2 :: r2...
p3 :: r3...
Simplify the first column [p1 p2 p3] by splitting all or-patterns.
The result is a list of pairs
((pattern head, arguments), rest of row)
For example,
x :: r1
(Some _) as y :: r2
(None as x) as y :: r3
(Some x | (None as x)) :: r4
becomes
(( _ , [ ] ), r1)
(( Some, [_] ), r2)
(( None, [ ] ), r3)
(( Some, [x] ), r4)
(( None, [ ] ), r4)
*)
let simplify_head_pat ~add_column p ps k =
let rec simplify_head_pat p ps k =
match Patterns.General.(view p |> strip_vars).pat_desc with
| `Or (p1,p2,_) -> simplify_head_pat p1 ps (simplify_head_pat p2 ps k)
| #Patterns.Simple.view as view ->
add_column (Patterns.Head.deconstruct { p with pat_desc = view }) ps k
in simplify_head_pat p ps k
let rec simplify_first_col = function
| [] -> []
| [] :: _ -> assert false (* the rows are non-empty! *)
| (p::ps) :: rows ->
let add_column p ps k = (p, ps) :: k in
simplify_head_pat ~add_column p ps (simplify_first_col rows)
Builds the specialized matrix of [ pss ] according to the discriminating
pattern head [ d ] .
See section 3.1 of /~maranget/papers/warn/warn.pdf
NOTES :
- we are polymorphic on the type of matrices we work on , in particular a row
might not simply be a [ pattern list ] . That 's why we have the [ extend_row ]
parameter .
pattern head [d].
See section 3.1 of /~maranget/papers/warn/warn.pdf
NOTES:
- we are polymorphic on the type of matrices we work on, in particular a row
might not simply be a [pattern list]. That's why we have the [extend_row]
parameter.
*)
let build_specialized_submatrix ~extend_row discr pss =
let rec filter_rec = function
| ((head, args), ps) :: pss ->
if simple_match discr head
then extend_row (simple_match_args discr head args) ps :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
The " default " and " specialized " matrices of a given matrix .
See section 3.1 of /~maranget/papers/warn/warn.pdf .
See section 3.1 of /~maranget/papers/warn/warn.pdf .
*)
type 'matrix specialized_matrices = {
default : 'matrix;
constrs : (Patterns.Head.t * 'matrix) list;
}
Consider a pattern matrix whose first column has been simplified
to contain only _ or a head constructor
| p1 , r1 ...
| p2 , r2 ...
| p3 , r3 ...
| ...
We split this matrix into a list of /specialized/ sub - matrices , one for
each head constructor appearing in the first column . For each row whose
first column starts with a head constructor , remove this head
column , prepend one column for each argument of the constructor ,
and add the resulting row in the sub - matrix corresponding to this
head constructor .
Rows whose left column is omega ( the Any pattern _ ) may match any
head constructor , so they are added to all sub - matrices .
In the case where all the rows in the matrix have an omega on their first
column , then there is only one /specialized/ sub - matrix , formed of all these
omega rows .
This matrix is also called the /default/ matrix .
See the documentation of [ build_specialized_submatrix ] for an explanation of
the [ extend_row ] parameter .
to contain only _ or a head constructor
| p1, r1...
| p2, r2...
| p3, r3...
| ...
We split this matrix into a list of /specialized/ sub-matrices, one for
each head constructor appearing in the first column. For each row whose
first column starts with a head constructor, remove this head
column, prepend one column for each argument of the constructor,
and add the resulting row in the sub-matrix corresponding to this
head constructor.
Rows whose left column is omega (the Any pattern _) may match any
head constructor, so they are added to all sub-matrices.
In the case where all the rows in the matrix have an omega on their first
column, then there is only one /specialized/ sub-matrix, formed of all these
omega rows.
This matrix is also called the /default/ matrix.
See the documentation of [build_specialized_submatrix] for an explanation of
the [extend_row] parameter.
*)
let build_specialized_submatrices ~extend_row discr rows =
let extend_group discr p args r rs =
let r = extend_row (simple_match_args discr p args) r in
(discr, r :: rs)
in
insert a row of head [ p ] and rest [ r ] into the right group
Note : with this implementation , the order of the groups
is the order of their first row in the source order .
This is a nice property to get exhaustivity counter - examples
in source order .
Note: with this implementation, the order of the groups
is the order of their first row in the source order.
This is a nice property to get exhaustivity counter-examples
in source order.
*)
let rec insert_constr head args r = function
| [] ->
(* if no group matched this row, it has a head constructor that
was never seen before; add a new sub-matrix for this head *)
[extend_group head head args r []]
| (q0,rs) as bd::env ->
if simple_match q0 head
then extend_group q0 head args r rs :: env
else bd :: insert_constr head args r env
in
(* insert a row of head omega into all groups *)
let insert_omega r env =
List.map (fun (q0,rs) -> extend_group q0 Patterns.Head.omega [] r rs) env
in
let rec form_groups constr_groups omega_tails = function
| [] -> (constr_groups, omega_tails)
| ((head, args), tail) :: rest ->
match head.pat_desc with
| Patterns.Head.Any ->
note that calling insert_omega here would be wrong
as some groups may not have been formed yet , if the
first row with this head pattern comes after in the list
as some groups may not have been formed yet, if the
first row with this head pattern comes after in the list *)
form_groups constr_groups (tail :: omega_tails) rest
| _ ->
form_groups
(insert_constr head args tail constr_groups) omega_tails rest
in
let constr_groups, omega_tails =
let initial_constr_group =
let open Patterns.Head in
match discr.pat_desc with
| Record _ | Tuple _ | Lazy ->
[ ] comes from [ discr_pat ] , and in this case subsumes any of the
patterns we could find on the first column of [ rows ] . So it is better
to use it for our initial environment than any of the normalized
pattern we might obtain from the first column .
patterns we could find on the first column of [rows]. So it is better
to use it for our initial environment than any of the normalized
pattern we might obtain from the first column. *)
[discr,[]]
| _ -> []
in
form_groups initial_constr_group [] rows
in
(* groups are accumulated in reverse order;
we restore the order of rows in the source code *)
let default = List.rev omega_tails in
let constrs =
List.fold_right insert_omega omega_tails constr_groups
|> List.map (fun (discr, rs) -> (discr, List.rev rs))
in
{ default; constrs; }
(* Variant related functions *)
let set_last a =
let rec loop = function
| [] -> assert false
| [_] -> [Patterns.General.erase a]
| x::l -> x :: loop l
in
function
| (_, []) -> (Patterns.Head.deconstruct a, [])
| (first, row) -> (first, loop row)
(* mark constructor lines for failure when they are incomplete *)
let mark_partial =
let zero =
make_pat (`Constant (Const_int 0)) Ctype.none Value_mode.max_mode Env.empty
in
List.map (fun ((hp, _), _ as ps) ->
match hp.pat_desc with
| Patterns.Head.Any -> ps
| _ -> set_last zero ps
)
let close_variant env row =
let Row {fields; more; name=orig_name; closed; fixed} = row_repr row in
let name, static =
List.fold_left
(fun (nm, static) (_tag,f) ->
match row_field_repr f with
| Reither(_, _, false) ->
(* fixed=false means that this tag is not explicitly matched *)
link_row_field_ext ~inside:f rf_absent;
(None, static)
| Reither (_, _, true) -> (nm, false)
| Rabsent | Rpresent _ -> (nm, static))
(orig_name, true) fields in
if not closed || name != orig_name then begin
let more' = if static then Btype.newgenty Tnil else Btype.newgenvar () in
(* this unification cannot fail *)
Ctype.unify env more
(Btype.newgenty
(Tvariant
(create_row ~fields:[] ~more:more'
~closed:true ~name ~fixed)))
end
Check whether the first column of env makes up a complete signature or
not . We work on the discriminating pattern heads of each sub - matrix : they
are not omega / Any .
Check whether the first column of env makes up a complete signature or
not. We work on the discriminating pattern heads of each sub-matrix: they
are not omega/Any.
*)
let full_match closing env = match env with
| [] -> false
| (discr, _) :: _ ->
let open Patterns.Head in
match discr.pat_desc with
| Any -> assert false
| Construct { cstr_tag = Cstr_extension _ ; _ } -> false
| Construct c -> List.length env = c.cstr_consts + c.cstr_nonconsts
| Variant { type_row; _ } ->
let fields =
List.map
(fun (d, _) ->
match d.pat_desc with
| Variant { tag } -> tag
| _ -> assert false)
env
in
let row = type_row () in
if closing && not (Btype.has_fixed_explanation row) then
(* closing=true, we are considering the variant as closed *)
List.for_all
(fun (tag,f) ->
match row_field_repr f with
Rabsent | Reither(_, _, false) -> true
| Reither (_, _, true)
(* m=true, do not discard matched tags, rather warn *)
| Rpresent _ -> List.mem tag fields)
(row_fields row)
else
row_closed row &&
List.for_all
(fun (tag,f) ->
row_field_repr f = Rabsent || List.mem tag fields)
(row_fields row)
| Constant Const_char _ ->
List.length env = 256
| Constant _
| Array _ -> false
| Tuple _
| Record _
| Lazy -> true
(* Written as a non-fragile matching, PR#7451 originated from a fragile matching
below. *)
let should_extend ext env = match ext with
| None -> false
| Some ext -> begin match env with
| [] -> assert false
| (p,_)::_ ->
let open Patterns.Head in
begin match p.pat_desc with
| Construct {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)} ->
let path = get_constructor_type_path p.pat_type p.pat_env in
Path.same path ext
| Construct {cstr_tag=(Cstr_extension _)} -> false
| Constant _ | Tuple _ | Variant _ | Record _ | Array _ | Lazy -> false
| Any -> assert false
end
end
(* build a pattern from a constructor description *)
let pat_of_constr ex_pat cstr =
{ex_pat with pat_desc =
Tpat_construct (mknoloc (Longident.Lident cstr.cstr_name),
cstr, omegas cstr.cstr_arity, None)}
let orify x y =
make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_mode x.pat_env
let rec orify_many = function
| [] -> assert false
| [x] -> x
| x :: xs -> orify x (orify_many xs)
(* build an or-pattern from a constructor list *)
let pat_of_constrs ex_pat cstrs =
let ex_pat = Patterns.Head.to_omega_pattern ex_pat in
if cstrs = [] then raise Empty else
orify_many (List.map (pat_of_constr ex_pat) cstrs)
let pats_of_type ?(always=false) env ty mode =
let ty' = Ctype.expand_head env ty in
match get_desc ty' with
| Tconstr (path, _, _) ->
begin match Env.find_type_descrs path env with
| exception Not_found -> [omega]
| Type_variant (cstrs,_) when always || List.length cstrs <= 1 ||
Only explode when all constructors are GADTs
List.for_all (fun cd -> cd.cstr_generalized) cstrs ->
List.map (pat_of_constr (make_pat Tpat_any ty mode env)) cstrs
| Type_record (labels, _) ->
let fields =
List.map (fun ld ->
mknoloc (Longident.Lident ld.lbl_name), ld, omega)
labels
in
[make_pat (Tpat_record (fields, Closed)) ty mode env]
| Type_variant _ | Type_abstract | Type_open -> [omega]
end
| Ttuple tl ->
[make_pat (Tpat_tuple (omegas (List.length tl))) ty mode env]
| _ -> [omega]
let rec get_variant_constructors env ty =
match get_desc ty with
| Tconstr (path,_,_) -> begin
try match Env.find_type path env, Env.find_type_descrs path env with
| _, Type_variant (cstrs,_) -> cstrs
| {type_manifest = Some _}, _ ->
get_variant_constructors env
(Ctype.expand_head_once env (clean_copy ty))
| _ -> fatal_error "Parmatch.get_variant_constructors"
with Not_found ->
fatal_error "Parmatch.get_variant_constructors"
end
| _ -> fatal_error "Parmatch.get_variant_constructors"
module ConstructorSet = Set.Make(struct
type t = constructor_description
let compare c1 c2 = String.compare c1.cstr_name c2.cstr_name
end)
Sends back a pattern that complements the given constructors used_constrs
let complete_constrs constr used_constrs =
let c = constr.pat_desc in
let constrs = get_variant_constructors constr.pat_env c.cstr_res in
let used_constrs = ConstructorSet.of_list used_constrs in
let others =
List.filter
(fun cnstr -> not (ConstructorSet.mem cnstr used_constrs))
constrs in
Split constructors to put constant ones first
let const, nonconst =
List.partition (fun cnstr -> cnstr.cstr_arity = 0) others in
const @ nonconst
let build_other_constrs env p =
let open Patterns.Head in
match p.pat_desc with
| Construct ({ cstr_tag = Cstr_extension _ }) -> extra_pat
| Construct
({ cstr_tag = Cstr_constant _ | Cstr_block _ | Cstr_unboxed } as c) ->
let constr = { p with pat_desc = c } in
let get_constr q =
match q.pat_desc with
| Construct c -> c
| _ -> fatal_error "Parmatch.get_constr" in
let used_constrs = List.map (fun (p,_) -> get_constr p) env in
pat_of_constrs p (complete_constrs constr used_constrs)
| _ -> extra_pat
(* Auxiliary for build_other *)
let build_other_constant proj make first next p env =
let all = List.map (fun (p, _) -> proj p.pat_desc) env in
let rec try_const i =
if List.mem i all
then try_const (next i)
else make_pat (make i) p.pat_type p.pat_mode p.pat_env
in try_const first
Builds a pattern that is incompatible with all patterns in
the first column of env
Builds a pattern that is incompatible with all patterns in
the first column of env
*)
let some_private_tag = "<some private tag>"
let build_other ext env =
match env with
| [] -> omega
| (d, _) :: _ ->
let open Patterns.Head in
match d.pat_desc with
| Construct { cstr_tag = Cstr_extension _ } ->
(* let c = {c with cstr_name = "*extension*"} in *) (* PR#7330 *)
make_pat
(Tpat_var (Ident.create_local "*extension*",
{txt="*extension*"; loc = d.pat_loc}))
Ctype.none Value_mode.max_mode Env.empty
| Construct _ ->
begin match ext with
| Some ext ->
if Path.same ext (get_constructor_type_path d.pat_type d.pat_env)
then
extra_pat
else
build_other_constrs env d
| _ ->
build_other_constrs env d
end
| Variant { cstr_row; type_row } ->
let tags =
List.map
(fun (d, _) ->
match d.pat_desc with
| Variant { tag } -> tag
| _ -> assert false)
env
in
let make_other_pat tag const =
let arg = if const then None else Some Patterns.omega in
make_pat (Tpat_variant(tag, arg, cstr_row))
d.pat_type d.pat_mode d.pat_env
in
let row = type_row () in
begin match
List.fold_left
(fun others (tag,f) ->
if List.mem tag tags then others else
match row_field_repr f with
Rabsent (* | Reither _ *) -> others
(* This one is called after erasing pattern info *)
| Reither (c, _, _) -> make_other_pat tag c :: others
| Rpresent arg -> make_other_pat tag (arg = None) :: others)
[] (row_fields row)
with
[] ->
let tag =
if Btype.has_fixed_explanation row then some_private_tag else
let rec mktag tag =
if List.mem tag tags then mktag (tag ^ "'") else tag in
mktag "AnyOtherTag"
in make_other_pat tag true
| pat::other_pats ->
List.fold_left
(fun p_res pat ->
make_pat (Tpat_or (pat, p_res, None))
d.pat_type d.pat_mode d.pat_env)
pat other_pats
end
| Constant Const_char _ ->
let all_chars =
List.map
(fun (p,_) -> match p.pat_desc with
| Constant (Const_char c) -> c
| _ -> assert false)
env
in
let rec find_other i imax =
if i > imax then raise Not_found
else
let ci = Char.chr i in
if List.mem ci all_chars then
find_other (i+1) imax
else
make_pat (Tpat_constant (Const_char ci))
d.pat_type d.pat_mode d.pat_env
in
let rec try_chars = function
| [] -> Patterns.omega
| (c1,c2) :: rest ->
try
find_other (Char.code c1) (Char.code c2)
with
| Not_found -> try_chars rest
in
try_chars
[ 'a', 'z' ; 'A', 'Z' ; '0', '9' ;
' ', '~' ; Char.chr 0 , Char.chr 255]
| Constant Const_int _ ->
build_other_constant
(function Constant(Const_int i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int i))
0 succ d env
| Constant Const_int32 _ ->
build_other_constant
(function Constant(Const_int32 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int32 i))
0l Int32.succ d env
| Constant Const_int64 _ ->
build_other_constant
(function Constant(Const_int64 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int64 i))
0L Int64.succ d env
| Constant Const_nativeint _ ->
build_other_constant
(function Constant(Const_nativeint i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_nativeint i))
0n Nativeint.succ d env
| Constant Const_string _ ->
build_other_constant
(function Constant(Const_string (s, _, _)) -> String.length s
| _ -> assert false)
(function i ->
Tpat_constant
(Const_string(String.make i '*',Location.none,None)))
0 succ d env
| Constant Const_float _ ->
build_other_constant
(function Constant(Const_float f) -> float_of_string f
| _ -> assert false)
(function f -> Tpat_constant(Const_float (string_of_float f)))
0.0 (fun f -> f +. 1.0) d env
| Array _ ->
let all_lengths =
List.map
(fun (p,_) -> match p.pat_desc with
| Array len -> len
| _ -> assert false)
env in
let rec try_arrays l =
if List.mem l all_lengths then try_arrays (l+1)
else
make_pat (Tpat_array (omegas l))
d.pat_type d.pat_mode d.pat_env in
try_arrays 0
| _ -> Patterns.omega
let rec has_instance p = match p.pat_desc with
| Tpat_variant (l,_,r) when is_absent l r -> false
| Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_,None,_) -> true
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) -> has_instance p
| Tpat_or (p1,p2,_) -> has_instance p1 || has_instance p2
| Tpat_construct (_,_,ps,_) | Tpat_tuple ps | Tpat_array ps ->
has_instances ps
| Tpat_record (lps,_) -> has_instances (List.map (fun (_,_,x) -> x) lps)
| Tpat_lazy p
-> has_instance p
and has_instances = function
| [] -> true
| q::rem -> has_instance q && has_instances rem
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector , es such that :
1- for all ps in pss ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
---
In two places in the following function , we check the coherence of the first
column of ( pss + qs ) .
If it is incoherent , then we exit early saying that ( pss + qs ) is not
satisfiable ( which is equivalent to saying " oh , we should n't have considered
that branch , no good result came come from here " ) .
But what happens if we have a coherent but ill - typed column ?
- we might end up returning [ false ] , which is equivalent to noticing the
incompatibility : clearly this is fine .
- if we end up returning [ true ] then we 're saying that [ qs ] is useful while
it is not . This is sad but not the end of the world , we 're just allowing dead
code to survive .
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector, es such that :
1- for all ps in pss ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
---
In two places in the following function, we check the coherence of the first
column of (pss + qs).
If it is incoherent, then we exit early saying that (pss + qs) is not
satisfiable (which is equivalent to saying "oh, we shouldn't have considered
that branch, no good result came come from here").
But what happens if we have a coherent but ill-typed column?
- we might end up returning [false], which is equivalent to noticing the
incompatibility: clearly this is fine.
- if we end up returning [true] then we're saying that [qs] is useful while
it is not. This is sad but not the end of the world, we're just allowing dead
code to survive.
*)
let rec satisfiable pss qs = match pss with
| [] -> has_instances qs
| _ ->
match qs with
| [] -> false
| q::qs ->
match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or(q1,q2,_) ->
satisfiable pss (q1::qs) || satisfiable pss (q2::qs)
| `Any ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
false
else begin
let { default; constrs } =
let q0 = discr_pat Patterns.Simple.omega pss in
build_specialized_submatrices ~extend_row:(@) q0 pss in
if not (full_match false constrs) then
satisfiable default qs
else
List.exists
(fun (p,pss) ->
not (is_absent_pat p) &&
satisfiable pss
(simple_match_args p Patterns.Head.omega [] @ qs))
constrs
end
| `Variant (l,_,r) when is_absent l r -> false
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let pss = simplify_first_col pss in
let hq, qargs = Patterns.Head.deconstruct q in
if not (all_coherent (hq :: first_column pss)) then
false
else begin
let q0 = discr_pat q pss in
satisfiable (build_specialized_submatrix ~extend_row:(@) q0 pss)
(simple_match_args q0 hq qargs @ qs)
end
While [ satisfiable ] only checks whether the last row of [ pss + qs ] is
satisfiable , this function returns the ( possibly empty ) list of vectors [ es ]
which verify :
1- for all ps in pss , ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
This is done to enable GADT handling
For considerations regarding the coherence check , see the comment on
[ satisfiable ] above .
satisfiable, this function returns the (possibly empty) list of vectors [es]
which verify:
1- for all ps in pss, ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
This is done to enable GADT handling
For considerations regarding the coherence check, see the comment on
[satisfiable] above. *)
let rec list_satisfying_vectors pss qs =
match pss with
| [] -> if has_instances qs then [qs] else []
| _ ->
match qs with
| [] -> []
| q :: qs ->
match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or(q1,q2,_) ->
list_satisfying_vectors pss (q1::qs) @
list_satisfying_vectors pss (q2::qs)
| `Any ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
[]
else begin
let q0 = discr_pat Patterns.Simple.omega pss in
let wild default_matrix p =
List.map (fun qs -> p::qs)
(list_satisfying_vectors default_matrix qs)
in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } ->
first column of pss is made of variables only
wild default omega
| { default; constrs = ((p,_)::_ as constrs) } ->
let for_constrs () =
List.flatten (
List.map (fun (p,pss) ->
if is_absent_pat p then
[]
else
let witnesses =
list_satisfying_vectors pss
(simple_match_args p Patterns.Head.omega [] @ qs)
in
let p = Patterns.Head.to_omega_pattern p in
List.map (set_args p) witnesses
) constrs
)
in
if full_match false constrs then for_constrs () else
begin match p.pat_desc with
| Construct _ ->
(* activate this code
for checking non-gadt constructors *)
wild default (build_other_constrs constrs p)
@ for_constrs ()
| _ ->
wild default Patterns.omega
end
end
| `Variant (l, _, r) when is_absent l r -> []
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let hq, qargs = Patterns.Head.deconstruct q in
let pss = simplify_first_col pss in
if not (all_coherent (hq :: first_column pss)) then
[]
else begin
let q0 = discr_pat q pss in
List.map (set_args (Patterns.Head.to_omega_pattern q0))
(list_satisfying_vectors
(build_specialized_submatrix ~extend_row:(@) q0 pss)
(simple_match_args q0 hq qargs @ qs))
end
(******************************************)
(* Look for a row that matches some value *)
(******************************************)
(*
Useful for seeing if the example of
non-matched value can indeed be matched
(by a guarded clause)
*)
let rec do_match pss qs = match qs with
| [] ->
begin match pss with
| []::_ -> true
| _ -> false
end
| q::qs -> match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or (q1,q2,_) ->
do_match pss (q1::qs) || do_match pss (q2::qs)
| `Any ->
let rec remove_first_column = function
| (_::ps)::rem -> ps::remove_first_column rem
| _ -> []
in
do_match (remove_first_column pss) qs
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let q0, qargs = Patterns.Head.deconstruct q in
let pss = simplify_first_col pss in
[ pss ] will ( or wo n't ) match [ q0 : : qs ] regardless of the coherence of
its first column .
its first column. *)
do_match
(build_specialized_submatrix ~extend_row:(@) q0 pss)
(qargs @ qs)
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ - > " v "
| Tpat_any - > " _ "
| Tpat_alias ( p , x ) - > Printf.sprintf " ( % s ) as ? " ( string_of_pat p )
| Tpat_constant n - > " 0 "
| Tpat_construct ( _ , lid , _ ) - >
Printf.sprintf " % s " ( String.concat " . " ( Longident.flatten lid.txt ) )
| Tpat_lazy p - >
Printf.sprintf " ( lazy % s ) " ( string_of_pat p )
| Tpat_or ( p1,p2 , _ ) - >
Printf.sprintf " ( % s | % s ) " ( string_of_pat p1 ) ( string_of_pat p2 )
| Tpat_tuple list - >
Printf.sprintf " ( % s ) " ( String.concat " , " ( List.map string_of_pat list ) )
| Tpat_variant ( _ , _ , _ ) - > " variant "
| Tpat_record ( _ , _ ) - > " record "
| Tpat_array _ - > " array "
in
Printf.fprintf stderr " PAT[%s]\n% ! " ( string_of_pat pat )
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ -> "v"
| Tpat_any -> "_"
| Tpat_alias (p, x) -> Printf.sprintf "(%s) as ?" (string_of_pat p)
| Tpat_constant n -> "0"
| Tpat_construct (_, lid, _) ->
Printf.sprintf "%s" (String.concat "." (Longident.flatten lid.txt))
| Tpat_lazy p ->
Printf.sprintf "(lazy %s)" (string_of_pat p)
| Tpat_or (p1,p2,_) ->
Printf.sprintf "(%s | %s)" (string_of_pat p1) (string_of_pat p2)
| Tpat_tuple list ->
Printf.sprintf "(%s)" (String.concat "," (List.map string_of_pat list))
| Tpat_variant (_, _, _) -> "variant"
| Tpat_record (_, _) -> "record"
| Tpat_array _ -> "array"
in
Printf.fprintf stderr "PAT[%s]\n%!" (string_of_pat pat)
*)
(*
Now another satisfiable function that additionally
supplies an example of a matching value.
This function should be called for exhaustiveness check only.
*)
let rec exhaust (ext:Path.t option) pss n = match pss with
| [] -> Seq.return (omegas n)
| []::_ -> Seq.empty
| [(p :: ps)] -> exhaust_single_row ext p ps n
| pss -> specialize_and_exhaust ext pss n
and exhaust_single_row ext p ps n =
Shortcut : in the single - row case p : : ps we know that all
counter - examples are either of the form
counter - example(p ) : : omegas
or
p : : counter - examples(ps )
This is very interesting in the case where p contains
or - patterns , as the non - shortcut path below would do a separate
search for each constructor of the or - pattern , which can lead to
an exponential blowup on examples such as
| ( A|B ) , ( A|B ) , ( A|B ) , ( A|B ) - > foo
Note that this shortcut also applies to examples such as
| A , A , A , A - > foo | ( A|B ) , ( A|B ) , ( A|B ) , ( A|B ) - > bar
thanks to the [ get_mins ] preprocessing step which will drop the
first row ( subsumed by the second ) . Code with this shape does
occur naturally when people want to avoid fragile pattern
matches : if A and B are the only two constructors , this is the
best way to make a non - fragile distinction between " all As " and
" at least one B " .
counter-examples are either of the form
counter-example(p) :: omegas
or
p :: counter-examples(ps)
This is very interesting in the case where p contains
or-patterns, as the non-shortcut path below would do a separate
search for each constructor of the or-pattern, which can lead to
an exponential blowup on examples such as
| (A|B), (A|B), (A|B), (A|B) -> foo
Note that this shortcut also applies to examples such as
| A, A, A, A -> foo | (A|B), (A|B), (A|B), (A|B) -> bar
thanks to the [get_mins] preprocessing step which will drop the
first row (subsumed by the second). Code with this shape does
occur naturally when people want to avoid fragile pattern
matches: if A and B are the only two constructors, this is the
best way to make a non-fragile distinction between "all As" and
"at least one B".
*)
List.to_seq [Some p; None] |> Seq.flat_map
(function
| Some p ->
let sub_witnesses = exhaust ext [ps] (n - 1) in
Seq.map (fun row -> p :: row) sub_witnesses
| None ->
note : calling [ exhaust ] recursively of p would
result in an infinite loop in the case n=1
result in an infinite loop in the case n=1 *)
let p_witnesses = specialize_and_exhaust ext [[p]] 1 in
Seq.map (fun p_row -> p_row @ omegas (n - 1)) p_witnesses
)
and specialize_and_exhaust ext pss n =
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
(* We're considering an ill-typed branch, we won't actually be able to
produce a well typed value taking that branch. *)
Seq.empty
else begin
Assuming the first column is ill - typed but considered coherent , we
might end up producing an ill - typed witness of non - exhaustivity
corresponding to the current branch .
If [ exhaust ] has been called by [ do_check_partial ] , then the witnesses
produced get typechecked and the ill - typed ones are discarded .
If [ exhaust ] has been called by [ do_check_fragile ] , then it is possible
we might fail to warn the user that the matching is fragile . See for
example testsuite / tests / warnings / w04_failure.ml .
might end up producing an ill-typed witness of non-exhaustivity
corresponding to the current branch.
If [exhaust] has been called by [do_check_partial], then the witnesses
produced get typechecked and the ill-typed ones are discarded.
If [exhaust] has been called by [do_check_fragile], then it is possible
we might fail to warn the user that the matching is fragile. See for
example testsuite/tests/warnings/w04_failure.ml. *)
let q0 = discr_pat Patterns.Simple.omega pss in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } ->
first column of pss is made of variables only
let sub_witnesses = exhaust ext default (n-1) in
let q0 = Patterns.Head.to_omega_pattern q0 in
Seq.map (fun row -> q0::row) sub_witnesses
| { default; constrs } ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Seq.empty
else
let sub_witnesses =
exhaust
ext pss
(List.length (simple_match_args p Patterns.Head.omega [])
+ n - 1)
in
let p = Patterns.Head.to_omega_pattern p in
Seq.map (set_args p) sub_witnesses
in
let try_omega () =
if full_match false constrs && not (should_extend ext constrs) then
Seq.empty
else
let sub_witnesses = exhaust ext default (n-1) in
match build_other ext constrs with
| exception Empty ->
(* cannot occur, since constructors don't make
a full signature *)
fatal_error "Parmatch.exhaust"
| p ->
Seq.map (fun tail -> p :: tail) sub_witnesses
in
(* Lazily compute witnesses for all constructor submatrices
(Some constr_mat) then the wildcard/default submatrix (None).
Note that the call to [try_omega ()] is delayed to after
all constructor matrices have been traversed. *)
List.map (fun constr_mat -> Some constr_mat) constrs @ [None]
|> List.to_seq
|> Seq.flat_map
(function
| Some constr_mat -> try_non_omega constr_mat
| None -> try_omega ())
end
let exhaust ext pss n =
exhaust ext pss n
|> Seq.map (function
| [x] -> x
| _ -> assert false)
(*
Another exhaustiveness check, enforcing variant typing.
Note that it does not check exact exhaustiveness, but whether a
matching could be made exhaustive by closing all variant types.
When this is true of all other columns, the current column is left
open (even if it means that the whole matching is not exhaustive as
a result).
When this is false for the matrix minus the current column, and the
current column is composed of variant tags, we close the variant
(even if it doesn't help in making the matching exhaustive).
*)
let rec pressure_variants tdefs = function
| [] -> false
| []::_ -> true
| pss ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
true
else begin
let q0 = discr_pat Patterns.Simple.omega pss in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } -> pressure_variants tdefs default
| { default; constrs } ->
let rec try_non_omega = function
| (_p,pss) :: rem ->
let ok = pressure_variants tdefs pss in
The order below matters : we want [ pressure_variants ] to be
called on all the specialized submatrices because we might
close some variant in any of them regardless of whether [ ok ]
is true for [ pss ] or not
called on all the specialized submatrices because we might
close some variant in any of them regardless of whether [ok]
is true for [pss] or not *)
try_non_omega rem && ok
| [] -> true
in
if full_match (tdefs=None) constrs then
try_non_omega constrs
else if tdefs = None then
pressure_variants None default
else
let full = full_match true constrs in
let ok =
if full then
try_non_omega constrs
else begin
let { constrs = partial_constrs; _ } =
build_specialized_submatrices ~extend_row:(@) q0
(mark_partial pss)
in
try_non_omega partial_constrs
end
in
begin match constrs, tdefs with
| [], _
| _, None -> ()
| (d, _) :: _, Some env ->
match d.pat_desc with
| Variant { type_row; _ } ->
let row = type_row () in
if Btype.has_fixed_explanation row
|| pressure_variants None default then ()
else close_variant env row
| _ -> ()
end;
ok
end
(* Yet another satisfiable function *)
This time every_satisfiable pss qs checks the
utility of every expansion of qs .
Expansion means expansion of or - patterns inside qs
This time every_satisfiable pss qs checks the
utility of every expansion of qs.
Expansion means expansion of or-patterns inside qs
*)
type answer =
| Used (* Useful pattern *)
| Unused (* Useless pattern *)
| Upartial of Typedtree.pattern list (* Mixed, with list of useless ones *)
(* this row type enable column processing inside the matrix
- left -> elements not to be processed,
- right -> elements to be processed
*)
type usefulness_row =
{no_ors : pattern list ; ors : pattern list ; active : pattern list}
(*
let pretty_row {ors=ors ; no_ors=no_ors; active=active} =
pretty_line ors ; prerr_string " *" ;
pretty_line no_ors ; prerr_string " *" ;
pretty_line active
let pretty_rows rs =
prerr_endline "begin matrix" ;
List.iter
(fun r ->
pretty_row r ;
prerr_endline "")
rs ;
prerr_endline "end matrix"
*)
(* Initial build *)
let make_row ps = {ors=[] ; no_ors=[]; active=ps}
let make_rows pss = List.map make_row pss
(* Useful to detect and expand or pats inside as pats *)
let is_var p = match Patterns.General.(view p |> strip_vars).pat_desc with
| `Any -> true
| _ -> false
let is_var_column rs =
List.for_all
(fun r -> match r.active with
| p::_ -> is_var p
| [] -> assert false)
rs
(* Standard or-args for left-to-right matching *)
let rec or_args p = match p.pat_desc with
| Tpat_or (p1,p2,_) -> p1,p2
| Tpat_alias (p,_,_) -> or_args p
| _ -> assert false
(* Just remove current column *)
let remove r = match r.active with
| _::rem -> {r with active=rem}
| [] -> assert false
let remove_column rs = List.map remove rs
(* Current column has been processed *)
let push_no_or r = match r.active with
| p::rem -> { r with no_ors = p::r.no_ors ; active=rem}
| [] -> assert false
let push_or r = match r.active with
| p::rem -> { r with ors = p::r.ors ; active=rem}
| [] -> assert false
let push_or_column rs = List.map push_or rs
and push_no_or_column rs = List.map push_no_or rs
let rec simplify_first_usefulness_col = function
| [] -> []
| row :: rows ->
match row.active with
| [] -> assert false (* the rows are non-empty! *)
| p :: ps ->
let add_column p ps k =
(p, { row with active = ps }) :: k in
simplify_head_pat ~add_column p ps
(simplify_first_usefulness_col rows)
(* Back to normal matrices *)
let make_vector r = List.rev r.no_ors
let make_matrix rs = List.map make_vector rs
Standard union on answers
let union_res r1 r2 = match r1, r2 with
| (Unused,_)
| (_, Unused) -> Unused
| Used,_ -> r2
| _, Used -> r1
| Upartial u1, Upartial u2 -> Upartial (u1@u2)
(* propose or pats for expansion *)
let extract_elements qs =
let rec do_rec seen = function
| [] -> []
| q::rem ->
{no_ors= List.rev_append seen rem @ qs.no_ors ;
ors=[] ;
active = [q]}::
do_rec (q::seen) rem in
do_rec [] qs.ors
(* idem for matrices *)
let transpose rs = match rs with
| [] -> assert false
| r::rem ->
let i = List.map (fun x -> [x]) r in
List.fold_left
(List.map2 (fun r x -> x::r))
i rem
let extract_columns pss qs = match pss with
| [] -> List.map (fun _ -> []) qs.ors
| _ ->
let rows = List.map extract_elements pss in
transpose rows
Core function
The idea is to first look for or patterns ( recursive case ) , then
check or - patterns argument usefulness ( terminal case )
The idea is to first look for or patterns (recursive case), then
check or-patterns argument usefulness (terminal case)
*)
let rec every_satisfiables pss qs = match qs.active with
| [] ->
(* qs is now partitionned, check usefulness *)
begin match qs.ors with
| [] -> (* no or-patterns *)
if satisfiable (make_matrix pss) (make_vector qs) then
Used
else
Unused
n or - patterns - > 2n expansions
List.fold_right2
(fun pss qs r -> match r with
| Unused -> Unused
| _ ->
match qs.active with
| [q] ->
let q1,q2 = or_args q in
let r_loc = every_both pss qs q1 q2 in
union_res r r_loc
| _ -> assert false)
(extract_columns pss qs) (extract_elements qs)
Used
end
| q::rem ->
begin match Patterns.General.(view q |> strip_vars).pat_desc with
| `Any ->
if is_var_column pss then
(* forget about ``all-variable'' columns now *)
every_satisfiables (remove_column pss) (remove qs)
else
(* otherwise this is direct food for satisfiable *)
every_satisfiables (push_no_or_column pss) (push_no_or qs)
| `Or (q1,q2,_) ->
if
q1.pat_loc.Location.loc_ghost &&
q2.pat_loc.Location.loc_ghost
then
(* syntactically generated or-pats should not be expanded *)
every_satisfiables (push_no_or_column pss) (push_no_or qs)
else
(* this is a real or-pattern *)
every_satisfiables (push_or_column pss) (push_or qs)
| `Variant (l,_,r) when is_absent l r -> (* Ah Jacques... *)
Unused
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
(* standard case, filter matrix *)
let pss = simplify_first_usefulness_col pss in
let hq, args = Patterns.Head.deconstruct q in
(* The handling of incoherent matrices is kept in line with
[satisfiable] *)
if not (all_coherent (hq :: first_column pss)) then
Unused
else begin
let q0 = discr_pat q pss in
every_satisfiables
(build_specialized_submatrix q0 pss
~extend_row:(fun ps r -> { r with active = ps @ r.active }))
{qs with active=simple_match_args q0 hq args @ rem}
end
end
This function ` ` every_both '' performs the usefulness check
of or - pat q1|q2 .
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2 ,
That way ,
- others orpats in qs.ors will not get expanded .
- all matching work performed on qs.no_ors is not performed again .
This function ``every_both'' performs the usefulness check
of or-pat q1|q2.
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2,
That way,
- others orpats in qs.ors will not get expanded.
- all matching work performed on qs.no_ors is not performed again.
*)
and every_both pss qs q1 q2 =
let qs1 = {qs with active=[q1]}
and qs2 = {qs with active=[q2]} in
let r1 = every_satisfiables pss qs1
and r2 = every_satisfiables (if compat q1 q2 then qs1::pss else pss) qs2 in
match r1 with
| Unused ->
begin match r2 with
| Unused -> Unused
| Used -> Upartial [q1]
| Upartial u2 -> Upartial (q1::u2)
end
| Used ->
begin match r2 with
| Unused -> Upartial [q2]
| _ -> r2
end
| Upartial u1 ->
begin match r2 with
| Unused -> Upartial (u1@[q2])
| Used -> r1
| Upartial u2 -> Upartial (u1 @ u2)
end
(* le_pat p q means, forall V, V matches q implies V matches p *)
let rec le_pat p q =
match (p.pat_desc, q.pat_desc) with
| (Tpat_var _|Tpat_any),_ -> true
| Tpat_alias(p,_,_), _ -> le_pat p q
| _, Tpat_alias(q,_,_) -> le_pat p q
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_construct(_,c1,ps,_), Tpat_construct(_,c2,qs,_) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs
| Tpat_variant(l1,Some p1,_), Tpat_variant(l2,Some p2,_) ->
(l1 = l2 && le_pat p1 p2)
| Tpat_variant(l1,None,_r1), Tpat_variant(l2,None,_) ->
l1 = l2
| Tpat_variant(_,_,_), Tpat_variant(_,_,_) -> false
| Tpat_tuple(ps), Tpat_tuple(qs) -> le_pats ps qs
| Tpat_lazy p, Tpat_lazy q -> le_pat p q
| Tpat_record (l1,_), Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
le_pats ps qs
| Tpat_array(ps), Tpat_array(qs) ->
List.length ps = List.length qs && le_pats ps qs
(* In all other cases, enumeration is performed *)
| _,_ -> not (satisfiable [[p]] [q])
and le_pats ps qs =
match ps,qs with
p::ps, q::qs -> le_pat p q && le_pats ps qs
| _, _ -> true
let get_mins le ps =
let rec select_rec r = function
[] -> r
| p::ps ->
if List.exists (fun p0 -> le p0 p) ps
then select_rec r ps
else select_rec (p::r) ps in
select_rec [] (select_rec [] ps)
lub p q is a pattern that matches all values matched by p and q
may raise Empty , when p and q are not compatible
lub p q is a pattern that matches all values matched by p and q
may raise Empty, when p and q are not compatible
*)
let rec lub p q = match p.pat_desc,q.pat_desc with
| Tpat_alias (p,_,_),_ -> lub p q
| _,Tpat_alias (q,_,_) -> lub p q
| (Tpat_any|Tpat_var _),_ -> q
| _,(Tpat_any|Tpat_var _) -> p
| Tpat_or (p1,p2,_),_ -> orlub p1 p2 q
Thanks god , lub is commutative
| Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p
| Tpat_tuple ps, Tpat_tuple qs ->
let rs = lubs ps qs in
make_pat (Tpat_tuple rs) p.pat_type p.pat_mode p.pat_env
| Tpat_lazy p, Tpat_lazy q ->
let r = lub p q in
make_pat (Tpat_lazy r) p.pat_type p.pat_mode p.pat_env
| Tpat_construct (lid,c1,ps1,_), Tpat_construct (_,c2,ps2,_)
when Types.equal_tag c1.cstr_tag c2.cstr_tag ->
let rs = lubs ps1 ps2 in
make_pat (Tpat_construct (lid, c1, rs, None))
p.pat_type p.pat_mode p.pat_env
| Tpat_variant(l1,Some p1,row), Tpat_variant(l2,Some p2,_)
when l1=l2 ->
let r=lub p1 p2 in
make_pat (Tpat_variant (l1,Some r,row))
p.pat_type p.pat_mode p.pat_env
| Tpat_variant (l1,None,_row), Tpat_variant(l2,None,_)
when l1 = l2 -> p
| Tpat_record (l1,closed),Tpat_record (l2,_) ->
let rs = record_lubs l1 l2 in
make_pat (Tpat_record (rs, closed))
p.pat_type p.pat_mode p.pat_env
| Tpat_array ps, Tpat_array qs
when List.length ps = List.length qs ->
let rs = lubs ps qs in
make_pat (Tpat_array rs)
p.pat_type p.pat_mode p.pat_env
| _,_ ->
raise Empty
and orlub p1 p2 q =
try
let r1 = lub p1 q in
try
{q with pat_desc=(Tpat_or (r1,lub p2 q,None))}
with
| Empty -> r1
with
| Empty -> lub p2 q
and record_lubs l1 l2 =
let rec lub_rec l1 l2 = match l1,l2 with
| [],_ -> l2
| _,[] -> l1
| (lid1, lbl1,p1)::rem1, (lid2, lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
(lid1, lbl1,p1)::lub_rec rem1 l2
else if lbl2.lbl_pos < lbl1.lbl_pos then
(lid2, lbl2,p2)::lub_rec l1 rem2
else
(lid1, lbl1,lub p1 p2)::lub_rec rem1 rem2 in
lub_rec l1 l2
and lubs ps qs = match ps,qs with
| p::ps, q::qs -> lub p q :: lubs ps qs
| _,_ -> []
(******************************)
(* Exported variant closing *)
(******************************)
(* Apply pressure to variants *)
let pressure_variants tdefs patl =
ignore (pressure_variants
(Some tdefs)
(List.map (fun p -> [p; omega]) patl))
let pressure_variants_in_computation_pattern tdefs patl =
let add_row pss p_opt =
match p_opt with
| None -> pss
| Some p -> p :: pss
in
let val_pss, exn_pss =
List.fold_right (fun pat (vpss, epss)->
let (vp, ep) = split_pattern pat in
add_row vpss vp, add_row epss ep
) patl ([], [])
in
pressure_variants tdefs val_pss;
pressure_variants tdefs exn_pss
(*****************************)
Utilities for diagnostics
(*****************************)
(*
Build up a working pattern matrix by forgetting
about guarded patterns
*)
let rec initial_matrix = function
[] -> []
| {c_guard=Some _} :: rem -> initial_matrix rem
| {c_guard=None; c_lhs=p} :: rem -> [p] :: initial_matrix rem
(*
Build up a working pattern matrix by keeping
only the patterns which are guarded
*)
let rec initial_only_guarded = function
| [] -> []
| { c_guard = None; _} :: rem ->
initial_only_guarded rem
| { c_lhs = pat; _ } :: rem ->
[pat] :: initial_only_guarded rem
(************************)
(* Exhaustiveness check *)
(************************)
conversion from Typedtree.pattern to Parsetree.pattern list
module Conv = struct
open Parsetree
let mkpat desc = Ast_helper.Pat.mk desc
let name_counter = ref 0
let fresh name =
let current = !name_counter in
name_counter := !name_counter + 1;
"#$" ^ name ^ Int.to_string current
let conv typed =
let constrs = Hashtbl.create 7 in
let labels = Hashtbl.create 7 in
let rec loop pat =
match pat.pat_desc with
Tpat_or (pa,pb,_) ->
mkpat (Ppat_or (loop pa, loop pb))
| Tpat_var (_, ({txt="*extension*"} as nm)) -> (* PR#7330 *)
mkpat (Ppat_var nm)
| Tpat_any
| Tpat_var _ ->
mkpat Ppat_any
| Tpat_constant c ->
mkpat (Ppat_constant (Untypeast.constant c))
| Tpat_alias (p,_,_) -> loop p
| Tpat_tuple lst ->
mkpat (Ppat_tuple (List.map loop lst))
| Tpat_construct (cstr_lid, cstr, lst, _) ->
let id = fresh cstr.cstr_name in
let lid = { cstr_lid with txt = Longident.Lident id } in
Hashtbl.add constrs id cstr;
let arg =
match List.map loop lst with
| [] -> None
| [p] -> Some ([], p)
| lst -> Some ([], mkpat (Ppat_tuple lst))
in
mkpat (Ppat_construct(lid, arg))
| Tpat_variant(label,p_opt,_row_desc) ->
let arg = Option.map loop p_opt in
mkpat (Ppat_variant(label, arg))
| Tpat_record (subpatterns, _closed_flag) ->
let fields =
List.map
(fun (_, lbl, p) ->
let id = fresh lbl.lbl_name in
Hashtbl.add labels id lbl;
(mknoloc (Longident.Lident id), loop p))
subpatterns
in
mkpat (Ppat_record (fields, Open))
| Tpat_array lst ->
mkpat (Ppat_array (List.map loop lst))
| Tpat_lazy p ->
mkpat (Ppat_lazy (loop p))
in
let ps = loop typed in
(ps, constrs, labels)
end
(* Whether the counter-example contains an extension pattern *)
let contains_extension pat =
exists_pattern
(function
| {pat_desc=Tpat_var (_, {txt="*extension*"})} -> true
| _ -> false)
pat
(* Build a pattern from its expected type *)
type pat_explosion = PE_single | PE_gadt_cases
type ppat_of_type =
| PT_empty
| PT_any
| PT_pattern of
pat_explosion *
Parsetree.pattern *
(string, constructor_description) Hashtbl.t *
(string, label_description) Hashtbl.t
let ppat_of_type env ty =
match pats_of_type env ty Value_mode.max_mode with
| [] -> PT_empty
| [{pat_desc = Tpat_any}] -> PT_any
| [pat] ->
let (ppat, constrs, labels) = Conv.conv pat in
PT_pattern (PE_single, ppat, constrs, labels)
| pats ->
let (ppat, constrs, labels) = Conv.conv (orify_many pats) in
PT_pattern (PE_gadt_cases, ppat, constrs, labels)
let typecheck ~pred p =
let (pattern,constrs,labels) = Conv.conv p in
pred constrs labels pattern
let do_check_partial ~pred loc casel pss = match pss with
| [] ->
This can occur
- For empty matches generated by ( no warning )
- when all patterns have guards ( then , < > [ ] )
( specific warning )
Then match MUST be considered non - exhaustive ,
otherwise compilation of PM is broken .
This can occur
- For empty matches generated by ocamlp4 (no warning)
- when all patterns have guards (then, casel <> [])
(specific warning)
Then match MUST be considered non-exhaustive,
otherwise compilation of PM is broken.
*)
begin match casel with
| [] -> ()
| _ ->
if Warnings.is_active Warnings.All_clauses_guarded then
Location.prerr_warning loc Warnings.All_clauses_guarded
end ;
Partial
| ps::_ ->
let counter_examples =
exhaust None pss (List.length ps)
|> Seq.filter_map (typecheck ~pred) in
match counter_examples () with
| Seq.Nil -> Total
| Seq.Cons (v, _rest) ->
if Warnings.is_active (Warnings.Partial_match "") then begin
let errmsg =
try
let buf = Buffer.create 16 in
let fmt = Format.formatter_of_buffer buf in
Printpat.top_pretty fmt v;
if do_match (initial_only_guarded casel) [v] then
Buffer.add_string buf
"\n(However, some guarded clause may match this value.)";
if contains_extension v then
Buffer.add_string buf
"\nMatching over values of extensible variant types \
(the *extension* above)\n\
must include a wild card pattern in order to be exhaustive."
;
Buffer.contents buf
with _ ->
""
in
Location.prerr_warning loc (Warnings.Partial_match errmsg)
end;
Partial
(*****************)
(* Fragile check *)
(*****************)
(* Collect all data types in a pattern *)
let rec add_path path = function
| [] -> [path]
| x::rem as paths ->
if Path.same path x then paths
else x::add_path path rem
let extendable_path path =
not
(Path.same path Predef.path_bool ||
Path.same path Predef.path_list ||
Path.same path Predef.path_unit ||
Path.same path Predef.path_option)
let rec collect_paths_from_pat r p = match p.pat_desc with
| Tpat_construct(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},
ps, _) ->
let path = get_constructor_type_path p.pat_type p.pat_env in
List.fold_left
collect_paths_from_pat
(if extendable_path path then add_path path r else r)
ps
| Tpat_any|Tpat_var _|Tpat_constant _| Tpat_variant (_,None,_) -> r
| Tpat_tuple ps | Tpat_array ps
| Tpat_construct (_, {cstr_tag=Cstr_extension _}, ps, _)->
List.fold_left collect_paths_from_pat r ps
| Tpat_record (lps,_) ->
List.fold_left
(fun r (_, _, p) -> collect_paths_from_pat r p)
r lps
| Tpat_variant (_, Some p, _) | Tpat_alias (p,_,_) -> collect_paths_from_pat r p
| Tpat_or (p1,p2,_) ->
collect_paths_from_pat (collect_paths_from_pat r p1) p2
| Tpat_lazy p
->
collect_paths_from_pat r p
Actual fragile check
1 . Collect data types in the patterns of the match .
2 . One exhaustivity check per datatype , considering that
the type is extended .
Actual fragile check
1. Collect data types in the patterns of the match.
2. One exhaustivity check per datatype, considering that
the type is extended.
*)
let do_check_fragile loc casel pss =
let exts =
List.fold_left
(fun r c -> collect_paths_from_pat r c.c_lhs)
[] casel in
match exts with
| [] -> ()
| _ -> match pss with
| [] -> ()
| ps::_ ->
List.iter
(fun ext ->
let witnesses = exhaust (Some ext) pss (List.length ps) in
match witnesses () with
| Seq.Nil ->
Location.prerr_warning
loc
(Warnings.Fragile_match (Path.name ext))
| Seq.Cons _ -> ())
exts
(********************************)
(* Exported unused clause check *)
(********************************)
let check_unused pred casel =
if Warnings.is_active Warnings.Redundant_case
|| List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel then
let rec do_rec pref = function
| [] -> ()
| {c_lhs=q; c_guard; c_rhs} :: rem ->
let qs = [q] in
begin try
let pss =
(* prev was accumulated in reverse order;
restore source order to get ordered counter-examples *)
List.rev pref
|> List.filter (compats qs)
|> get_mins le_pats in
First look for redundant or partially redundant patterns
let r = every_satisfiables (make_rows pss) (make_row qs) in
let refute = (c_rhs.exp_desc = Texp_unreachable) in
(* Do not warn for unused [pat -> .] *)
if r = Unused && refute then () else
let r =
(* Do not refine if either:
- we already know the clause is unused
- the clause under consideration is not a refutation clause
and either:
+ there are no other lines
+ we do not care whether the types prevent this clause to
be reached.
If the clause under consideration *is* a refutation clause
then we do need to check more carefully whether it can be
refuted or not. *)
let skip =
r = Unused || (not refute && pref = []) ||
not(refute || Warnings.is_active Warnings.Unreachable_case) in
if skip then r else
(* Then look for empty patterns *)
let sfs = list_satisfying_vectors pss qs in
if sfs = [] then Unused else
let sfs =
List.map (function [u] -> u | _ -> assert false) sfs in
let u = orify_many sfs in
(*Format.eprintf "%a@." pretty_val u;*)
let (pattern,constrs,labels) = Conv.conv u in
let pattern = {pattern with Parsetree.ppat_loc = q.pat_loc} in
match pred refute constrs labels pattern with
None when not refute ->
Location.prerr_warning q.pat_loc Warnings.Unreachable_case;
Used
| _ -> r
in
match r with
| Unused ->
Location.prerr_warning
q.pat_loc Warnings.Redundant_case
| Upartial ps ->
List.iter
(fun p ->
Location.prerr_warning
p.pat_loc Warnings.Redundant_subpat)
ps
| Used -> ()
with Empty | Not_found -> assert false
end ;
if c_guard <> None then
do_rec pref rem
else
do_rec ([q]::pref) rem in
do_rec [] casel
(*********************************)
(* Exported irrefutability tests *)
(*********************************)
let irrefutable pat = le_pat pat omega
let inactive ~partial pat =
match partial with
| Partial -> false
| Total -> begin
let rec loop pat =
match pat.pat_desc with
| Tpat_lazy _ | Tpat_array _ ->
false
| Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) ->
true
| Tpat_constant c -> begin
match c with
| Const_string _ -> Config.safe_string
| Const_int _ | Const_char _ | Const_float _
| Const_int32 _ | Const_int64 _ | Const_nativeint _ -> true
end
| Tpat_tuple ps | Tpat_construct (_, _, ps, _) ->
List.for_all (fun p -> loop p) ps
| Tpat_alias (p,_,_) | Tpat_variant (_, Some p, _) ->
loop p
| Tpat_record (ldps,_) ->
List.for_all
(fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p)
ldps
| Tpat_or (p,q,_) ->
loop p && loop q
in
loop pat
end
(*********************************)
(* Exported exhaustiveness check *)
(*********************************)
(*
Fragile check is performed when required and
on exhaustive matches only.
*)
let check_partial pred loc casel =
let pss = initial_matrix casel in
let pss = get_mins le_pats pss in
let total = do_check_partial ~pred loc casel pss in
if
total = Total && Warnings.is_active (Warnings.Fragile_match "")
then begin
do_check_fragile loc casel pss
end ;
total
(*************************************)
(* Ambiguous variable in or-patterns *)
(*************************************)
Specification : ambiguous variables in or - patterns .
The semantics of or - patterns in OCaml is specified with
a left - to - right bias : a value [ v ] matches the pattern [ p | q ] if it
matches [ p ] or [ q ] , but if it matches both , the environment
captured by the match is the environment captured by [ p ] , never the
one captured by [ q ] .
While this property is generally well - understood , one specific case
where users expect a different semantics is when a pattern is
followed by a when - guard : [ | p when g - > e ] . Consider for example :
| ( ( Const x , _ ) | ( _ , Const x ) ) when is_neutral x - > branch
The semantics is clear : match the scrutinee against the pattern , if
it matches , test the guard , and if the guard passes , take the
branch .
However , consider the input [ ( Const a , Const b ) ] , where [ a ] fails
the test [ is_neutral f ] , while [ b ] passes the test [ is_neutral
b ] . With the left - to - right semantics , the clause above is * not *
taken by its input : matching [ ( Const a , Const b ) ] against the
or - pattern succeeds in the left branch , it returns the environment
[ x - > a ] , and then the guard [ is_neutral a ] is tested and fails ,
the branch is not taken . Most users , however , intuitively expect
that any pair that has one side passing the test will take the
branch . They assume it is equivalent to the following :
| ( Const x , _ ) when is_neutral x - > branch
| ( _ , Const x ) when is_neutral x - > branch
while it is not .
The code below is dedicated to finding these confusing cases : the
cases where a guard uses " ambiguous " variables , that are bound to
different parts of the scrutinees by different sides of
a or - pattern . In other words , it finds the cases where the
specified left - to - right semantics is not equivalent to
a non - deterministic semantics ( any branch can be taken ) relatively
to a specific guard .
The semantics of or-patterns in OCaml is specified with
a left-to-right bias: a value [v] matches the pattern [p | q] if it
matches [p] or [q], but if it matches both, the environment
captured by the match is the environment captured by [p], never the
one captured by [q].
While this property is generally well-understood, one specific case
where users expect a different semantics is when a pattern is
followed by a when-guard: [| p when g -> e]. Consider for example:
| ((Const x, _) | (_, Const x)) when is_neutral x -> branch
The semantics is clear: match the scrutinee against the pattern, if
it matches, test the guard, and if the guard passes, take the
branch.
However, consider the input [(Const a, Const b)], where [a] fails
the test [is_neutral f], while [b] passes the test [is_neutral
b]. With the left-to-right semantics, the clause above is *not*
taken by its input: matching [(Const a, Const b)] against the
or-pattern succeeds in the left branch, it returns the environment
[x -> a], and then the guard [is_neutral a] is tested and fails,
the branch is not taken. Most users, however, intuitively expect
that any pair that has one side passing the test will take the
branch. They assume it is equivalent to the following:
| (Const x, _) when is_neutral x -> branch
| (_, Const x) when is_neutral x -> branch
while it is not.
The code below is dedicated to finding these confusing cases: the
cases where a guard uses "ambiguous" variables, that are bound to
different parts of the scrutinees by different sides of
a or-pattern. In other words, it finds the cases where the
specified left-to-right semantics is not equivalent to
a non-deterministic semantics (any branch can be taken) relatively
to a specific guard.
*)
let pattern_vars p = Ident.Set.of_list (Typedtree.pat_bound_idents p)
Row for ambiguous variable search ,
row is the traditional pattern row ,
varsets contain a list of head variable sets ( varsets )
A given varset contains all the variables that appeared at the head
of a pattern in the row at some point during traversal : they would
all be bound to the same value at matching time . On the contrary ,
two variables of different varsets appeared at different places in
the pattern and may be bound to distinct sub - parts of the matched
value .
All rows of a ( sub)matrix have rows of the same length ,
but also varsets of the same length .
Varsets are populated when simplifying the first column
-- the variables of the head pattern are collected in a new varset .
For example ,
{ row = x : : r1 ; varsets = s1 }
{ row = ( Some _ ) as y : : r2 ; s2 }
{ row = ( None as x ) as y : : r3 ; = s3 }
{ row = ( Some x | ( None as x ) ) : : r4 with varsets = s4 }
becomes
( _ , { row = r1 ; varsets = { x } : : s1 } )
( Some _ , { row = r2 ; = { y } : : s2 } )
( None , { row = r3 ; varsets = { x , y } : : s3 } )
( Some x , { row = r4 ; varsets = { } : : s4 } )
( None , { row = r4 ; varsets = { x } : : s4 } )
row is the traditional pattern row,
varsets contain a list of head variable sets (varsets)
A given varset contains all the variables that appeared at the head
of a pattern in the row at some point during traversal: they would
all be bound to the same value at matching time. On the contrary,
two variables of different varsets appeared at different places in
the pattern and may be bound to distinct sub-parts of the matched
value.
All rows of a (sub)matrix have rows of the same length,
but also varsets of the same length.
Varsets are populated when simplifying the first column
-- the variables of the head pattern are collected in a new varset.
For example,
{ row = x :: r1; varsets = s1 }
{ row = (Some _) as y :: r2; varsets = s2 }
{ row = (None as x) as y :: r3; varsets = s3 }
{ row = (Some x | (None as x)) :: r4 with varsets = s4 }
becomes
(_, { row = r1; varsets = {x} :: s1 })
(Some _, { row = r2; varsets = {y} :: s2 })
(None, { row = r3; varsets = {x, y} :: s3 })
(Some x, { row = r4; varsets = {} :: s4 })
(None, { row = r4; varsets = {x} :: s4 })
*)
type amb_row = { row : pattern list ; varsets : Ident.Set.t list; }
let simplify_head_amb_pat head_bound_variables varsets ~add_column p ps k =
let rec simpl head_bound_variables varsets p ps k =
match (Patterns.General.view p).pat_desc with
| `Alias (p,x,_) ->
simpl (Ident.Set.add x head_bound_variables) varsets p ps k
| `Var (x, _) ->
simpl (Ident.Set.add x head_bound_variables) varsets Patterns.omega ps k
| `Or (p1,p2,_) ->
simpl head_bound_variables varsets p1 ps
(simpl head_bound_variables varsets p2 ps k)
| #Patterns.Simple.view as view ->
add_column (Patterns.Head.deconstruct { p with pat_desc = view })
{ row = ps; varsets = head_bound_variables :: varsets; } k
in simpl head_bound_variables varsets p ps k
To accurately report ambiguous variables , one must consider
that previous clauses have already matched some values .
Consider for example :
| ( , ) - > ...
| ( ( , _ ) | ( _ , ) ) when bar x - > ...
The second line taken in isolation uses an unstable variable ,
but the discriminating values , of the shape [ ( Foo v1 , ) ] ,
would all be filtered by the line above .
To track this information , the matrices we analyze contain both
* positive * rows , that describe the rows currently being analyzed
( of type Varsets.row , so that their varsets are tracked ) and
* negative rows * , that describe the cases already matched against .
The values matched by a signed matrix are the values matched by
some of the positive rows but none of the negative rows . In
particular , a variable is stable if , for any value not matched by
any of the negative rows , the environment captured by any of the
matching positive rows is identical .
To accurately report ambiguous variables, one must consider
that previous clauses have already matched some values.
Consider for example:
| (Foo x, Foo y) -> ...
| ((Foo x, _) | (_, Foo x)) when bar x -> ...
The second line taken in isolation uses an unstable variable,
but the discriminating values, of the shape [(Foo v1, Foo v2)],
would all be filtered by the line above.
To track this information, the matrices we analyze contain both
*positive* rows, that describe the rows currently being analyzed
(of type Varsets.row, so that their varsets are tracked) and
*negative rows*, that describe the cases already matched against.
The values matched by a signed matrix are the values matched by
some of the positive rows but none of the negative rows. In
particular, a variable is stable if, for any value not matched by
any of the negative rows, the environment captured by any of the
matching positive rows is identical.
*)
type ('a, 'b) signed = Positive of 'a | Negative of 'b
let rec simplify_first_amb_col = function
| [] -> []
| (Negative [] | Positive { row = []; _ }) :: _ -> assert false
| Negative (n :: ns) :: rem ->
let add_column n ns k = (n, Negative ns) :: k in
simplify_head_pat
~add_column n ns (simplify_first_amb_col rem)
| Positive { row = p::ps; varsets; }::rem ->
let add_column p ps k = (p, Positive ps) :: k in
simplify_head_amb_pat
Ident.Set.empty varsets
~add_column p ps (simplify_first_amb_col rem)
(* Compute stable bindings *)
type stable_vars =
| All
| Vars of Ident.Set.t
let stable_inter sv1 sv2 = match sv1, sv2 with
| All, sv | sv, All -> sv
| Vars s1, Vars s2 -> Vars (Ident.Set.inter s1 s2)
let reduce f = function
| [] -> invalid_arg "reduce"
| x::xs -> List.fold_left f x xs
let rec matrix_stable_vars m = match m with
| [] -> All
| ((Positive {row = []; _} | Negative []) :: _) as empty_rows ->
let exception Negative_empty_row in
if at least one empty row is negative , the matrix matches no value
let get_varsets = function
| Negative n ->
All rows have the same number of columns ;
if the first row is empty , they all are .
if the first row is empty, they all are. *)
assert (n = []);
raise Negative_empty_row
| Positive p ->
assert (p.row = []);
p.varsets in
begin match List.map get_varsets empty_rows with
| exception Negative_empty_row -> All
| rows_varsets ->
let stables_in_varsets =
reduce (List.map2 Ident.Set.inter) rows_varsets in
(* The stable variables are those stable at any position *)
Vars
(List.fold_left Ident.Set.union Ident.Set.empty stables_in_varsets)
end
| m ->
let is_negative = function
| Negative _ -> true
| Positive _ -> false in
if List.for_all is_negative m then
(* optimization: quit early if there are no positive rows.
This may happen often when the initial matrix has many
negative cases and few positive cases (a small guarded
clause after a long list of clauses) *)
All
else begin
let m = simplify_first_amb_col m in
if not (all_coherent (first_column m)) then
All
else begin
(* If the column is ill-typed but deemed coherent, we might
spuriously warn about some variables being unstable.
As sad as that might be, the warning can be silenced by
splitting the or-pattern... *)
let submatrices =
let extend_row columns = function
| Negative r -> Negative (columns @ r)
| Positive r -> Positive { r with row = columns @ r.row } in
let q0 = discr_pat Patterns.Simple.omega m in
let { default; constrs } =
build_specialized_submatrices ~extend_row q0 m in
let non_default = List.map snd constrs in
if full_match false constrs
then non_default
else default :: non_default in
(* A stable variable must be stable in each submatrix. *)
let submat_stable = List.map matrix_stable_vars submatrices in
List.fold_left stable_inter All submat_stable
end
end
let pattern_stable_vars ns p =
matrix_stable_vars
(List.fold_left (fun m n -> Negative n :: m)
[Positive {varsets = []; row = [p]}] ns)
All identifier paths that appear in an expression that occurs
as a clause right hand side or guard .
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and * * guards * * .
For pattern ( module M : S ) - > e the code is
let module M_mod = unpack M .. in e
Hence M is " free " in e iff M_mod is free in e.
Not doing so will yield excessive warning in
( module ( M : S ) } ... ) when true - > ....
as M is always present in
let module M_mod = unpack M .. in true
as a clause right hand side or guard.
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and **guards**.
For pattern (module M:S) -> e the code is
let module M_mod = unpack M .. in e
Hence M is "free" in e iff M_mod is free in e.
Not doing so will yield excessive warning in
(module (M:S) } ...) when true -> ....
as M is always present in
let module M_mod = unpack M .. in true
*)
let all_rhs_idents exp =
let ids = ref Ident.Set.empty in
(* Very hackish, detect unpack pattern compilation
and perform "indirect check for them" *)
let is_unpack exp =
List.exists
(fun attr -> attr.Parsetree.attr_name.txt = "#modulepat")
exp.exp_attributes in
let open Tast_iterator in
let expr_iter iter exp =
(match exp.exp_desc with
| Texp_ident (path, _lid, _descr, _kind) ->
List.iter (fun id -> ids := Ident.Set.add id !ids) (Path.heads path)
(* Use default iterator methods for rest of match.*)
| _ -> Tast_iterator.default_iterator.expr iter exp);
if is_unpack exp then begin match exp.exp_desc with
| Texp_letmodule
(id_mod,_,_,
{mod_desc=
Tmod_unpack ({exp_desc=Texp_ident (Path.Pident id_exp,_,_,_)},_)},
_) ->
assert (Ident.Set.mem id_exp !ids) ;
begin match id_mod with
| Some id_mod when not (Ident.Set.mem id_mod !ids) ->
ids := Ident.Set.remove id_exp !ids
| _ -> ()
end
| _ -> assert false
end
in
let iterator = {Tast_iterator.default_iterator with expr = expr_iter} in
iterator.expr iterator exp;
!ids
let check_ambiguous_bindings =
let open Warnings in
let warn0 = Ambiguous_var_in_pattern_guard [] in
fun cases ->
if is_active warn0 then
let check_case ns case = match case with
| { c_lhs = p; c_guard=None ; _} -> [p]::ns
| { c_lhs=p; c_guard=Some g; _} ->
let all =
Ident.Set.inter (pattern_vars p) (all_rhs_idents g) in
if not (Ident.Set.is_empty all) then begin
match pattern_stable_vars ns p with
| All -> ()
| Vars stable ->
let ambiguous = Ident.Set.diff all stable in
if not (Ident.Set.is_empty ambiguous) then begin
let pps =
Ident.Set.elements ambiguous |> List.map Ident.name in
let warn = Ambiguous_var_in_pattern_guard pps in
Location.prerr_warning p.pat_loc warn
end
end;
ns
in
ignore (List.fold_left check_case [] cases)
let do_complete_partial ?pred pss =
(* c/p of [do_check_partial] without the parts concerning the generation of
the error message or the warning emiting. *)
match pss with
| [] -> []
| ps :: _ ->
let typecheck p =
match pred with
| Some pred ->
let (pattern,constrs,labels) = Conv.conv p in
Option.map (fun v -> v, Some (constrs, labels))
(pred constrs labels pattern)
| None -> Some (p, None)
in
exhaust None pss (List.length ps)
|> Seq.filter_map typecheck
|> List.of_seq
let complete_partial ~pred pss =
let pss = get_mins le_pats pss in
do_complete_partial ~pred pss
let return_unused casel =
let rec do_rec acc pref = function
| [] -> acc
| q :: rem ->
let qs = [q] in
let acc =
try
let pss = get_mins le_pats (List.filter (compats qs) pref) in
let r = every_satisfiables (make_rows pss) (make_row qs) in
match r with
| Unused -> `Unused q :: acc
| Upartial ps -> `Unused_subs (q, ps) :: acc
| Used -> acc
with Empty | Not_found -> assert false
in
(* FIXME: we need to know whether there is a guard here, because if there
is, we dont want to add [[q]] to [pref]. *)
do_rec acc ([q]::pref) rem
in
do_rec [] [] casel
| null | https://raw.githubusercontent.com/janestreet/merlin-jst/980b574405617fa0dfb0b79a84a66536b46cd71b/src/ocaml/typing/parmatch.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Detection of partial matches and unused match cases.
***********************************
***********************************
*****************
*****************
only omegas on the column: the column is coherent.
*********************
Compatibility check
*********************
same label on both sides
Variables match any value
Structural induction
Constructors, with special case for extension
More standard stuff
Empty pattern
**************************************
**************************************
**************************
**************************
Check top matching
extract record fields as a whole
Build argument list when p2 >= p1, where p1 is a simple pattern
short-circuiting: clearly if we have anything other than [Record] or
[Any] to start with, we're not going to be able refine at all. So
there's no point going over the matrix.
In case a matching value is found, set actual arguments
of the matching pattern.
case any is used in matching.ml
the rows are non-empty!
if no group matched this row, it has a head constructor that
was never seen before; add a new sub-matrix for this head
insert a row of head omega into all groups
groups are accumulated in reverse order;
we restore the order of rows in the source code
Variant related functions
mark constructor lines for failure when they are incomplete
fixed=false means that this tag is not explicitly matched
this unification cannot fail
closing=true, we are considering the variant as closed
m=true, do not discard matched tags, rather warn
Written as a non-fragile matching, PR#7451 originated from a fragile matching
below.
build a pattern from a constructor description
build an or-pattern from a constructor list
Auxiliary for build_other
let c = {c with cstr_name = "*extension*"} in
PR#7330
| Reither _
This one is called after erasing pattern info
activate this code
for checking non-gadt constructors
****************************************
Look for a row that matches some value
****************************************
Useful for seeing if the example of
non-matched value can indeed be matched
(by a guarded clause)
Now another satisfiable function that additionally
supplies an example of a matching value.
This function should be called for exhaustiveness check only.
We're considering an ill-typed branch, we won't actually be able to
produce a well typed value taking that branch.
cannot occur, since constructors don't make
a full signature
Lazily compute witnesses for all constructor submatrices
(Some constr_mat) then the wildcard/default submatrix (None).
Note that the call to [try_omega ()] is delayed to after
all constructor matrices have been traversed.
Another exhaustiveness check, enforcing variant typing.
Note that it does not check exact exhaustiveness, but whether a
matching could be made exhaustive by closing all variant types.
When this is true of all other columns, the current column is left
open (even if it means that the whole matching is not exhaustive as
a result).
When this is false for the matrix minus the current column, and the
current column is composed of variant tags, we close the variant
(even if it doesn't help in making the matching exhaustive).
Yet another satisfiable function
Useful pattern
Useless pattern
Mixed, with list of useless ones
this row type enable column processing inside the matrix
- left -> elements not to be processed,
- right -> elements to be processed
let pretty_row {ors=ors ; no_ors=no_ors; active=active} =
pretty_line ors ; prerr_string " *" ;
pretty_line no_ors ; prerr_string " *" ;
pretty_line active
let pretty_rows rs =
prerr_endline "begin matrix" ;
List.iter
(fun r ->
pretty_row r ;
prerr_endline "")
rs ;
prerr_endline "end matrix"
Initial build
Useful to detect and expand or pats inside as pats
Standard or-args for left-to-right matching
Just remove current column
Current column has been processed
the rows are non-empty!
Back to normal matrices
propose or pats for expansion
idem for matrices
qs is now partitionned, check usefulness
no or-patterns
forget about ``all-variable'' columns now
otherwise this is direct food for satisfiable
syntactically generated or-pats should not be expanded
this is a real or-pattern
Ah Jacques...
standard case, filter matrix
The handling of incoherent matrices is kept in line with
[satisfiable]
le_pat p q means, forall V, V matches q implies V matches p
In all other cases, enumeration is performed
****************************
Exported variant closing
****************************
Apply pressure to variants
***************************
***************************
Build up a working pattern matrix by forgetting
about guarded patterns
Build up a working pattern matrix by keeping
only the patterns which are guarded
**********************
Exhaustiveness check
**********************
PR#7330
Whether the counter-example contains an extension pattern
Build a pattern from its expected type
***************
Fragile check
***************
Collect all data types in a pattern
******************************
Exported unused clause check
******************************
prev was accumulated in reverse order;
restore source order to get ordered counter-examples
Do not warn for unused [pat -> .]
Do not refine if either:
- we already know the clause is unused
- the clause under consideration is not a refutation clause
and either:
+ there are no other lines
+ we do not care whether the types prevent this clause to
be reached.
If the clause under consideration *is* a refutation clause
then we do need to check more carefully whether it can be
refuted or not.
Then look for empty patterns
Format.eprintf "%a@." pretty_val u;
*******************************
Exported irrefutability tests
*******************************
*******************************
Exported exhaustiveness check
*******************************
Fragile check is performed when required and
on exhaustive matches only.
***********************************
Ambiguous variable in or-patterns
***********************************
Compute stable bindings
The stable variables are those stable at any position
optimization: quit early if there are no positive rows.
This may happen often when the initial matrix has many
negative cases and few positive cases (a small guarded
clause after a long list of clauses)
If the column is ill-typed but deemed coherent, we might
spuriously warn about some variables being unstable.
As sad as that might be, the warning can be silenced by
splitting the or-pattern...
A stable variable must be stable in each submatrix.
Very hackish, detect unpack pattern compilation
and perform "indirect check for them"
Use default iterator methods for rest of match.
c/p of [do_check_partial] without the parts concerning the generation of
the error message or the warning emiting.
FIXME: we need to know whether there is a guard here, because if there
is, we dont want to add [[q]] to [pref]. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Asttypes
open Types
open Typedtree
Utilities for building patterns
let make_pat desc ty mode tenv =
{pat_desc = desc; pat_loc = Location.none; pat_extra = [];
pat_type = ty ; pat_mode = mode; pat_env = tenv;
pat_attributes = [];
}
let omega = Patterns.omega
let omegas = Patterns.omegas
let omega_list = Patterns.omega_list
let extra_pat =
make_pat
(Tpat_var (Ident.create_local "+", mknoloc "+"))
Ctype.none Value_mode.max_mode Env.empty
Coherence check
For some of the operations we do in this module , we would like ( because it
simplifies matters ) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ ( think " of the same type " ) .
Unfortunately that is not always true .
Consider the following ( well - typed ) example :
{ [
type _ t = S : string t | U : unit t
let f ( type a ) ( t1 : a t ) ( t2 : a t ) ( a : a ) =
match t1 , t2 , a with
| U , _ , ( ) - > ( )
| _ , S , " " - > ( )
] }
Clearly the 3rd column contains incoherent patterns .
On the example above , most of the algorithms will explore the pattern matrix
as illustrated by the following tree :
{ v
S
------- > | " " |
U | S , " " | _ _ / | ( ) |
-------- > | _ , ( ) | \ not S
| U , _ , ( ) | _ _ / ------- > | ( ) |
| _ , S , " " | \
--------- > | S , " " | ---------- > | " " |
not U S
v }
where following an edge labelled by a pattern P means " assuming the value I
am matching on is filtered by [ P ] on the column I am currently looking at ,
then the following submatrix is still reachable " .
Notice that at any point of that tree , if the first column of a matrix is
incoherent , then the branch leading to it can only be taken if the scrutinee
is ill - typed .
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [ t1 , t2 , a ] to be [ U , S , ... ] . However such
a value would be ill - typed , so we can never actually get there .
Checking the first column at each step of the recursion and making the
conscious decision of " aborting " the algorithm whenever the first column
becomes incoherent , allows us to retain the initial assumption in later
stages of the algorithms .
---
N.B. two patterns can be considered coherent even though they might not be of
the same type .
That 's in part because we only care about the " head " of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm :
( ' a ' , ' b ' ) and ( 1 , ( ) ) will be deemed coherent because they are both a tuples
of arity 2 ( we 'll notice at a later stage the incoherence of ' a ' and 1 ) .
But also because it can be hard / costly to determine exactly whether two
patterns are of the same type or not ( eg . in the example above with _ and S ,
but see also the module [ Coherence_illustration ] in
testsuite / tests / basic - more / robustmatch.ml ) .
For the moment our weak , loosely - syntactic , coherence check seems to be
enough and we leave it to each user to consider ( and document ! ) what happens
when an " incoherence " is not detected by this check .
simplifies matters) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ (think "of the same type").
Unfortunately that is not always true.
Consider the following (well-typed) example:
{[
type _ t = S : string t | U : unit t
let f (type a) (t1 : a t) (t2 : a t) (a : a) =
match t1, t2, a with
| U, _, () -> ()
| _, S, "" -> ()
]}
Clearly the 3rd column contains incoherent patterns.
On the example above, most of the algorithms will explore the pattern matrix
as illustrated by the following tree:
{v
S
-------> | "" |
U | S, "" | __/ | () |
--------> | _, () | \ not S
| U, _, () | __/ -------> | () |
| _, S, "" | \
---------> | S, "" | ----------> | "" |
not U S
v}
where following an edge labelled by a pattern P means "assuming the value I
am matching on is filtered by [P] on the column I am currently looking at,
then the following submatrix is still reachable".
Notice that at any point of that tree, if the first column of a matrix is
incoherent, then the branch leading to it can only be taken if the scrutinee
is ill-typed.
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [t1, t2, a] to be [U, S, ...]. However such
a value would be ill-typed, so we can never actually get there.
Checking the first column at each step of the recursion and making the
conscious decision of "aborting" the algorithm whenever the first column
becomes incoherent, allows us to retain the initial assumption in later
stages of the algorithms.
---
N.B. two patterns can be considered coherent even though they might not be of
the same type.
That's in part because we only care about the "head" of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm:
('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples
of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1).
But also because it can be hard/costly to determine exactly whether two
patterns are of the same type or not (eg. in the example above with _ and S,
but see also the module [Coherence_illustration] in
testsuite/tests/basic-more/robustmatch.ml).
For the moment our weak, loosely-syntactic, coherence check seems to be
enough and we leave it to each user to consider (and document!) what happens
when an "incoherence" is not detected by this check.
*)
Given the first column of a simplified matrix , this function first looks for
a " discriminating " pattern on that column ( i.e. a non - omega one ) and then
check that every other head pattern in the column is coherent with that one .
a "discriminating" pattern on that column (i.e. a non-omega one) and then
check that every other head pattern in the column is coherent with that one.
*)
let all_coherent column =
let open Patterns.Head in
let coherent_heads hp1 hp2 =
match hp1.pat_desc, hp2.pat_desc with
| Construct c, Construct c' ->
c.cstr_consts = c'.cstr_consts
&& c.cstr_nonconsts = c'.cstr_nonconsts
| Constant c1, Constant c2 -> begin
match c1, c2 with
| Const_char _, Const_char _
| Const_int _, Const_int _
| Const_int32 _, Const_int32 _
| Const_int64 _, Const_int64 _
| Const_nativeint _, Const_nativeint _
| Const_float _, Const_float _
| Const_string _, Const_string _ -> true
| ( Const_char _
| Const_int _
| Const_int32 _
| Const_int64 _
| Const_nativeint _
| Const_float _
| Const_string _), _ -> false
end
| Tuple l1, Tuple l2 -> l1 = l2
| Record (lbl1 :: _), Record (lbl2 :: _) ->
Array.length lbl1.lbl_all = Array.length lbl2.lbl_all
| Any, _
| _, Any
| Record [], Record []
| Variant _, Variant _
| Array _, Array _
| Lazy, Lazy -> true
| _, _ -> false
in
match
List.find
(function
| { pat_desc = Any } -> false
| _ -> true)
column
with
| exception Not_found ->
true
| discr_pat ->
List.for_all (coherent_heads discr_pat) column
let first_column simplified_matrix =
List.map (fun ((head, _args), _rest) -> head) simplified_matrix
Patterns p and q compatible means :
there exists value V that matches both , However ....
The case of extension types is dubious , as constructor rebind permits
that different constructors are the same ( and are thus compatible ) .
Compilation must take this into account , consider :
type t = ..
type t + = A|B
type t + = C = A
let f x y = match x , y with
| true , A - > ' 1 '
| _ , C - > ' 2 '
| false , A - > ' 3 '
| _ , _ - > ' _ '
As C is bound to A the value of f false A is ' 2 ' ( and not ' 3 ' as it would
be in the absence of rebinding ) .
Not considering rebinding , patterns " false , A " and " _ , C " are incompatible
and the compiler can swap the second and third clause , resulting in the
( more efficiently compiled ) matching
match x , y with
| true , A - > ' 1 '
| false , A - > ' 3 '
| _ , C - > ' 2 '
| _ , _ - > ' _ '
This is not correct : when C is bound to A , " f false A " returns ' 2 ' ( not ' 3 ' )
However , diagnostics do not take constructor rebinding into account .
Notice , that due to module abstraction constructor rebinding is hidden .
module X : sig type t = .. type t + = A|B end = struct
type t = ..
type t + = A
type t + = B = A
end
open X
let f x = match x with
| A - > ' 1 '
| B - > ' 2 '
| _ - > ' _ '
The second clause above will NOT ( and can not ) be flagged as useless .
Finally , there are two compatibility functions :
compat p q --- > ' syntactic compatibility , used for diagnostics .
may_compat p q --- > a safe approximation of possible compat ,
for compilation
there exists value V that matches both, However....
The case of extension types is dubious, as constructor rebind permits
that different constructors are the same (and are thus compatible).
Compilation must take this into account, consider:
type t = ..
type t += A|B
type t += C=A
let f x y = match x,y with
| true,A -> '1'
| _,C -> '2'
| false,A -> '3'
| _,_ -> '_'
As C is bound to A the value of f false A is '2' (and not '3' as it would
be in the absence of rebinding).
Not considering rebinding, patterns "false,A" and "_,C" are incompatible
and the compiler can swap the second and third clause, resulting in the
(more efficiently compiled) matching
match x,y with
| true,A -> '1'
| false,A -> '3'
| _,C -> '2'
| _,_ -> '_'
This is not correct: when C is bound to A, "f false A" returns '2' (not '3')
However, diagnostics do not take constructor rebinding into account.
Notice, that due to module abstraction constructor rebinding is hidden.
module X : sig type t = .. type t += A|B end = struct
type t = ..
type t += A
type t += B=A
end
open X
let f x = match x with
| A -> '1'
| B -> '2'
| _ -> '_'
The second clause above will NOT (and cannot) be flagged as useless.
Finally, there are two compatibility functions:
compat p q ---> 'syntactic compatibility, used for diagnostics.
may_compat p q ---> a safe approximation of possible compat,
for compilation
*)
let is_absent tag row = row_field_repr (get_row_field tag !row) = Rabsent
let is_absent_pat d =
match d.pat_desc with
| Patterns.Head.Variant { tag; cstr_row; _ } -> is_absent tag cstr_row
| _ -> false
let const_compare x y =
match x,y with
| Const_float f1, Const_float f2 ->
Stdlib.compare (float_of_string f1) (float_of_string f2)
| Const_string (s1, _, _), Const_string (s2, _, _) ->
String.compare s1 s2
| (Const_int _
|Const_char _
|Const_string (_, _, _)
|Const_float _
|Const_int32 _
|Const_int64 _
|Const_nativeint _
), _ -> Stdlib.compare x y
let records_args l1 l2 =
Invariant : fields are already sorted by Typecore.type_label_a_list
let rec combine r1 r2 l1 l2 = match l1,l2 with
| [],[] -> List.rev r1, List.rev r2
| [],(_,_,p2)::rem2 -> combine (omega::r1) (p2::r2) [] rem2
| (_,_,p1)::rem1,[] -> combine (p1::r1) (omega::r2) rem1 []
| (_,lbl1,p1)::rem1, ( _,lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
combine (p1::r1) (omega::r2) rem1 l2
else if lbl1.lbl_pos > lbl2.lbl_pos then
combine (omega::r1) (p2::r2) l1 rem2
combine (p1::r1) (p2::r2) rem1 rem2 in
combine [] [] l1 l2
module Compat
(Constr:sig
val equal :
Types.constructor_description ->
Types.constructor_description ->
bool
end) = struct
let rec compat p q = match p.pat_desc,q.pat_desc with
| ((Tpat_any|Tpat_var _),_)
| (_,(Tpat_any|Tpat_var _)) -> true
| Tpat_alias (p,_,_),_ -> compat p q
| _,Tpat_alias (q,_,_) -> compat p q
| Tpat_or (p1,p2,_),_ ->
(compat p1 q || compat p2 q)
| _,Tpat_or (q1,q2,_) ->
(compat p q1 || compat p q2)
| Tpat_construct (_, c1, ps1, _), Tpat_construct (_, c2, ps2, _) ->
Constr.equal c1 c2 && compats ps1 ps2
| Tpat_variant(l1,op1, _), Tpat_variant(l2,op2,_) ->
l1=l2 && ocompat op1 op2
| Tpat_constant c1, Tpat_constant c2 ->
const_compare c1 c2 = 0
| Tpat_tuple ps, Tpat_tuple qs -> compats ps qs
| Tpat_lazy p, Tpat_lazy q -> compat p q
| Tpat_record (l1,_),Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
compats ps qs
| Tpat_array ps, Tpat_array qs ->
List.length ps = List.length qs &&
compats ps qs
| _,_ -> false
and ocompat op oq = match op,oq with
| None,None -> true
| Some p,Some q -> compat p q
| (None,Some _)|(Some _,None) -> false
and compats ps qs = match ps,qs with
| [], [] -> true
| p::ps, q::qs -> compat p q && compats ps qs
| _,_ -> false
end
module SyntacticCompat =
Compat
(struct
let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag
end)
let compat = SyntacticCompat.compat
and compats = SyntacticCompat.compats
Due to ( potential ) rebinding , two extension constructors
of the same arity type may equal
of the same arity type may equal *)
Utilities for retrieving type paths
May need a clean copy , cf . PR#4745
let clean_copy ty =
if get_level ty = Btype.generic_level then ty
else Subst.type_expr Subst.identity ty
let get_constructor_type_path ty tenv =
let ty = Ctype.expand_head tenv (clean_copy ty) in
match get_desc ty with
| Tconstr (path,_,_) -> path
| _ -> assert false
Utilities for matching
let simple_match d h =
let open Patterns.Head in
match d.pat_desc, h.pat_desc with
| Construct c1, Construct c2 ->
Types.equal_tag c1.cstr_tag c2.cstr_tag
| Variant { tag = t1; _ }, Variant { tag = t2 } ->
t1 = t2
| Constant c1, Constant c2 -> const_compare c1 c2 = 0
| Lazy, Lazy -> true
| Record _, Record _ -> true
| Tuple len1, Tuple len2
| Array len1, Array len2 -> len1 = len2
| _, Any -> true
| _, _ -> false
let record_arg ph =
let open Patterns.Head in
match ph.pat_desc with
| Any -> []
| Record args -> args
| _ -> fatal_error "Parmatch.as_record"
let extract_fields lbls arg =
let get_field pos arg =
match List.find (fun (lbl,_) -> pos = lbl.lbl_pos) arg with
| _, p -> p
| exception Not_found -> omega
in
List.map (fun lbl -> get_field lbl.lbl_pos arg) lbls
let simple_match_args discr head args =
let open Patterns.Head in
match head.pat_desc with
| Constant _ -> []
| Construct _
| Variant _
| Tuple _
| Array _
| Lazy -> args
| Record lbls -> extract_fields (record_arg discr) (List.combine lbls args)
| Any ->
begin match discr.pat_desc with
| Construct cstr -> Patterns.omegas cstr.cstr_arity
| Variant { has_arg = true }
| Lazy -> [Patterns.omega]
| Record lbls -> omega_list lbls
| Array len
| Tuple len -> Patterns.omegas len
| Variant { has_arg = false }
| Any
| Constant _ -> []
end
Consider a pattern matrix whose first column has been simplified to contain
only _ or a head constructor
| p1 , r1 ...
| p2 , r2 ...
| p3 , r3 ...
| ...
We build a normalized /discriminating/ pattern from a pattern [ q ] by folding
over the first column of the matrix , " refining " [ q ] as we go :
- when we encounter a row starting with [ Tuple ] or [ Lazy ] then we
can stop and return that head , as we can not refine any further . Indeed ,
these constructors are alone in their signature , so they will subsume
whatever other head we might find , as well as the head we 're threading
along .
- when we find a [ Record ] then it is a bit more involved : it is also alone
in its signature , however it might only be matching a subset of the
record fields . We use these fields to refine our accumulator and keep going
as another row might match on different fields .
- rows starting with a wildcard do not bring any information , so we ignore
them and keep going
- if we encounter anything else ( i.e. any other constructor ) , then we just
stop and return our accumulator .
only _ or a head constructor
| p1, r1...
| p2, r2...
| p3, r3...
| ...
We build a normalized /discriminating/ pattern from a pattern [q] by folding
over the first column of the matrix, "refining" [q] as we go:
- when we encounter a row starting with [Tuple] or [Lazy] then we
can stop and return that head, as we cannot refine any further. Indeed,
these constructors are alone in their signature, so they will subsume
whatever other head we might find, as well as the head we're threading
along.
- when we find a [Record] then it is a bit more involved: it is also alone
in its signature, however it might only be matching a subset of the
record fields. We use these fields to refine our accumulator and keep going
as another row might match on different fields.
- rows starting with a wildcard do not bring any information, so we ignore
them and keep going
- if we encounter anything else (i.e. any other constructor), then we just
stop and return our accumulator.
*)
let discr_pat q pss =
let open Patterns.Head in
let rec refine_pat acc = function
| [] -> acc
| ((head, _), _) :: rows ->
match head.pat_desc with
| Any -> refine_pat acc rows
| Tuple _ | Lazy -> head
| Record lbls ->
N.B. we could make this case " simpler " by refining the record case
using [ all_record_args ] .
In which case we would n't need to fold over the first column for
records .
However it makes the witness we generate for the exhaustivity warning
less pretty .
using [all_record_args].
In which case we wouldn't need to fold over the first column for
records.
However it makes the witness we generate for the exhaustivity warning
less pretty. *)
let fields =
List.fold_right (fun lbl r ->
if List.exists (fun l -> l.lbl_pos = lbl.lbl_pos) r then
r
else
lbl :: r
) lbls (record_arg acc)
in
let d = { head with pat_desc = Record fields } in
refine_pat d rows
| _ -> acc
in
let q, _ = deconstruct q in
match q.pat_desc with
| Any | Record _ -> refine_pat q pss
| _ -> q
let rec read_args xs r = match xs,r with
| [],_ -> [],r
| _::xs, arg::rest ->
let args,rest = read_args xs rest in
arg::args,rest
| _,_ ->
fatal_error "Parmatch.read_args"
let do_set_args ~erase_mutable q r = match q with
| {pat_desc = Tpat_tuple omegas} ->
let args,rest = read_args omegas r in
make_pat (Tpat_tuple args) q.pat_type q.pat_mode q.pat_env::rest
| {pat_desc = Tpat_record (omegas,closed)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_record
(List.map2 (fun (lid, lbl,_) arg ->
if
erase_mutable &&
(match lbl.lbl_mut with
| Mutable -> true | Immutable -> false)
then
lid, lbl, omega
else
lid, lbl, arg)
omegas args, closed))
q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_construct (lid, c, omegas, _)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_construct (lid, c, args, None))
q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_variant (l, omega, row)} ->
let arg, rest =
match omega, r with
Some _, a::r -> Some a, r
| None, r -> None, r
| _ -> assert false
in
make_pat
(Tpat_variant (l, arg, row)) q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc = Tpat_lazy _omega} ->
begin match r with
arg::rest ->
make_pat (Tpat_lazy arg) q.pat_type q.pat_mode q.pat_env::rest
| _ -> fatal_error "Parmatch.do_set_args (lazy)"
end
| {pat_desc = Tpat_array omegas} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_array args) q.pat_type q.pat_mode q.pat_env::
rest
| {pat_desc=Tpat_constant _|Tpat_any} ->
| _ -> fatal_error "Parmatch.set_args"
let set_args q r = do_set_args ~erase_mutable:false q r
and set_args_erase_mutable q r = do_set_args ~erase_mutable:true q r
Given a matrix of non - empty rows
p1 : : r1 ...
p2 : : r2 ...
p3 : : r3 ...
Simplify the first column [ p1 p2 p3 ] by splitting all or - patterns .
The result is a list of pairs
( ( pattern head , arguments ) , rest of row )
For example ,
x : : r1
( Some _ ) as y : : r2
( None as x ) as y : : r3
( Some x | ( None as x ) ) : : becomes
( ( _ , [ ] ) , r1 )
( ( Some , [ _ ] ) , r2 )
( ( None , [ ] ) , r3 )
( ( Some , [ x ] ) , r4 )
( ( None , [ ] ) , r4 )
p1 :: r1...
p2 :: r2...
p3 :: r3...
Simplify the first column [p1 p2 p3] by splitting all or-patterns.
The result is a list of pairs
((pattern head, arguments), rest of row)
For example,
x :: r1
(Some _) as y :: r2
(None as x) as y :: r3
(Some x | (None as x)) :: r4
becomes
(( _ , [ ] ), r1)
(( Some, [_] ), r2)
(( None, [ ] ), r3)
(( Some, [x] ), r4)
(( None, [ ] ), r4)
*)
let simplify_head_pat ~add_column p ps k =
let rec simplify_head_pat p ps k =
match Patterns.General.(view p |> strip_vars).pat_desc with
| `Or (p1,p2,_) -> simplify_head_pat p1 ps (simplify_head_pat p2 ps k)
| #Patterns.Simple.view as view ->
add_column (Patterns.Head.deconstruct { p with pat_desc = view }) ps k
in simplify_head_pat p ps k
let rec simplify_first_col = function
| [] -> []
| (p::ps) :: rows ->
let add_column p ps k = (p, ps) :: k in
simplify_head_pat ~add_column p ps (simplify_first_col rows)
Builds the specialized matrix of [ pss ] according to the discriminating
pattern head [ d ] .
See section 3.1 of /~maranget/papers/warn/warn.pdf
NOTES :
- we are polymorphic on the type of matrices we work on , in particular a row
might not simply be a [ pattern list ] . That 's why we have the [ extend_row ]
parameter .
pattern head [d].
See section 3.1 of /~maranget/papers/warn/warn.pdf
NOTES:
- we are polymorphic on the type of matrices we work on, in particular a row
might not simply be a [pattern list]. That's why we have the [extend_row]
parameter.
*)
let build_specialized_submatrix ~extend_row discr pss =
let rec filter_rec = function
| ((head, args), ps) :: pss ->
if simple_match discr head
then extend_row (simple_match_args discr head args) ps :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
The " default " and " specialized " matrices of a given matrix .
See section 3.1 of /~maranget/papers/warn/warn.pdf .
See section 3.1 of /~maranget/papers/warn/warn.pdf .
*)
type 'matrix specialized_matrices = {
default : 'matrix;
constrs : (Patterns.Head.t * 'matrix) list;
}
Consider a pattern matrix whose first column has been simplified
to contain only _ or a head constructor
| p1 , r1 ...
| p2 , r2 ...
| p3 , r3 ...
| ...
We split this matrix into a list of /specialized/ sub - matrices , one for
each head constructor appearing in the first column . For each row whose
first column starts with a head constructor , remove this head
column , prepend one column for each argument of the constructor ,
and add the resulting row in the sub - matrix corresponding to this
head constructor .
Rows whose left column is omega ( the Any pattern _ ) may match any
head constructor , so they are added to all sub - matrices .
In the case where all the rows in the matrix have an omega on their first
column , then there is only one /specialized/ sub - matrix , formed of all these
omega rows .
This matrix is also called the /default/ matrix .
See the documentation of [ build_specialized_submatrix ] for an explanation of
the [ extend_row ] parameter .
to contain only _ or a head constructor
| p1, r1...
| p2, r2...
| p3, r3...
| ...
We split this matrix into a list of /specialized/ sub-matrices, one for
each head constructor appearing in the first column. For each row whose
first column starts with a head constructor, remove this head
column, prepend one column for each argument of the constructor,
and add the resulting row in the sub-matrix corresponding to this
head constructor.
Rows whose left column is omega (the Any pattern _) may match any
head constructor, so they are added to all sub-matrices.
In the case where all the rows in the matrix have an omega on their first
column, then there is only one /specialized/ sub-matrix, formed of all these
omega rows.
This matrix is also called the /default/ matrix.
See the documentation of [build_specialized_submatrix] for an explanation of
the [extend_row] parameter.
*)
let build_specialized_submatrices ~extend_row discr rows =
let extend_group discr p args r rs =
let r = extend_row (simple_match_args discr p args) r in
(discr, r :: rs)
in
insert a row of head [ p ] and rest [ r ] into the right group
Note : with this implementation , the order of the groups
is the order of their first row in the source order .
This is a nice property to get exhaustivity counter - examples
in source order .
Note: with this implementation, the order of the groups
is the order of their first row in the source order.
This is a nice property to get exhaustivity counter-examples
in source order.
*)
let rec insert_constr head args r = function
| [] ->
[extend_group head head args r []]
| (q0,rs) as bd::env ->
if simple_match q0 head
then extend_group q0 head args r rs :: env
else bd :: insert_constr head args r env
in
let insert_omega r env =
List.map (fun (q0,rs) -> extend_group q0 Patterns.Head.omega [] r rs) env
in
let rec form_groups constr_groups omega_tails = function
| [] -> (constr_groups, omega_tails)
| ((head, args), tail) :: rest ->
match head.pat_desc with
| Patterns.Head.Any ->
note that calling insert_omega here would be wrong
as some groups may not have been formed yet , if the
first row with this head pattern comes after in the list
as some groups may not have been formed yet, if the
first row with this head pattern comes after in the list *)
form_groups constr_groups (tail :: omega_tails) rest
| _ ->
form_groups
(insert_constr head args tail constr_groups) omega_tails rest
in
let constr_groups, omega_tails =
let initial_constr_group =
let open Patterns.Head in
match discr.pat_desc with
| Record _ | Tuple _ | Lazy ->
[ ] comes from [ discr_pat ] , and in this case subsumes any of the
patterns we could find on the first column of [ rows ] . So it is better
to use it for our initial environment than any of the normalized
pattern we might obtain from the first column .
patterns we could find on the first column of [rows]. So it is better
to use it for our initial environment than any of the normalized
pattern we might obtain from the first column. *)
[discr,[]]
| _ -> []
in
form_groups initial_constr_group [] rows
in
let default = List.rev omega_tails in
let constrs =
List.fold_right insert_omega omega_tails constr_groups
|> List.map (fun (discr, rs) -> (discr, List.rev rs))
in
{ default; constrs; }
let set_last a =
let rec loop = function
| [] -> assert false
| [_] -> [Patterns.General.erase a]
| x::l -> x :: loop l
in
function
| (_, []) -> (Patterns.Head.deconstruct a, [])
| (first, row) -> (first, loop row)
let mark_partial =
let zero =
make_pat (`Constant (Const_int 0)) Ctype.none Value_mode.max_mode Env.empty
in
List.map (fun ((hp, _), _ as ps) ->
match hp.pat_desc with
| Patterns.Head.Any -> ps
| _ -> set_last zero ps
)
let close_variant env row =
let Row {fields; more; name=orig_name; closed; fixed} = row_repr row in
let name, static =
List.fold_left
(fun (nm, static) (_tag,f) ->
match row_field_repr f with
| Reither(_, _, false) ->
link_row_field_ext ~inside:f rf_absent;
(None, static)
| Reither (_, _, true) -> (nm, false)
| Rabsent | Rpresent _ -> (nm, static))
(orig_name, true) fields in
if not closed || name != orig_name then begin
let more' = if static then Btype.newgenty Tnil else Btype.newgenvar () in
Ctype.unify env more
(Btype.newgenty
(Tvariant
(create_row ~fields:[] ~more:more'
~closed:true ~name ~fixed)))
end
Check whether the first column of env makes up a complete signature or
not . We work on the discriminating pattern heads of each sub - matrix : they
are not omega / Any .
Check whether the first column of env makes up a complete signature or
not. We work on the discriminating pattern heads of each sub-matrix: they
are not omega/Any.
*)
let full_match closing env = match env with
| [] -> false
| (discr, _) :: _ ->
let open Patterns.Head in
match discr.pat_desc with
| Any -> assert false
| Construct { cstr_tag = Cstr_extension _ ; _ } -> false
| Construct c -> List.length env = c.cstr_consts + c.cstr_nonconsts
| Variant { type_row; _ } ->
let fields =
List.map
(fun (d, _) ->
match d.pat_desc with
| Variant { tag } -> tag
| _ -> assert false)
env
in
let row = type_row () in
if closing && not (Btype.has_fixed_explanation row) then
List.for_all
(fun (tag,f) ->
match row_field_repr f with
Rabsent | Reither(_, _, false) -> true
| Reither (_, _, true)
| Rpresent _ -> List.mem tag fields)
(row_fields row)
else
row_closed row &&
List.for_all
(fun (tag,f) ->
row_field_repr f = Rabsent || List.mem tag fields)
(row_fields row)
| Constant Const_char _ ->
List.length env = 256
| Constant _
| Array _ -> false
| Tuple _
| Record _
| Lazy -> true
let should_extend ext env = match ext with
| None -> false
| Some ext -> begin match env with
| [] -> assert false
| (p,_)::_ ->
let open Patterns.Head in
begin match p.pat_desc with
| Construct {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)} ->
let path = get_constructor_type_path p.pat_type p.pat_env in
Path.same path ext
| Construct {cstr_tag=(Cstr_extension _)} -> false
| Constant _ | Tuple _ | Variant _ | Record _ | Array _ | Lazy -> false
| Any -> assert false
end
end
let pat_of_constr ex_pat cstr =
{ex_pat with pat_desc =
Tpat_construct (mknoloc (Longident.Lident cstr.cstr_name),
cstr, omegas cstr.cstr_arity, None)}
let orify x y =
make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_mode x.pat_env
let rec orify_many = function
| [] -> assert false
| [x] -> x
| x :: xs -> orify x (orify_many xs)
let pat_of_constrs ex_pat cstrs =
let ex_pat = Patterns.Head.to_omega_pattern ex_pat in
if cstrs = [] then raise Empty else
orify_many (List.map (pat_of_constr ex_pat) cstrs)
let pats_of_type ?(always=false) env ty mode =
let ty' = Ctype.expand_head env ty in
match get_desc ty' with
| Tconstr (path, _, _) ->
begin match Env.find_type_descrs path env with
| exception Not_found -> [omega]
| Type_variant (cstrs,_) when always || List.length cstrs <= 1 ||
Only explode when all constructors are GADTs
List.for_all (fun cd -> cd.cstr_generalized) cstrs ->
List.map (pat_of_constr (make_pat Tpat_any ty mode env)) cstrs
| Type_record (labels, _) ->
let fields =
List.map (fun ld ->
mknoloc (Longident.Lident ld.lbl_name), ld, omega)
labels
in
[make_pat (Tpat_record (fields, Closed)) ty mode env]
| Type_variant _ | Type_abstract | Type_open -> [omega]
end
| Ttuple tl ->
[make_pat (Tpat_tuple (omegas (List.length tl))) ty mode env]
| _ -> [omega]
let rec get_variant_constructors env ty =
match get_desc ty with
| Tconstr (path,_,_) -> begin
try match Env.find_type path env, Env.find_type_descrs path env with
| _, Type_variant (cstrs,_) -> cstrs
| {type_manifest = Some _}, _ ->
get_variant_constructors env
(Ctype.expand_head_once env (clean_copy ty))
| _ -> fatal_error "Parmatch.get_variant_constructors"
with Not_found ->
fatal_error "Parmatch.get_variant_constructors"
end
| _ -> fatal_error "Parmatch.get_variant_constructors"
module ConstructorSet = Set.Make(struct
type t = constructor_description
let compare c1 c2 = String.compare c1.cstr_name c2.cstr_name
end)
Sends back a pattern that complements the given constructors used_constrs
let complete_constrs constr used_constrs =
let c = constr.pat_desc in
let constrs = get_variant_constructors constr.pat_env c.cstr_res in
let used_constrs = ConstructorSet.of_list used_constrs in
let others =
List.filter
(fun cnstr -> not (ConstructorSet.mem cnstr used_constrs))
constrs in
Split constructors to put constant ones first
let const, nonconst =
List.partition (fun cnstr -> cnstr.cstr_arity = 0) others in
const @ nonconst
let build_other_constrs env p =
let open Patterns.Head in
match p.pat_desc with
| Construct ({ cstr_tag = Cstr_extension _ }) -> extra_pat
| Construct
({ cstr_tag = Cstr_constant _ | Cstr_block _ | Cstr_unboxed } as c) ->
let constr = { p with pat_desc = c } in
let get_constr q =
match q.pat_desc with
| Construct c -> c
| _ -> fatal_error "Parmatch.get_constr" in
let used_constrs = List.map (fun (p,_) -> get_constr p) env in
pat_of_constrs p (complete_constrs constr used_constrs)
| _ -> extra_pat
let build_other_constant proj make first next p env =
let all = List.map (fun (p, _) -> proj p.pat_desc) env in
let rec try_const i =
if List.mem i all
then try_const (next i)
else make_pat (make i) p.pat_type p.pat_mode p.pat_env
in try_const first
Builds a pattern that is incompatible with all patterns in
the first column of env
Builds a pattern that is incompatible with all patterns in
the first column of env
*)
let some_private_tag = "<some private tag>"
let build_other ext env =
match env with
| [] -> omega
| (d, _) :: _ ->
let open Patterns.Head in
match d.pat_desc with
| Construct { cstr_tag = Cstr_extension _ } ->
make_pat
(Tpat_var (Ident.create_local "*extension*",
{txt="*extension*"; loc = d.pat_loc}))
Ctype.none Value_mode.max_mode Env.empty
| Construct _ ->
begin match ext with
| Some ext ->
if Path.same ext (get_constructor_type_path d.pat_type d.pat_env)
then
extra_pat
else
build_other_constrs env d
| _ ->
build_other_constrs env d
end
| Variant { cstr_row; type_row } ->
let tags =
List.map
(fun (d, _) ->
match d.pat_desc with
| Variant { tag } -> tag
| _ -> assert false)
env
in
let make_other_pat tag const =
let arg = if const then None else Some Patterns.omega in
make_pat (Tpat_variant(tag, arg, cstr_row))
d.pat_type d.pat_mode d.pat_env
in
let row = type_row () in
begin match
List.fold_left
(fun others (tag,f) ->
if List.mem tag tags then others else
match row_field_repr f with
| Reither (c, _, _) -> make_other_pat tag c :: others
| Rpresent arg -> make_other_pat tag (arg = None) :: others)
[] (row_fields row)
with
[] ->
let tag =
if Btype.has_fixed_explanation row then some_private_tag else
let rec mktag tag =
if List.mem tag tags then mktag (tag ^ "'") else tag in
mktag "AnyOtherTag"
in make_other_pat tag true
| pat::other_pats ->
List.fold_left
(fun p_res pat ->
make_pat (Tpat_or (pat, p_res, None))
d.pat_type d.pat_mode d.pat_env)
pat other_pats
end
| Constant Const_char _ ->
let all_chars =
List.map
(fun (p,_) -> match p.pat_desc with
| Constant (Const_char c) -> c
| _ -> assert false)
env
in
let rec find_other i imax =
if i > imax then raise Not_found
else
let ci = Char.chr i in
if List.mem ci all_chars then
find_other (i+1) imax
else
make_pat (Tpat_constant (Const_char ci))
d.pat_type d.pat_mode d.pat_env
in
let rec try_chars = function
| [] -> Patterns.omega
| (c1,c2) :: rest ->
try
find_other (Char.code c1) (Char.code c2)
with
| Not_found -> try_chars rest
in
try_chars
[ 'a', 'z' ; 'A', 'Z' ; '0', '9' ;
' ', '~' ; Char.chr 0 , Char.chr 255]
| Constant Const_int _ ->
build_other_constant
(function Constant(Const_int i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int i))
0 succ d env
| Constant Const_int32 _ ->
build_other_constant
(function Constant(Const_int32 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int32 i))
0l Int32.succ d env
| Constant Const_int64 _ ->
build_other_constant
(function Constant(Const_int64 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int64 i))
0L Int64.succ d env
| Constant Const_nativeint _ ->
build_other_constant
(function Constant(Const_nativeint i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_nativeint i))
0n Nativeint.succ d env
| Constant Const_string _ ->
build_other_constant
(function Constant(Const_string (s, _, _)) -> String.length s
| _ -> assert false)
(function i ->
Tpat_constant
(Const_string(String.make i '*',Location.none,None)))
0 succ d env
| Constant Const_float _ ->
build_other_constant
(function Constant(Const_float f) -> float_of_string f
| _ -> assert false)
(function f -> Tpat_constant(Const_float (string_of_float f)))
0.0 (fun f -> f +. 1.0) d env
| Array _ ->
let all_lengths =
List.map
(fun (p,_) -> match p.pat_desc with
| Array len -> len
| _ -> assert false)
env in
let rec try_arrays l =
if List.mem l all_lengths then try_arrays (l+1)
else
make_pat (Tpat_array (omegas l))
d.pat_type d.pat_mode d.pat_env in
try_arrays 0
| _ -> Patterns.omega
let rec has_instance p = match p.pat_desc with
| Tpat_variant (l,_,r) when is_absent l r -> false
| Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_,None,_) -> true
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) -> has_instance p
| Tpat_or (p1,p2,_) -> has_instance p1 || has_instance p2
| Tpat_construct (_,_,ps,_) | Tpat_tuple ps | Tpat_array ps ->
has_instances ps
| Tpat_record (lps,_) -> has_instances (List.map (fun (_,_,x) -> x) lps)
| Tpat_lazy p
-> has_instance p
and has_instances = function
| [] -> true
| q::rem -> has_instance q && has_instances rem
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector , es such that :
1- for all ps in pss ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
---
In two places in the following function , we check the coherence of the first
column of ( pss + qs ) .
If it is incoherent , then we exit early saying that ( pss + qs ) is not
satisfiable ( which is equivalent to saying " oh , we should n't have considered
that branch , no good result came come from here " ) .
But what happens if we have a coherent but ill - typed column ?
- we might end up returning [ false ] , which is equivalent to noticing the
incompatibility : clearly this is fine .
- if we end up returning [ true ] then we 're saying that [ qs ] is useful while
it is not . This is sad but not the end of the world , we 're just allowing dead
code to survive .
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector, es such that :
1- for all ps in pss ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
---
In two places in the following function, we check the coherence of the first
column of (pss + qs).
If it is incoherent, then we exit early saying that (pss + qs) is not
satisfiable (which is equivalent to saying "oh, we shouldn't have considered
that branch, no good result came come from here").
But what happens if we have a coherent but ill-typed column?
- we might end up returning [false], which is equivalent to noticing the
incompatibility: clearly this is fine.
- if we end up returning [true] then we're saying that [qs] is useful while
it is not. This is sad but not the end of the world, we're just allowing dead
code to survive.
*)
let rec satisfiable pss qs = match pss with
| [] -> has_instances qs
| _ ->
match qs with
| [] -> false
| q::qs ->
match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or(q1,q2,_) ->
satisfiable pss (q1::qs) || satisfiable pss (q2::qs)
| `Any ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
false
else begin
let { default; constrs } =
let q0 = discr_pat Patterns.Simple.omega pss in
build_specialized_submatrices ~extend_row:(@) q0 pss in
if not (full_match false constrs) then
satisfiable default qs
else
List.exists
(fun (p,pss) ->
not (is_absent_pat p) &&
satisfiable pss
(simple_match_args p Patterns.Head.omega [] @ qs))
constrs
end
| `Variant (l,_,r) when is_absent l r -> false
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let pss = simplify_first_col pss in
let hq, qargs = Patterns.Head.deconstruct q in
if not (all_coherent (hq :: first_column pss)) then
false
else begin
let q0 = discr_pat q pss in
satisfiable (build_specialized_submatrix ~extend_row:(@) q0 pss)
(simple_match_args q0 hq qargs @ qs)
end
While [ satisfiable ] only checks whether the last row of [ pss + qs ] is
satisfiable , this function returns the ( possibly empty ) list of vectors [ es ]
which verify :
1- for all ps in pss , ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
This is done to enable GADT handling
For considerations regarding the coherence check , see the comment on
[ satisfiable ] above .
satisfiable, this function returns the (possibly empty) list of vectors [es]
which verify:
1- for all ps in pss, ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
This is done to enable GADT handling
For considerations regarding the coherence check, see the comment on
[satisfiable] above. *)
let rec list_satisfying_vectors pss qs =
match pss with
| [] -> if has_instances qs then [qs] else []
| _ ->
match qs with
| [] -> []
| q :: qs ->
match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or(q1,q2,_) ->
list_satisfying_vectors pss (q1::qs) @
list_satisfying_vectors pss (q2::qs)
| `Any ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
[]
else begin
let q0 = discr_pat Patterns.Simple.omega pss in
let wild default_matrix p =
List.map (fun qs -> p::qs)
(list_satisfying_vectors default_matrix qs)
in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } ->
first column of pss is made of variables only
wild default omega
| { default; constrs = ((p,_)::_ as constrs) } ->
let for_constrs () =
List.flatten (
List.map (fun (p,pss) ->
if is_absent_pat p then
[]
else
let witnesses =
list_satisfying_vectors pss
(simple_match_args p Patterns.Head.omega [] @ qs)
in
let p = Patterns.Head.to_omega_pattern p in
List.map (set_args p) witnesses
) constrs
)
in
if full_match false constrs then for_constrs () else
begin match p.pat_desc with
| Construct _ ->
wild default (build_other_constrs constrs p)
@ for_constrs ()
| _ ->
wild default Patterns.omega
end
end
| `Variant (l, _, r) when is_absent l r -> []
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let hq, qargs = Patterns.Head.deconstruct q in
let pss = simplify_first_col pss in
if not (all_coherent (hq :: first_column pss)) then
[]
else begin
let q0 = discr_pat q pss in
List.map (set_args (Patterns.Head.to_omega_pattern q0))
(list_satisfying_vectors
(build_specialized_submatrix ~extend_row:(@) q0 pss)
(simple_match_args q0 hq qargs @ qs))
end
let rec do_match pss qs = match qs with
| [] ->
begin match pss with
| []::_ -> true
| _ -> false
end
| q::qs -> match Patterns.General.(view q |> strip_vars).pat_desc with
| `Or (q1,q2,_) ->
do_match pss (q1::qs) || do_match pss (q2::qs)
| `Any ->
let rec remove_first_column = function
| (_::ps)::rem -> ps::remove_first_column rem
| _ -> []
in
do_match (remove_first_column pss) qs
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let q0, qargs = Patterns.Head.deconstruct q in
let pss = simplify_first_col pss in
[ pss ] will ( or wo n't ) match [ q0 : : qs ] regardless of the coherence of
its first column .
its first column. *)
do_match
(build_specialized_submatrix ~extend_row:(@) q0 pss)
(qargs @ qs)
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ - > " v "
| Tpat_any - > " _ "
| Tpat_alias ( p , x ) - > Printf.sprintf " ( % s ) as ? " ( string_of_pat p )
| Tpat_constant n - > " 0 "
| Tpat_construct ( _ , lid , _ ) - >
Printf.sprintf " % s " ( String.concat " . " ( Longident.flatten lid.txt ) )
| Tpat_lazy p - >
Printf.sprintf " ( lazy % s ) " ( string_of_pat p )
| Tpat_or ( p1,p2 , _ ) - >
Printf.sprintf " ( % s | % s ) " ( string_of_pat p1 ) ( string_of_pat p2 )
| Tpat_tuple list - >
Printf.sprintf " ( % s ) " ( String.concat " , " ( List.map string_of_pat list ) )
| Tpat_variant ( _ , _ , _ ) - > " variant "
| Tpat_record ( _ , _ ) - > " record "
| Tpat_array _ - > " array "
in
Printf.fprintf stderr " PAT[%s]\n% ! " ( string_of_pat pat )
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ -> "v"
| Tpat_any -> "_"
| Tpat_alias (p, x) -> Printf.sprintf "(%s) as ?" (string_of_pat p)
| Tpat_constant n -> "0"
| Tpat_construct (_, lid, _) ->
Printf.sprintf "%s" (String.concat "." (Longident.flatten lid.txt))
| Tpat_lazy p ->
Printf.sprintf "(lazy %s)" (string_of_pat p)
| Tpat_or (p1,p2,_) ->
Printf.sprintf "(%s | %s)" (string_of_pat p1) (string_of_pat p2)
| Tpat_tuple list ->
Printf.sprintf "(%s)" (String.concat "," (List.map string_of_pat list))
| Tpat_variant (_, _, _) -> "variant"
| Tpat_record (_, _) -> "record"
| Tpat_array _ -> "array"
in
Printf.fprintf stderr "PAT[%s]\n%!" (string_of_pat pat)
*)
let rec exhaust (ext:Path.t option) pss n = match pss with
| [] -> Seq.return (omegas n)
| []::_ -> Seq.empty
| [(p :: ps)] -> exhaust_single_row ext p ps n
| pss -> specialize_and_exhaust ext pss n
and exhaust_single_row ext p ps n =
Shortcut : in the single - row case p : : ps we know that all
counter - examples are either of the form
counter - example(p ) : : omegas
or
p : : counter - examples(ps )
This is very interesting in the case where p contains
or - patterns , as the non - shortcut path below would do a separate
search for each constructor of the or - pattern , which can lead to
an exponential blowup on examples such as
| ( A|B ) , ( A|B ) , ( A|B ) , ( A|B ) - > foo
Note that this shortcut also applies to examples such as
| A , A , A , A - > foo | ( A|B ) , ( A|B ) , ( A|B ) , ( A|B ) - > bar
thanks to the [ get_mins ] preprocessing step which will drop the
first row ( subsumed by the second ) . Code with this shape does
occur naturally when people want to avoid fragile pattern
matches : if A and B are the only two constructors , this is the
best way to make a non - fragile distinction between " all As " and
" at least one B " .
counter-examples are either of the form
counter-example(p) :: omegas
or
p :: counter-examples(ps)
This is very interesting in the case where p contains
or-patterns, as the non-shortcut path below would do a separate
search for each constructor of the or-pattern, which can lead to
an exponential blowup on examples such as
| (A|B), (A|B), (A|B), (A|B) -> foo
Note that this shortcut also applies to examples such as
| A, A, A, A -> foo | (A|B), (A|B), (A|B), (A|B) -> bar
thanks to the [get_mins] preprocessing step which will drop the
first row (subsumed by the second). Code with this shape does
occur naturally when people want to avoid fragile pattern
matches: if A and B are the only two constructors, this is the
best way to make a non-fragile distinction between "all As" and
"at least one B".
*)
List.to_seq [Some p; None] |> Seq.flat_map
(function
| Some p ->
let sub_witnesses = exhaust ext [ps] (n - 1) in
Seq.map (fun row -> p :: row) sub_witnesses
| None ->
note : calling [ exhaust ] recursively of p would
result in an infinite loop in the case n=1
result in an infinite loop in the case n=1 *)
let p_witnesses = specialize_and_exhaust ext [[p]] 1 in
Seq.map (fun p_row -> p_row @ omegas (n - 1)) p_witnesses
)
and specialize_and_exhaust ext pss n =
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
Seq.empty
else begin
Assuming the first column is ill - typed but considered coherent , we
might end up producing an ill - typed witness of non - exhaustivity
corresponding to the current branch .
If [ exhaust ] has been called by [ do_check_partial ] , then the witnesses
produced get typechecked and the ill - typed ones are discarded .
If [ exhaust ] has been called by [ do_check_fragile ] , then it is possible
we might fail to warn the user that the matching is fragile . See for
example testsuite / tests / warnings / w04_failure.ml .
might end up producing an ill-typed witness of non-exhaustivity
corresponding to the current branch.
If [exhaust] has been called by [do_check_partial], then the witnesses
produced get typechecked and the ill-typed ones are discarded.
If [exhaust] has been called by [do_check_fragile], then it is possible
we might fail to warn the user that the matching is fragile. See for
example testsuite/tests/warnings/w04_failure.ml. *)
let q0 = discr_pat Patterns.Simple.omega pss in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } ->
first column of pss is made of variables only
let sub_witnesses = exhaust ext default (n-1) in
let q0 = Patterns.Head.to_omega_pattern q0 in
Seq.map (fun row -> q0::row) sub_witnesses
| { default; constrs } ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Seq.empty
else
let sub_witnesses =
exhaust
ext pss
(List.length (simple_match_args p Patterns.Head.omega [])
+ n - 1)
in
let p = Patterns.Head.to_omega_pattern p in
Seq.map (set_args p) sub_witnesses
in
let try_omega () =
if full_match false constrs && not (should_extend ext constrs) then
Seq.empty
else
let sub_witnesses = exhaust ext default (n-1) in
match build_other ext constrs with
| exception Empty ->
fatal_error "Parmatch.exhaust"
| p ->
Seq.map (fun tail -> p :: tail) sub_witnesses
in
List.map (fun constr_mat -> Some constr_mat) constrs @ [None]
|> List.to_seq
|> Seq.flat_map
(function
| Some constr_mat -> try_non_omega constr_mat
| None -> try_omega ())
end
let exhaust ext pss n =
exhaust ext pss n
|> Seq.map (function
| [x] -> x
| _ -> assert false)
let rec pressure_variants tdefs = function
| [] -> false
| []::_ -> true
| pss ->
let pss = simplify_first_col pss in
if not (all_coherent (first_column pss)) then
true
else begin
let q0 = discr_pat Patterns.Simple.omega pss in
match build_specialized_submatrices ~extend_row:(@) q0 pss with
| { default; constrs = [] } -> pressure_variants tdefs default
| { default; constrs } ->
let rec try_non_omega = function
| (_p,pss) :: rem ->
let ok = pressure_variants tdefs pss in
The order below matters : we want [ pressure_variants ] to be
called on all the specialized submatrices because we might
close some variant in any of them regardless of whether [ ok ]
is true for [ pss ] or not
called on all the specialized submatrices because we might
close some variant in any of them regardless of whether [ok]
is true for [pss] or not *)
try_non_omega rem && ok
| [] -> true
in
if full_match (tdefs=None) constrs then
try_non_omega constrs
else if tdefs = None then
pressure_variants None default
else
let full = full_match true constrs in
let ok =
if full then
try_non_omega constrs
else begin
let { constrs = partial_constrs; _ } =
build_specialized_submatrices ~extend_row:(@) q0
(mark_partial pss)
in
try_non_omega partial_constrs
end
in
begin match constrs, tdefs with
| [], _
| _, None -> ()
| (d, _) :: _, Some env ->
match d.pat_desc with
| Variant { type_row; _ } ->
let row = type_row () in
if Btype.has_fixed_explanation row
|| pressure_variants None default then ()
else close_variant env row
| _ -> ()
end;
ok
end
This time every_satisfiable pss qs checks the
utility of every expansion of qs .
Expansion means expansion of or - patterns inside qs
This time every_satisfiable pss qs checks the
utility of every expansion of qs.
Expansion means expansion of or-patterns inside qs
*)
type answer =
type usefulness_row =
{no_ors : pattern list ; ors : pattern list ; active : pattern list}
let make_row ps = {ors=[] ; no_ors=[]; active=ps}
let make_rows pss = List.map make_row pss
let is_var p = match Patterns.General.(view p |> strip_vars).pat_desc with
| `Any -> true
| _ -> false
let is_var_column rs =
List.for_all
(fun r -> match r.active with
| p::_ -> is_var p
| [] -> assert false)
rs
let rec or_args p = match p.pat_desc with
| Tpat_or (p1,p2,_) -> p1,p2
| Tpat_alias (p,_,_) -> or_args p
| _ -> assert false
let remove r = match r.active with
| _::rem -> {r with active=rem}
| [] -> assert false
let remove_column rs = List.map remove rs
let push_no_or r = match r.active with
| p::rem -> { r with no_ors = p::r.no_ors ; active=rem}
| [] -> assert false
let push_or r = match r.active with
| p::rem -> { r with ors = p::r.ors ; active=rem}
| [] -> assert false
let push_or_column rs = List.map push_or rs
and push_no_or_column rs = List.map push_no_or rs
let rec simplify_first_usefulness_col = function
| [] -> []
| row :: rows ->
match row.active with
| p :: ps ->
let add_column p ps k =
(p, { row with active = ps }) :: k in
simplify_head_pat ~add_column p ps
(simplify_first_usefulness_col rows)
let make_vector r = List.rev r.no_ors
let make_matrix rs = List.map make_vector rs
Standard union on answers
let union_res r1 r2 = match r1, r2 with
| (Unused,_)
| (_, Unused) -> Unused
| Used,_ -> r2
| _, Used -> r1
| Upartial u1, Upartial u2 -> Upartial (u1@u2)
let extract_elements qs =
let rec do_rec seen = function
| [] -> []
| q::rem ->
{no_ors= List.rev_append seen rem @ qs.no_ors ;
ors=[] ;
active = [q]}::
do_rec (q::seen) rem in
do_rec [] qs.ors
let transpose rs = match rs with
| [] -> assert false
| r::rem ->
let i = List.map (fun x -> [x]) r in
List.fold_left
(List.map2 (fun r x -> x::r))
i rem
let extract_columns pss qs = match pss with
| [] -> List.map (fun _ -> []) qs.ors
| _ ->
let rows = List.map extract_elements pss in
transpose rows
Core function
The idea is to first look for or patterns ( recursive case ) , then
check or - patterns argument usefulness ( terminal case )
The idea is to first look for or patterns (recursive case), then
check or-patterns argument usefulness (terminal case)
*)
let rec every_satisfiables pss qs = match qs.active with
| [] ->
begin match qs.ors with
if satisfiable (make_matrix pss) (make_vector qs) then
Used
else
Unused
n or - patterns - > 2n expansions
List.fold_right2
(fun pss qs r -> match r with
| Unused -> Unused
| _ ->
match qs.active with
| [q] ->
let q1,q2 = or_args q in
let r_loc = every_both pss qs q1 q2 in
union_res r r_loc
| _ -> assert false)
(extract_columns pss qs) (extract_elements qs)
Used
end
| q::rem ->
begin match Patterns.General.(view q |> strip_vars).pat_desc with
| `Any ->
if is_var_column pss then
every_satisfiables (remove_column pss) (remove qs)
else
every_satisfiables (push_no_or_column pss) (push_no_or qs)
| `Or (q1,q2,_) ->
if
q1.pat_loc.Location.loc_ghost &&
q2.pat_loc.Location.loc_ghost
then
every_satisfiables (push_no_or_column pss) (push_no_or qs)
else
every_satisfiables (push_or_column pss) (push_or qs)
Unused
| #Patterns.Simple.view as view ->
let q = { q with pat_desc = view } in
let pss = simplify_first_usefulness_col pss in
let hq, args = Patterns.Head.deconstruct q in
if not (all_coherent (hq :: first_column pss)) then
Unused
else begin
let q0 = discr_pat q pss in
every_satisfiables
(build_specialized_submatrix q0 pss
~extend_row:(fun ps r -> { r with active = ps @ r.active }))
{qs with active=simple_match_args q0 hq args @ rem}
end
end
This function ` ` every_both '' performs the usefulness check
of or - pat q1|q2 .
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2 ,
That way ,
- others orpats in qs.ors will not get expanded .
- all matching work performed on qs.no_ors is not performed again .
This function ``every_both'' performs the usefulness check
of or-pat q1|q2.
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2,
That way,
- others orpats in qs.ors will not get expanded.
- all matching work performed on qs.no_ors is not performed again.
*)
and every_both pss qs q1 q2 =
let qs1 = {qs with active=[q1]}
and qs2 = {qs with active=[q2]} in
let r1 = every_satisfiables pss qs1
and r2 = every_satisfiables (if compat q1 q2 then qs1::pss else pss) qs2 in
match r1 with
| Unused ->
begin match r2 with
| Unused -> Unused
| Used -> Upartial [q1]
| Upartial u2 -> Upartial (q1::u2)
end
| Used ->
begin match r2 with
| Unused -> Upartial [q2]
| _ -> r2
end
| Upartial u1 ->
begin match r2 with
| Unused -> Upartial (u1@[q2])
| Used -> r1
| Upartial u2 -> Upartial (u1 @ u2)
end
let rec le_pat p q =
match (p.pat_desc, q.pat_desc) with
| (Tpat_var _|Tpat_any),_ -> true
| Tpat_alias(p,_,_), _ -> le_pat p q
| _, Tpat_alias(q,_,_) -> le_pat p q
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_construct(_,c1,ps,_), Tpat_construct(_,c2,qs,_) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs
| Tpat_variant(l1,Some p1,_), Tpat_variant(l2,Some p2,_) ->
(l1 = l2 && le_pat p1 p2)
| Tpat_variant(l1,None,_r1), Tpat_variant(l2,None,_) ->
l1 = l2
| Tpat_variant(_,_,_), Tpat_variant(_,_,_) -> false
| Tpat_tuple(ps), Tpat_tuple(qs) -> le_pats ps qs
| Tpat_lazy p, Tpat_lazy q -> le_pat p q
| Tpat_record (l1,_), Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
le_pats ps qs
| Tpat_array(ps), Tpat_array(qs) ->
List.length ps = List.length qs && le_pats ps qs
| _,_ -> not (satisfiable [[p]] [q])
and le_pats ps qs =
match ps,qs with
p::ps, q::qs -> le_pat p q && le_pats ps qs
| _, _ -> true
let get_mins le ps =
let rec select_rec r = function
[] -> r
| p::ps ->
if List.exists (fun p0 -> le p0 p) ps
then select_rec r ps
else select_rec (p::r) ps in
select_rec [] (select_rec [] ps)
lub p q is a pattern that matches all values matched by p and q
may raise Empty , when p and q are not compatible
lub p q is a pattern that matches all values matched by p and q
may raise Empty, when p and q are not compatible
*)
let rec lub p q = match p.pat_desc,q.pat_desc with
| Tpat_alias (p,_,_),_ -> lub p q
| _,Tpat_alias (q,_,_) -> lub p q
| (Tpat_any|Tpat_var _),_ -> q
| _,(Tpat_any|Tpat_var _) -> p
| Tpat_or (p1,p2,_),_ -> orlub p1 p2 q
Thanks god , lub is commutative
| Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p
| Tpat_tuple ps, Tpat_tuple qs ->
let rs = lubs ps qs in
make_pat (Tpat_tuple rs) p.pat_type p.pat_mode p.pat_env
| Tpat_lazy p, Tpat_lazy q ->
let r = lub p q in
make_pat (Tpat_lazy r) p.pat_type p.pat_mode p.pat_env
| Tpat_construct (lid,c1,ps1,_), Tpat_construct (_,c2,ps2,_)
when Types.equal_tag c1.cstr_tag c2.cstr_tag ->
let rs = lubs ps1 ps2 in
make_pat (Tpat_construct (lid, c1, rs, None))
p.pat_type p.pat_mode p.pat_env
| Tpat_variant(l1,Some p1,row), Tpat_variant(l2,Some p2,_)
when l1=l2 ->
let r=lub p1 p2 in
make_pat (Tpat_variant (l1,Some r,row))
p.pat_type p.pat_mode p.pat_env
| Tpat_variant (l1,None,_row), Tpat_variant(l2,None,_)
when l1 = l2 -> p
| Tpat_record (l1,closed),Tpat_record (l2,_) ->
let rs = record_lubs l1 l2 in
make_pat (Tpat_record (rs, closed))
p.pat_type p.pat_mode p.pat_env
| Tpat_array ps, Tpat_array qs
when List.length ps = List.length qs ->
let rs = lubs ps qs in
make_pat (Tpat_array rs)
p.pat_type p.pat_mode p.pat_env
| _,_ ->
raise Empty
and orlub p1 p2 q =
try
let r1 = lub p1 q in
try
{q with pat_desc=(Tpat_or (r1,lub p2 q,None))}
with
| Empty -> r1
with
| Empty -> lub p2 q
and record_lubs l1 l2 =
let rec lub_rec l1 l2 = match l1,l2 with
| [],_ -> l2
| _,[] -> l1
| (lid1, lbl1,p1)::rem1, (lid2, lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
(lid1, lbl1,p1)::lub_rec rem1 l2
else if lbl2.lbl_pos < lbl1.lbl_pos then
(lid2, lbl2,p2)::lub_rec l1 rem2
else
(lid1, lbl1,lub p1 p2)::lub_rec rem1 rem2 in
lub_rec l1 l2
and lubs ps qs = match ps,qs with
| p::ps, q::qs -> lub p q :: lubs ps qs
| _,_ -> []
let pressure_variants tdefs patl =
ignore (pressure_variants
(Some tdefs)
(List.map (fun p -> [p; omega]) patl))
let pressure_variants_in_computation_pattern tdefs patl =
let add_row pss p_opt =
match p_opt with
| None -> pss
| Some p -> p :: pss
in
let val_pss, exn_pss =
List.fold_right (fun pat (vpss, epss)->
let (vp, ep) = split_pattern pat in
add_row vpss vp, add_row epss ep
) patl ([], [])
in
pressure_variants tdefs val_pss;
pressure_variants tdefs exn_pss
Utilities for diagnostics
let rec initial_matrix = function
[] -> []
| {c_guard=Some _} :: rem -> initial_matrix rem
| {c_guard=None; c_lhs=p} :: rem -> [p] :: initial_matrix rem
let rec initial_only_guarded = function
| [] -> []
| { c_guard = None; _} :: rem ->
initial_only_guarded rem
| { c_lhs = pat; _ } :: rem ->
[pat] :: initial_only_guarded rem
conversion from Typedtree.pattern to Parsetree.pattern list
module Conv = struct
open Parsetree
let mkpat desc = Ast_helper.Pat.mk desc
let name_counter = ref 0
let fresh name =
let current = !name_counter in
name_counter := !name_counter + 1;
"#$" ^ name ^ Int.to_string current
let conv typed =
let constrs = Hashtbl.create 7 in
let labels = Hashtbl.create 7 in
let rec loop pat =
match pat.pat_desc with
Tpat_or (pa,pb,_) ->
mkpat (Ppat_or (loop pa, loop pb))
mkpat (Ppat_var nm)
| Tpat_any
| Tpat_var _ ->
mkpat Ppat_any
| Tpat_constant c ->
mkpat (Ppat_constant (Untypeast.constant c))
| Tpat_alias (p,_,_) -> loop p
| Tpat_tuple lst ->
mkpat (Ppat_tuple (List.map loop lst))
| Tpat_construct (cstr_lid, cstr, lst, _) ->
let id = fresh cstr.cstr_name in
let lid = { cstr_lid with txt = Longident.Lident id } in
Hashtbl.add constrs id cstr;
let arg =
match List.map loop lst with
| [] -> None
| [p] -> Some ([], p)
| lst -> Some ([], mkpat (Ppat_tuple lst))
in
mkpat (Ppat_construct(lid, arg))
| Tpat_variant(label,p_opt,_row_desc) ->
let arg = Option.map loop p_opt in
mkpat (Ppat_variant(label, arg))
| Tpat_record (subpatterns, _closed_flag) ->
let fields =
List.map
(fun (_, lbl, p) ->
let id = fresh lbl.lbl_name in
Hashtbl.add labels id lbl;
(mknoloc (Longident.Lident id), loop p))
subpatterns
in
mkpat (Ppat_record (fields, Open))
| Tpat_array lst ->
mkpat (Ppat_array (List.map loop lst))
| Tpat_lazy p ->
mkpat (Ppat_lazy (loop p))
in
let ps = loop typed in
(ps, constrs, labels)
end
let contains_extension pat =
exists_pattern
(function
| {pat_desc=Tpat_var (_, {txt="*extension*"})} -> true
| _ -> false)
pat
type pat_explosion = PE_single | PE_gadt_cases
type ppat_of_type =
| PT_empty
| PT_any
| PT_pattern of
pat_explosion *
Parsetree.pattern *
(string, constructor_description) Hashtbl.t *
(string, label_description) Hashtbl.t
let ppat_of_type env ty =
match pats_of_type env ty Value_mode.max_mode with
| [] -> PT_empty
| [{pat_desc = Tpat_any}] -> PT_any
| [pat] ->
let (ppat, constrs, labels) = Conv.conv pat in
PT_pattern (PE_single, ppat, constrs, labels)
| pats ->
let (ppat, constrs, labels) = Conv.conv (orify_many pats) in
PT_pattern (PE_gadt_cases, ppat, constrs, labels)
let typecheck ~pred p =
let (pattern,constrs,labels) = Conv.conv p in
pred constrs labels pattern
let do_check_partial ~pred loc casel pss = match pss with
| [] ->
This can occur
- For empty matches generated by ( no warning )
- when all patterns have guards ( then , < > [ ] )
( specific warning )
Then match MUST be considered non - exhaustive ,
otherwise compilation of PM is broken .
This can occur
- For empty matches generated by ocamlp4 (no warning)
- when all patterns have guards (then, casel <> [])
(specific warning)
Then match MUST be considered non-exhaustive,
otherwise compilation of PM is broken.
*)
begin match casel with
| [] -> ()
| _ ->
if Warnings.is_active Warnings.All_clauses_guarded then
Location.prerr_warning loc Warnings.All_clauses_guarded
end ;
Partial
| ps::_ ->
let counter_examples =
exhaust None pss (List.length ps)
|> Seq.filter_map (typecheck ~pred) in
match counter_examples () with
| Seq.Nil -> Total
| Seq.Cons (v, _rest) ->
if Warnings.is_active (Warnings.Partial_match "") then begin
let errmsg =
try
let buf = Buffer.create 16 in
let fmt = Format.formatter_of_buffer buf in
Printpat.top_pretty fmt v;
if do_match (initial_only_guarded casel) [v] then
Buffer.add_string buf
"\n(However, some guarded clause may match this value.)";
if contains_extension v then
Buffer.add_string buf
"\nMatching over values of extensible variant types \
(the *extension* above)\n\
must include a wild card pattern in order to be exhaustive."
;
Buffer.contents buf
with _ ->
""
in
Location.prerr_warning loc (Warnings.Partial_match errmsg)
end;
Partial
let rec add_path path = function
| [] -> [path]
| x::rem as paths ->
if Path.same path x then paths
else x::add_path path rem
let extendable_path path =
not
(Path.same path Predef.path_bool ||
Path.same path Predef.path_list ||
Path.same path Predef.path_unit ||
Path.same path Predef.path_option)
let rec collect_paths_from_pat r p = match p.pat_desc with
| Tpat_construct(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},
ps, _) ->
let path = get_constructor_type_path p.pat_type p.pat_env in
List.fold_left
collect_paths_from_pat
(if extendable_path path then add_path path r else r)
ps
| Tpat_any|Tpat_var _|Tpat_constant _| Tpat_variant (_,None,_) -> r
| Tpat_tuple ps | Tpat_array ps
| Tpat_construct (_, {cstr_tag=Cstr_extension _}, ps, _)->
List.fold_left collect_paths_from_pat r ps
| Tpat_record (lps,_) ->
List.fold_left
(fun r (_, _, p) -> collect_paths_from_pat r p)
r lps
| Tpat_variant (_, Some p, _) | Tpat_alias (p,_,_) -> collect_paths_from_pat r p
| Tpat_or (p1,p2,_) ->
collect_paths_from_pat (collect_paths_from_pat r p1) p2
| Tpat_lazy p
->
collect_paths_from_pat r p
Actual fragile check
1 . Collect data types in the patterns of the match .
2 . One exhaustivity check per datatype , considering that
the type is extended .
Actual fragile check
1. Collect data types in the patterns of the match.
2. One exhaustivity check per datatype, considering that
the type is extended.
*)
let do_check_fragile loc casel pss =
let exts =
List.fold_left
(fun r c -> collect_paths_from_pat r c.c_lhs)
[] casel in
match exts with
| [] -> ()
| _ -> match pss with
| [] -> ()
| ps::_ ->
List.iter
(fun ext ->
let witnesses = exhaust (Some ext) pss (List.length ps) in
match witnesses () with
| Seq.Nil ->
Location.prerr_warning
loc
(Warnings.Fragile_match (Path.name ext))
| Seq.Cons _ -> ())
exts
let check_unused pred casel =
if Warnings.is_active Warnings.Redundant_case
|| List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel then
let rec do_rec pref = function
| [] -> ()
| {c_lhs=q; c_guard; c_rhs} :: rem ->
let qs = [q] in
begin try
let pss =
List.rev pref
|> List.filter (compats qs)
|> get_mins le_pats in
First look for redundant or partially redundant patterns
let r = every_satisfiables (make_rows pss) (make_row qs) in
let refute = (c_rhs.exp_desc = Texp_unreachable) in
if r = Unused && refute then () else
let r =
let skip =
r = Unused || (not refute && pref = []) ||
not(refute || Warnings.is_active Warnings.Unreachable_case) in
if skip then r else
let sfs = list_satisfying_vectors pss qs in
if sfs = [] then Unused else
let sfs =
List.map (function [u] -> u | _ -> assert false) sfs in
let u = orify_many sfs in
let (pattern,constrs,labels) = Conv.conv u in
let pattern = {pattern with Parsetree.ppat_loc = q.pat_loc} in
match pred refute constrs labels pattern with
None when not refute ->
Location.prerr_warning q.pat_loc Warnings.Unreachable_case;
Used
| _ -> r
in
match r with
| Unused ->
Location.prerr_warning
q.pat_loc Warnings.Redundant_case
| Upartial ps ->
List.iter
(fun p ->
Location.prerr_warning
p.pat_loc Warnings.Redundant_subpat)
ps
| Used -> ()
with Empty | Not_found -> assert false
end ;
if c_guard <> None then
do_rec pref rem
else
do_rec ([q]::pref) rem in
do_rec [] casel
let irrefutable pat = le_pat pat omega
let inactive ~partial pat =
match partial with
| Partial -> false
| Total -> begin
let rec loop pat =
match pat.pat_desc with
| Tpat_lazy _ | Tpat_array _ ->
false
| Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) ->
true
| Tpat_constant c -> begin
match c with
| Const_string _ -> Config.safe_string
| Const_int _ | Const_char _ | Const_float _
| Const_int32 _ | Const_int64 _ | Const_nativeint _ -> true
end
| Tpat_tuple ps | Tpat_construct (_, _, ps, _) ->
List.for_all (fun p -> loop p) ps
| Tpat_alias (p,_,_) | Tpat_variant (_, Some p, _) ->
loop p
| Tpat_record (ldps,_) ->
List.for_all
(fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p)
ldps
| Tpat_or (p,q,_) ->
loop p && loop q
in
loop pat
end
let check_partial pred loc casel =
let pss = initial_matrix casel in
let pss = get_mins le_pats pss in
let total = do_check_partial ~pred loc casel pss in
if
total = Total && Warnings.is_active (Warnings.Fragile_match "")
then begin
do_check_fragile loc casel pss
end ;
total
Specification : ambiguous variables in or - patterns .
The semantics of or - patterns in OCaml is specified with
a left - to - right bias : a value [ v ] matches the pattern [ p | q ] if it
matches [ p ] or [ q ] , but if it matches both , the environment
captured by the match is the environment captured by [ p ] , never the
one captured by [ q ] .
While this property is generally well - understood , one specific case
where users expect a different semantics is when a pattern is
followed by a when - guard : [ | p when g - > e ] . Consider for example :
| ( ( Const x , _ ) | ( _ , Const x ) ) when is_neutral x - > branch
The semantics is clear : match the scrutinee against the pattern , if
it matches , test the guard , and if the guard passes , take the
branch .
However , consider the input [ ( Const a , Const b ) ] , where [ a ] fails
the test [ is_neutral f ] , while [ b ] passes the test [ is_neutral
b ] . With the left - to - right semantics , the clause above is * not *
taken by its input : matching [ ( Const a , Const b ) ] against the
or - pattern succeeds in the left branch , it returns the environment
[ x - > a ] , and then the guard [ is_neutral a ] is tested and fails ,
the branch is not taken . Most users , however , intuitively expect
that any pair that has one side passing the test will take the
branch . They assume it is equivalent to the following :
| ( Const x , _ ) when is_neutral x - > branch
| ( _ , Const x ) when is_neutral x - > branch
while it is not .
The code below is dedicated to finding these confusing cases : the
cases where a guard uses " ambiguous " variables , that are bound to
different parts of the scrutinees by different sides of
a or - pattern . In other words , it finds the cases where the
specified left - to - right semantics is not equivalent to
a non - deterministic semantics ( any branch can be taken ) relatively
to a specific guard .
The semantics of or-patterns in OCaml is specified with
a left-to-right bias: a value [v] matches the pattern [p | q] if it
matches [p] or [q], but if it matches both, the environment
captured by the match is the environment captured by [p], never the
one captured by [q].
While this property is generally well-understood, one specific case
where users expect a different semantics is when a pattern is
followed by a when-guard: [| p when g -> e]. Consider for example:
| ((Const x, _) | (_, Const x)) when is_neutral x -> branch
The semantics is clear: match the scrutinee against the pattern, if
it matches, test the guard, and if the guard passes, take the
branch.
However, consider the input [(Const a, Const b)], where [a] fails
the test [is_neutral f], while [b] passes the test [is_neutral
b]. With the left-to-right semantics, the clause above is *not*
taken by its input: matching [(Const a, Const b)] against the
or-pattern succeeds in the left branch, it returns the environment
[x -> a], and then the guard [is_neutral a] is tested and fails,
the branch is not taken. Most users, however, intuitively expect
that any pair that has one side passing the test will take the
branch. They assume it is equivalent to the following:
| (Const x, _) when is_neutral x -> branch
| (_, Const x) when is_neutral x -> branch
while it is not.
The code below is dedicated to finding these confusing cases: the
cases where a guard uses "ambiguous" variables, that are bound to
different parts of the scrutinees by different sides of
a or-pattern. In other words, it finds the cases where the
specified left-to-right semantics is not equivalent to
a non-deterministic semantics (any branch can be taken) relatively
to a specific guard.
*)
let pattern_vars p = Ident.Set.of_list (Typedtree.pat_bound_idents p)
Row for ambiguous variable search ,
row is the traditional pattern row ,
varsets contain a list of head variable sets ( varsets )
A given varset contains all the variables that appeared at the head
of a pattern in the row at some point during traversal : they would
all be bound to the same value at matching time . On the contrary ,
two variables of different varsets appeared at different places in
the pattern and may be bound to distinct sub - parts of the matched
value .
All rows of a ( sub)matrix have rows of the same length ,
but also varsets of the same length .
Varsets are populated when simplifying the first column
-- the variables of the head pattern are collected in a new varset .
For example ,
{ row = x : : r1 ; varsets = s1 }
{ row = ( Some _ ) as y : : r2 ; s2 }
{ row = ( None as x ) as y : : r3 ; = s3 }
{ row = ( Some x | ( None as x ) ) : : r4 with varsets = s4 }
becomes
( _ , { row = r1 ; varsets = { x } : : s1 } )
( Some _ , { row = r2 ; = { y } : : s2 } )
( None , { row = r3 ; varsets = { x , y } : : s3 } )
( Some x , { row = r4 ; varsets = { } : : s4 } )
( None , { row = r4 ; varsets = { x } : : s4 } )
row is the traditional pattern row,
varsets contain a list of head variable sets (varsets)
A given varset contains all the variables that appeared at the head
of a pattern in the row at some point during traversal: they would
all be bound to the same value at matching time. On the contrary,
two variables of different varsets appeared at different places in
the pattern and may be bound to distinct sub-parts of the matched
value.
All rows of a (sub)matrix have rows of the same length,
but also varsets of the same length.
Varsets are populated when simplifying the first column
-- the variables of the head pattern are collected in a new varset.
For example,
{ row = x :: r1; varsets = s1 }
{ row = (Some _) as y :: r2; varsets = s2 }
{ row = (None as x) as y :: r3; varsets = s3 }
{ row = (Some x | (None as x)) :: r4 with varsets = s4 }
becomes
(_, { row = r1; varsets = {x} :: s1 })
(Some _, { row = r2; varsets = {y} :: s2 })
(None, { row = r3; varsets = {x, y} :: s3 })
(Some x, { row = r4; varsets = {} :: s4 })
(None, { row = r4; varsets = {x} :: s4 })
*)
type amb_row = { row : pattern list ; varsets : Ident.Set.t list; }
let simplify_head_amb_pat head_bound_variables varsets ~add_column p ps k =
let rec simpl head_bound_variables varsets p ps k =
match (Patterns.General.view p).pat_desc with
| `Alias (p,x,_) ->
simpl (Ident.Set.add x head_bound_variables) varsets p ps k
| `Var (x, _) ->
simpl (Ident.Set.add x head_bound_variables) varsets Patterns.omega ps k
| `Or (p1,p2,_) ->
simpl head_bound_variables varsets p1 ps
(simpl head_bound_variables varsets p2 ps k)
| #Patterns.Simple.view as view ->
add_column (Patterns.Head.deconstruct { p with pat_desc = view })
{ row = ps; varsets = head_bound_variables :: varsets; } k
in simpl head_bound_variables varsets p ps k
To accurately report ambiguous variables , one must consider
that previous clauses have already matched some values .
Consider for example :
| ( , ) - > ...
| ( ( , _ ) | ( _ , ) ) when bar x - > ...
The second line taken in isolation uses an unstable variable ,
but the discriminating values , of the shape [ ( Foo v1 , ) ] ,
would all be filtered by the line above .
To track this information , the matrices we analyze contain both
* positive * rows , that describe the rows currently being analyzed
( of type Varsets.row , so that their varsets are tracked ) and
* negative rows * , that describe the cases already matched against .
The values matched by a signed matrix are the values matched by
some of the positive rows but none of the negative rows . In
particular , a variable is stable if , for any value not matched by
any of the negative rows , the environment captured by any of the
matching positive rows is identical .
To accurately report ambiguous variables, one must consider
that previous clauses have already matched some values.
Consider for example:
| (Foo x, Foo y) -> ...
| ((Foo x, _) | (_, Foo x)) when bar x -> ...
The second line taken in isolation uses an unstable variable,
but the discriminating values, of the shape [(Foo v1, Foo v2)],
would all be filtered by the line above.
To track this information, the matrices we analyze contain both
*positive* rows, that describe the rows currently being analyzed
(of type Varsets.row, so that their varsets are tracked) and
*negative rows*, that describe the cases already matched against.
The values matched by a signed matrix are the values matched by
some of the positive rows but none of the negative rows. In
particular, a variable is stable if, for any value not matched by
any of the negative rows, the environment captured by any of the
matching positive rows is identical.
*)
type ('a, 'b) signed = Positive of 'a | Negative of 'b
let rec simplify_first_amb_col = function
| [] -> []
| (Negative [] | Positive { row = []; _ }) :: _ -> assert false
| Negative (n :: ns) :: rem ->
let add_column n ns k = (n, Negative ns) :: k in
simplify_head_pat
~add_column n ns (simplify_first_amb_col rem)
| Positive { row = p::ps; varsets; }::rem ->
let add_column p ps k = (p, Positive ps) :: k in
simplify_head_amb_pat
Ident.Set.empty varsets
~add_column p ps (simplify_first_amb_col rem)
type stable_vars =
| All
| Vars of Ident.Set.t
let stable_inter sv1 sv2 = match sv1, sv2 with
| All, sv | sv, All -> sv
| Vars s1, Vars s2 -> Vars (Ident.Set.inter s1 s2)
let reduce f = function
| [] -> invalid_arg "reduce"
| x::xs -> List.fold_left f x xs
let rec matrix_stable_vars m = match m with
| [] -> All
| ((Positive {row = []; _} | Negative []) :: _) as empty_rows ->
let exception Negative_empty_row in
if at least one empty row is negative , the matrix matches no value
let get_varsets = function
| Negative n ->
All rows have the same number of columns ;
if the first row is empty , they all are .
if the first row is empty, they all are. *)
assert (n = []);
raise Negative_empty_row
| Positive p ->
assert (p.row = []);
p.varsets in
begin match List.map get_varsets empty_rows with
| exception Negative_empty_row -> All
| rows_varsets ->
let stables_in_varsets =
reduce (List.map2 Ident.Set.inter) rows_varsets in
Vars
(List.fold_left Ident.Set.union Ident.Set.empty stables_in_varsets)
end
| m ->
let is_negative = function
| Negative _ -> true
| Positive _ -> false in
if List.for_all is_negative m then
All
else begin
let m = simplify_first_amb_col m in
if not (all_coherent (first_column m)) then
All
else begin
let submatrices =
let extend_row columns = function
| Negative r -> Negative (columns @ r)
| Positive r -> Positive { r with row = columns @ r.row } in
let q0 = discr_pat Patterns.Simple.omega m in
let { default; constrs } =
build_specialized_submatrices ~extend_row q0 m in
let non_default = List.map snd constrs in
if full_match false constrs
then non_default
else default :: non_default in
let submat_stable = List.map matrix_stable_vars submatrices in
List.fold_left stable_inter All submat_stable
end
end
let pattern_stable_vars ns p =
matrix_stable_vars
(List.fold_left (fun m n -> Negative n :: m)
[Positive {varsets = []; row = [p]}] ns)
All identifier paths that appear in an expression that occurs
as a clause right hand side or guard .
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and * * guards * * .
For pattern ( module M : S ) - > e the code is
let module M_mod = unpack M .. in e
Hence M is " free " in e iff M_mod is free in e.
Not doing so will yield excessive warning in
( module ( M : S ) } ... ) when true - > ....
as M is always present in
let module M_mod = unpack M .. in true
as a clause right hand side or guard.
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and **guards**.
For pattern (module M:S) -> e the code is
let module M_mod = unpack M .. in e
Hence M is "free" in e iff M_mod is free in e.
Not doing so will yield excessive warning in
(module (M:S) } ...) when true -> ....
as M is always present in
let module M_mod = unpack M .. in true
*)
let all_rhs_idents exp =
let ids = ref Ident.Set.empty in
let is_unpack exp =
List.exists
(fun attr -> attr.Parsetree.attr_name.txt = "#modulepat")
exp.exp_attributes in
let open Tast_iterator in
let expr_iter iter exp =
(match exp.exp_desc with
| Texp_ident (path, _lid, _descr, _kind) ->
List.iter (fun id -> ids := Ident.Set.add id !ids) (Path.heads path)
| _ -> Tast_iterator.default_iterator.expr iter exp);
if is_unpack exp then begin match exp.exp_desc with
| Texp_letmodule
(id_mod,_,_,
{mod_desc=
Tmod_unpack ({exp_desc=Texp_ident (Path.Pident id_exp,_,_,_)},_)},
_) ->
assert (Ident.Set.mem id_exp !ids) ;
begin match id_mod with
| Some id_mod when not (Ident.Set.mem id_mod !ids) ->
ids := Ident.Set.remove id_exp !ids
| _ -> ()
end
| _ -> assert false
end
in
let iterator = {Tast_iterator.default_iterator with expr = expr_iter} in
iterator.expr iterator exp;
!ids
let check_ambiguous_bindings =
let open Warnings in
let warn0 = Ambiguous_var_in_pattern_guard [] in
fun cases ->
if is_active warn0 then
let check_case ns case = match case with
| { c_lhs = p; c_guard=None ; _} -> [p]::ns
| { c_lhs=p; c_guard=Some g; _} ->
let all =
Ident.Set.inter (pattern_vars p) (all_rhs_idents g) in
if not (Ident.Set.is_empty all) then begin
match pattern_stable_vars ns p with
| All -> ()
| Vars stable ->
let ambiguous = Ident.Set.diff all stable in
if not (Ident.Set.is_empty ambiguous) then begin
let pps =
Ident.Set.elements ambiguous |> List.map Ident.name in
let warn = Ambiguous_var_in_pattern_guard pps in
Location.prerr_warning p.pat_loc warn
end
end;
ns
in
ignore (List.fold_left check_case [] cases)
let do_complete_partial ?pred pss =
match pss with
| [] -> []
| ps :: _ ->
let typecheck p =
match pred with
| Some pred ->
let (pattern,constrs,labels) = Conv.conv p in
Option.map (fun v -> v, Some (constrs, labels))
(pred constrs labels pattern)
| None -> Some (p, None)
in
exhaust None pss (List.length ps)
|> Seq.filter_map typecheck
|> List.of_seq
let complete_partial ~pred pss =
let pss = get_mins le_pats pss in
do_complete_partial ~pred pss
let return_unused casel =
let rec do_rec acc pref = function
| [] -> acc
| q :: rem ->
let qs = [q] in
let acc =
try
let pss = get_mins le_pats (List.filter (compats qs) pref) in
let r = every_satisfiables (make_rows pss) (make_row qs) in
match r with
| Unused -> `Unused q :: acc
| Upartial ps -> `Unused_subs (q, ps) :: acc
| Used -> acc
with Empty | Not_found -> assert false
in
do_rec acc ([q]::pref) rem
in
do_rec [] [] casel
|
bf54cd907190864552b3f5751ab6baeb262c911ac38603099ae4d282f9bb7e3e | mejgun/haskell-tdlib | AvailableReaction.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.AvailableReaction where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.ReactionType as ReactionType
import qualified Utils as U
-- |
| Represents an available reaction @type Type of the reaction @needs_premium True , if Telegram Premium is needed to send the reaction
AvailableReaction
{ -- |
needs_premium :: Maybe Bool,
-- |
_type :: Maybe ReactionType.ReactionType
}
deriving (Eq)
instance Show AvailableReaction where
show
AvailableReaction
{ needs_premium = needs_premium_,
_type = _type_
} =
"AvailableReaction"
++ U.cc
[ U.p "needs_premium" needs_premium_,
U.p "_type" _type_
]
instance T.FromJSON AvailableReaction where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"availableReaction" -> parseAvailableReaction v
_ -> mempty
where
parseAvailableReaction :: A.Value -> T.Parser AvailableReaction
parseAvailableReaction = A.withObject "AvailableReaction" $ \o -> do
needs_premium_ <- o A..:? "needs_premium"
_type_ <- o A..:? "type"
return $ AvailableReaction {needs_premium = needs_premium_, _type = _type_}
parseJSON _ = mempty
instance T.ToJSON AvailableReaction where
toJSON
AvailableReaction
{ needs_premium = needs_premium_,
_type = _type_
} =
A.object
[ "@type" A..= T.String "availableReaction",
"needs_premium" A..= needs_premium_,
"type" A..= _type_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/e5c8059a74f88073b27dbfafb6908de0729af110/src/TD/Data/AvailableReaction.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
|
| |
module TD.Data.AvailableReaction where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.ReactionType as ReactionType
import qualified Utils as U
| Represents an available reaction @type Type of the reaction @needs_premium True , if Telegram Premium is needed to send the reaction
AvailableReaction
needs_premium :: Maybe Bool,
_type :: Maybe ReactionType.ReactionType
}
deriving (Eq)
instance Show AvailableReaction where
show
AvailableReaction
{ needs_premium = needs_premium_,
_type = _type_
} =
"AvailableReaction"
++ U.cc
[ U.p "needs_premium" needs_premium_,
U.p "_type" _type_
]
instance T.FromJSON AvailableReaction where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"availableReaction" -> parseAvailableReaction v
_ -> mempty
where
parseAvailableReaction :: A.Value -> T.Parser AvailableReaction
parseAvailableReaction = A.withObject "AvailableReaction" $ \o -> do
needs_premium_ <- o A..:? "needs_premium"
_type_ <- o A..:? "type"
return $ AvailableReaction {needs_premium = needs_premium_, _type = _type_}
parseJSON _ = mempty
instance T.ToJSON AvailableReaction where
toJSON
AvailableReaction
{ needs_premium = needs_premium_,
_type = _type_
} =
A.object
[ "@type" A..= T.String "availableReaction",
"needs_premium" A..= needs_premium_,
"type" A..= _type_
]
|
64a090f1e10b999046bae4978b0f9d8a33fc73048397732ae29ed5d1defb0e47 | daigotanaka/mern-cljs | express.cljs | (ns mern-utils.express
(:require-macros
[cljs.core.async.macros :refer [go]]
[mern-utils.macros :refer [node-require]])
(:require
[cljs.core.async :as async :refer [put!]]
[cljs.nodejs :as nodejs]
[clojure.string :as string]
[cognitect.transit :as transit]
[mern-utils.backend-lib :refer [local-ip log DEFAULT-LOGGER]]))
(defn write-json-str [x]
(let [w (transit/writer :json-verbose)]
(transit/write w x)))
(node-require cors "cors")
(defn render [req res page]
(if (= "https" (aget (.-headers req) "x-forwarded-proto"))
(.redirect res (str "http://" (.get req "Host") (.-url req)))
(go
(.set res "Content-Type" "text/html")
(.send res (<! page)))))
(defn route [ex route-table cors-options]
(.options ex "*" (cors cors-options))
(println ; I don't know why it requires println to make map run :(
(map
(fn [r]
(let [m (:method r)]
(log DEFAULT-LOGGER :info (str "Registering: " m " " (:endpoint r)))
(case m
"get" (.get ex (:endpoint r) (cors cors-options) (:handler r))
"post" (.post ex (:endpoint r) (cors cors-options) (:handler r))
"put" (.put ex (:endpoint r) (cors cors-options) (:handler r))
"delete" (.delete ex (:endpoint r) cors (cors-options) (:handler r))
(log DEFAULT-LOGGER :error (str "Not a METHOD: " m)))))
route-table)
)
ex)
| null | https://raw.githubusercontent.com/daigotanaka/mern-cljs/a9dedbb3b622f96dd0b06832733b4fd961e6437d/example/common/checkouts/mern_utils/src/mern_utils/express.cljs | clojure | I don't know why it requires println to make map run :( | (ns mern-utils.express
(:require-macros
[cljs.core.async.macros :refer [go]]
[mern-utils.macros :refer [node-require]])
(:require
[cljs.core.async :as async :refer [put!]]
[cljs.nodejs :as nodejs]
[clojure.string :as string]
[cognitect.transit :as transit]
[mern-utils.backend-lib :refer [local-ip log DEFAULT-LOGGER]]))
(defn write-json-str [x]
(let [w (transit/writer :json-verbose)]
(transit/write w x)))
(node-require cors "cors")
(defn render [req res page]
(if (= "https" (aget (.-headers req) "x-forwarded-proto"))
(.redirect res (str "http://" (.get req "Host") (.-url req)))
(go
(.set res "Content-Type" "text/html")
(.send res (<! page)))))
(defn route [ex route-table cors-options]
(.options ex "*" (cors cors-options))
(map
(fn [r]
(let [m (:method r)]
(log DEFAULT-LOGGER :info (str "Registering: " m " " (:endpoint r)))
(case m
"get" (.get ex (:endpoint r) (cors cors-options) (:handler r))
"post" (.post ex (:endpoint r) (cors cors-options) (:handler r))
"put" (.put ex (:endpoint r) (cors cors-options) (:handler r))
"delete" (.delete ex (:endpoint r) cors (cors-options) (:handler r))
(log DEFAULT-LOGGER :error (str "Not a METHOD: " m)))))
route-table)
)
ex)
|
a88c73833c54dd3f1155c4da766c44927ca5e92dd64cec9c6784fc49f4bd63da | bobzhang/fan | bench1.ml | (* let _ = *)
(* function *)
(* | 550543360 *)
(* |803846675 *)
(* |483739668 *)
(* |131103253 *)
(* |939042348 *)
(* |190501942 *)
(* |318291514 *)
(* |1000574016 *)
(* |600187987 *)
(* |1035971165 *)
(* |258923636 *)
(* |231714422 *)
(* |916095096 *)
(* |1031134330 *)
(* |166283392 *)
(* |303530675 *)
(* |63952589 *)
(* |606848730 *)
(* |347290843 *)
(* |92423390 *)
|72534754
(* |504783075 *)
(* |632292067 *)
(* |299205366 *)
(* |804297977 *)
(* |624008963 *)
(* |175869201 *)
(* |972174611 *)
(* |99260692 *)
(* |415265556 *)
(* |43519261 *)
(* |569308970 *)
(* |197088567 *)
(* |146147642 *)
|424948034
(* |201771337 *)
(* |494069608 *)
(* |1035704714 *)
(* |889500043 *)
(* |144676753 *)
(* |343776663 *)
(* |1048928162 *)
(* |149418948 *)
(* |55606727|294194640|1050473980 *)
(* -> true *)
(* | _ -> false *)
let f v =
(fun (s : string) ->
function
| 550543360 -> s = "functor"
| 803846675 -> s = "private"
| 483739668 -> s = "sig"
| 131103253 -> s = "include"
| 939042348 -> s = "exception"
| 190501942 -> s = "inherit"
| 318291514 -> s = "and"
| 1000574016 -> s = "when"
| 600187987 -> s = "then"
| 1035971165 -> s = "initializer"
| 258923636 -> s = "in"
| 231714422 -> s = "downto"
| 916095096 -> s = "as"
| 1031134330 -> s = "function"
| 166283392 -> s = "begin"
| 303530675 -> s = "class"
| 63952589 -> s = "do"
| 606848730 -> s = "end"
| 347290843 -> s = "assert"
| 92423390 -> s = "external"
| 72534754 -> s = "virtual"
| 504783075 -> s = "to"
| 632292067 -> s = "try"
| 299205366 -> s = "struct"
| 804297977 -> s = "else"
| 624008963 -> s = "val"
| 175869201 -> s = "constraint"
| 972174611 -> s = "type"
| 99260692 -> s = "new"
| 415265556 -> s = "of"
| 43519261 -> s = "done"
| 569308970 -> s = "for"
| 197088567 -> s = "fun"
| 146147642 -> s = "method"
| 424948034 -> s = "mutable"
| 201771337 -> s = "lazy"
| 494069608 -> s = "with"
| 1035704714 -> s = "if"
| 889500043 -> s = "while"
| 144676753 -> s = "rec"
| 343776663 -> s = "object"
| 1048928162 -> s = "or"
| 149418948 -> s = "match"
| 55606727 -> s = "open"
| 294194640 -> s = "module"
| 1050473980 -> s = "let"
| _ -> false) "xxx" v
(*
%hash_cmp{"functor"|"private"|"sig"
| "include"| "exception"| "inherit"
| "and"| "when"| "then"| "initializer"
| "in" | "downto"| "as"| "function"
| "begin"| "class"| "do"| "end"
| "assert"| "external"| "virtual"| "to"
| "try" | "struct"| "else"
| "val" | "constraint"| "type"
| "new" | "of"| "done"
| "for" | "fun"| "method"
| "mutable"| "lazy"| "with"
| "if" | "while" | "rec"
| "object" | "or"
| "match" | "open"| "module"|"let"}
*)
(* local variables: *)
(* compile-command: "ocamlopt.opt -dcmm -c bench1.ml" *)
(* end: *)
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/unitest/suites/bench1.ml | ocaml | let _ =
function
| 550543360
|803846675
|483739668
|131103253
|939042348
|190501942
|318291514
|1000574016
|600187987
|1035971165
|258923636
|231714422
|916095096
|1031134330
|166283392
|303530675
|63952589
|606848730
|347290843
|92423390
|504783075
|632292067
|299205366
|804297977
|624008963
|175869201
|972174611
|99260692
|415265556
|43519261
|569308970
|197088567
|146147642
|201771337
|494069608
|1035704714
|889500043
|144676753
|343776663
|1048928162
|149418948
|55606727|294194640|1050473980
-> true
| _ -> false
%hash_cmp{"functor"|"private"|"sig"
| "include"| "exception"| "inherit"
| "and"| "when"| "then"| "initializer"
| "in" | "downto"| "as"| "function"
| "begin"| "class"| "do"| "end"
| "assert"| "external"| "virtual"| "to"
| "try" | "struct"| "else"
| "val" | "constraint"| "type"
| "new" | "of"| "done"
| "for" | "fun"| "method"
| "mutable"| "lazy"| "with"
| "if" | "while" | "rec"
| "object" | "or"
| "match" | "open"| "module"|"let"}
local variables:
compile-command: "ocamlopt.opt -dcmm -c bench1.ml"
end: | |72534754
|424948034
let f v =
(fun (s : string) ->
function
| 550543360 -> s = "functor"
| 803846675 -> s = "private"
| 483739668 -> s = "sig"
| 131103253 -> s = "include"
| 939042348 -> s = "exception"
| 190501942 -> s = "inherit"
| 318291514 -> s = "and"
| 1000574016 -> s = "when"
| 600187987 -> s = "then"
| 1035971165 -> s = "initializer"
| 258923636 -> s = "in"
| 231714422 -> s = "downto"
| 916095096 -> s = "as"
| 1031134330 -> s = "function"
| 166283392 -> s = "begin"
| 303530675 -> s = "class"
| 63952589 -> s = "do"
| 606848730 -> s = "end"
| 347290843 -> s = "assert"
| 92423390 -> s = "external"
| 72534754 -> s = "virtual"
| 504783075 -> s = "to"
| 632292067 -> s = "try"
| 299205366 -> s = "struct"
| 804297977 -> s = "else"
| 624008963 -> s = "val"
| 175869201 -> s = "constraint"
| 972174611 -> s = "type"
| 99260692 -> s = "new"
| 415265556 -> s = "of"
| 43519261 -> s = "done"
| 569308970 -> s = "for"
| 197088567 -> s = "fun"
| 146147642 -> s = "method"
| 424948034 -> s = "mutable"
| 201771337 -> s = "lazy"
| 494069608 -> s = "with"
| 1035704714 -> s = "if"
| 889500043 -> s = "while"
| 144676753 -> s = "rec"
| 343776663 -> s = "object"
| 1048928162 -> s = "or"
| 149418948 -> s = "match"
| 55606727 -> s = "open"
| 294194640 -> s = "module"
| 1050473980 -> s = "let"
| _ -> false) "xxx" v
|
660c1f07ce5e4a90f038274575f608bf9341a75b9947c9c4ff7f0de3e8b2d50c | hypirion/haskell-transducers | Conduit.hs | {-# LANGUAGE DeriveFunctor, RankNTypes #-}
module Data.Transducer.Conduit
( toConduit
) where
import Data.Transducer
import Data.Conduit
-- Yields the bs it receives
conduitYielder :: Monad m => Reducer () b (Conduit a m b)
conduitYielder = stateless run
where run m x = m >> yield x
conduitAwaiter :: Monad m => (Reducer s a (Conduit a m b)) -> Conduit a m b
conduitAwaiter (Reducer is c f) = go is
where go s = do mval <- await
case mval of
(Just val) -> feed s val
Nothing -> feedLast s
feed s val = case f s (return ()) val of
(s', Reduced comp) -> comp >> feedLast s'
(s', Continue comp) -> comp >> go s'
feedLast s = c s (return ())
| toConduit takes a Transducer and converts it into a Conduit for any monad m
toConduit :: Monad m => Transducer () s a b -> Conduit b m a
toConduit xform = conduitAwaiter (xform conduitYielder)
| null | https://raw.githubusercontent.com/hypirion/haskell-transducers/7913c3bef9b5d3a30f48b878fbb9ff1675deb3ac/src/Data/Transducer/Conduit.hs | haskell | # LANGUAGE DeriveFunctor, RankNTypes #
Yields the bs it receives |
module Data.Transducer.Conduit
( toConduit
) where
import Data.Transducer
import Data.Conduit
conduitYielder :: Monad m => Reducer () b (Conduit a m b)
conduitYielder = stateless run
where run m x = m >> yield x
conduitAwaiter :: Monad m => (Reducer s a (Conduit a m b)) -> Conduit a m b
conduitAwaiter (Reducer is c f) = go is
where go s = do mval <- await
case mval of
(Just val) -> feed s val
Nothing -> feedLast s
feed s val = case f s (return ()) val of
(s', Reduced comp) -> comp >> feedLast s'
(s', Continue comp) -> comp >> go s'
feedLast s = c s (return ())
| toConduit takes a Transducer and converts it into a Conduit for any monad m
toConduit :: Monad m => Transducer () s a b -> Conduit b m a
toConduit xform = conduitAwaiter (xform conduitYielder)
|
157fc9f102c92f8dd68c5e27dc892594c7ef165790281a738fa0f82476fca338 | zellige/zellige | Simplify.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Data.Geometry.Simplify where
import qualified Data.Aeson as Aeson
import qualified Data.Foldable as Foldable
import qualified Data.Geometry.Simplify.DouglasPeucker as SimplifyDouglasPeucker
import qualified Data.Geometry.Types.Config as TypesConfig
import qualified Data.Geometry.WindingOrder as WindingOrder
import qualified Data.Geospatial as Geospatial
import qualified Data.LinearRing as LinearRing
import qualified Data.LineString as LineString
import qualified Data.Sequence as Sequence
import qualified Data.Validation as Validation
simplifyFeatures :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
simplifyFeatures algo = foldr (\x acc -> simplifyFeature algo (Geospatial._geometry x) x acc) Sequence.empty
simplifyFeature :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeospatialGeometry -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
simplifyFeature algo geometry feature acc =
if algo == TypesConfig.NoAlgorithm then
feature Sequence.<| acc
else
case geometry of
Geospatial.NoGeometry -> acc
Geospatial.Point _ -> feature Sequence.<| acc
Geospatial.MultiPoint _ -> feature Sequence.<| acc
Geospatial.Line l -> simplifyLineAcc algo l feature acc
Geospatial.MultiLine ls -> simplifyLinesAcc algo ls feature acc
Geospatial.Polygon p -> simplifyPolygonAcc algo p feature acc
Geospatial.MultiPolygon ps -> simplifyPolygonsAcc algo ps feature acc
Geospatial.Collection gs -> Foldable.foldMap (\x -> simplifyFeature algo x feature acc) gs
mapFeature :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeospatialGeometry -> Geospatial.GeospatialGeometry
mapFeature algo geometry =
if algo == TypesConfig.NoAlgorithm then
geometry
else
case geometry of
Geospatial.NoGeometry -> geometry
Geospatial.Point _ -> geometry
Geospatial.MultiPoint _ -> geometry
Geospatial.Line l -> maybe Geospatial.NoGeometry (Geospatial.Line . Geospatial.GeoLine) (simplifyLine algo l)
Geospatial.MultiLine (Geospatial.GeoMultiLine ls) -> maybe Geospatial.NoGeometry (Geospatial.MultiLine . Geospatial.GeoMultiLine) (simplifyLines algo ls)
Geospatial.Polygon (Geospatial.GeoPolygon p) -> maybe Geospatial.NoGeometry (Geospatial.Polygon . Geospatial.GeoPolygon) (simplifyPolygon algo p)
Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon ps) -> maybe Geospatial.NoGeometry (Geospatial.MultiPolygon . Geospatial.GeoMultiPolygon) (simplifyPolygons algo ps)
Geospatial.Collection gs -> if Sequence.null (foldOver gs) then Geospatial.NoGeometry else Geospatial.Collection (foldOver gs)
where
foldOver = foldr (\geom acc -> mapFeature algo geom Sequence.<| acc) Sequence.empty
simplifyLineAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoLine -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyLineAcc algo line (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyLine algo line of
Just res -> Geospatial.GeoFeature bbox (Geospatial.Line (Geospatial.GeoLine res)) props fId Sequence.<| acc
Nothing -> acc
simplifyLine :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoLine -> Maybe (LineString.LineString Geospatial.GeoPositionWithoutCRS)
simplifyLine algo (Geospatial.GeoLine points) = either (const Nothing) Just . Validation.toEither $ LineString.fromSeq (createSimplifiedLineString algo points)
simplifyLinesAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoMultiLine -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyLinesAcc algo (Geospatial.GeoMultiLine multiLines) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyLines algo multiLines of
Just res -> Geospatial.GeoFeature bbox (Geospatial.MultiLine (Geospatial.GeoMultiLine res)) props fId Sequence.<| acc
Nothing -> acc
simplifyLines :: Traversable t => TypesConfig.SimplificationAlgorithm -> t (LineString.LineString Geospatial.GeoPositionWithoutCRS) -> Maybe (Sequence.Seq (LineString.LineString Geospatial.GeoPositionWithoutCRS))
simplifyLines algo multiLines =
if Sequence.null foldLines
then Nothing
else Just foldLines
where
foldLines = Foldable.foldr (\points acc -> either (const acc) (Sequence.<| acc) (Validation.toEither . LineString.fromSeq $ createSimplifiedLineString algo points)) Sequence.empty multiLines
simplifyPolygonAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoPolygon -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyPolygonAcc algo (Geospatial.GeoPolygon polygon) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyPolygon algo polygon of
Just res -> Geospatial.GeoFeature bbox (Geospatial.Polygon (Geospatial.GeoPolygon res)) props fId Sequence.<| acc
Nothing -> acc
simplifyPolygon :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS) -> Maybe (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS))
simplifyPolygon algo polygon =
if Sequence.null simplifyGeoPolygon
then Nothing
else Just simplifyGeoPolygon
where
windingList = Sequence.fromList (WindingOrder.Clockwise : repeat WindingOrder.AntiClockwise)
simplifyGeoPolygon = Foldable.foldr (\(points, windingOrder) acc -> either (const acc) (Sequence.<| acc) (Validation.toEither . LinearRing.fromSeq $ createSimplifiedLinearRing algo windingOrder points)) Sequence.empty (Sequence.zip polygon windingList)
simplifyPolygonsAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoMultiPolygon -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyPolygonsAcc algo (Geospatial.GeoMultiPolygon polygons) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyPolygons algo polygons of
Just res -> Geospatial.GeoFeature bbox (Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon res)) props fId Sequence.<| acc
Nothing -> acc
simplifyPolygons :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS)) -> Maybe (Sequence.Seq (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS)))
simplifyPolygons algo polygons =
if Sequence.null foldedPolys
then Nothing
else Just foldedPolys
where
foldedPolys = Foldable.foldr (\polys acc -> maybe acc (Sequence.<| acc) polys) Sequence.empty simplifyGeoPolygons
simplifyGeoPolygons = fmap (simplifyPolygon algo) polygons
createSimplifiedLineString :: TypesConfig.SimplificationAlgorithm -> LineString.LineString Geospatial.GeoPositionWithoutCRS -> Sequence.Seq Geospatial.GeoPositionWithoutCRS
createSimplifiedLineString algo lineString = fmap Geospatial.GeoPointXY (simplifyUsing algo WindingOrder.Clockwise (fmap Geospatial.retrieveXY (LineString.toSeq lineString)))
createSimplifiedLinearRing :: TypesConfig.SimplificationAlgorithm -> WindingOrder.WindingOrder -> LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS -> Sequence.Seq Geospatial.GeoPositionWithoutCRS
createSimplifiedLinearRing algo windingOrder linearRing = fmap Geospatial.GeoPointXY (simplifyUsing algo windingOrder (fmap Geospatial.retrieveXY (LinearRing.toSeq linearRing)))
simplifyUsing :: TypesConfig.SimplificationAlgorithm -> WindingOrder.WindingOrder -> Sequence.Seq Geospatial.PointXY -> Sequence.Seq Geospatial.PointXY
simplifyUsing TypesConfig.NoAlgorithm _ = id
simplifyUsing TypesConfig.DouglasPeucker windingOrder = WindingOrder.ensureOrder windingOrder . SimplifyDouglasPeucker.douglasPeucker 1.0
simplifyUsing TypesConfig.Visvalingam _ = id
| null | https://raw.githubusercontent.com/zellige/zellige/87e6dab11ac4c1843009043580f14422a1d83ebf/src/Data/Geometry/Simplify.hs | haskell | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators # | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
module Data.Geometry.Simplify where
import qualified Data.Aeson as Aeson
import qualified Data.Foldable as Foldable
import qualified Data.Geometry.Simplify.DouglasPeucker as SimplifyDouglasPeucker
import qualified Data.Geometry.Types.Config as TypesConfig
import qualified Data.Geometry.WindingOrder as WindingOrder
import qualified Data.Geospatial as Geospatial
import qualified Data.LinearRing as LinearRing
import qualified Data.LineString as LineString
import qualified Data.Sequence as Sequence
import qualified Data.Validation as Validation
simplifyFeatures :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
simplifyFeatures algo = foldr (\x acc -> simplifyFeature algo (Geospatial._geometry x) x acc) Sequence.empty
simplifyFeature :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeospatialGeometry -> Geospatial.GeoFeature Aeson.Value -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value) -> Sequence.Seq (Geospatial.GeoFeature Aeson.Value)
simplifyFeature algo geometry feature acc =
if algo == TypesConfig.NoAlgorithm then
feature Sequence.<| acc
else
case geometry of
Geospatial.NoGeometry -> acc
Geospatial.Point _ -> feature Sequence.<| acc
Geospatial.MultiPoint _ -> feature Sequence.<| acc
Geospatial.Line l -> simplifyLineAcc algo l feature acc
Geospatial.MultiLine ls -> simplifyLinesAcc algo ls feature acc
Geospatial.Polygon p -> simplifyPolygonAcc algo p feature acc
Geospatial.MultiPolygon ps -> simplifyPolygonsAcc algo ps feature acc
Geospatial.Collection gs -> Foldable.foldMap (\x -> simplifyFeature algo x feature acc) gs
mapFeature :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeospatialGeometry -> Geospatial.GeospatialGeometry
mapFeature algo geometry =
if algo == TypesConfig.NoAlgorithm then
geometry
else
case geometry of
Geospatial.NoGeometry -> geometry
Geospatial.Point _ -> geometry
Geospatial.MultiPoint _ -> geometry
Geospatial.Line l -> maybe Geospatial.NoGeometry (Geospatial.Line . Geospatial.GeoLine) (simplifyLine algo l)
Geospatial.MultiLine (Geospatial.GeoMultiLine ls) -> maybe Geospatial.NoGeometry (Geospatial.MultiLine . Geospatial.GeoMultiLine) (simplifyLines algo ls)
Geospatial.Polygon (Geospatial.GeoPolygon p) -> maybe Geospatial.NoGeometry (Geospatial.Polygon . Geospatial.GeoPolygon) (simplifyPolygon algo p)
Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon ps) -> maybe Geospatial.NoGeometry (Geospatial.MultiPolygon . Geospatial.GeoMultiPolygon) (simplifyPolygons algo ps)
Geospatial.Collection gs -> if Sequence.null (foldOver gs) then Geospatial.NoGeometry else Geospatial.Collection (foldOver gs)
where
foldOver = foldr (\geom acc -> mapFeature algo geom Sequence.<| acc) Sequence.empty
simplifyLineAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoLine -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyLineAcc algo line (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyLine algo line of
Just res -> Geospatial.GeoFeature bbox (Geospatial.Line (Geospatial.GeoLine res)) props fId Sequence.<| acc
Nothing -> acc
simplifyLine :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoLine -> Maybe (LineString.LineString Geospatial.GeoPositionWithoutCRS)
simplifyLine algo (Geospatial.GeoLine points) = either (const Nothing) Just . Validation.toEither $ LineString.fromSeq (createSimplifiedLineString algo points)
simplifyLinesAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoMultiLine -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyLinesAcc algo (Geospatial.GeoMultiLine multiLines) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyLines algo multiLines of
Just res -> Geospatial.GeoFeature bbox (Geospatial.MultiLine (Geospatial.GeoMultiLine res)) props fId Sequence.<| acc
Nothing -> acc
simplifyLines :: Traversable t => TypesConfig.SimplificationAlgorithm -> t (LineString.LineString Geospatial.GeoPositionWithoutCRS) -> Maybe (Sequence.Seq (LineString.LineString Geospatial.GeoPositionWithoutCRS))
simplifyLines algo multiLines =
if Sequence.null foldLines
then Nothing
else Just foldLines
where
foldLines = Foldable.foldr (\points acc -> either (const acc) (Sequence.<| acc) (Validation.toEither . LineString.fromSeq $ createSimplifiedLineString algo points)) Sequence.empty multiLines
simplifyPolygonAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoPolygon -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyPolygonAcc algo (Geospatial.GeoPolygon polygon) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyPolygon algo polygon of
Just res -> Geospatial.GeoFeature bbox (Geospatial.Polygon (Geospatial.GeoPolygon res)) props fId Sequence.<| acc
Nothing -> acc
simplifyPolygon :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS) -> Maybe (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS))
simplifyPolygon algo polygon =
if Sequence.null simplifyGeoPolygon
then Nothing
else Just simplifyGeoPolygon
where
windingList = Sequence.fromList (WindingOrder.Clockwise : repeat WindingOrder.AntiClockwise)
simplifyGeoPolygon = Foldable.foldr (\(points, windingOrder) acc -> either (const acc) (Sequence.<| acc) (Validation.toEither . LinearRing.fromSeq $ createSimplifiedLinearRing algo windingOrder points)) Sequence.empty (Sequence.zip polygon windingList)
simplifyPolygonsAcc :: TypesConfig.SimplificationAlgorithm -> Geospatial.GeoMultiPolygon -> Geospatial.GeoFeature a -> Sequence.Seq (Geospatial.GeoFeature a) -> Sequence.Seq (Geospatial.GeoFeature a)
simplifyPolygonsAcc algo (Geospatial.GeoMultiPolygon polygons) (Geospatial.GeoFeature bbox _ props fId) acc =
case simplifyPolygons algo polygons of
Just res -> Geospatial.GeoFeature bbox (Geospatial.MultiPolygon (Geospatial.GeoMultiPolygon res)) props fId Sequence.<| acc
Nothing -> acc
simplifyPolygons :: TypesConfig.SimplificationAlgorithm -> Sequence.Seq (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS)) -> Maybe (Sequence.Seq (Sequence.Seq (LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS)))
simplifyPolygons algo polygons =
if Sequence.null foldedPolys
then Nothing
else Just foldedPolys
where
foldedPolys = Foldable.foldr (\polys acc -> maybe acc (Sequence.<| acc) polys) Sequence.empty simplifyGeoPolygons
simplifyGeoPolygons = fmap (simplifyPolygon algo) polygons
createSimplifiedLineString :: TypesConfig.SimplificationAlgorithm -> LineString.LineString Geospatial.GeoPositionWithoutCRS -> Sequence.Seq Geospatial.GeoPositionWithoutCRS
createSimplifiedLineString algo lineString = fmap Geospatial.GeoPointXY (simplifyUsing algo WindingOrder.Clockwise (fmap Geospatial.retrieveXY (LineString.toSeq lineString)))
createSimplifiedLinearRing :: TypesConfig.SimplificationAlgorithm -> WindingOrder.WindingOrder -> LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS -> Sequence.Seq Geospatial.GeoPositionWithoutCRS
createSimplifiedLinearRing algo windingOrder linearRing = fmap Geospatial.GeoPointXY (simplifyUsing algo windingOrder (fmap Geospatial.retrieveXY (LinearRing.toSeq linearRing)))
simplifyUsing :: TypesConfig.SimplificationAlgorithm -> WindingOrder.WindingOrder -> Sequence.Seq Geospatial.PointXY -> Sequence.Seq Geospatial.PointXY
simplifyUsing TypesConfig.NoAlgorithm _ = id
simplifyUsing TypesConfig.DouglasPeucker windingOrder = WindingOrder.ensureOrder windingOrder . SimplifyDouglasPeucker.douglasPeucker 1.0
simplifyUsing TypesConfig.Visvalingam _ = id
|
46836d8287b9014bfb49e5f185bc948a25299c3cb1b0d2252a833642627ffb36 | binsec/haunted | hashamt.mli | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Implementation of Hash-Array Mapped Tries *)
* is a very efficient persistent data structures for dictionaries
module type S = sig
type key
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val singleton : key -> 'a -> 'a t
val add : key -> 'a -> 'a t -> 'a t
val remove : key -> 'a t -> 'a t
val mem : key -> 'a t -> bool
val find : key -> 'a t -> 'a
val union : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t
val join : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val iter : (key -> 'a -> unit) -> 'a t -> unit
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t
val cardinal : 'a t -> int
val bindings : 'a t -> (key * 'a) list
end
module Make(H : Hashtbl.HashedType) : S with type key = H.t
| null | https://raw.githubusercontent.com/binsec/haunted/7ffc5f4072950fe138f53fe953ace98fff181c73/src/base/hashamt.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Implementation of Hash-Array Mapped Tries | This file is part of BINSEC .
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
* is a very efficient persistent data structures for dictionaries
module type S = sig
type key
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val singleton : key -> 'a -> 'a t
val add : key -> 'a -> 'a t -> 'a t
val remove : key -> 'a t -> 'a t
val mem : key -> 'a t -> bool
val find : key -> 'a t -> 'a
val union : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t
val join : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val iter : (key -> 'a -> unit) -> 'a t -> unit
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t
val cardinal : 'a t -> int
val bindings : 'a t -> (key * 'a) list
end
module Make(H : Hashtbl.HashedType) : S with type key = H.t
|
aed018d206ddfe2e6d8ffe1d20be34bc6ce680a847708061baa3543c881527ec | penrose/geometric-queries | MathUtils.hs | # LANGUAGE AllowAmbiguousTypes , NoMonomorphismRestriction #
module MathUtils where
to use this module , functions need to be constrained into Autofloat first
---- constants ----
eps = 0.1 ** 8
posInf = 1 / 0
negInf = -1 / 0
---- helper functions ----
eq a b = abs (a-b) < eps
isInf x = x == 1/0 || x == -1/0
neg v = map (\x->(-x)) v
add v1 v2 = map (\(a,b)->a+b) (zip v1 v2)
sub v1 v2 = v1 `add` (neg v2)
rot90 [x,y] = [-y,x]
-- scalar-vector multiplication
mult k v = map (k*) v
magsq xs = foldl (+) 0.0 $ map (**2) xs
mag xs = sqrt $ magsq xs
normalize xs = let magnitude = mag xs in
map (/magnitude) xs
dot v1 v2 = let zp = zip v1 v2 in foldl (+) 0 $ map (\(a,b)->a*b) zp
lerp v1 v2 k = let
lerpNum (a,b) = a*(1.0-k) + b*k
in map lerpNum $ zip v1 v2
-- transformations
centroid pts = let
len = fromIntegral $ length pts
[sumx, sumy] = foldl add [0,0] pts
in [sumx/len, sumy/len]
combTrans [mx1,my1,r1,s1] [mx2,my2,r2,s2] =
[mx1+mx2, my1+my2, r1+r2, s1*s2]
negTrans [mx,my,r,s] = [-mx,-my,-r,1/s]
transformP [x,y] [mx,my,t,s] = let
scost = s * (cos t)
ssint = s * (sin t)
x' = x*scost - y*ssint + mx
y' = x*ssint + y*scost + my
in [x', y']
transformG poly amt = map (\p->transformP p amt) poly
| null | https://raw.githubusercontent.com/penrose/geometric-queries/79676192b2740e7bed39535611db8949b7846e14/rewrite-incomplete/MathUtils.hs | haskell | -- constants ----
-- helper functions ----
scalar-vector multiplication
transformations | # LANGUAGE AllowAmbiguousTypes , NoMonomorphismRestriction #
module MathUtils where
to use this module , functions need to be constrained into Autofloat first
eps = 0.1 ** 8
posInf = 1 / 0
negInf = -1 / 0
eq a b = abs (a-b) < eps
isInf x = x == 1/0 || x == -1/0
neg v = map (\x->(-x)) v
add v1 v2 = map (\(a,b)->a+b) (zip v1 v2)
sub v1 v2 = v1 `add` (neg v2)
rot90 [x,y] = [-y,x]
mult k v = map (k*) v
magsq xs = foldl (+) 0.0 $ map (**2) xs
mag xs = sqrt $ magsq xs
normalize xs = let magnitude = mag xs in
map (/magnitude) xs
dot v1 v2 = let zp = zip v1 v2 in foldl (+) 0 $ map (\(a,b)->a*b) zp
lerp v1 v2 k = let
lerpNum (a,b) = a*(1.0-k) + b*k
in map lerpNum $ zip v1 v2
centroid pts = let
len = fromIntegral $ length pts
[sumx, sumy] = foldl add [0,0] pts
in [sumx/len, sumy/len]
combTrans [mx1,my1,r1,s1] [mx2,my2,r2,s2] =
[mx1+mx2, my1+my2, r1+r2, s1*s2]
negTrans [mx,my,r,s] = [-mx,-my,-r,1/s]
transformP [x,y] [mx,my,t,s] = let
scost = s * (cos t)
ssint = s * (sin t)
x' = x*scost - y*ssint + mx
y' = x*ssint + y*scost + my
in [x', y']
transformG poly amt = map (\p->transformP p amt) poly
|
ee9a477649958f61a1c72af33c9f4463ab3f2dd1be07431eba775b07370e07ae | b0-system/b0 | b00_htmlg.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2016 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2016 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
module At = struct
type name = string
type t = name * string
let v n v = (n, v)
let true' n = (n, "")
let int n i = (n, string_of_int i)
let add_if b at l = if b then at :: l else l
let add_if_some name o l = match o with None -> l | Some a -> (name, a) :: l
let to_pair = Fun.id
module Name = struct
let accesskey = "accesskey"
let autofocus = "autofocus"
let charset = "charset"
let checked = "checked"
let class' = "class"
let content = "content"
let contenteditable = "contenteditable"
let cols = "cols"
let defer = "defer"
let disabled = "disabled"
let dir = "dir"
let draggable = "draggable"
let for' = "for"
let height = "height"
let hidden = "hidden"
let href = "href"
let id = "id"
let lang = "lang"
let media = "media"
let name = "name"
let placeholder = "placeholder"
let rel = "rel"
let required = "required"
let rows = "rows"
let src = "src"
let spellcheck = "spellcheck"
let tabindex = "tabindex"
let title = "title"
let type' = "type"
let value = "value"
let width = "width"
let wrap = "wrap"
end
type 'a cons = 'a -> t
let accesskey s = v Name.accesskey s
let autofocus = true' Name.autofocus
let charset = v Name.charset
let checked = true' Name.checked
let class' s = v Name.class' s
let cols i = int Name.cols i
let content s = v Name.content s
let contenteditable s = true' Name.contenteditable
let defer = true' Name.defer
let disabled = true' Name.disabled
let dir s = v Name.dir s
let draggable s = true' Name.draggable
let for' s = v Name.for' s
let height i = int Name.height i
let hidden = true' Name.hidden
let href s = v Name.href s
let id s = v Name.id s
let lang s = v Name.lang s
let media s = v Name.media s
let name s = v Name.name s
let placeholder s = v Name.placeholder s
let rel s = v Name.rel s
let required = true' Name.required
let rows i = int Name.rows i
let src s = v Name.src s
let spellcheck = v Name.spellcheck
let tabindex i = int Name.tabindex i
let title s = v Name.title s
let type' s = v Name.type' s
let value s = v Name.value s
let width i = int Name.width i
let wrap s = v Name.value s
end
module El = struct
type name = string
type frag =
| El of name * At.t list * frag list
| Txt of string
| Splice of frag option * frag list
| Raw of string
let v ?(at = []) n cs = El (n, at, cs)
let txt v = Txt v
let sp = Txt " "
let nbsp = Txt "\u{00A0}"
let splice ?sep cs = Splice (sep, cs)
let void = Splice (None, [])
let raw f = Raw f
(* Output *)
let addc = Buffer.add_char
let adds = Buffer.add_string
let adds_esc b s =
(* N.B. we also escape @'s since ocamldoc trips over them. *)
let len = String.length s in
let max_idx = len - 1 in
let flush b start i =
if start < len then Buffer.add_substring b s start (i - start);
in
let rec loop start i =
if i > max_idx then flush b start i else
let next = i + 1 in
match String.get s i with
| '&' -> flush b start i; adds b "&"; loop next next
| '<' -> flush b start i; adds b "<"; loop next next
| '>' -> flush b start i; adds b ">"; loop next next
| '\'' -> flush b start i; adds b "'"; loop next next
| '\"' -> flush b start i; adds b """; loop next next
| '@' -> flush b start i; adds b "@"; loop next next
| c -> loop start next
in
loop 0 0
let void_els = B0_std.String.Set.of_list
[ "area"; "base"; "br"; "col"; "embed"; "hr"; "img"; "input"; "link";
"meta"; "param"; "source"; "track"; "wbr" ]
let rec add_ats b cs atts =
let add_at b n v = adds b n; adds b "=\""; adds_esc b v; addc b '\"' in
match atts with
| ("class", c) :: atts -> add_ats b (c :: cs) atts
| (n, v) :: atts -> addc b ' '; add_at b n v; add_ats b cs atts
| [] when cs = [] -> ()
| [] -> addc b ' '; add_at b "class" (String.concat " " (List.rev cs))
not T.R.
| Raw r -> adds b r
| Txt txt -> adds_esc b txt
| Splice (sep, cs) ->
begin match sep with
| None -> List.iter (add_child b) cs
| Some sep ->
begin match cs with
| [] -> ()
| c :: cs ->
let add b c = add_child b sep; add_child b c in
add_child b c; List.iter (add b) cs
end
end
| El (n, atts, cs) ->
addc b '<'; adds b n; add_ats b [] atts; addc b '>';
if not (B0_std.String.Set.mem n void_els)
then (List.iter (add_child b) cs; adds b "</"; adds b n; addc b '>')
let add_doc_type b = adds b "<!DOCTYPE html>\n"
let buffer_add ~doc_type b cs =
if doc_type then add_doc_type b; add_child b cs
let to_string ~doc_type g =
let b = Buffer.create 65525 in
buffer_add ~doc_type b g; Buffer.contents b
(* Predefined element constructors *)
type cons = ?at:At.t list -> frag list -> frag
type void_cons = ?at:At.t list -> unit -> frag
let[@inline] cons e ?at els = v ?at e els
let[@inline] void_cons e ?at () = v e ?at []
let a = cons "a"
let abbr = cons "abbr"
let address = cons "address"
let area = void_cons "area"
let article = cons "article"
let aside = cons "aside"
let audio = cons "audio"
let b = cons "b"
let base = void_cons "base"
let bdi = cons "bdi"
let bdo = cons "bdo"
let blockquote = cons "blockquote"
let body = cons "body"
let br = void_cons "br"
let button = cons "button"
let canvas = cons "canvas"
let caption = cons "caption"
let cite = cons "cite"
let code = cons "code"
let col = void_cons "col"
let colgroup = cons "colgroup"
let command = cons "command"
let datalist = cons "datalist"
let dd = cons "dd"
let del = cons "del"
let details = cons "details"
let dfn = cons "dfn"
let div = cons "div"
let dl = cons "dl"
let dt = cons "dt"
let em = cons "em"
let embed = void_cons "embed"
let fieldset = cons "fieldset"
let figcaption = cons "figcaption"
let figure = cons "figure"
let footer = cons "footer"
let form = cons "form"
let h1 = cons "h1"
let h2 = cons "h2"
let h3 = cons "h3"
let h4 = cons "h4"
let h5 = cons "h5"
let h6 = cons "h6"
let head = cons "head"
let header = cons "header"
let hgroup = cons "hgroup"
let hr = void_cons "hr"
let html = cons "html"
let i = cons "i"
let iframe = cons "iframe"
let img = void_cons "img"
let input = void_cons "input"
let ins = cons "ins"
let kbd = cons "kbd"
let keygen = cons "keygen"
let label = cons "label"
let legend = cons "legend"
let li = cons "li"
let link = void_cons "link"
let map = cons "map"
let mark = cons "mark"
let menu = cons "menu"
let meta = void_cons "meta"
let meter = cons "meter"
let nav = cons "nav"
let noscript = cons "noscript"
let object' = cons "object"
let ol = cons "ol"
let optgroup = cons "optgroup"
let option = cons "option"
let output = cons "output"
let p = cons "p"
let param = void_cons "param"
let pre = cons "pre"
let progress = cons "progress"
let q = cons "q"
let rp = cons "rp"
let rt = cons "rt"
let ruby = cons "ruby"
let s = cons "s"
let samp = cons "samp"
let script = cons "script"
let section = cons "section"
let select = cons "select"
let small = cons "small"
let source = void_cons "source"
let span = cons "span"
let strong = cons "strong"
let style = cons "style"
let sub = cons "sub"
let summary = cons "summary"
let sup = cons "sup"
let table = cons "table"
let tbody = cons "tbody"
let td = cons "td"
let textarea = cons "textarea"
let tfoot = cons "tfoot"
let th = cons "th"
let thead = cons "thead"
let time = cons "time"
let title = cons "title"
let tr = cons "tr"
let track = void_cons "track"
let u = cons "u"
let ul = cons "ul"
let var = cons "var"
let video = cons "video"
let wbr = void_cons "wbr"
(* Convenience *)
let title_of_fpath file = match B0_std.Fpath.basename ~no_ext:true file with
| "index" | "" ->
let title = B0_std.Fpath.(basename ~no_ext:true (parent file)) in
if title = "" then "Untitled" else title
| title -> title
let basic_page
?(lang = "") ?(generator = "") ?(styles = []) ?(scripts = [])
?(more_head = void) ~title:t body
=
let viewport = "width=device-width, initial-scale=1.0" in
let generator = match generator with
| "" -> void
| g -> meta ~at:At.[name "generator"; content g] ()
in
let style uri =
link ~at:At.[rel "stylesheet"; type' "text/css"; href uri] ()
in
let script uri =
script ~at:At.[type' "text/javascript"; defer; src uri] []
in
let head = head [
meta ~at:At.[charset "utf-8"] ();
generator;
meta ~at:At.[name "viewport"; content viewport] ();
splice (List.map style styles);
splice (List.map script scripts);
more_head;
title [txt (if String.trim t = "" then "Untilted" else t)]]
in
let at = if lang = "" then [] else [At.lang lang] in
html ~at [head; body]
let write_page
?(lang = "") ?(generator = "") ?(styles = []) ?(scripts = [])
?more_head ?(title = "") m ~frag ~o
=
(* FIXME Ideally we would like the read to be in write.
The write fun return a future but this has other impacts. *)
let open B0_std.Fut.Syntax in
ignore @@ (* FIXME maybe get rid of that. *)
let* contents = B00.Memo.read m frag in
let title = if title = "" then title_of_fpath o else title in
let more_head = match more_head with
| None -> ""
| Some more_head -> to_string ~doc_type:false more_head
in
let stamp = lang :: generator :: more_head :: title :: [] in
let stamp = List.rev_append styles stamp in
let stamp = List.rev_append scripts stamp in
let stamp = String.concat "" stamp in
B0_std.Fut.return @@
(B00.Memo.write m ~stamp ~reads:[frag] o @@ fun () ->
let more_head = raw more_head in
let body = body [raw contents] in
let page =
basic_page ~lang ~generator ~styles ~scripts ~more_head ~title body
in
Ok (to_string ~doc_type:true page))
end
---------------------------------------------------------------------------
Copyright ( c ) 2016 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/b0-system/b0/cbe12b8a55da6b50ab01ed058b339dbed3cfe894/src/b00/kit/b00_htmlg.ml | ocaml | Output
N.B. we also escape @'s since ocamldoc trips over them.
Predefined element constructors
Convenience
FIXME Ideally we would like the read to be in write.
The write fun return a future but this has other impacts.
FIXME maybe get rid of that. | ---------------------------------------------------------------------------
Copyright ( c ) 2016 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2016 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
module At = struct
type name = string
type t = name * string
let v n v = (n, v)
let true' n = (n, "")
let int n i = (n, string_of_int i)
let add_if b at l = if b then at :: l else l
let add_if_some name o l = match o with None -> l | Some a -> (name, a) :: l
let to_pair = Fun.id
module Name = struct
let accesskey = "accesskey"
let autofocus = "autofocus"
let charset = "charset"
let checked = "checked"
let class' = "class"
let content = "content"
let contenteditable = "contenteditable"
let cols = "cols"
let defer = "defer"
let disabled = "disabled"
let dir = "dir"
let draggable = "draggable"
let for' = "for"
let height = "height"
let hidden = "hidden"
let href = "href"
let id = "id"
let lang = "lang"
let media = "media"
let name = "name"
let placeholder = "placeholder"
let rel = "rel"
let required = "required"
let rows = "rows"
let src = "src"
let spellcheck = "spellcheck"
let tabindex = "tabindex"
let title = "title"
let type' = "type"
let value = "value"
let width = "width"
let wrap = "wrap"
end
type 'a cons = 'a -> t
let accesskey s = v Name.accesskey s
let autofocus = true' Name.autofocus
let charset = v Name.charset
let checked = true' Name.checked
let class' s = v Name.class' s
let cols i = int Name.cols i
let content s = v Name.content s
let contenteditable s = true' Name.contenteditable
let defer = true' Name.defer
let disabled = true' Name.disabled
let dir s = v Name.dir s
let draggable s = true' Name.draggable
let for' s = v Name.for' s
let height i = int Name.height i
let hidden = true' Name.hidden
let href s = v Name.href s
let id s = v Name.id s
let lang s = v Name.lang s
let media s = v Name.media s
let name s = v Name.name s
let placeholder s = v Name.placeholder s
let rel s = v Name.rel s
let required = true' Name.required
let rows i = int Name.rows i
let src s = v Name.src s
let spellcheck = v Name.spellcheck
let tabindex i = int Name.tabindex i
let title s = v Name.title s
let type' s = v Name.type' s
let value s = v Name.value s
let width i = int Name.width i
let wrap s = v Name.value s
end
module El = struct
type name = string
type frag =
| El of name * At.t list * frag list
| Txt of string
| Splice of frag option * frag list
| Raw of string
let v ?(at = []) n cs = El (n, at, cs)
let txt v = Txt v
let sp = Txt " "
let nbsp = Txt "\u{00A0}"
let splice ?sep cs = Splice (sep, cs)
let void = Splice (None, [])
let raw f = Raw f
let addc = Buffer.add_char
let adds = Buffer.add_string
let adds_esc b s =
let len = String.length s in
let max_idx = len - 1 in
let flush b start i =
if start < len then Buffer.add_substring b s start (i - start);
in
let rec loop start i =
if i > max_idx then flush b start i else
let next = i + 1 in
match String.get s i with
| '&' -> flush b start i; adds b "&"; loop next next
| '<' -> flush b start i; adds b "<"; loop next next
| '>' -> flush b start i; adds b ">"; loop next next
| '\'' -> flush b start i; adds b "'"; loop next next
| '\"' -> flush b start i; adds b """; loop next next
| '@' -> flush b start i; adds b "@"; loop next next
| c -> loop start next
in
loop 0 0
let void_els = B0_std.String.Set.of_list
[ "area"; "base"; "br"; "col"; "embed"; "hr"; "img"; "input"; "link";
"meta"; "param"; "source"; "track"; "wbr" ]
let rec add_ats b cs atts =
let add_at b n v = adds b n; adds b "=\""; adds_esc b v; addc b '\"' in
match atts with
| ("class", c) :: atts -> add_ats b (c :: cs) atts
| (n, v) :: atts -> addc b ' '; add_at b n v; add_ats b cs atts
| [] when cs = [] -> ()
| [] -> addc b ' '; add_at b "class" (String.concat " " (List.rev cs))
not T.R.
| Raw r -> adds b r
| Txt txt -> adds_esc b txt
| Splice (sep, cs) ->
begin match sep with
| None -> List.iter (add_child b) cs
| Some sep ->
begin match cs with
| [] -> ()
| c :: cs ->
let add b c = add_child b sep; add_child b c in
add_child b c; List.iter (add b) cs
end
end
| El (n, atts, cs) ->
addc b '<'; adds b n; add_ats b [] atts; addc b '>';
if not (B0_std.String.Set.mem n void_els)
then (List.iter (add_child b) cs; adds b "</"; adds b n; addc b '>')
let add_doc_type b = adds b "<!DOCTYPE html>\n"
let buffer_add ~doc_type b cs =
if doc_type then add_doc_type b; add_child b cs
let to_string ~doc_type g =
let b = Buffer.create 65525 in
buffer_add ~doc_type b g; Buffer.contents b
type cons = ?at:At.t list -> frag list -> frag
type void_cons = ?at:At.t list -> unit -> frag
let[@inline] cons e ?at els = v ?at e els
let[@inline] void_cons e ?at () = v e ?at []
let a = cons "a"
let abbr = cons "abbr"
let address = cons "address"
let area = void_cons "area"
let article = cons "article"
let aside = cons "aside"
let audio = cons "audio"
let b = cons "b"
let base = void_cons "base"
let bdi = cons "bdi"
let bdo = cons "bdo"
let blockquote = cons "blockquote"
let body = cons "body"
let br = void_cons "br"
let button = cons "button"
let canvas = cons "canvas"
let caption = cons "caption"
let cite = cons "cite"
let code = cons "code"
let col = void_cons "col"
let colgroup = cons "colgroup"
let command = cons "command"
let datalist = cons "datalist"
let dd = cons "dd"
let del = cons "del"
let details = cons "details"
let dfn = cons "dfn"
let div = cons "div"
let dl = cons "dl"
let dt = cons "dt"
let em = cons "em"
let embed = void_cons "embed"
let fieldset = cons "fieldset"
let figcaption = cons "figcaption"
let figure = cons "figure"
let footer = cons "footer"
let form = cons "form"
let h1 = cons "h1"
let h2 = cons "h2"
let h3 = cons "h3"
let h4 = cons "h4"
let h5 = cons "h5"
let h6 = cons "h6"
let head = cons "head"
let header = cons "header"
let hgroup = cons "hgroup"
let hr = void_cons "hr"
let html = cons "html"
let i = cons "i"
let iframe = cons "iframe"
let img = void_cons "img"
let input = void_cons "input"
let ins = cons "ins"
let kbd = cons "kbd"
let keygen = cons "keygen"
let label = cons "label"
let legend = cons "legend"
let li = cons "li"
let link = void_cons "link"
let map = cons "map"
let mark = cons "mark"
let menu = cons "menu"
let meta = void_cons "meta"
let meter = cons "meter"
let nav = cons "nav"
let noscript = cons "noscript"
let object' = cons "object"
let ol = cons "ol"
let optgroup = cons "optgroup"
let option = cons "option"
let output = cons "output"
let p = cons "p"
let param = void_cons "param"
let pre = cons "pre"
let progress = cons "progress"
let q = cons "q"
let rp = cons "rp"
let rt = cons "rt"
let ruby = cons "ruby"
let s = cons "s"
let samp = cons "samp"
let script = cons "script"
let section = cons "section"
let select = cons "select"
let small = cons "small"
let source = void_cons "source"
let span = cons "span"
let strong = cons "strong"
let style = cons "style"
let sub = cons "sub"
let summary = cons "summary"
let sup = cons "sup"
let table = cons "table"
let tbody = cons "tbody"
let td = cons "td"
let textarea = cons "textarea"
let tfoot = cons "tfoot"
let th = cons "th"
let thead = cons "thead"
let time = cons "time"
let title = cons "title"
let tr = cons "tr"
let track = void_cons "track"
let u = cons "u"
let ul = cons "ul"
let var = cons "var"
let video = cons "video"
let wbr = void_cons "wbr"
let title_of_fpath file = match B0_std.Fpath.basename ~no_ext:true file with
| "index" | "" ->
let title = B0_std.Fpath.(basename ~no_ext:true (parent file)) in
if title = "" then "Untitled" else title
| title -> title
let basic_page
?(lang = "") ?(generator = "") ?(styles = []) ?(scripts = [])
?(more_head = void) ~title:t body
=
let viewport = "width=device-width, initial-scale=1.0" in
let generator = match generator with
| "" -> void
| g -> meta ~at:At.[name "generator"; content g] ()
in
let style uri =
link ~at:At.[rel "stylesheet"; type' "text/css"; href uri] ()
in
let script uri =
script ~at:At.[type' "text/javascript"; defer; src uri] []
in
let head = head [
meta ~at:At.[charset "utf-8"] ();
generator;
meta ~at:At.[name "viewport"; content viewport] ();
splice (List.map style styles);
splice (List.map script scripts);
more_head;
title [txt (if String.trim t = "" then "Untilted" else t)]]
in
let at = if lang = "" then [] else [At.lang lang] in
html ~at [head; body]
let write_page
?(lang = "") ?(generator = "") ?(styles = []) ?(scripts = [])
?more_head ?(title = "") m ~frag ~o
=
let open B0_std.Fut.Syntax in
let* contents = B00.Memo.read m frag in
let title = if title = "" then title_of_fpath o else title in
let more_head = match more_head with
| None -> ""
| Some more_head -> to_string ~doc_type:false more_head
in
let stamp = lang :: generator :: more_head :: title :: [] in
let stamp = List.rev_append styles stamp in
let stamp = List.rev_append scripts stamp in
let stamp = String.concat "" stamp in
B0_std.Fut.return @@
(B00.Memo.write m ~stamp ~reads:[frag] o @@ fun () ->
let more_head = raw more_head in
let body = body [raw contents] in
let page =
basic_page ~lang ~generator ~styles ~scripts ~more_head ~title body
in
Ok (to_string ~doc_type:true page))
end
---------------------------------------------------------------------------
Copyright ( c ) 2016 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
028a3cccdb547230ca7d3fd8060d5aaaade92c2634738f8e19f405cf53833ade | zenspider/schemers | exercise.2.97.scm | #lang racket/base
(require "../lib/test.rkt")
(require "../lib/myutils.scm")
Exercise 2.97
;; ;; a. Implement this algorithm as a procedure `reduce-terms' that
takes two term lists ` n ' and ` d ' as arguments and returns a
;; list `nn', `dd', which are `n' and `d' reduced to lowest
;; terms via the algorithm given above. Also write a procedure
;; `reduce-poly', analogous to `add-poly', that checks to see if
the two polys have the same variable . If so , ` reduce - poly '
;; strips off the variable and passes the problem to
` reduce - terms ' , then reattaches the variable to the two term
;; lists supplied by `reduce-terms'.
;;
;; b. Define a procedure analogous to `reduce-terms' that does what
;; the original `make-rat' did for integers:
;;
;; (define (reduce-integers n d)
;; (let ((g (gcd n d)))
;; (list (/ n g) (/ d g))))
;;
;; and define `reduce' as a generic operation that calls
;; `apply-generic' to dispatch to either `reduce-poly' (for
;; `polynomial' arguments) or `reduce-integers' (for
;; `scheme-number' arguments). You can now easily make the
;; rational-arithmetic package reduce fractions to lowest terms
;; by having `make-rat' call `reduce' before combining the given
;; numerator and denominator to form a rational number. The
;; system now handles rational expressions in either integers or
;; polynomials. To test your program, try the example at the
;; beginning of this extended exercise:
;;
( define p1 ( make - polynomial ' x ' ( ( 1 1)(0 1 ) ) ) )
( define p2 ( make - polynomial ' x ' ( ( 3 1)(0 -1 ) ) ) )
( define p3 ( make - polynomial ' x ' ( ( 1 1 ) ) ) )
( define p4 ( make - polynomial ' x ' ( ( 2 1)(0 -1 ) ) ) )
;;
;; (define rf1 (make-rational p1 p2))
;; (define rf2 (make-rational p3 p4))
;;
;; (add rf1 rf2)
;;
;; See if you get the correct answer, correctly reduced to
;; lowest terms.
;;
The GCD computation is at the heart of any system that does
;; operations on rational functions. The algorithm used above,
;; although mathematically straightforward, is extremely slow.
;; The slowness is due partly to the large number of division
;; operations and partly to the enormous size of the
;; intermediate coefficients generated by the pseudodivisions.
One of the active areas in the development of
;; algebraic-manipulation systems is the design of better
algorithms for computing polynomial GCDs.(9 )
;; (assert-equal x y)
(done)
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_2/exercise.2.97.scm | scheme | ;; a. Implement this algorithm as a procedure `reduce-terms' that
list `nn', `dd', which are `n' and `d' reduced to lowest
terms via the algorithm given above. Also write a procedure
`reduce-poly', analogous to `add-poly', that checks to see if
strips off the variable and passes the problem to
lists supplied by `reduce-terms'.
b. Define a procedure analogous to `reduce-terms' that does what
the original `make-rat' did for integers:
(define (reduce-integers n d)
(let ((g (gcd n d)))
(list (/ n g) (/ d g))))
and define `reduce' as a generic operation that calls
`apply-generic' to dispatch to either `reduce-poly' (for
`polynomial' arguments) or `reduce-integers' (for
`scheme-number' arguments). You can now easily make the
rational-arithmetic package reduce fractions to lowest terms
by having `make-rat' call `reduce' before combining the given
numerator and denominator to form a rational number. The
system now handles rational expressions in either integers or
polynomials. To test your program, try the example at the
beginning of this extended exercise:
(define rf1 (make-rational p1 p2))
(define rf2 (make-rational p3 p4))
(add rf1 rf2)
See if you get the correct answer, correctly reduced to
lowest terms.
operations on rational functions. The algorithm used above,
although mathematically straightforward, is extremely slow.
The slowness is due partly to the large number of division
operations and partly to the enormous size of the
intermediate coefficients generated by the pseudodivisions.
algebraic-manipulation systems is the design of better
(assert-equal x y) | #lang racket/base
(require "../lib/test.rkt")
(require "../lib/myutils.scm")
Exercise 2.97
takes two term lists ` n ' and ` d ' as arguments and returns a
the two polys have the same variable . If so , ` reduce - poly '
` reduce - terms ' , then reattaches the variable to the two term
( define p1 ( make - polynomial ' x ' ( ( 1 1)(0 1 ) ) ) )
( define p2 ( make - polynomial ' x ' ( ( 3 1)(0 -1 ) ) ) )
( define p3 ( make - polynomial ' x ' ( ( 1 1 ) ) ) )
( define p4 ( make - polynomial ' x ' ( ( 2 1)(0 -1 ) ) ) )
The GCD computation is at the heart of any system that does
One of the active areas in the development of
algorithms for computing polynomial GCDs.(9 )
(done)
|
7cfcd0db36caa668eccfb5d49a0a474ad3a594d3d1eae4b7750d76beb425024c | ghc/ghc | PatriciaTree.hs | # LANGUAGE BangPatterns , ScopedTypeVariables #
# LANGUAGE DeriveGeneric #
-- |An efficient implementation of 'Data.Graph.Inductive.Graph.Graph'
using big - endian ( i.e. " Data . IntMap " ) .
--
-- This module provides the following specialised functions to gain
more performance , using GHC 's RULES pragma :
--
-- * 'Data.Graph.Inductive.Graph.insNode'
--
-- * 'Data.Graph.Inductive.Graph.insEdge'
--
-- * 'Data.Graph.Inductive.Graph.gmap'
--
-- * 'Data.Graph.Inductive.Graph.nmap'
--
-- * 'Data.Graph.Inductive.Graph.emap'
--
Code is from Hackage ` fgl ` package version 5.7.0.3
module GHC.Data.Graph.Inductive.PatriciaTree
( Gr
, UGr
)
where
import GHC.Prelude
import GHC.Data.Graph.Inductive.Graph
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.List (sort)
import Data.Maybe (fromMaybe)
import Data.Tuple (swap)
import qualified Data.IntMap.Strict as IMS
import GHC.Generics (Generic)
import Data.Bifunctor
----------------------------------------------------------------------
-- GRAPH REPRESENTATION
----------------------------------------------------------------------
newtype Gr a b = Gr (GraphRep a b)
deriving (Generic)
type GraphRep a b = IntMap (Context' a b)
type Context' a b = (IntMap [b], a, IntMap [b])
type UGr = Gr () ()
----------------------------------------------------------------------
-- CLASS INSTANCES
----------------------------------------------------------------------
instance (Eq a, Ord b) => Eq (Gr a b) where
(Gr g1) == (Gr g2) = fmap sortAdj g1 == fmap sortAdj g2
where
sortAdj (p,n,s) = (fmap sort p,n,fmap sort s)
instance (Show a, Show b) => Show (Gr a b) where
showsPrec d g = showParen (d > 10) $
showString "mkGraph "
. shows (labNodes g)
. showString " "
. shows (labEdges g)
instance (Read a, Read b) => Read (Gr a b) where
readsPrec p = readParen (p > 10) $ \ r -> do
("mkGraph", s) <- lex r
(ns,t) <- reads s
(es,u) <- reads t
return (mkGraph ns es, u)
instance Graph Gr where
empty = Gr IM.empty
isEmpty (Gr g) = IM.null g
match = matchGr
mkGraph vs es = insEdges es
. Gr
. IM.fromList
. map (second (\l -> (IM.empty,l,IM.empty)))
$ vs
labNodes (Gr g) = [ (node, label)
| (node, (_, label, _)) <- IM.toList g ]
noNodes (Gr g) = IM.size g
nodeRange (Gr g) = fromMaybe (error "nodeRange of empty graph")
$ liftA2 (,) (ix (IM.minViewWithKey g))
(ix (IM.maxViewWithKey g))
where
ix = fmap (fst . fst)
labEdges (Gr g) = do (node, (_, _, s)) <- IM.toList g
(next, labels) <- IM.toList s
label <- labels
return (node, next, label)
instance DynGraph Gr where
(p, v, l, s) & (Gr g)
= let !g1 = IM.insert v (preds, l, succs) g
!(np, preds) = fromAdjCounting p
!(ns, succs) = fromAdjCounting s
!g2 = addSucc g1 v np preds
!g3 = addPred g2 v ns succs
in Gr g3
instance Functor (Gr a) where
fmap = fastEMap
instance Bifunctor Gr where
bimap = fastNEMap
first = fastNMap
second = fastEMap
matchGr :: Node -> Gr a b -> Decomp Gr a b
matchGr node (Gr g)
= case IM.lookup node g of
Nothing
-> (Nothing, Gr g)
Just (p, label, s)
-> let !g1 = IM.delete node g
!p' = IM.delete node p
!s' = IM.delete node s
!g2 = clearPred g1 node s'
!g3 = clearSucc g2 node p'
in (Just (toAdj p', node, label, toAdj s), Gr g3)
----------------------------------------------------------------------
-- OVERRIDING FUNCTIONS
----------------------------------------------------------------------
{-
{- RULES
"insNode/Data.Graph.Inductive.PatriciaTree" insNode = fastInsNode
-}
fastInsNode :: LNode a -> Gr a b -> Gr a b
fastInsNode (v, l) (Gr g) = g' `seq` Gr g'
where
g' = IM.insert v (IM.empty, l, IM.empty) g
-}
# RULES
" insEdge / GHC.Data . Graph . Inductive . PatriciaTree " insEdge = fastInsEdge
#
"insEdge/GHC.Data.Graph.Inductive.PatriciaTree" insEdge = fastInsEdge
#-}
fastInsEdge :: LEdge b -> Gr a b -> Gr a b
fastInsEdge (v, w, l) (Gr g) = g2 `seq` Gr g2
where
g1 = IM.adjust addS' v g
g2 = IM.adjust addP' w g1
addS' (ps, l', ss) = (ps, l', IM.insertWith addLists w [l] ss)
addP' (ps, l', ss) = (IM.insertWith addLists v [l] ps, l', ss)
{ - RULES
" gmap / Data . Graph . Inductive . PatriciaTree " gmap =
{- RULES
"gmap/Data.Graph.Inductive.PatriciaTree" gmap = fastGMap
-}
fastGMap :: forall a b c d. (Context a b -> Context c d) -> Gr a b -> Gr c d
fastGMap f (Gr g) = Gr (IM.mapWithKey f' g)
where
f' :: Node -> Context' a b -> Context' c d
f' = ((fromContext . f) .) . toContext
RULES
" nmap / Data . Graph . Inductive . PatriciaTree " nmap = fastNMap
"nmap/Data.Graph.Inductive.PatriciaTree" nmap = fastNMap
-}
-}
fastNMap :: forall a b c. (a -> c) -> Gr a b -> Gr c b
fastNMap f (Gr g) = Gr (IM.map f' g)
where
f' :: Context' a b -> Context' c b
f' (ps, a, ss) = (ps, f a, ss)
{ - RULES
" emap / GHC.Data . Graph . Inductive . PatriciaTree " = fastEMap
{- RULES
"emap/GHC.Data.Graph.Inductive.PatriciaTree" emap = fastEMap
-}
-}
fastEMap :: forall a b c. (b -> c) -> Gr a b -> Gr a c
fastEMap f (Gr g) = Gr (IM.map f' g)
where
f' :: Context' a b -> Context' a c
f' (ps, a, ss) = (IM.map (map f) ps, a, IM.map (map f) ss)
{- RULES
"nemap/GHC.Data.Graph.Inductive.PatriciaTree" nemap = fastNEMap
-}
fastNEMap :: forall a b c d. (a -> c) -> (b -> d) -> Gr a b -> Gr c d
fastNEMap fn fe (Gr g) = Gr (IM.map f g)
where
f :: Context' a b -> Context' c d
f (ps, a, ss) = (IM.map (map fe) ps, fn a, IM.map (map fe) ss)
----------------------------------------------------------------------
UTILITIES
----------------------------------------------------------------------
toAdj :: IntMap [b] -> Adj b
toAdj = concatMap expand . IM.toList
where
expand (n,ls) = map (flip (,) n) ls
fromAdj : : > IntMap [ b ]
fromAdj = IM.fromListWith addLists . map ( second (: [ ] ) . swap )
data FromListCounting a = FromListCounting !Int !(IntMap a)
deriving (Eq, Show, Read)
getFromListCounting :: FromListCounting a -> (Int, IntMap a)
getFromListCounting (FromListCounting i m) = (i, m)
{-# INLINE getFromListCounting #-}
fromListWithKeyCounting :: (Int -> a -> a -> a) -> [(Int, a)] -> (Int, IntMap a)
fromListWithKeyCounting f = getFromListCounting . foldl' ins (FromListCounting 0 IM.empty)
where
ins (FromListCounting i t) (k,x) = FromListCounting (i + 1) (IM.insertWithKey f k x t)
# INLINE fromListWithKeyCounting #
fromListWithCounting :: (a -> a -> a) -> [(Int, a)] -> (Int, IntMap a)
fromListWithCounting f = fromListWithKeyCounting (\_ x y -> f x y)
# INLINE fromListWithCounting #
fromAdjCounting :: Adj b -> (Int, IntMap [b])
fromAdjCounting = fromListWithCounting addLists . map (second (:[]) . swap)
-- We use differenceWith to modify a graph more than bulkThreshold times,
-- and repeated insertWith otherwise.
bulkThreshold :: Int
bulkThreshold = 5
--toContext :: Node -> Context' a b -> Context a b
v ( ps , a , ss ) = ( toAdj ps , v , a , toAdj ss )
--fromContext :: Context a b -> Context' a b
--fromContext (ps, _, a, ss) = (fromAdj ps, a, fromAdj ss)
A version of @++@ where order is n't important , so @xs + + [ x]@
becomes @x : xs@. Used when we have to have a function of type @[a ]
- > [ a ] - > [ a]@ but one of the lists is just going to be a single
-- element (and it isn't possible to tell which).
addLists :: [a] -> [a] -> [a]
addLists [a] as = a : as
addLists as [a] = a : as
addLists xs ys = xs ++ ys
addSucc :: forall a b . GraphRep a b -> Node -> Int -> IM.IntMap [b] -> GraphRep a b
addSucc g0 v numAdd xs
| numAdd < bulkThreshold = foldlWithKey' go g0 xs
where
go :: GraphRep a b -> Node -> [b] -> GraphRep a b
go g p l = IMS.adjust f p g
where f (ps, l', ss) = let !ss' = IM.insertWith addLists v l ss
in (ps, l', ss')
addSucc g v _ xs = IMS.differenceWith go g xs
where
go :: Context' a b -> [b] -> Maybe (Context' a b)
go (ps, l', ss) l = let !ss' = IM.insertWith addLists v l ss
in Just (ps, l', ss')
foldlWithKey' :: (a -> IM.Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey' =
IM.foldlWithKey'
addPred :: forall a b . GraphRep a b -> Node -> Int -> IM.IntMap [b] -> GraphRep a b
addPred g0 v numAdd xs
| numAdd < bulkThreshold = foldlWithKey' go g0 xs
where
go :: GraphRep a b -> Node -> [b] -> GraphRep a b
go g p l = IMS.adjust f p g
where f (ps, l', ss) = let !ps' = IM.insertWith addLists v l ps
in (ps', l', ss)
addPred g v _ xs = IMS.differenceWith go g xs
where
go :: Context' a b -> [b] -> Maybe (Context' a b)
go (ps, l', ss) l = let !ps' = IM.insertWith addLists v l ps
in Just (ps', l', ss)
clearSucc :: forall a b x . GraphRep a b -> Node -> IM.IntMap x -> GraphRep a b
clearSucc g v = IMS.differenceWith go g
where
go :: Context' a b -> x -> Maybe (Context' a b)
go (ps, l, ss) _ = let !ss' = IM.delete v ss
in Just (ps, l, ss')
clearPred :: forall a b x . GraphRep a b -> Node -> IM.IntMap x -> GraphRep a b
clearPred g v = IMS.differenceWith go g
where
go :: Context' a b -> x -> Maybe (Context' a b)
go (ps, l, ss) _ = let !ps' = IM.delete v ps
in Just (ps', l, ss)
----------------------------------------------------------------
Copyright ( c ) 1999 - 2008 ,
2010 ,
2022 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
1 . Redistributions of source code must retain the above copyright notice ,
this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------
Copyright (c) 1999-2008, Martin Erwig
2010, Ivan Lazar Miljenovic
2022, Norman Ramsey
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------}
| null | https://raw.githubusercontent.com/ghc/ghc/d87f28d810b9c536ca4db7f363163e6d0dd6c93c/compiler/GHC/Data/Graph/Inductive/PatriciaTree.hs | haskell | |An efficient implementation of 'Data.Graph.Inductive.Graph.Graph'
This module provides the following specialised functions to gain
* 'Data.Graph.Inductive.Graph.insNode'
* 'Data.Graph.Inductive.Graph.insEdge'
* 'Data.Graph.Inductive.Graph.gmap'
* 'Data.Graph.Inductive.Graph.nmap'
* 'Data.Graph.Inductive.Graph.emap'
--------------------------------------------------------------------
GRAPH REPRESENTATION
--------------------------------------------------------------------
--------------------------------------------------------------------
CLASS INSTANCES
--------------------------------------------------------------------
--------------------------------------------------------------------
OVERRIDING FUNCTIONS
--------------------------------------------------------------------
{- RULES
"insNode/Data.Graph.Inductive.PatriciaTree" insNode = fastInsNode
RULES
"gmap/Data.Graph.Inductive.PatriciaTree" gmap = fastGMap
RULES
"emap/GHC.Data.Graph.Inductive.PatriciaTree" emap = fastEMap
RULES
"nemap/GHC.Data.Graph.Inductive.PatriciaTree" nemap = fastNEMap
--------------------------------------------------------------------
--------------------------------------------------------------------
# INLINE getFromListCounting #
We use differenceWith to modify a graph more than bulkThreshold times,
and repeated insertWith otherwise.
toContext :: Node -> Context' a b -> Context a b
fromContext :: Context a b -> Context' a b
fromContext (ps, _, a, ss) = (fromAdj ps, a, fromAdj ss)
element (and it isn't possible to tell which).
--------------------------------------------------------------
-------------------------------------------------------------
--------------------------------------------------------------} | # LANGUAGE BangPatterns , ScopedTypeVariables #
# LANGUAGE DeriveGeneric #
using big - endian ( i.e. " Data . IntMap " ) .
more performance , using GHC 's RULES pragma :
Code is from Hackage ` fgl ` package version 5.7.0.3
module GHC.Data.Graph.Inductive.PatriciaTree
( Gr
, UGr
)
where
import GHC.Prelude
import GHC.Data.Graph.Inductive.Graph
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.List (sort)
import Data.Maybe (fromMaybe)
import Data.Tuple (swap)
import qualified Data.IntMap.Strict as IMS
import GHC.Generics (Generic)
import Data.Bifunctor
newtype Gr a b = Gr (GraphRep a b)
deriving (Generic)
type GraphRep a b = IntMap (Context' a b)
type Context' a b = (IntMap [b], a, IntMap [b])
type UGr = Gr () ()
instance (Eq a, Ord b) => Eq (Gr a b) where
(Gr g1) == (Gr g2) = fmap sortAdj g1 == fmap sortAdj g2
where
sortAdj (p,n,s) = (fmap sort p,n,fmap sort s)
instance (Show a, Show b) => Show (Gr a b) where
showsPrec d g = showParen (d > 10) $
showString "mkGraph "
. shows (labNodes g)
. showString " "
. shows (labEdges g)
instance (Read a, Read b) => Read (Gr a b) where
readsPrec p = readParen (p > 10) $ \ r -> do
("mkGraph", s) <- lex r
(ns,t) <- reads s
(es,u) <- reads t
return (mkGraph ns es, u)
instance Graph Gr where
empty = Gr IM.empty
isEmpty (Gr g) = IM.null g
match = matchGr
mkGraph vs es = insEdges es
. Gr
. IM.fromList
. map (second (\l -> (IM.empty,l,IM.empty)))
$ vs
labNodes (Gr g) = [ (node, label)
| (node, (_, label, _)) <- IM.toList g ]
noNodes (Gr g) = IM.size g
nodeRange (Gr g) = fromMaybe (error "nodeRange of empty graph")
$ liftA2 (,) (ix (IM.minViewWithKey g))
(ix (IM.maxViewWithKey g))
where
ix = fmap (fst . fst)
labEdges (Gr g) = do (node, (_, _, s)) <- IM.toList g
(next, labels) <- IM.toList s
label <- labels
return (node, next, label)
instance DynGraph Gr where
(p, v, l, s) & (Gr g)
= let !g1 = IM.insert v (preds, l, succs) g
!(np, preds) = fromAdjCounting p
!(ns, succs) = fromAdjCounting s
!g2 = addSucc g1 v np preds
!g3 = addPred g2 v ns succs
in Gr g3
instance Functor (Gr a) where
fmap = fastEMap
instance Bifunctor Gr where
bimap = fastNEMap
first = fastNMap
second = fastEMap
matchGr :: Node -> Gr a b -> Decomp Gr a b
matchGr node (Gr g)
= case IM.lookup node g of
Nothing
-> (Nothing, Gr g)
Just (p, label, s)
-> let !g1 = IM.delete node g
!p' = IM.delete node p
!s' = IM.delete node s
!g2 = clearPred g1 node s'
!g3 = clearSucc g2 node p'
in (Just (toAdj p', node, label, toAdj s), Gr g3)
fastInsNode :: LNode a -> Gr a b -> Gr a b
fastInsNode (v, l) (Gr g) = g' `seq` Gr g'
where
g' = IM.insert v (IM.empty, l, IM.empty) g
-}
# RULES
" insEdge / GHC.Data . Graph . Inductive . PatriciaTree " insEdge = fastInsEdge
#
"insEdge/GHC.Data.Graph.Inductive.PatriciaTree" insEdge = fastInsEdge
#-}
fastInsEdge :: LEdge b -> Gr a b -> Gr a b
fastInsEdge (v, w, l) (Gr g) = g2 `seq` Gr g2
where
g1 = IM.adjust addS' v g
g2 = IM.adjust addP' w g1
addS' (ps, l', ss) = (ps, l', IM.insertWith addLists w [l] ss)
addP' (ps, l', ss) = (IM.insertWith addLists v [l] ps, l', ss)
{ - RULES
" gmap / Data . Graph . Inductive . PatriciaTree " gmap =
fastGMap :: forall a b c d. (Context a b -> Context c d) -> Gr a b -> Gr c d
fastGMap f (Gr g) = Gr (IM.mapWithKey f' g)
where
f' :: Node -> Context' a b -> Context' c d
f' = ((fromContext . f) .) . toContext
RULES
" nmap / Data . Graph . Inductive . PatriciaTree " nmap = fastNMap
"nmap/Data.Graph.Inductive.PatriciaTree" nmap = fastNMap
-}
-}
fastNMap :: forall a b c. (a -> c) -> Gr a b -> Gr c b
fastNMap f (Gr g) = Gr (IM.map f' g)
where
f' :: Context' a b -> Context' c b
f' (ps, a, ss) = (ps, f a, ss)
{ - RULES
" emap / GHC.Data . Graph . Inductive . PatriciaTree " = fastEMap
-}
fastEMap :: forall a b c. (b -> c) -> Gr a b -> Gr a c
fastEMap f (Gr g) = Gr (IM.map f' g)
where
f' :: Context' a b -> Context' a c
f' (ps, a, ss) = (IM.map (map f) ps, a, IM.map (map f) ss)
fastNEMap :: forall a b c d. (a -> c) -> (b -> d) -> Gr a b -> Gr c d
fastNEMap fn fe (Gr g) = Gr (IM.map f g)
where
f :: Context' a b -> Context' c d
f (ps, a, ss) = (IM.map (map fe) ps, fn a, IM.map (map fe) ss)
UTILITIES
toAdj :: IntMap [b] -> Adj b
toAdj = concatMap expand . IM.toList
where
expand (n,ls) = map (flip (,) n) ls
fromAdj : : > IntMap [ b ]
fromAdj = IM.fromListWith addLists . map ( second (: [ ] ) . swap )
data FromListCounting a = FromListCounting !Int !(IntMap a)
deriving (Eq, Show, Read)
getFromListCounting :: FromListCounting a -> (Int, IntMap a)
getFromListCounting (FromListCounting i m) = (i, m)
fromListWithKeyCounting :: (Int -> a -> a -> a) -> [(Int, a)] -> (Int, IntMap a)
fromListWithKeyCounting f = getFromListCounting . foldl' ins (FromListCounting 0 IM.empty)
where
ins (FromListCounting i t) (k,x) = FromListCounting (i + 1) (IM.insertWithKey f k x t)
# INLINE fromListWithKeyCounting #
fromListWithCounting :: (a -> a -> a) -> [(Int, a)] -> (Int, IntMap a)
fromListWithCounting f = fromListWithKeyCounting (\_ x y -> f x y)
# INLINE fromListWithCounting #
fromAdjCounting :: Adj b -> (Int, IntMap [b])
fromAdjCounting = fromListWithCounting addLists . map (second (:[]) . swap)
bulkThreshold :: Int
bulkThreshold = 5
v ( ps , a , ss ) = ( toAdj ps , v , a , toAdj ss )
A version of @++@ where order is n't important , so @xs + + [ x]@
becomes @x : xs@. Used when we have to have a function of type @[a ]
- > [ a ] - > [ a]@ but one of the lists is just going to be a single
addLists :: [a] -> [a] -> [a]
addLists [a] as = a : as
addLists as [a] = a : as
addLists xs ys = xs ++ ys
addSucc :: forall a b . GraphRep a b -> Node -> Int -> IM.IntMap [b] -> GraphRep a b
addSucc g0 v numAdd xs
| numAdd < bulkThreshold = foldlWithKey' go g0 xs
where
go :: GraphRep a b -> Node -> [b] -> GraphRep a b
go g p l = IMS.adjust f p g
where f (ps, l', ss) = let !ss' = IM.insertWith addLists v l ss
in (ps, l', ss')
addSucc g v _ xs = IMS.differenceWith go g xs
where
go :: Context' a b -> [b] -> Maybe (Context' a b)
go (ps, l', ss) l = let !ss' = IM.insertWith addLists v l ss
in Just (ps, l', ss')
foldlWithKey' :: (a -> IM.Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey' =
IM.foldlWithKey'
addPred :: forall a b . GraphRep a b -> Node -> Int -> IM.IntMap [b] -> GraphRep a b
addPred g0 v numAdd xs
| numAdd < bulkThreshold = foldlWithKey' go g0 xs
where
go :: GraphRep a b -> Node -> [b] -> GraphRep a b
go g p l = IMS.adjust f p g
where f (ps, l', ss) = let !ps' = IM.insertWith addLists v l ps
in (ps', l', ss)
addPred g v _ xs = IMS.differenceWith go g xs
where
go :: Context' a b -> [b] -> Maybe (Context' a b)
go (ps, l', ss) l = let !ps' = IM.insertWith addLists v l ps
in Just (ps', l', ss)
clearSucc :: forall a b x . GraphRep a b -> Node -> IM.IntMap x -> GraphRep a b
clearSucc g v = IMS.differenceWith go g
where
go :: Context' a b -> x -> Maybe (Context' a b)
go (ps, l, ss) _ = let !ss' = IM.delete v ss
in Just (ps, l, ss')
clearPred :: forall a b x . GraphRep a b -> Node -> IM.IntMap x -> GraphRep a b
clearPred g v = IMS.differenceWith go g
where
go :: Context' a b -> x -> Maybe (Context' a b)
go (ps, l, ss) _ = let !ps' = IM.delete v ps
in Just (ps', l, ss)
Copyright ( c ) 1999 - 2008 ,
2010 ,
2022 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
1 . Redistributions of source code must retain the above copyright notice ,
this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
Copyright (c) 1999-2008, Martin Erwig
2010, Ivan Lazar Miljenovic
2022, Norman Ramsey
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
|
92698438f17b802eb3dc092466a78ae4ddd1cccd9d35c81c3265f23e8dbf5960 | cyppan/grape | default_sort.clj | (ns grape.hooks.default-sort)
(def hooks
{:pre-read (fn [{:keys [config]} resource request query]
(if (get-in query [:opts :sort?])
(let [query-sort (:sort query)
resource-sort (:default-sort resource)
config-sort (:default-sort config)]
(assoc-in query [:sort] (or query-sort resource-sort config-sort)))
query))})
| null | https://raw.githubusercontent.com/cyppan/grape/62488a335542fc58fc9126b8d5ff7fccdd16f1d7/src/grape/hooks/default_sort.clj | clojure | (ns grape.hooks.default-sort)
(def hooks
{:pre-read (fn [{:keys [config]} resource request query]
(if (get-in query [:opts :sort?])
(let [query-sort (:sort query)
resource-sort (:default-sort resource)
config-sort (:default-sort config)]
(assoc-in query [:sort] (or query-sort resource-sort config-sort)))
query))})
| |
cf9267631607c36581157c31bbe3fc6eaf84d1ebd2dd37f7bb2155ebd239be50 | nomeata/haskell-candid | Infer.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
module Codec.Candid.Infer where
import qualified Data.Vector as V
import Control.Monad
import Data.Void
import Data.List
import Prettyprinter
import Codec.Candid.Types
inferTypes :: [Value] -> Either String [Type Void]
inferTypes = mapM inferTyp
inferTyp :: Value -> Either String (Type Void)
inferTyp (NumV v) = return $ if v >= 0 then NatT else IntT
inferTyp (BoolV _) = return BoolT
inferTyp (NatV _) = return NatT
inferTyp (Nat8V _) = return Nat8T
inferTyp (Nat16V _) = return Nat16T
inferTyp (Nat32V _) = return Nat32T
inferTyp (Nat64V _) = return Nat64T
inferTyp (IntV _) = return IntT
inferTyp (Int8V _) = return Int8T
inferTyp (Int16V _) = return Int16T
inferTyp (Int32V _) = return Int32T
inferTyp (Int64V _) = return Int64T
inferTyp (Float32V _) = return Float32T
inferTyp (Float64V _) = return Float64T
inferTyp (TextV _) = return TextT
inferTyp NullV = return NullT
inferTyp ReservedV = return ReservedT
inferTyp (OptV Nothing) = return $ OptT EmptyT
inferTyp (OptV (Just v)) = OptT <$> inferTyp v
inferTyp (VecV vs) = VecT <$> (mapM inferTyp (V.toList vs) >>= lubs)
inferTyp (RecV fs) = RecT <$> sequence [ (fn,) <$> inferTyp t | (fn, t) <- fs ]
inferTyp (VariantV f v) = do
t <- inferTyp v
return $ VariantT [ (f, t) ]
inferTyp (TupV vs) = tupT <$> mapM inferTyp vs
inferTyp (FuncV _ _) = return (FuncT (MethodType [] [] False False)) -- no principal type
inferTyp (ServiceV _) = return (ServiceT []) -- no principal type
inferTyp (PrincipalV _) = return PrincipalT
inferTyp FutureV = return FutureT
inferTyp (BlobV _) = return BlobT
inferTyp (AnnV _ t) = return t -- Maybe do type checking?
lubs :: [Type Void] -> Either String (Type Void)
lubs = foldM lub EmptyT
lub :: Type Void -> Type Void -> Either String (Type Void)
lub ReservedT _ = return ReservedT
lub _ ReservedT = return ReservedT
lub EmptyT t = return t
lub t EmptyT = return t
lub NatT IntT = return IntT
lub IntT NatT = return IntT
lub NullT (OptT t) = return (OptT t)
lub (OptT t) NullT = return (OptT t)
lub (OptT t1) (OptT t2) = OptT <$> lub t1 t2
lub (VecT t1) (VecT t2) = VecT <$> lub t1 t2
lub (RecT fs1) (RecT fs2) = RecT <$> go (sortOn fst fs1) (sortOn fst fs2)
where
go [] _ = return []
go _ [] = return []
go ((f1, v1):fs1) ((f2,v2):fs2)
| f1 < f2 = go fs1 ((f2,v2):fs2)
| f1 > f2 = go ((f1,v1):fs1) fs2
| otherwise = (:) <$> ((f1,) <$> lub v1 v2) <*> go fs1 fs2
lub (VariantT fs1) (VariantT fs2) = VariantT <$> go (sortOn fst fs1) (sortOn fst fs2)
where
go [] fs = return fs
go fs [] = return fs
go ((f1, v1):fs1) ((f2,v2):fs2)
| f1 < f2 = ((f1,v1) :) <$> go fs1 ((f2,v2):fs2)
| f1 > f2 = ((f2,v2) :) <$> go ((f1,v1):fs1) fs2
| otherwise = (:) <$> ((f1,) <$> lub v1 v2) <*> go fs1 fs2
-- the reflexive cases
lub NatT NatT = return NatT
lub Nat8T Nat8T = return Nat8T
lub Nat16T Nat16T = return Nat16T
lub Nat32T Nat32T = return Nat32T
lub Nat64T Nat64T = return Nat64T
lub IntT IntT = return IntT
lub Int8T Int8T = return Int8T
lub Int16T Int16T = return Int16T
lub Int32T Int32T = return Int32T
lub Int64T Int64T = return Int64T
lub Float32T Float32T = return Float32T
lub Float64T Float64T = return Float64T
lub BoolT BoolT = return BoolT
lub TextT TextT = return TextT
lub NullT NullT = return NullT
lub BlobT BlobT = return BlobT
lub PrincipalT PrincipalT = return PrincipalT
-- The shorthands
lub BlobT t@(VecT _) = lub (VecT Nat8T) t
lub t@(VecT _) BlobT = lub (VecT Nat8T) t
-- failure
lub t1 t2 = Left $ show $ "Incompatible types: " <+> pretty t1 <+> " and " <+> pretty t2
| null | https://raw.githubusercontent.com/nomeata/haskell-candid/87a4f01eb9cb93c827a0a7f5f29af0ee19135308/src/Codec/Candid/Infer.hs | haskell | # LANGUAGE OverloadedStrings #
no principal type
no principal type
Maybe do type checking?
the reflexive cases
The shorthands
failure | # LANGUAGE TupleSections #
module Codec.Candid.Infer where
import qualified Data.Vector as V
import Control.Monad
import Data.Void
import Data.List
import Prettyprinter
import Codec.Candid.Types
inferTypes :: [Value] -> Either String [Type Void]
inferTypes = mapM inferTyp
inferTyp :: Value -> Either String (Type Void)
inferTyp (NumV v) = return $ if v >= 0 then NatT else IntT
inferTyp (BoolV _) = return BoolT
inferTyp (NatV _) = return NatT
inferTyp (Nat8V _) = return Nat8T
inferTyp (Nat16V _) = return Nat16T
inferTyp (Nat32V _) = return Nat32T
inferTyp (Nat64V _) = return Nat64T
inferTyp (IntV _) = return IntT
inferTyp (Int8V _) = return Int8T
inferTyp (Int16V _) = return Int16T
inferTyp (Int32V _) = return Int32T
inferTyp (Int64V _) = return Int64T
inferTyp (Float32V _) = return Float32T
inferTyp (Float64V _) = return Float64T
inferTyp (TextV _) = return TextT
inferTyp NullV = return NullT
inferTyp ReservedV = return ReservedT
inferTyp (OptV Nothing) = return $ OptT EmptyT
inferTyp (OptV (Just v)) = OptT <$> inferTyp v
inferTyp (VecV vs) = VecT <$> (mapM inferTyp (V.toList vs) >>= lubs)
inferTyp (RecV fs) = RecT <$> sequence [ (fn,) <$> inferTyp t | (fn, t) <- fs ]
inferTyp (VariantV f v) = do
t <- inferTyp v
return $ VariantT [ (f, t) ]
inferTyp (TupV vs) = tupT <$> mapM inferTyp vs
inferTyp (PrincipalV _) = return PrincipalT
inferTyp FutureV = return FutureT
inferTyp (BlobV _) = return BlobT
lubs :: [Type Void] -> Either String (Type Void)
lubs = foldM lub EmptyT
lub :: Type Void -> Type Void -> Either String (Type Void)
lub ReservedT _ = return ReservedT
lub _ ReservedT = return ReservedT
lub EmptyT t = return t
lub t EmptyT = return t
lub NatT IntT = return IntT
lub IntT NatT = return IntT
lub NullT (OptT t) = return (OptT t)
lub (OptT t) NullT = return (OptT t)
lub (OptT t1) (OptT t2) = OptT <$> lub t1 t2
lub (VecT t1) (VecT t2) = VecT <$> lub t1 t2
lub (RecT fs1) (RecT fs2) = RecT <$> go (sortOn fst fs1) (sortOn fst fs2)
where
go [] _ = return []
go _ [] = return []
go ((f1, v1):fs1) ((f2,v2):fs2)
| f1 < f2 = go fs1 ((f2,v2):fs2)
| f1 > f2 = go ((f1,v1):fs1) fs2
| otherwise = (:) <$> ((f1,) <$> lub v1 v2) <*> go fs1 fs2
lub (VariantT fs1) (VariantT fs2) = VariantT <$> go (sortOn fst fs1) (sortOn fst fs2)
where
go [] fs = return fs
go fs [] = return fs
go ((f1, v1):fs1) ((f2,v2):fs2)
| f1 < f2 = ((f1,v1) :) <$> go fs1 ((f2,v2):fs2)
| f1 > f2 = ((f2,v2) :) <$> go ((f1,v1):fs1) fs2
| otherwise = (:) <$> ((f1,) <$> lub v1 v2) <*> go fs1 fs2
lub NatT NatT = return NatT
lub Nat8T Nat8T = return Nat8T
lub Nat16T Nat16T = return Nat16T
lub Nat32T Nat32T = return Nat32T
lub Nat64T Nat64T = return Nat64T
lub IntT IntT = return IntT
lub Int8T Int8T = return Int8T
lub Int16T Int16T = return Int16T
lub Int32T Int32T = return Int32T
lub Int64T Int64T = return Int64T
lub Float32T Float32T = return Float32T
lub Float64T Float64T = return Float64T
lub BoolT BoolT = return BoolT
lub TextT TextT = return TextT
lub NullT NullT = return NullT
lub BlobT BlobT = return BlobT
lub PrincipalT PrincipalT = return PrincipalT
lub BlobT t@(VecT _) = lub (VecT Nat8T) t
lub t@(VecT _) BlobT = lub (VecT Nat8T) t
lub t1 t2 = Left $ show $ "Incompatible types: " <+> pretty t1 <+> " and " <+> pretty t2
|
54db0924471a358e938b5cfb69d0f85ea9ad1fdb8a19f70f687511cd56cb567f | OCamlPro/OCamlPro-OCaml-Branch | boyer.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : boyer.ml 7017 2005 - 08 - 12 09:22:04Z xleroy $
(* Manipulations over terms *)
type term =
Var of int
| Prop of head * term list
and head =
{ name: string;
mutable props: (term * term) list }
let rec print_term = function
Var v ->
print_string "v"; print_int v
| Prop (head,argl) ->
print_string "(";
print_string head.name;
List.iter (fun t -> print_string " "; print_term t) argl;
print_string ")"
let lemmas = ref ([] : head list)
(* Replacement for property lists *)
let get name =
let rec get_rec = function
hd1::hdl ->
if hd1.name = name then hd1 else get_rec hdl
| [] ->
let entry = {name = name; props = []} in
lemmas := entry :: !lemmas;
entry
in get_rec !lemmas
let add_lemma = function
| Prop(_, [(Prop(headl,_) as left); right]) ->
headl.props <- (left, right) :: headl.props
| _ -> assert false
(* Substitutions *)
type subst = Bind of int * term
let get_binding v list =
let rec get_rec = function
[] -> failwith "unbound"
| Bind(w,t)::rest -> if v = w then t else get_rec rest
in get_rec list
let apply_subst alist term =
let rec as_rec = function
Var v -> begin try get_binding v alist with Failure _ -> term end
| Prop (head,argl) -> Prop (head, List.map as_rec argl)
in as_rec term
exception Unify
let rec unify term1 term2 =
unify1 term1 term2 []
and unify1 term1 term2 unify_subst =
match term2 with
Var v ->
begin try
if get_binding v unify_subst = term1
then unify_subst
else raise Unify
with Failure _ ->
Bind(v,term1) :: unify_subst
end
| Prop (head2, argl2) ->
match term1 with
Var _ -> raise Unify
| Prop (head1,argl1) ->
if head1 == head2
then unify1_lst argl1 argl2 unify_subst
else raise Unify
and unify1_lst l1 l2 unify_subst =
match (l1, l2) with
([], []) -> unify_subst
| (h1::r1, h2::r2) -> unify1_lst r1 r2 (unify1 h1 h2 unify_subst)
| _ -> raise Unify
let rec rewrite = function
Var _ as term -> term
| Prop (head, argl) ->
rewrite_with_lemmas (Prop (head, List.map rewrite argl)) head.props
and rewrite_with_lemmas term lemmas =
match lemmas with
[] ->
term
| (t1,t2)::rest ->
try
rewrite (apply_subst (unify term t1) t2)
with Unify ->
rewrite_with_lemmas term rest
type cterm = CVar of int | CProp of string * cterm list
let rec cterm_to_term = function
CVar v -> Var v
| CProp(p, l) -> Prop(get p, List.map cterm_to_term l)
let add t = add_lemma (cterm_to_term t)
let _ =
add (CProp
("equal",
[CProp ("compile",[CVar 5]);
CProp
("reverse",
[CProp ("codegen",[CProp ("optimize",[CVar 5]); CProp ("nil",[])])])]));
add (CProp
("equal",
[CProp ("eqp",[CVar 23; CVar 24]);
CProp ("equal",[CProp ("fix",[CVar 23]); CProp ("fix",[CVar 24])])]));
add (CProp
("equal",
[CProp ("gt",[CVar 23; CVar 24]); CProp ("lt",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("le",[CVar 23; CVar 24]); CProp ("ge",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("ge",[CVar 23; CVar 24]); CProp ("le",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("boolean",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("true",[])]);
CProp ("equal",[CVar 23; CProp ("false",[])])])]));
add (CProp
("equal",
[CProp ("iff",[CVar 23; CVar 24]);
CProp
("and",
[CProp ("implies",[CVar 23; CVar 24]);
CProp ("implies",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("even1",[CVar 23]);
CProp
("if",
[CProp ("zerop",[CVar 23]); CProp ("true",[]);
CProp ("odd",[CProp ("sub1",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("countps_",[CVar 11; CVar 15]);
CProp ("countps_loop",[CVar 11; CVar 15; CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("fact_",[CVar 8]);
CProp ("fact_loop",[CVar 8; CProp ("one",[])])]));
add (CProp
("equal",
[CProp ("reverse_",[CVar 23]);
CProp ("reverse_loop",[CVar 23; CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("divides",[CVar 23; CVar 24]);
CProp ("zerop",[CProp ("remainder",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("assume_true",[CVar 21; CVar 0]);
CProp ("cons",[CProp ("cons",[CVar 21; CProp ("true",[])]); CVar 0])]));
add (CProp
("equal",
[CProp ("assume_false",[CVar 21; CVar 0]);
CProp ("cons",[CProp ("cons",[CVar 21; CProp ("false",[])]); CVar 0])]));
add (CProp
("equal",
[CProp ("tautology_checker",[CVar 23]);
CProp ("tautologyp",[CProp ("normalize",[CVar 23]); CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("falsify",[CVar 23]);
CProp ("falsify1",[CProp ("normalize",[CVar 23]); CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("prime",[CVar 23]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 23])]);
CProp
("not",
[CProp ("equal",[CVar 23; CProp ("add1",[CProp ("zero",[])])])]);
CProp ("prime1",[CVar 23; CProp ("sub1",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("and",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15;
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("false",[])])]));
add (CProp
("equal",
[CProp ("or",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15; CProp ("true",[]);
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("false",[])])]));
add (CProp
("equal",
[CProp ("not",[CVar 15]);
CProp ("if",[CVar 15; CProp ("false",[]); CProp ("true",[])])]));
add (CProp
("equal",
[CProp ("implies",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15;
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("true",[])])]));
add (CProp
("equal",
[CProp ("fix",[CVar 23]);
CProp ("if",[CProp ("numberp",[CVar 23]); CVar 23; CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("if",[CProp ("if",[CVar 0; CVar 1; CVar 2]); CVar 3; CVar 4]);
CProp
("if",
[CVar 0; CProp ("if",[CVar 1; CVar 3; CVar 4]);
CProp ("if",[CVar 2; CVar 3; CVar 4])])]));
add (CProp
("equal",
[CProp ("zerop",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp ("not",[CProp ("numberp",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("plus",[CProp ("plus",[CVar 23; CVar 24]); CVar 25]);
CProp ("plus",[CVar 23; CProp ("plus",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("plus",[CVar 0; CVar 1]); CProp ("zero",[])]);
CProp ("and",[CProp ("zerop",[CVar 0]); CProp ("zerop",[CVar 1])])]));
add (CProp
("equal",[CProp ("difference",[CVar 23; CVar 23]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("plus",[CVar 0; CVar 1]); CProp ("plus",[CVar 0; CVar 2])]);
CProp ("equal",[CProp ("fix",[CVar 1]); CProp ("fix",[CVar 2])])]));
add (CProp
("equal",
[CProp
("equal",[CProp ("zero",[]); CProp ("difference",[CVar 23; CVar 24])]);
CProp ("not",[CProp ("gt",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 23; CProp ("difference",[CVar 23; CVar 24])]);
CProp
("and",
[CProp ("numberp",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp ("zerop",[CVar 24])])])]));
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("append",[CVar 23; CVar 24])]); CVar 0]);
CProp
("plus",
[CProp ("meaning",[CProp ("plus_tree",[CVar 23]); CVar 0]);
CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0])])]));
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("plus_fringe",[CVar 23])]); CVar 0]);
CProp ("fix",[CProp ("meaning",[CVar 23; CVar 0])])]));
add (CProp
("equal",
[CProp ("append",[CProp ("append",[CVar 23; CVar 24]); CVar 25]);
CProp ("append",[CVar 23; CProp ("append",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp ("reverse",[CProp ("append",[CVar 0; CVar 1])]);
CProp
("append",[CProp ("reverse",[CVar 1]); CProp ("reverse",[CVar 0])])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("plus",[CVar 24; CVar 25])]);
CProp
("plus",
[CProp ("times",[CVar 23; CVar 24]);
CProp ("times",[CVar 23; CVar 25])])]));
add (CProp
("equal",
[CProp ("times",[CProp ("times",[CVar 23; CVar 24]); CVar 25]);
CProp ("times",[CVar 23; CProp ("times",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp
("equal",[CProp ("times",[CVar 23; CVar 24]); CProp ("zero",[])]);
CProp ("or",[CProp ("zerop",[CVar 23]); CProp ("zerop",[CVar 24])])]));
add (CProp
("equal",
[CProp ("exec",[CProp ("append",[CVar 23; CVar 24]); CVar 15; CVar 4]);
CProp
("exec",[CVar 24; CProp ("exec",[CVar 23; CVar 15; CVar 4]); CVar 4])]));
add (CProp
("equal",
[CProp ("mc_flatten",[CVar 23; CVar 24]);
CProp ("append",[CProp ("flatten",[CVar 23]); CVar 24])]));
add (CProp
("equal",
[CProp ("member",[CVar 23; CProp ("append",[CVar 0; CVar 1])]);
CProp
("or",
[CProp ("member",[CVar 23; CVar 0]);
CProp ("member",[CVar 23; CVar 1])])]));
add (CProp
("equal",
[CProp ("member",[CVar 23; CProp ("reverse",[CVar 24])]);
CProp ("member",[CVar 23; CVar 24])]));
add (CProp
("equal",
[CProp ("length",[CProp ("reverse",[CVar 23])]);
CProp ("length",[CVar 23])]));
add (CProp
("equal",
[CProp ("member",[CVar 0; CProp ("intersect",[CVar 1; CVar 2])]);
CProp
("and",
[CProp ("member",[CVar 0; CVar 1]); CProp ("member",[CVar 0; CVar 2])])]));
add (CProp
("equal",[CProp ("nth",[CProp ("zero",[]); CVar 8]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("exp",[CVar 8; CProp ("plus",[CVar 9; CVar 10])]);
CProp
("times",
[CProp ("exp",[CVar 8; CVar 9]); CProp ("exp",[CVar 8; CVar 10])])]));
add (CProp
("equal",
[CProp ("exp",[CVar 8; CProp ("times",[CVar 9; CVar 10])]);
CProp ("exp",[CProp ("exp",[CVar 8; CVar 9]); CVar 10])]));
add (CProp
("equal",
[CProp ("reverse_loop",[CVar 23; CVar 24]);
CProp ("append",[CProp ("reverse",[CVar 23]); CVar 24])]));
add (CProp
("equal",
[CProp ("reverse_loop",[CVar 23; CProp ("nil",[])]);
CProp ("reverse",[CVar 23])]));
add (CProp
("equal",
[CProp ("count_list",[CVar 25; CProp ("sort_lp",[CVar 23; CVar 24])]);
CProp
("plus",
[CProp ("count_list",[CVar 25; CVar 23]);
CProp ("count_list",[CVar 25; CVar 24])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("append",[CVar 0; CVar 1]); CProp ("append",[CVar 0; CVar 2])]);
CProp ("equal",[CVar 1; CVar 2])]));
add (CProp
("equal",
[CProp
("plus",
[CProp ("remainder",[CVar 23; CVar 24]);
CProp ("times",[CVar 24; CProp ("quotient",[CVar 23; CVar 24])])]);
CProp ("fix",[CVar 23])]));
add (CProp
("equal",
[CProp
("power_eval",[CProp ("big_plus",[CVar 11; CVar 8; CVar 1]); CVar 1]);
CProp ("plus",[CProp ("power_eval",[CVar 11; CVar 1]); CVar 8])]));
add (CProp
("equal",
[CProp
("power_eval",
[CProp ("big_plus",[CVar 23; CVar 24; CVar 8; CVar 1]); CVar 1]);
CProp
("plus",
[CVar 8;
CProp
("plus",
[CProp ("power_eval",[CVar 23; CVar 1]);
CProp ("power_eval",[CVar 24; CVar 1])])])]));
add (CProp
("equal",
[CProp ("remainder",[CVar 24; CProp ("one",[])]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("remainder",[CVar 23; CVar 24]); CVar 24]);
CProp ("not",[CProp ("zerop",[CVar 24])])]));
add (CProp
("equal",[CProp ("remainder",[CVar 23; CVar 23]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("quotient",[CVar 8; CVar 9]); CVar 8]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 8])]);
CProp
("or",
[CProp ("zerop",[CVar 9]);
CProp ("not",[CProp ("equal",[CVar 9; CProp ("one",[])])])])])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("remainder",[CVar 23; CVar 24]); CVar 23]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 24])]);
CProp ("not",[CProp ("zerop",[CVar 23])]);
CProp ("not",[CProp ("lt",[CVar 23; CVar 24])])])]));
add (CProp
("equal",
[CProp ("power_eval",[CProp ("power_rep",[CVar 8; CVar 1]); CVar 1]);
CProp ("fix",[CVar 8])]));
add (CProp
("equal",
[CProp
("power_eval",
[CProp
("big_plus",
[CProp ("power_rep",[CVar 8; CVar 1]);
CProp ("power_rep",[CVar 9; CVar 1]); CProp ("zero",[]);
CVar 1]);
CVar 1]);
CProp ("plus",[CVar 8; CVar 9])]));
add (CProp
("equal",
[CProp ("gcd",[CVar 23; CVar 24]); CProp ("gcd",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("nth",[CProp ("append",[CVar 0; CVar 1]); CVar 8]);
CProp
("append",
[CProp ("nth",[CVar 0; CVar 8]);
CProp
("nth",
[CVar 1; CProp ("difference",[CVar 8; CProp ("length",[CVar 0])])])])]));
add (CProp
("equal",
[CProp ("difference",[CProp ("plus",[CVar 23; CVar 24]); CVar 23]);
CProp ("fix",[CVar 24])]));
add (CProp
("equal",
[CProp ("difference",[CProp ("plus",[CVar 24; CVar 23]); CVar 23]);
CProp ("fix",[CVar 24])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("plus",[CVar 23; CVar 24]); CProp ("plus",[CVar 23; CVar 25])]);
CProp ("difference",[CVar 24; CVar 25])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("difference",[CVar 2; CVar 22])]);
CProp
("difference",
[CProp ("times",[CVar 2; CVar 23]);
CProp ("times",[CVar 22; CVar 23])])]));
add (CProp
("equal",
[CProp ("remainder",[CProp ("times",[CVar 23; CVar 25]); CVar 25]);
CProp ("zero",[])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("plus",[CVar 1; CProp ("plus",[CVar 0; CVar 2])]); CVar 0]);
CProp ("plus",[CVar 1; CVar 2])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("add1",[CProp ("plus",[CVar 24; CVar 25])]); CVar 25]);
CProp ("add1",[CVar 24])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("plus",[CVar 23; CVar 24]); CProp ("plus",[CVar 23; CVar 25])]);
CProp ("lt",[CVar 24; CVar 25])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("times",[CVar 23; CVar 25]);
CProp ("times",[CVar 24; CVar 25])]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 25])]);
CProp ("lt",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("lt",[CVar 24; CProp ("plus",[CVar 23; CVar 24])]);
CProp ("not",[CProp ("zerop",[CVar 23])])]));
add (CProp
("equal",
[CProp
("gcd",
[CProp ("times",[CVar 23; CVar 25]);
CProp ("times",[CVar 24; CVar 25])]);
CProp ("times",[CVar 25; CProp ("gcd",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("value",[CProp ("normalize",[CVar 23]); CVar 0]);
CProp ("value",[CVar 23; CVar 0])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("flatten",[CVar 23]);
CProp ("cons",[CVar 24; CProp ("nil",[])])]);
CProp
("and",
[CProp ("nlistp",[CVar 23]); CProp ("equal",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("listp",[CProp ("gother",[CVar 23])]);
CProp ("listp",[CVar 23])]));
add (CProp
("equal",
[CProp ("samefringe",[CVar 23; CVar 24]);
CProp
("equal",[CProp ("flatten",[CVar 23]); CProp ("flatten",[CVar 24])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("greatest_factor",[CVar 23; CVar 24]); CProp ("zero",[])]);
CProp
("and",
[CProp
("or",
[CProp ("zerop",[CVar 24]);
CProp ("equal",[CVar 24; CProp ("one",[])])]);
CProp ("equal",[CVar 23; CProp ("zero",[])])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("greatest_factor",[CVar 23; CVar 24]); CProp ("one",[])]);
CProp ("equal",[CVar 23; CProp ("one",[])])]));
add (CProp
("equal",
[CProp ("numberp",[CProp ("greatest_factor",[CVar 23; CVar 24])]);
CProp
("not",
[CProp
("and",
[CProp
("or",
[CProp ("zerop",[CVar 24]);
CProp ("equal",[CVar 24; CProp ("one",[])])]);
CProp ("not",[CProp ("numberp",[CVar 23])])])])]));
add (CProp
("equal",
[CProp ("times_list",[CProp ("append",[CVar 23; CVar 24])]);
CProp
("times",
[CProp ("times_list",[CVar 23]); CProp ("times_list",[CVar 24])])]));
add (CProp
("equal",
[CProp ("prime_list",[CProp ("append",[CVar 23; CVar 24])]);
CProp
("and",
[CProp ("prime_list",[CVar 23]); CProp ("prime_list",[CVar 24])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 25; CProp ("times",[CVar 22; CVar 25])]);
CProp
("and",
[CProp ("numberp",[CVar 25]);
CProp
("or",
[CProp ("equal",[CVar 25; CProp ("zero",[])]);
CProp ("equal",[CVar 22; CProp ("one",[])])])])]));
add (CProp
("equal",
[CProp ("ge",[CVar 23; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 23; CProp ("times",[CVar 23; CVar 24])]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp
("and",
[CProp ("numberp",[CVar 23]);
CProp ("equal",[CVar 24; CProp ("one",[])])])])]));
add (CProp
("equal",
[CProp ("remainder",[CProp ("times",[CVar 24; CVar 23]); CVar 24]);
CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("times",[CVar 0; CVar 1]); CProp ("one",[])]);
CProp
("and",
[CProp ("not",[CProp ("equal",[CVar 0; CProp ("zero",[])])]);
CProp ("not",[CProp ("equal",[CVar 1; CProp ("zero",[])])]);
CProp ("numberp",[CVar 0]); CProp ("numberp",[CVar 1]);
CProp ("equal",[CProp ("sub1",[CVar 0]); CProp ("zero",[])]);
CProp ("equal",[CProp ("sub1",[CVar 1]); CProp ("zero",[])])])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("length",[CProp ("delete",[CVar 23; CVar 11])]);
CProp ("length",[CVar 11])]);
CProp ("member",[CVar 23; CVar 11])]));
add (CProp
("equal",
[CProp ("sort2",[CProp ("delete",[CVar 23; CVar 11])]);
CProp ("delete",[CVar 23; CProp ("sort2",[CVar 11])])]));
add (CProp ("equal",[CProp ("dsort",[CVar 23]); CProp ("sort2",[CVar 23])]));
add (CProp
("equal",
[CProp
("length",
[CProp
("cons",
[CVar 0;
CProp
("cons",
[CVar 1;
CProp
("cons",
[CVar 2;
CProp
("cons",
[CVar 3;
CProp ("cons",[CVar 4; CProp ("cons",[CVar 5; CVar 6])])])])])])])
; CProp ("plus",[CProp ("six",[]); CProp ("length",[CVar 6])])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("add1",[CProp ("add1",[CVar 23])]); CProp ("two",[])]);
CProp ("fix",[CVar 23])]));
add (CProp
("equal",
[CProp
("quotient",
[CProp ("plus",[CVar 23; CProp ("plus",[CVar 23; CVar 24])]);
CProp ("two",[])]);
CProp
("plus",[CVar 23; CProp ("quotient",[CVar 24; CProp ("two",[])])])]));
add (CProp
("equal",
[CProp ("sigma",[CProp ("zero",[]); CVar 8]);
CProp
("quotient",
[CProp ("times",[CVar 8; CProp ("add1",[CVar 8])]); CProp ("two",[])])]));
add (CProp
("equal",
[CProp ("plus",[CVar 23; CProp ("add1",[CVar 24])]);
CProp
("if",
[CProp ("numberp",[CVar 24]);
CProp ("add1",[CProp ("plus",[CVar 23; CVar 24])]);
CProp ("add1",[CVar 23])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("difference",[CVar 23; CVar 24]);
CProp ("difference",[CVar 25; CVar 24])]);
CProp
("if",
[CProp ("lt",[CVar 23; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 24; CVar 25])]);
CProp
("if",
[CProp ("lt",[CVar 25; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 24; CVar 23])]);
CProp ("equal",[CProp ("fix",[CVar 23]); CProp ("fix",[CVar 25])])])])])
);
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("delete",[CVar 23; CVar 24])]); CVar 0]);
CProp
("if",
[CProp ("member",[CVar 23; CVar 24]);
CProp
("difference",
[CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0]);
CProp ("meaning",[CVar 23; CVar 0])]);
CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0])])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("add1",[CVar 24])]);
CProp
("if",
[CProp ("numberp",[CVar 24]);
CProp
("plus",
[CVar 23; CProp ("times",[CVar 23; CVar 24]);
CProp ("fix",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("nth",[CProp ("nil",[]); CVar 8]);
CProp
("if",[CProp ("zerop",[CVar 8]); CProp ("nil",[]); CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("last",[CProp ("append",[CVar 0; CVar 1])]);
CProp
("if",
[CProp ("listp",[CVar 1]); CProp ("last",[CVar 1]);
CProp
("if",
[CProp ("listp",[CVar 0]);
CProp ("cons",[CProp ("car",[CProp ("last",[CVar 0])]); CVar 1]);
CVar 1])])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("lt",[CVar 23; CVar 24]); CVar 25]);
CProp
("if",
[CProp ("lt",[CVar 23; CVar 24]);
CProp ("equal",[CProp ("true",[]); CVar 25]);
CProp ("equal",[CProp ("false",[]); CVar 25])])]));
add (CProp
("equal",
[CProp ("assignment",[CVar 23; CProp ("append",[CVar 0; CVar 1])]);
CProp
("if",
[CProp ("assignedp",[CVar 23; CVar 0]);
CProp ("assignment",[CVar 23; CVar 0]);
CProp ("assignment",[CVar 23; CVar 1])])]));
add (CProp
("equal",
[CProp ("car",[CProp ("gother",[CVar 23])]);
CProp
("if",
[CProp ("listp",[CVar 23]);
CProp ("car",[CProp ("flatten",[CVar 23])]); CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("flatten",[CProp ("cdr",[CProp ("gother",[CVar 23])])]);
CProp
("if",
[CProp ("listp",[CVar 23]);
CProp ("cdr",[CProp ("flatten",[CVar 23])]);
CProp ("cons",[CProp ("zero",[]); CProp ("nil",[])])])]));
add (CProp
("equal",
[CProp ("quotient",[CProp ("times",[CVar 24; CVar 23]); CVar 24]);
CProp
("if",
[CProp ("zerop",[CVar 24]); CProp ("zero",[]);
CProp ("fix",[CVar 23])])]));
add (CProp
("equal",
[CProp ("get",[CVar 9; CProp ("set",[CVar 8; CVar 21; CVar 12])]);
CProp
("if",
[CProp ("eqp",[CVar 9; CVar 8]); CVar 21;
CProp ("get",[CVar 9; CVar 12])])]))
(* Tautology checker *)
let truep x lst =
match x with
Prop(head, _) ->
head.name = "true" || List.mem x lst
| _ ->
List.mem x lst
and falsep x lst =
match x with
Prop(head, _) ->
head.name = "false" || List.mem x lst
| _ ->
List.mem x lst
let rec tautologyp x true_lst false_lst =
if truep x true_lst then true else
if falsep x false_lst then false else begin
(*
print_term x; print_newline();
*)
match x with
Var _ -> false
| Prop (head,[test; yes; no]) ->
if head.name = "if" then
if truep test true_lst then
tautologyp yes true_lst false_lst
else if falsep test false_lst then
tautologyp no true_lst false_lst
else tautologyp yes (test::true_lst) false_lst &&
tautologyp no true_lst (test::false_lst)
else
false
| _ -> assert false
end
let tautp x =
(* print_term x; print_string"\n"; *)
let y = rewrite x in
print_term y ; print_string " \n " ;
tautologyp y [] []
(* the benchmark *)
let subst =
[Bind(23, cterm_to_term(
CProp
("f",
[CProp
("plus",
[CProp ("plus",[CVar 0; CVar 1]);
CProp ("plus",[CVar 2; CProp ("zero",[])])])])));
Bind(24, cterm_to_term(
CProp
("f",
[CProp
("times",
[CProp ("times",[CVar 0; CVar 1]);
CProp ("plus",[CVar 2; CVar 3])])])));
Bind(25, cterm_to_term(
CProp
("f",
[CProp
("reverse",
[CProp
("append",
[CProp ("append",[CVar 0; CVar 1]);
CProp ("nil",[])])])])));
Bind(20, cterm_to_term(
CProp
("equal",
[CProp ("plus",[CVar 0; CVar 1]);
CProp ("difference",[CVar 23; CVar 24])])));
Bind(22, cterm_to_term(
CProp
("lt",
[CProp ("remainder",[CVar 0; CVar 1]);
CProp ("member",[CVar 0; CProp ("length",[CVar 1])])])))]
let term = cterm_to_term(
CProp
("implies",
[CProp
("and",
[CProp ("implies",[CVar 23; CVar 24]);
CProp
("and",
[CProp ("implies",[CVar 24; CVar 25]);
CProp
("and",
[CProp ("implies",[CVar 25; CVar 20]);
CProp ("implies",[CVar 20; CVar 22])])])]);
CProp ("implies",[CVar 23; CVar 22])]))
let _ =
let ok = ref true in
for i = 1 to 50 do
if not (tautp (apply_subst subst term)) then ok := false
done;
if !ok then
print_string "Proved!\n"
else
print_string "Cannot prove!\n";
exit 0
* * * * * * * *
with
failure s - >
print_string " Exception failure ( " ; print_string s ; print_string " ) \n "
| Unify - >
print_string " Exception Unify\n "
| match_failure(file , start , stop ) - >
print_string " Exception match_failure ( " ;
print_string file ;
print_string " , " ;
print_int start ;
print_string " , " ;
print_int stop ;
print_string " ) \n "
| _ - >
print_string " Exception ? \n "
* * * * * * * * *
with
failure s ->
print_string "Exception failure("; print_string s; print_string ")\n"
| Unify ->
print_string "Exception Unify\n"
| match_failure(file,start,stop) ->
print_string "Exception match_failure(";
print_string file;
print_string ",";
print_int start;
print_string ",";
print_int stop;
print_string ")\n"
| _ ->
print_string "Exception ?\n"
**********)
| null | https://raw.githubusercontent.com/OCamlPro/OCamlPro-OCaml-Branch/3a522985649389f89dac73e655d562c54f0456a5/inline-more/testsuite/tests/misc/boyer.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Manipulations over terms
Replacement for property lists
Substitutions
Tautology checker
print_term x; print_newline();
print_term x; print_string"\n";
the benchmark | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : boyer.ml 7017 2005 - 08 - 12 09:22:04Z xleroy $
type term =
Var of int
| Prop of head * term list
and head =
{ name: string;
mutable props: (term * term) list }
let rec print_term = function
Var v ->
print_string "v"; print_int v
| Prop (head,argl) ->
print_string "(";
print_string head.name;
List.iter (fun t -> print_string " "; print_term t) argl;
print_string ")"
let lemmas = ref ([] : head list)
let get name =
let rec get_rec = function
hd1::hdl ->
if hd1.name = name then hd1 else get_rec hdl
| [] ->
let entry = {name = name; props = []} in
lemmas := entry :: !lemmas;
entry
in get_rec !lemmas
let add_lemma = function
| Prop(_, [(Prop(headl,_) as left); right]) ->
headl.props <- (left, right) :: headl.props
| _ -> assert false
type subst = Bind of int * term
let get_binding v list =
let rec get_rec = function
[] -> failwith "unbound"
| Bind(w,t)::rest -> if v = w then t else get_rec rest
in get_rec list
let apply_subst alist term =
let rec as_rec = function
Var v -> begin try get_binding v alist with Failure _ -> term end
| Prop (head,argl) -> Prop (head, List.map as_rec argl)
in as_rec term
exception Unify
let rec unify term1 term2 =
unify1 term1 term2 []
and unify1 term1 term2 unify_subst =
match term2 with
Var v ->
begin try
if get_binding v unify_subst = term1
then unify_subst
else raise Unify
with Failure _ ->
Bind(v,term1) :: unify_subst
end
| Prop (head2, argl2) ->
match term1 with
Var _ -> raise Unify
| Prop (head1,argl1) ->
if head1 == head2
then unify1_lst argl1 argl2 unify_subst
else raise Unify
and unify1_lst l1 l2 unify_subst =
match (l1, l2) with
([], []) -> unify_subst
| (h1::r1, h2::r2) -> unify1_lst r1 r2 (unify1 h1 h2 unify_subst)
| _ -> raise Unify
let rec rewrite = function
Var _ as term -> term
| Prop (head, argl) ->
rewrite_with_lemmas (Prop (head, List.map rewrite argl)) head.props
and rewrite_with_lemmas term lemmas =
match lemmas with
[] ->
term
| (t1,t2)::rest ->
try
rewrite (apply_subst (unify term t1) t2)
with Unify ->
rewrite_with_lemmas term rest
type cterm = CVar of int | CProp of string * cterm list
let rec cterm_to_term = function
CVar v -> Var v
| CProp(p, l) -> Prop(get p, List.map cterm_to_term l)
let add t = add_lemma (cterm_to_term t)
let _ =
add (CProp
("equal",
[CProp ("compile",[CVar 5]);
CProp
("reverse",
[CProp ("codegen",[CProp ("optimize",[CVar 5]); CProp ("nil",[])])])]));
add (CProp
("equal",
[CProp ("eqp",[CVar 23; CVar 24]);
CProp ("equal",[CProp ("fix",[CVar 23]); CProp ("fix",[CVar 24])])]));
add (CProp
("equal",
[CProp ("gt",[CVar 23; CVar 24]); CProp ("lt",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("le",[CVar 23; CVar 24]); CProp ("ge",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("ge",[CVar 23; CVar 24]); CProp ("le",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("boolean",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("true",[])]);
CProp ("equal",[CVar 23; CProp ("false",[])])])]));
add (CProp
("equal",
[CProp ("iff",[CVar 23; CVar 24]);
CProp
("and",
[CProp ("implies",[CVar 23; CVar 24]);
CProp ("implies",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("even1",[CVar 23]);
CProp
("if",
[CProp ("zerop",[CVar 23]); CProp ("true",[]);
CProp ("odd",[CProp ("sub1",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("countps_",[CVar 11; CVar 15]);
CProp ("countps_loop",[CVar 11; CVar 15; CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("fact_",[CVar 8]);
CProp ("fact_loop",[CVar 8; CProp ("one",[])])]));
add (CProp
("equal",
[CProp ("reverse_",[CVar 23]);
CProp ("reverse_loop",[CVar 23; CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("divides",[CVar 23; CVar 24]);
CProp ("zerop",[CProp ("remainder",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("assume_true",[CVar 21; CVar 0]);
CProp ("cons",[CProp ("cons",[CVar 21; CProp ("true",[])]); CVar 0])]));
add (CProp
("equal",
[CProp ("assume_false",[CVar 21; CVar 0]);
CProp ("cons",[CProp ("cons",[CVar 21; CProp ("false",[])]); CVar 0])]));
add (CProp
("equal",
[CProp ("tautology_checker",[CVar 23]);
CProp ("tautologyp",[CProp ("normalize",[CVar 23]); CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("falsify",[CVar 23]);
CProp ("falsify1",[CProp ("normalize",[CVar 23]); CProp ("nil",[])])]));
add (CProp
("equal",
[CProp ("prime",[CVar 23]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 23])]);
CProp
("not",
[CProp ("equal",[CVar 23; CProp ("add1",[CProp ("zero",[])])])]);
CProp ("prime1",[CVar 23; CProp ("sub1",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("and",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15;
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("false",[])])]));
add (CProp
("equal",
[CProp ("or",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15; CProp ("true",[]);
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("false",[])])]));
add (CProp
("equal",
[CProp ("not",[CVar 15]);
CProp ("if",[CVar 15; CProp ("false",[]); CProp ("true",[])])]));
add (CProp
("equal",
[CProp ("implies",[CVar 15; CVar 16]);
CProp
("if",
[CVar 15;
CProp ("if",[CVar 16; CProp ("true",[]); CProp ("false",[])]);
CProp ("true",[])])]));
add (CProp
("equal",
[CProp ("fix",[CVar 23]);
CProp ("if",[CProp ("numberp",[CVar 23]); CVar 23; CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("if",[CProp ("if",[CVar 0; CVar 1; CVar 2]); CVar 3; CVar 4]);
CProp
("if",
[CVar 0; CProp ("if",[CVar 1; CVar 3; CVar 4]);
CProp ("if",[CVar 2; CVar 3; CVar 4])])]));
add (CProp
("equal",
[CProp ("zerop",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp ("not",[CProp ("numberp",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("plus",[CProp ("plus",[CVar 23; CVar 24]); CVar 25]);
CProp ("plus",[CVar 23; CProp ("plus",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("plus",[CVar 0; CVar 1]); CProp ("zero",[])]);
CProp ("and",[CProp ("zerop",[CVar 0]); CProp ("zerop",[CVar 1])])]));
add (CProp
("equal",[CProp ("difference",[CVar 23; CVar 23]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("plus",[CVar 0; CVar 1]); CProp ("plus",[CVar 0; CVar 2])]);
CProp ("equal",[CProp ("fix",[CVar 1]); CProp ("fix",[CVar 2])])]));
add (CProp
("equal",
[CProp
("equal",[CProp ("zero",[]); CProp ("difference",[CVar 23; CVar 24])]);
CProp ("not",[CProp ("gt",[CVar 24; CVar 23])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 23; CProp ("difference",[CVar 23; CVar 24])]);
CProp
("and",
[CProp ("numberp",[CVar 23]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp ("zerop",[CVar 24])])])]));
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("append",[CVar 23; CVar 24])]); CVar 0]);
CProp
("plus",
[CProp ("meaning",[CProp ("plus_tree",[CVar 23]); CVar 0]);
CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0])])]));
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("plus_fringe",[CVar 23])]); CVar 0]);
CProp ("fix",[CProp ("meaning",[CVar 23; CVar 0])])]));
add (CProp
("equal",
[CProp ("append",[CProp ("append",[CVar 23; CVar 24]); CVar 25]);
CProp ("append",[CVar 23; CProp ("append",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp ("reverse",[CProp ("append",[CVar 0; CVar 1])]);
CProp
("append",[CProp ("reverse",[CVar 1]); CProp ("reverse",[CVar 0])])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("plus",[CVar 24; CVar 25])]);
CProp
("plus",
[CProp ("times",[CVar 23; CVar 24]);
CProp ("times",[CVar 23; CVar 25])])]));
add (CProp
("equal",
[CProp ("times",[CProp ("times",[CVar 23; CVar 24]); CVar 25]);
CProp ("times",[CVar 23; CProp ("times",[CVar 24; CVar 25])])]));
add (CProp
("equal",
[CProp
("equal",[CProp ("times",[CVar 23; CVar 24]); CProp ("zero",[])]);
CProp ("or",[CProp ("zerop",[CVar 23]); CProp ("zerop",[CVar 24])])]));
add (CProp
("equal",
[CProp ("exec",[CProp ("append",[CVar 23; CVar 24]); CVar 15; CVar 4]);
CProp
("exec",[CVar 24; CProp ("exec",[CVar 23; CVar 15; CVar 4]); CVar 4])]));
add (CProp
("equal",
[CProp ("mc_flatten",[CVar 23; CVar 24]);
CProp ("append",[CProp ("flatten",[CVar 23]); CVar 24])]));
add (CProp
("equal",
[CProp ("member",[CVar 23; CProp ("append",[CVar 0; CVar 1])]);
CProp
("or",
[CProp ("member",[CVar 23; CVar 0]);
CProp ("member",[CVar 23; CVar 1])])]));
add (CProp
("equal",
[CProp ("member",[CVar 23; CProp ("reverse",[CVar 24])]);
CProp ("member",[CVar 23; CVar 24])]));
add (CProp
("equal",
[CProp ("length",[CProp ("reverse",[CVar 23])]);
CProp ("length",[CVar 23])]));
add (CProp
("equal",
[CProp ("member",[CVar 0; CProp ("intersect",[CVar 1; CVar 2])]);
CProp
("and",
[CProp ("member",[CVar 0; CVar 1]); CProp ("member",[CVar 0; CVar 2])])]));
add (CProp
("equal",[CProp ("nth",[CProp ("zero",[]); CVar 8]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("exp",[CVar 8; CProp ("plus",[CVar 9; CVar 10])]);
CProp
("times",
[CProp ("exp",[CVar 8; CVar 9]); CProp ("exp",[CVar 8; CVar 10])])]));
add (CProp
("equal",
[CProp ("exp",[CVar 8; CProp ("times",[CVar 9; CVar 10])]);
CProp ("exp",[CProp ("exp",[CVar 8; CVar 9]); CVar 10])]));
add (CProp
("equal",
[CProp ("reverse_loop",[CVar 23; CVar 24]);
CProp ("append",[CProp ("reverse",[CVar 23]); CVar 24])]));
add (CProp
("equal",
[CProp ("reverse_loop",[CVar 23; CProp ("nil",[])]);
CProp ("reverse",[CVar 23])]));
add (CProp
("equal",
[CProp ("count_list",[CVar 25; CProp ("sort_lp",[CVar 23; CVar 24])]);
CProp
("plus",
[CProp ("count_list",[CVar 25; CVar 23]);
CProp ("count_list",[CVar 25; CVar 24])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("append",[CVar 0; CVar 1]); CProp ("append",[CVar 0; CVar 2])]);
CProp ("equal",[CVar 1; CVar 2])]));
add (CProp
("equal",
[CProp
("plus",
[CProp ("remainder",[CVar 23; CVar 24]);
CProp ("times",[CVar 24; CProp ("quotient",[CVar 23; CVar 24])])]);
CProp ("fix",[CVar 23])]));
add (CProp
("equal",
[CProp
("power_eval",[CProp ("big_plus",[CVar 11; CVar 8; CVar 1]); CVar 1]);
CProp ("plus",[CProp ("power_eval",[CVar 11; CVar 1]); CVar 8])]));
add (CProp
("equal",
[CProp
("power_eval",
[CProp ("big_plus",[CVar 23; CVar 24; CVar 8; CVar 1]); CVar 1]);
CProp
("plus",
[CVar 8;
CProp
("plus",
[CProp ("power_eval",[CVar 23; CVar 1]);
CProp ("power_eval",[CVar 24; CVar 1])])])]));
add (CProp
("equal",
[CProp ("remainder",[CVar 24; CProp ("one",[])]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("remainder",[CVar 23; CVar 24]); CVar 24]);
CProp ("not",[CProp ("zerop",[CVar 24])])]));
add (CProp
("equal",[CProp ("remainder",[CVar 23; CVar 23]); CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("quotient",[CVar 8; CVar 9]); CVar 8]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 8])]);
CProp
("or",
[CProp ("zerop",[CVar 9]);
CProp ("not",[CProp ("equal",[CVar 9; CProp ("one",[])])])])])]));
add (CProp
("equal",
[CProp ("lt",[CProp ("remainder",[CVar 23; CVar 24]); CVar 23]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 24])]);
CProp ("not",[CProp ("zerop",[CVar 23])]);
CProp ("not",[CProp ("lt",[CVar 23; CVar 24])])])]));
add (CProp
("equal",
[CProp ("power_eval",[CProp ("power_rep",[CVar 8; CVar 1]); CVar 1]);
CProp ("fix",[CVar 8])]));
add (CProp
("equal",
[CProp
("power_eval",
[CProp
("big_plus",
[CProp ("power_rep",[CVar 8; CVar 1]);
CProp ("power_rep",[CVar 9; CVar 1]); CProp ("zero",[]);
CVar 1]);
CVar 1]);
CProp ("plus",[CVar 8; CVar 9])]));
add (CProp
("equal",
[CProp ("gcd",[CVar 23; CVar 24]); CProp ("gcd",[CVar 24; CVar 23])]));
add (CProp
("equal",
[CProp ("nth",[CProp ("append",[CVar 0; CVar 1]); CVar 8]);
CProp
("append",
[CProp ("nth",[CVar 0; CVar 8]);
CProp
("nth",
[CVar 1; CProp ("difference",[CVar 8; CProp ("length",[CVar 0])])])])]));
add (CProp
("equal",
[CProp ("difference",[CProp ("plus",[CVar 23; CVar 24]); CVar 23]);
CProp ("fix",[CVar 24])]));
add (CProp
("equal",
[CProp ("difference",[CProp ("plus",[CVar 24; CVar 23]); CVar 23]);
CProp ("fix",[CVar 24])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("plus",[CVar 23; CVar 24]); CProp ("plus",[CVar 23; CVar 25])]);
CProp ("difference",[CVar 24; CVar 25])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("difference",[CVar 2; CVar 22])]);
CProp
("difference",
[CProp ("times",[CVar 2; CVar 23]);
CProp ("times",[CVar 22; CVar 23])])]));
add (CProp
("equal",
[CProp ("remainder",[CProp ("times",[CVar 23; CVar 25]); CVar 25]);
CProp ("zero",[])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("plus",[CVar 1; CProp ("plus",[CVar 0; CVar 2])]); CVar 0]);
CProp ("plus",[CVar 1; CVar 2])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("add1",[CProp ("plus",[CVar 24; CVar 25])]); CVar 25]);
CProp ("add1",[CVar 24])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("plus",[CVar 23; CVar 24]); CProp ("plus",[CVar 23; CVar 25])]);
CProp ("lt",[CVar 24; CVar 25])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("times",[CVar 23; CVar 25]);
CProp ("times",[CVar 24; CVar 25])]);
CProp
("and",
[CProp ("not",[CProp ("zerop",[CVar 25])]);
CProp ("lt",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("lt",[CVar 24; CProp ("plus",[CVar 23; CVar 24])]);
CProp ("not",[CProp ("zerop",[CVar 23])])]));
add (CProp
("equal",
[CProp
("gcd",
[CProp ("times",[CVar 23; CVar 25]);
CProp ("times",[CVar 24; CVar 25])]);
CProp ("times",[CVar 25; CProp ("gcd",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("value",[CProp ("normalize",[CVar 23]); CVar 0]);
CProp ("value",[CVar 23; CVar 0])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("flatten",[CVar 23]);
CProp ("cons",[CVar 24; CProp ("nil",[])])]);
CProp
("and",
[CProp ("nlistp",[CVar 23]); CProp ("equal",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("listp",[CProp ("gother",[CVar 23])]);
CProp ("listp",[CVar 23])]));
add (CProp
("equal",
[CProp ("samefringe",[CVar 23; CVar 24]);
CProp
("equal",[CProp ("flatten",[CVar 23]); CProp ("flatten",[CVar 24])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("greatest_factor",[CVar 23; CVar 24]); CProp ("zero",[])]);
CProp
("and",
[CProp
("or",
[CProp ("zerop",[CVar 24]);
CProp ("equal",[CVar 24; CProp ("one",[])])]);
CProp ("equal",[CVar 23; CProp ("zero",[])])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("greatest_factor",[CVar 23; CVar 24]); CProp ("one",[])]);
CProp ("equal",[CVar 23; CProp ("one",[])])]));
add (CProp
("equal",
[CProp ("numberp",[CProp ("greatest_factor",[CVar 23; CVar 24])]);
CProp
("not",
[CProp
("and",
[CProp
("or",
[CProp ("zerop",[CVar 24]);
CProp ("equal",[CVar 24; CProp ("one",[])])]);
CProp ("not",[CProp ("numberp",[CVar 23])])])])]));
add (CProp
("equal",
[CProp ("times_list",[CProp ("append",[CVar 23; CVar 24])]);
CProp
("times",
[CProp ("times_list",[CVar 23]); CProp ("times_list",[CVar 24])])]));
add (CProp
("equal",
[CProp ("prime_list",[CProp ("append",[CVar 23; CVar 24])]);
CProp
("and",
[CProp ("prime_list",[CVar 23]); CProp ("prime_list",[CVar 24])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 25; CProp ("times",[CVar 22; CVar 25])]);
CProp
("and",
[CProp ("numberp",[CVar 25]);
CProp
("or",
[CProp ("equal",[CVar 25; CProp ("zero",[])]);
CProp ("equal",[CVar 22; CProp ("one",[])])])])]));
add (CProp
("equal",
[CProp ("ge",[CVar 23; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 23; CVar 24])])]));
add (CProp
("equal",
[CProp ("equal",[CVar 23; CProp ("times",[CVar 23; CVar 24])]);
CProp
("or",
[CProp ("equal",[CVar 23; CProp ("zero",[])]);
CProp
("and",
[CProp ("numberp",[CVar 23]);
CProp ("equal",[CVar 24; CProp ("one",[])])])])]));
add (CProp
("equal",
[CProp ("remainder",[CProp ("times",[CVar 24; CVar 23]); CVar 24]);
CProp ("zero",[])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("times",[CVar 0; CVar 1]); CProp ("one",[])]);
CProp
("and",
[CProp ("not",[CProp ("equal",[CVar 0; CProp ("zero",[])])]);
CProp ("not",[CProp ("equal",[CVar 1; CProp ("zero",[])])]);
CProp ("numberp",[CVar 0]); CProp ("numberp",[CVar 1]);
CProp ("equal",[CProp ("sub1",[CVar 0]); CProp ("zero",[])]);
CProp ("equal",[CProp ("sub1",[CVar 1]); CProp ("zero",[])])])]));
add (CProp
("equal",
[CProp
("lt",
[CProp ("length",[CProp ("delete",[CVar 23; CVar 11])]);
CProp ("length",[CVar 11])]);
CProp ("member",[CVar 23; CVar 11])]));
add (CProp
("equal",
[CProp ("sort2",[CProp ("delete",[CVar 23; CVar 11])]);
CProp ("delete",[CVar 23; CProp ("sort2",[CVar 11])])]));
add (CProp ("equal",[CProp ("dsort",[CVar 23]); CProp ("sort2",[CVar 23])]));
add (CProp
("equal",
[CProp
("length",
[CProp
("cons",
[CVar 0;
CProp
("cons",
[CVar 1;
CProp
("cons",
[CVar 2;
CProp
("cons",
[CVar 3;
CProp ("cons",[CVar 4; CProp ("cons",[CVar 5; CVar 6])])])])])])])
; CProp ("plus",[CProp ("six",[]); CProp ("length",[CVar 6])])]));
add (CProp
("equal",
[CProp
("difference",
[CProp ("add1",[CProp ("add1",[CVar 23])]); CProp ("two",[])]);
CProp ("fix",[CVar 23])]));
add (CProp
("equal",
[CProp
("quotient",
[CProp ("plus",[CVar 23; CProp ("plus",[CVar 23; CVar 24])]);
CProp ("two",[])]);
CProp
("plus",[CVar 23; CProp ("quotient",[CVar 24; CProp ("two",[])])])]));
add (CProp
("equal",
[CProp ("sigma",[CProp ("zero",[]); CVar 8]);
CProp
("quotient",
[CProp ("times",[CVar 8; CProp ("add1",[CVar 8])]); CProp ("two",[])])]));
add (CProp
("equal",
[CProp ("plus",[CVar 23; CProp ("add1",[CVar 24])]);
CProp
("if",
[CProp ("numberp",[CVar 24]);
CProp ("add1",[CProp ("plus",[CVar 23; CVar 24])]);
CProp ("add1",[CVar 23])])]));
add (CProp
("equal",
[CProp
("equal",
[CProp ("difference",[CVar 23; CVar 24]);
CProp ("difference",[CVar 25; CVar 24])]);
CProp
("if",
[CProp ("lt",[CVar 23; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 24; CVar 25])]);
CProp
("if",
[CProp ("lt",[CVar 25; CVar 24]);
CProp ("not",[CProp ("lt",[CVar 24; CVar 23])]);
CProp ("equal",[CProp ("fix",[CVar 23]); CProp ("fix",[CVar 25])])])])])
);
add (CProp
("equal",
[CProp
("meaning",
[CProp ("plus_tree",[CProp ("delete",[CVar 23; CVar 24])]); CVar 0]);
CProp
("if",
[CProp ("member",[CVar 23; CVar 24]);
CProp
("difference",
[CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0]);
CProp ("meaning",[CVar 23; CVar 0])]);
CProp ("meaning",[CProp ("plus_tree",[CVar 24]); CVar 0])])]));
add (CProp
("equal",
[CProp ("times",[CVar 23; CProp ("add1",[CVar 24])]);
CProp
("if",
[CProp ("numberp",[CVar 24]);
CProp
("plus",
[CVar 23; CProp ("times",[CVar 23; CVar 24]);
CProp ("fix",[CVar 23])])])]));
add (CProp
("equal",
[CProp ("nth",[CProp ("nil",[]); CVar 8]);
CProp
("if",[CProp ("zerop",[CVar 8]); CProp ("nil",[]); CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("last",[CProp ("append",[CVar 0; CVar 1])]);
CProp
("if",
[CProp ("listp",[CVar 1]); CProp ("last",[CVar 1]);
CProp
("if",
[CProp ("listp",[CVar 0]);
CProp ("cons",[CProp ("car",[CProp ("last",[CVar 0])]); CVar 1]);
CVar 1])])]));
add (CProp
("equal",
[CProp ("equal",[CProp ("lt",[CVar 23; CVar 24]); CVar 25]);
CProp
("if",
[CProp ("lt",[CVar 23; CVar 24]);
CProp ("equal",[CProp ("true",[]); CVar 25]);
CProp ("equal",[CProp ("false",[]); CVar 25])])]));
add (CProp
("equal",
[CProp ("assignment",[CVar 23; CProp ("append",[CVar 0; CVar 1])]);
CProp
("if",
[CProp ("assignedp",[CVar 23; CVar 0]);
CProp ("assignment",[CVar 23; CVar 0]);
CProp ("assignment",[CVar 23; CVar 1])])]));
add (CProp
("equal",
[CProp ("car",[CProp ("gother",[CVar 23])]);
CProp
("if",
[CProp ("listp",[CVar 23]);
CProp ("car",[CProp ("flatten",[CVar 23])]); CProp ("zero",[])])]));
add (CProp
("equal",
[CProp ("flatten",[CProp ("cdr",[CProp ("gother",[CVar 23])])]);
CProp
("if",
[CProp ("listp",[CVar 23]);
CProp ("cdr",[CProp ("flatten",[CVar 23])]);
CProp ("cons",[CProp ("zero",[]); CProp ("nil",[])])])]));
add (CProp
("equal",
[CProp ("quotient",[CProp ("times",[CVar 24; CVar 23]); CVar 24]);
CProp
("if",
[CProp ("zerop",[CVar 24]); CProp ("zero",[]);
CProp ("fix",[CVar 23])])]));
add (CProp
("equal",
[CProp ("get",[CVar 9; CProp ("set",[CVar 8; CVar 21; CVar 12])]);
CProp
("if",
[CProp ("eqp",[CVar 9; CVar 8]); CVar 21;
CProp ("get",[CVar 9; CVar 12])])]))
let truep x lst =
match x with
Prop(head, _) ->
head.name = "true" || List.mem x lst
| _ ->
List.mem x lst
and falsep x lst =
match x with
Prop(head, _) ->
head.name = "false" || List.mem x lst
| _ ->
List.mem x lst
let rec tautologyp x true_lst false_lst =
if truep x true_lst then true else
if falsep x false_lst then false else begin
match x with
Var _ -> false
| Prop (head,[test; yes; no]) ->
if head.name = "if" then
if truep test true_lst then
tautologyp yes true_lst false_lst
else if falsep test false_lst then
tautologyp no true_lst false_lst
else tautologyp yes (test::true_lst) false_lst &&
tautologyp no true_lst (test::false_lst)
else
false
| _ -> assert false
end
let tautp x =
let y = rewrite x in
print_term y ; print_string " \n " ;
tautologyp y [] []
let subst =
[Bind(23, cterm_to_term(
CProp
("f",
[CProp
("plus",
[CProp ("plus",[CVar 0; CVar 1]);
CProp ("plus",[CVar 2; CProp ("zero",[])])])])));
Bind(24, cterm_to_term(
CProp
("f",
[CProp
("times",
[CProp ("times",[CVar 0; CVar 1]);
CProp ("plus",[CVar 2; CVar 3])])])));
Bind(25, cterm_to_term(
CProp
("f",
[CProp
("reverse",
[CProp
("append",
[CProp ("append",[CVar 0; CVar 1]);
CProp ("nil",[])])])])));
Bind(20, cterm_to_term(
CProp
("equal",
[CProp ("plus",[CVar 0; CVar 1]);
CProp ("difference",[CVar 23; CVar 24])])));
Bind(22, cterm_to_term(
CProp
("lt",
[CProp ("remainder",[CVar 0; CVar 1]);
CProp ("member",[CVar 0; CProp ("length",[CVar 1])])])))]
let term = cterm_to_term(
CProp
("implies",
[CProp
("and",
[CProp ("implies",[CVar 23; CVar 24]);
CProp
("and",
[CProp ("implies",[CVar 24; CVar 25]);
CProp
("and",
[CProp ("implies",[CVar 25; CVar 20]);
CProp ("implies",[CVar 20; CVar 22])])])]);
CProp ("implies",[CVar 23; CVar 22])]))
let _ =
let ok = ref true in
for i = 1 to 50 do
if not (tautp (apply_subst subst term)) then ok := false
done;
if !ok then
print_string "Proved!\n"
else
print_string "Cannot prove!\n";
exit 0
* * * * * * * *
with
failure s - >
print_string " Exception failure ( " ; print_string s ; print_string " ) \n "
| Unify - >
print_string " Exception Unify\n "
| match_failure(file , start , stop ) - >
print_string " Exception match_failure ( " ;
print_string file ;
print_string " , " ;
print_int start ;
print_string " , " ;
print_int stop ;
print_string " ) \n "
| _ - >
print_string " Exception ? \n "
* * * * * * * * *
with
failure s ->
print_string "Exception failure("; print_string s; print_string ")\n"
| Unify ->
print_string "Exception Unify\n"
| match_failure(file,start,stop) ->
print_string "Exception match_failure(";
print_string file;
print_string ",";
print_int start;
print_string ",";
print_int stop;
print_string ")\n"
| _ ->
print_string "Exception ?\n"
**********)
|
067ac1795cdb2007a5185c99a40398b9d675fd2d05071e8c8d696199aaef299d | 1Jajen1/Brokkr | Position.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE UndecidableInstances #
module Util.Position (
Position(..)
, pattern Position
, Falling(..)
) where
import Data.Word
import Foreign.Storable
import FlatParse.Basic (empty)
import Util.Binary
import Util.Linear.V3
import Util.Linear.Vector
import Hecs (Component, ViaBox, ViaFlat)
newtype Position = Pos (V3 Double)
deriving stock Show
deriving newtype (Eq, Storable)
deriving Component via (ViaFlat Position)
deriving newtype instance VectorSpace Double Position
pattern Position :: Double -> Double -> Double -> Position
pattern Position x y z = Pos (V3_Double x y z)
{-# COMPLETE Position #-}
instance ToBinary Position where
put (Position x y z) = put x <> put y <> put z
instance FromBinary Position where
get = Position <$> get <*> get <*> get
data Falling = OnGround | Falling
deriving stock (Show, Eq)
TODO As Tags ? If I ever need to iterate only Falling or only OnGround ( like in Physics , I may do that )
instance ToBinary Falling where
put OnGround = put @Word8 1
put Falling = put @Word8 0
instance FromBinary Falling where
get = get @Word8 >>= \case
0 -> pure Falling
1 -> pure OnGround
_ -> empty
| null | https://raw.githubusercontent.com/1Jajen1/Brokkr/1c93519fdc3490091205e8499ed04cd1fee66192/src/Util/Position.hs | haskell | # COMPLETE Position # | # LANGUAGE PatternSynonyms #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE UndecidableInstances #
module Util.Position (
Position(..)
, pattern Position
, Falling(..)
) where
import Data.Word
import Foreign.Storable
import FlatParse.Basic (empty)
import Util.Binary
import Util.Linear.V3
import Util.Linear.Vector
import Hecs (Component, ViaBox, ViaFlat)
newtype Position = Pos (V3 Double)
deriving stock Show
deriving newtype (Eq, Storable)
deriving Component via (ViaFlat Position)
deriving newtype instance VectorSpace Double Position
pattern Position :: Double -> Double -> Double -> Position
pattern Position x y z = Pos (V3_Double x y z)
instance ToBinary Position where
put (Position x y z) = put x <> put y <> put z
instance FromBinary Position where
get = Position <$> get <*> get <*> get
data Falling = OnGround | Falling
deriving stock (Show, Eq)
TODO As Tags ? If I ever need to iterate only Falling or only OnGround ( like in Physics , I may do that )
instance ToBinary Falling where
put OnGround = put @Word8 1
put Falling = put @Word8 0
instance FromBinary Falling where
get = get @Word8 >>= \case
0 -> pure Falling
1 -> pure OnGround
_ -> empty
|
96442ad84a56919792c2e7e0f20c898cbc553cc21c5906cd60694f7262685026 | binaryage/cljs-devtools | logging.clj | (ns devtools-sample.logging)
(defmacro log [& args]
`(.log js/console ~@args))
(defmacro info [& args]
`(.info js/console ~@args))
| null | https://raw.githubusercontent.com/binaryage/cljs-devtools/d07fc6d404479b1ddd32cecc105009de77e3cba7/examples/lein/src/demo/devtools_sample/logging.clj | clojure | (ns devtools-sample.logging)
(defmacro log [& args]
`(.log js/console ~@args))
(defmacro info [& args]
`(.info js/console ~@args))
| |
9c151dc1130dc792f55f568e527e1177c0e8ba1f102c2352206be0cdd133b7a5 | graphql-erlang/graphql | graphql_type_string.erl | -module(graphql_type_string).
-export([
type/0
]).
type()-> #{
kind => 'SCALAR',
name => 'String',
ofType => null,
description => <<
"The `String` scalar type represents textual data, represented as UTF-8",
"character sequences. The String type is most often used by GraphQL to",
"represent free-form human-readable text."
>>,
serialize => fun serialize/3,
parse_value => fun parse_value/2,
parse_literal => fun parse_literal/2
}.
serialize(Value,_,_) -> coerce(Value).
parse_value(null,_) -> null;
parse_value(#{kind := <<"StringValue">>, value := Value}, _) -> coerce(Value);
parse_value(#{kind := 'StringValue', value := Value}, _) -> coerce(Value).
-spec parse_literal(map(), map()) -> binary().
parse_literal(null, _) -> null;
parse_literal(#{kind := 'StringValue', value := Value}, _) -> Value;
parse_literal(#{kind := Kind}, _) ->
throw({error, type_validation, <<"Unexpected type ", (atom_to_binary(Kind, utf8))/binary, ", expected StringValue">>}).
-spec coerce(atom() | binary() | list()) -> binary().
coerce(null) -> null;
coerce(Value) when is_atom(Value) -> atom_to_binary(Value, utf8);
coerce(Value) when is_binary(Value) -> Value;
coerce(Value) when is_list(Value) -> list_to_binary(Value);
coerce(_) -> throw({error, type_validation, <<"Cannot coerce string type">>}).
| null | https://raw.githubusercontent.com/graphql-erlang/graphql/feef75d955e3404c3c8e4500b1e2c4d4821c1dc6/src/types/graphql_type_string.erl | erlang | -module(graphql_type_string).
-export([
type/0
]).
type()-> #{
kind => 'SCALAR',
name => 'String',
ofType => null,
description => <<
"The `String` scalar type represents textual data, represented as UTF-8",
"character sequences. The String type is most often used by GraphQL to",
"represent free-form human-readable text."
>>,
serialize => fun serialize/3,
parse_value => fun parse_value/2,
parse_literal => fun parse_literal/2
}.
serialize(Value,_,_) -> coerce(Value).
parse_value(null,_) -> null;
parse_value(#{kind := <<"StringValue">>, value := Value}, _) -> coerce(Value);
parse_value(#{kind := 'StringValue', value := Value}, _) -> coerce(Value).
-spec parse_literal(map(), map()) -> binary().
parse_literal(null, _) -> null;
parse_literal(#{kind := 'StringValue', value := Value}, _) -> Value;
parse_literal(#{kind := Kind}, _) ->
throw({error, type_validation, <<"Unexpected type ", (atom_to_binary(Kind, utf8))/binary, ", expected StringValue">>}).
-spec coerce(atom() | binary() | list()) -> binary().
coerce(null) -> null;
coerce(Value) when is_atom(Value) -> atom_to_binary(Value, utf8);
coerce(Value) when is_binary(Value) -> Value;
coerce(Value) when is_list(Value) -> list_to_binary(Value);
coerce(_) -> throw({error, type_validation, <<"Cannot coerce string type">>}).
| |
b4696753be7cbdf32cca8fb66f40fbf885d9665a2df1f83e29e6a474b947778c | quicklisp/quicklisp-controller | package.lisp | package.lisp
(defpackage #:quicklisp-controller
(:use #:cl
#:westbrook)
(:export #:setup-directories)
(:shadowing-import-from #:sb-ext
#:run-program
#:process-exit-code
#:process-output
#:process-close)
(:shadowing-import-from #:sb-ext
#:native-namestring)
(:shadowing-import-from #:sb-posix
#:chdir)
(:shadowing-import-from #:ql-gunzipper
#:gunzip)
(:shadowing-import-from #:ql-http
#:fetch)
(:shadowing-import-from #:quicklisp-tarhash
#:content-hash)
(:shadowing-import-from #:alexandria
#:when-let)
(:shadowing-import-from #:ql-dist
#:provided-systems
#:required-systems
#:provided-releases
#:system-file-name
#:name
#:dist
#:release))
(in-package #:quicklisp-controller)
| null | https://raw.githubusercontent.com/quicklisp/quicklisp-controller/9fc95237f0f2f86cafc3afb2f1e7666610e8561b/package.lisp | lisp | package.lisp
(defpackage #:quicklisp-controller
(:use #:cl
#:westbrook)
(:export #:setup-directories)
(:shadowing-import-from #:sb-ext
#:run-program
#:process-exit-code
#:process-output
#:process-close)
(:shadowing-import-from #:sb-ext
#:native-namestring)
(:shadowing-import-from #:sb-posix
#:chdir)
(:shadowing-import-from #:ql-gunzipper
#:gunzip)
(:shadowing-import-from #:ql-http
#:fetch)
(:shadowing-import-from #:quicklisp-tarhash
#:content-hash)
(:shadowing-import-from #:alexandria
#:when-let)
(:shadowing-import-from #:ql-dist
#:provided-systems
#:required-systems
#:provided-releases
#:system-file-name
#:name
#:dist
#:release))
(in-package #:quicklisp-controller)
| |
0ae31f7600ada56408c0c33512b454107ca7e13b806e0fae2da483876168eb83 | int28h/HaskellTasks | 0001.hs | main = putStrLn "Hello, world!" | null | https://raw.githubusercontent.com/int28h/HaskellTasks/38aa6c1d461ca5774350c68fa7dd631932f10f84/src/0001.hs | haskell | main = putStrLn "Hello, world!" | |
fd21a3b0f4ea4374b71034b94fb3b1043f295013f5d943930d81c0e18e6d2bda | basho/riak_core | riak_core_bucket_type.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Bucket Types allow groups of buckets to share configuration
%% details. Each bucket belongs to a type and inherits its
%% properties. Buckets can override the properties they inherit using
{ @link riak_core_bucket } . The " Default Bucket Type " always
%% exists. The Default Type's properties come from the riak_core
%% `default_bucket_props' application config, so the Default Type and
%% its buckets continue to act as they had prior to the existence of
%% Bucket Types.
%%
Unlike Buckets , Bucket Types must be explicitly created . In
%% addition, types support setting some properties only on creation
%% (via {@link riak_core_bucket_props:validate/4}). Since, types are
%% stored using {@link riak_core_metadata}, in order to provide safe
%% creation semantics the following invariant must be satisfied: all
%% nodes in the cluster either see no type or a single version of the
%% type for the lifetime of the cluster (nodes need not see the single
%% version at the same time). As part of ensuring this invariant, creation
is a two - step process :
%%
%% 1. The type is created and is inactive. To the node an inactive type
%% does not exist
%% 2. When the creation has propogated to all nodes, the type may be activated.
%% As the activation propogates, nodes will be able to use the type
%%
The first step is performed using { @link create/2 } . The second by
%% {@link activate/1}. After the type has been activated, some
%% properties may be updated using {@link update/2}. All operations are
%% serialized through {@link riak_core_claimant} except reading bucket
%% type properties with {@link get/1}.
%%
Bucket types can be in one of four states . The
%% state of a type can be queried using {@link status/1}.
%%
%% 1. undefined - the type has not been created
%% 2. created - the type has been created but has not propogated to all nodes
%% 3. ready - the type has been created and has propogated to all nodes but
%% has not been activated
%% 4. active - the Bucket Type has been activated, but the activation may
%% not have propogated to all nodes yet
%%
%% In order for the invariant to hold, additional restrictions are
%% placed on the operations, generally and based on the state of the
%% Bucket Type. These restrictions are in-place to ensure safety
%% during cases where membership changes or node failures change the
%% {@link riak_core_claimant} to a new node -- ensuring concurrent
%% updates do not break the invariant.
%%
%% * calling {@link create/1} multiple times before a Bucket Type
%% is active is allowed. The newer creation will supersede any
%% previous ones. In addition, the type will be "claimed" by the
%% {@link riak_core_claimant} node writing the property. Future
%% calls to {@link create/1} must be serialized through the same
%% claimant node or the call will not succeed. In the case where
%% the claimed type fails to propogate to a new claimant during a
%% a failure the potential concurrent update is resolved with
%% last-write-wins. Since nodes can not use inactive types, this is
%% safe.
%% * A type may only be activated if it is in the `ready' state. This means
%% all nodes must be reachable from the claimant
%% * {@link create/1} will fail if the type is active. Activation concurrent
%% with creation is not possible due to the previous restriction
%% * {@link update/1} will fail unless the type is updated. {@link update/1} does
%% not allow modifications to properties for which the invariant must hold
%% (NOTE: this is up to the implementor of the riak_core bucket_validator).
%%
%% There is one known case where this invariant does not hold:
%% * in the case where a singleton cluster activates a type before being joined
%% to a cluster that has activated the same type. This is a case poorly handled
%% by most riak_core applications and is considered acceptable (so dont do it!).
-module(riak_core_bucket_type).
-include("riak_core_bucket_type.hrl").
-export([defaults/0,
defaults/1,
create/2,
status/1,
activate/1,
update/2,
get/1,
reset/1,
fold/2,
iterator/0,
itr_next/1,
itr_done/1,
itr_value/1,
itr_close/1,
property_hash/2,
property_hash/3, all_n/0]).
-export_type([bucket_type/0]).
-type bucket_type() :: binary().
-type bucket_type_props() :: riak_core_bucket:properties().
-define(IF_CAPABLE(X, E), case riak_core_capability:get({riak_core, bucket_types}) of
true -> X;
false -> E
end).
%% @doc The hardcoded defaults for all bucket types.
-spec defaults() -> bucket_type_props().
defaults() ->
v225_defaults() ++ custom_type_defaults().
@private default propeties added for v2.2.5
-spec v225_defaults() -> bucket_type_props().
v225_defaults() ->
[{node_confirms, 0}].
%% @doc The hardcoded defaults for the legacy, default bucket
%% type. These find their way into the `default_bucket_props'
%% environment variable
-spec defaults(default_type) -> bucket_type_props().
defaults(default_type) ->
v225_defaults() ++ default_type_defaults().
default_type_defaults() ->
[{dvv_enabled, false},
{allow_mult, false}] ++
common_defaults().
custom_type_defaults() ->
@HACK dvv is a riak_kv only thing , yet there is nowhere else
to put it ( except maybe ? )
[{dvv_enabled, true},
{allow_mult, true}] ++
common_defaults().
common_defaults() ->
[{linkfun, {modfun, riak_kv_wm_link_walker, mapreduce_linkfun}},
{old_vclock, 86400},
{young_vclock, 20},
{big_vclock, 50},
{small_vclock, 50},
{pr, 0},
{r, quorum},
{w, quorum},
{pw, 0},
{dw, quorum},
{rw, quorum},
{sync_on_write, backend},
{basic_quorum, false},
{notfound_ok, true},
{n_val,3},
{last_write_wins,false},
{precommit, []},
{postcommit, []},
{chash_keyfun, {riak_core_util, chash_std_keyfun}}].
%% @doc Create the type. The type is not activated (available to nodes) at this time. This
%% function may be called arbitratily many times if the claimant does not change between
%% calls and the type is not active. An error will also be returned if the properties
%% are not valid. Properties not provided will be taken from those returned by
%% @see defaults/0.
-spec create(bucket_type(), bucket_type_props()) -> ok | {error, term()}.
create(?DEFAULT_TYPE, _Props) ->
{error, default_type};
create(BucketType, Props) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:create_bucket_type(BucketType,
riak_core_bucket_props:merge(Props, defaults())),
{error, not_capable}).
%% @doc Returns the state the type is in.
-spec status(bucket_type()) -> undefined | created | ready | active.
status(?DEFAULT_TYPE) ->
active;
status(BucketType) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:bucket_type_status(BucketType), undefined).
%% @doc Activate the type. This will succeed only if the type is in the `ready' state. Otherwise,
%% an error is returned.
-spec activate(bucket_type()) -> ok | {error, undefined | not_ready}.
activate(?DEFAULT_TYPE) ->
ok;
activate(BucketType) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:activate_bucket_type(BucketType), {error, undefined}).
%% @doc Update an existing bucket type. Updates may only be performed
%% on active types. Properties not provided will keep their existing
%% values.
-spec update(bucket_type(), bucket_type_props()) -> ok | {error, term()}.
update(?DEFAULT_TYPE, _Props) ->
{error, no_default_update}; %% default props are in the app.config
update(BucketType, Props) when is_binary(BucketType)->
?IF_CAPABLE(riak_core_claimant:update_bucket_type(BucketType, Props), {error, not_capable}).
%% @doc Return the properties associated with the given bucket type.
-spec get(bucket_type()) -> undefined | bucket_type_props().
get(<<"default">>) ->
riak_core_bucket_props:defaults();
get(BucketType) when is_binary(BucketType) ->
riak_core_claimant:get_bucket_type(BucketType, undefined).
%% @doc Reset the properties of the bucket. This only affects properties that
%% can be set using {@link update/2} and can only be performed on an active
%% type.
%%
%% This is not currently hooked into `riak-admin' but can be invoked
%% from the console.
-spec reset(bucket_type()) -> ok | {error, term()}.
reset(BucketType) ->
update(BucketType, defaults()).
%% @doc iterate over bucket types and find any active buckets.
-spec all_n() -> riak_core_bucket:nval_set().
all_n() ->
riak_core_bucket_type:fold(fun bucket_type_prop_nval_fold/2, ordsets:new()).
@private
-spec bucket_type_prop_nval_fold({bucket_type(), riak_core_bucket:properties()},
riak_core_bucket:nval_set()) -> riak_core_bucket:nval_set().
bucket_type_prop_nval_fold({_BType, BProps}, Accum) ->
case riak_core_bucket:get_value(active, BProps) of
true ->
bucket_prop_nval_fold(BProps, Accum);
_ ->
Accum
end.
-spec bucket_prop_nval_fold(riak_core_bucket:properties(), riak_core_bucket:nval_set()) ->
riak_core_bucket:nval_set().
bucket_prop_nval_fold(BProps, Accum) ->
case riak_core_bucket:get_value(n_val, BProps) of
undefined ->
Accum;
NVal ->
ordsets:add_element(NVal, Accum)
end.
@doc Fold over all bucket types , storing result in accumulator
-spec fold(fun(({bucket_type(), bucket_type_props()}, any()) -> any()),
Accumulator::any()) ->
any().
fold(Fun, Accum) ->
fold(iterator(), Fun, Accum).
-spec fold(
riak_core_metadata:iterator(),
fun(({bucket_type(), bucket_type_props()}, any()) -> any()),
any()
) ->
any().
fold(It, Fun, Accum) ->
case riak_core_bucket_type:itr_done(It) of
true ->
riak_core_bucket_type:itr_close(It),
Accum;
_ ->
NewAccum = Fun(itr_value(It), Accum),
fold(riak_core_bucket_type:itr_next(It), Fun, NewAccum)
end.
%% @doc Return an iterator that can be used to walk through all existing bucket types
%% and their properties
-spec iterator() -> riak_core_metadata:iterator().
iterator() ->
riak_core_claimant:bucket_type_iterator().
@doc Advance the iterator to the next bucket type . itr_done/1 should always be called
%% before this function
-spec itr_next(riak_core_metadata:iterator()) ->
riak_core_metadata:iterator().
itr_next(It) ->
riak_core_metadata:itr_next(It).
%% @doc Returns true if there are no more bucket types to iterate over
-spec itr_done(riak_core_metadata:iterator()) -> boolean().
itr_done(It) ->
riak_core_metadata:itr_done(It).
%% @doc Returns the type and properties that the iterator points too. Any siblings,
are resolved at this time . itr_done/1 should be checked before calling this function .
-spec itr_value(riak_core_metadata:iterator()) ->
{bucket_type(), bucket_type_props()}.
itr_value(It) ->
{BucketType, Props} = riak_core_metadata:itr_key_values(It),
{BucketType, Props}.
-spec itr_close(riak_core_metadata:iterator()) -> ok.
itr_close(It) ->
riak_core_metadata:itr_close(It).
%% @doc Returns a hash of a specified set of bucket type properties
%% whose values may have implications on the treatment or handling of
%% buckets created using the bucket type.
-spec property_hash(bucket_type(), [term()]) -> undefined | integer().
property_hash(Type, PropKeys) ->
property_hash(Type, PropKeys, ?MODULE:get(Type)).
-spec property_hash(bucket_type(), [term()], undefined | bucket_type_props()) ->
undefined | integer().
property_hash(_Type, _PropKeys, undefined) ->
undefined;
property_hash(_Type, PropKeys, Props) ->
erlang:phash2([lists:keyfind(PropKey, 1, Props) || PropKey <- PropKeys]).
| null | https://raw.githubusercontent.com/basho/riak_core/762ec81ae9af9a278e853f1feca418b9dcf748a3/src/riak_core_bucket_type.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Bucket Types allow groups of buckets to share configuration
details. Each bucket belongs to a type and inherits its
properties. Buckets can override the properties they inherit using
exists. The Default Type's properties come from the riak_core
`default_bucket_props' application config, so the Default Type and
its buckets continue to act as they had prior to the existence of
Bucket Types.
addition, types support setting some properties only on creation
(via {@link riak_core_bucket_props:validate/4}). Since, types are
stored using {@link riak_core_metadata}, in order to provide safe
creation semantics the following invariant must be satisfied: all
nodes in the cluster either see no type or a single version of the
type for the lifetime of the cluster (nodes need not see the single
version at the same time). As part of ensuring this invariant, creation
1. The type is created and is inactive. To the node an inactive type
does not exist
2. When the creation has propogated to all nodes, the type may be activated.
As the activation propogates, nodes will be able to use the type
{@link activate/1}. After the type has been activated, some
properties may be updated using {@link update/2}. All operations are
serialized through {@link riak_core_claimant} except reading bucket
type properties with {@link get/1}.
state of a type can be queried using {@link status/1}.
1. undefined - the type has not been created
2. created - the type has been created but has not propogated to all nodes
3. ready - the type has been created and has propogated to all nodes but
has not been activated
4. active - the Bucket Type has been activated, but the activation may
not have propogated to all nodes yet
In order for the invariant to hold, additional restrictions are
placed on the operations, generally and based on the state of the
Bucket Type. These restrictions are in-place to ensure safety
during cases where membership changes or node failures change the
{@link riak_core_claimant} to a new node -- ensuring concurrent
updates do not break the invariant.
* calling {@link create/1} multiple times before a Bucket Type
is active is allowed. The newer creation will supersede any
previous ones. In addition, the type will be "claimed" by the
{@link riak_core_claimant} node writing the property. Future
calls to {@link create/1} must be serialized through the same
claimant node or the call will not succeed. In the case where
the claimed type fails to propogate to a new claimant during a
a failure the potential concurrent update is resolved with
last-write-wins. Since nodes can not use inactive types, this is
safe.
* A type may only be activated if it is in the `ready' state. This means
all nodes must be reachable from the claimant
* {@link create/1} will fail if the type is active. Activation concurrent
with creation is not possible due to the previous restriction
* {@link update/1} will fail unless the type is updated. {@link update/1} does
not allow modifications to properties for which the invariant must hold
(NOTE: this is up to the implementor of the riak_core bucket_validator).
There is one known case where this invariant does not hold:
* in the case where a singleton cluster activates a type before being joined
to a cluster that has activated the same type. This is a case poorly handled
by most riak_core applications and is considered acceptable (so dont do it!).
@doc The hardcoded defaults for all bucket types.
@doc The hardcoded defaults for the legacy, default bucket
type. These find their way into the `default_bucket_props'
environment variable
@doc Create the type. The type is not activated (available to nodes) at this time. This
function may be called arbitratily many times if the claimant does not change between
calls and the type is not active. An error will also be returned if the properties
are not valid. Properties not provided will be taken from those returned by
@see defaults/0.
@doc Returns the state the type is in.
@doc Activate the type. This will succeed only if the type is in the `ready' state. Otherwise,
an error is returned.
@doc Update an existing bucket type. Updates may only be performed
on active types. Properties not provided will keep their existing
values.
default props are in the app.config
@doc Return the properties associated with the given bucket type.
@doc Reset the properties of the bucket. This only affects properties that
can be set using {@link update/2} and can only be performed on an active
type.
This is not currently hooked into `riak-admin' but can be invoked
from the console.
@doc iterate over bucket types and find any active buckets.
@doc Return an iterator that can be used to walk through all existing bucket types
and their properties
before this function
@doc Returns true if there are no more bucket types to iterate over
@doc Returns the type and properties that the iterator points too. Any siblings,
@doc Returns a hash of a specified set of bucket type properties
whose values may have implications on the treatment or handling of
buckets created using the bucket type. | Copyright ( c ) 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
{ @link riak_core_bucket } . The " Default Bucket Type " always
Unlike Buckets , Bucket Types must be explicitly created . In
is a two - step process :
The first step is performed using { @link create/2 } . The second by
Bucket types can be in one of four states . The
-module(riak_core_bucket_type).
-include("riak_core_bucket_type.hrl").
-export([defaults/0,
defaults/1,
create/2,
status/1,
activate/1,
update/2,
get/1,
reset/1,
fold/2,
iterator/0,
itr_next/1,
itr_done/1,
itr_value/1,
itr_close/1,
property_hash/2,
property_hash/3, all_n/0]).
-export_type([bucket_type/0]).
-type bucket_type() :: binary().
-type bucket_type_props() :: riak_core_bucket:properties().
-define(IF_CAPABLE(X, E), case riak_core_capability:get({riak_core, bucket_types}) of
true -> X;
false -> E
end).
-spec defaults() -> bucket_type_props().
defaults() ->
v225_defaults() ++ custom_type_defaults().
@private default propeties added for v2.2.5
-spec v225_defaults() -> bucket_type_props().
v225_defaults() ->
[{node_confirms, 0}].
-spec defaults(default_type) -> bucket_type_props().
defaults(default_type) ->
v225_defaults() ++ default_type_defaults().
default_type_defaults() ->
[{dvv_enabled, false},
{allow_mult, false}] ++
common_defaults().
custom_type_defaults() ->
@HACK dvv is a riak_kv only thing , yet there is nowhere else
to put it ( except maybe ? )
[{dvv_enabled, true},
{allow_mult, true}] ++
common_defaults().
common_defaults() ->
[{linkfun, {modfun, riak_kv_wm_link_walker, mapreduce_linkfun}},
{old_vclock, 86400},
{young_vclock, 20},
{big_vclock, 50},
{small_vclock, 50},
{pr, 0},
{r, quorum},
{w, quorum},
{pw, 0},
{dw, quorum},
{rw, quorum},
{sync_on_write, backend},
{basic_quorum, false},
{notfound_ok, true},
{n_val,3},
{last_write_wins,false},
{precommit, []},
{postcommit, []},
{chash_keyfun, {riak_core_util, chash_std_keyfun}}].
-spec create(bucket_type(), bucket_type_props()) -> ok | {error, term()}.
create(?DEFAULT_TYPE, _Props) ->
{error, default_type};
create(BucketType, Props) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:create_bucket_type(BucketType,
riak_core_bucket_props:merge(Props, defaults())),
{error, not_capable}).
-spec status(bucket_type()) -> undefined | created | ready | active.
status(?DEFAULT_TYPE) ->
active;
status(BucketType) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:bucket_type_status(BucketType), undefined).
-spec activate(bucket_type()) -> ok | {error, undefined | not_ready}.
activate(?DEFAULT_TYPE) ->
ok;
activate(BucketType) when is_binary(BucketType) ->
?IF_CAPABLE(riak_core_claimant:activate_bucket_type(BucketType), {error, undefined}).
-spec update(bucket_type(), bucket_type_props()) -> ok | {error, term()}.
update(?DEFAULT_TYPE, _Props) ->
update(BucketType, Props) when is_binary(BucketType)->
?IF_CAPABLE(riak_core_claimant:update_bucket_type(BucketType, Props), {error, not_capable}).
-spec get(bucket_type()) -> undefined | bucket_type_props().
get(<<"default">>) ->
riak_core_bucket_props:defaults();
get(BucketType) when is_binary(BucketType) ->
riak_core_claimant:get_bucket_type(BucketType, undefined).
-spec reset(bucket_type()) -> ok | {error, term()}.
reset(BucketType) ->
update(BucketType, defaults()).
-spec all_n() -> riak_core_bucket:nval_set().
all_n() ->
riak_core_bucket_type:fold(fun bucket_type_prop_nval_fold/2, ordsets:new()).
@private
-spec bucket_type_prop_nval_fold({bucket_type(), riak_core_bucket:properties()},
riak_core_bucket:nval_set()) -> riak_core_bucket:nval_set().
bucket_type_prop_nval_fold({_BType, BProps}, Accum) ->
case riak_core_bucket:get_value(active, BProps) of
true ->
bucket_prop_nval_fold(BProps, Accum);
_ ->
Accum
end.
-spec bucket_prop_nval_fold(riak_core_bucket:properties(), riak_core_bucket:nval_set()) ->
riak_core_bucket:nval_set().
bucket_prop_nval_fold(BProps, Accum) ->
case riak_core_bucket:get_value(n_val, BProps) of
undefined ->
Accum;
NVal ->
ordsets:add_element(NVal, Accum)
end.
@doc Fold over all bucket types , storing result in accumulator
-spec fold(fun(({bucket_type(), bucket_type_props()}, any()) -> any()),
Accumulator::any()) ->
any().
fold(Fun, Accum) ->
fold(iterator(), Fun, Accum).
-spec fold(
riak_core_metadata:iterator(),
fun(({bucket_type(), bucket_type_props()}, any()) -> any()),
any()
) ->
any().
fold(It, Fun, Accum) ->
case riak_core_bucket_type:itr_done(It) of
true ->
riak_core_bucket_type:itr_close(It),
Accum;
_ ->
NewAccum = Fun(itr_value(It), Accum),
fold(riak_core_bucket_type:itr_next(It), Fun, NewAccum)
end.
-spec iterator() -> riak_core_metadata:iterator().
iterator() ->
riak_core_claimant:bucket_type_iterator().
@doc Advance the iterator to the next bucket type . itr_done/1 should always be called
-spec itr_next(riak_core_metadata:iterator()) ->
riak_core_metadata:iterator().
itr_next(It) ->
riak_core_metadata:itr_next(It).
-spec itr_done(riak_core_metadata:iterator()) -> boolean().
itr_done(It) ->
riak_core_metadata:itr_done(It).
are resolved at this time . itr_done/1 should be checked before calling this function .
-spec itr_value(riak_core_metadata:iterator()) ->
{bucket_type(), bucket_type_props()}.
itr_value(It) ->
{BucketType, Props} = riak_core_metadata:itr_key_values(It),
{BucketType, Props}.
-spec itr_close(riak_core_metadata:iterator()) -> ok.
itr_close(It) ->
riak_core_metadata:itr_close(It).
-spec property_hash(bucket_type(), [term()]) -> undefined | integer().
property_hash(Type, PropKeys) ->
property_hash(Type, PropKeys, ?MODULE:get(Type)).
-spec property_hash(bucket_type(), [term()], undefined | bucket_type_props()) ->
undefined | integer().
property_hash(_Type, _PropKeys, undefined) ->
undefined;
property_hash(_Type, PropKeys, Props) ->
erlang:phash2([lists:keyfind(PropKey, 1, Props) || PropKey <- PropKeys]).
|
efcc4e3a6db690d2f71eecf15e59261fc715d350a3c5f7e8b513ef315a22b58b | haskell/vector | minimumBy.hs | import qualified Data.Vector as U
import Data.Bits
main = print . U.minimumBy (\x y -> GT) . U.map (*2) . U.map (`shiftL` 2) $ U.replicate (100000000 :: Int) (5::Int)
| null | https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/minimumBy.hs | haskell | import qualified Data.Vector as U
import Data.Bits
main = print . U.minimumBy (\x y -> GT) . U.map (*2) . U.map (`shiftL` 2) $ U.replicate (100000000 :: Int) (5::Int)
| |
e4bcd547bc9b5350b6fce57e1f0b5830d288af912992b0277cfd90552384ebba | spawnfest/eep49ers | wxListCtrl.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2011 - 2016 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
<<EXPORT:SortItems sortItems/2 SortItems:EXPORT>>
<<SortItems
-spec sortItems(This::wxListCtrl(), SortCallBack) -> boolean()
when SortCallBack :: fun((integer(), integer()) -> integer()).
sortItems(#wx_ref{type=ThisT}=This, SortCallBack)
when is_function(SortCallBack, 2) ->
?CLASS(ThisT,wxListCtrl),
SortId = wxe_util:get_cbId(SortCallBack),
Op = ~s,
wxe_util:queue_cmd(This, SortId, ?get_env(), Op),
wxe_util:rec(Op).
SortItems>>
<<EXPORT:wxListCtrl new/0, new/1, new/2 wxListCtrl:EXPORT>>
<<wxListCtrl_new_0
%% @doc See <a href="#wxlistctrlwxlistctrl">external documentation</a>.
-spec new() -> wxListCtrl().
new() ->
Op = ~s,
wxe_util:queue_cmd(?get_env(), Op),
wxe_util:rec(Op).
wxListCtrl_new_0>>
<<wxListCtrl_new_2
-spec new(Parent) -> wxListCtrl() when
Parent::wxWindow:wxWindow().
new(Parent)
when is_record(Parent, wx_ref) ->
new(Parent, []).
%% @doc Creates a listctrl with optional callback functions:
%%
OnGetItemText = ( This , Item , Column ) - > unicode : ( )
%% OnGetItemAttr = (This, Item) -> wxListItemAttr:wxListItemAttr()
OnGetItemColumnImage = ( This , Item , Column ) - > integer ( )
%%
%% See <a href="#wxlistctrlwxlistctrl">external documentation</a>.
-spec new(Parent, [Option]) -> wxListCtrl() when
Parent::wxWindow:wxWindow(),
Option::{winid, integer()} |
{pos, {X::integer(),Y::integer()}} |
{size, {W::integer(),H::integer()}} |
{style, integer()} |
{validator, wx:wx_object()} |
{onGetItemText, function()} |
{onGetItemAttr, function()} |
{onGetItemColumnImage, function()}.
new(#wx_ref{}=Parent, Options)
when is_list(Options)->
%% ~s
ListCtrl = new(),
true = create(ListCtrl,Parent,Options),
ListCtrl.
wxListCtrl_new_2>>
<<EXPORT:Create create/2, create/3 Create:EXPORT>>
<<Create
%% @equiv create(This,Parent, [])
-spec create(This, Parent) -> boolean() when
This::wxWindow:wxWindow(),
Parent::wxWindow:wxWindow().
create(This,Parent)
when is_record(This, wx_ref),is_record(Parent, wx_ref) ->
create(This,Parent, []).
%% @doc See <a href="#wxlistctrlcreate">external documentation</a>.
-spec create(This, Parent, [Option]) -> boolean() when
This::wxWindow:wxWindow(),
Parent::wxWindow:wxWindow(),
Option::{winid, integer()} |
{pos, {X::integer(),Y::integer()}} |
{size, {W::integer(),H::integer()}} |
{style, integer()} |
{validator, wx:wx_object()} |
{onGetItemText, function()} |
{onGetItemAttr, function()} |
{onGetItemColumnImage, function()}.
create(#wx_ref{type=ThisT}=This,#wx_ref{type=ParentT}=Parent, Options)
when is_list(Options) ->
?CLASS(ThisT,wxListCtrl),
?CLASS(ParentT,wxWindow),
Op = ~s,
MOpts = fun({winid, _} = Arg) -> Arg;
({pos, {_posX,_posY}} = Arg) -> Arg;
({size, {_sizeW,_sizeH}} = Arg) -> Arg;
({style, _style} = Arg) -> Arg;
({validator, #wx_ref{type=ValidatorT}} = Arg) -> ?CLASS(ValidatorT,wx),Arg;
({onGetItemText, Fun}) ->
ToStr = fun(A,B,C) -> unicode:characters_to_binary(Fun(A,B,C)) end,
{onGetItemText, wxe_util:get_cbId(ToStr)};
({onGetItemAttr, Fun}) -> {onGetItemAttr, wxe_util:get_cbId(Fun)};
({onGetItemColumnImage, Fun}) -> {onGetItemColumnImage, wxe_util:get_cbId(Fun)};
(BadOpt) -> erlang:error({badoption, BadOpt}) end,
Opts = lists:map(MOpts, Options),
wxe_util:queue_cmd(This, Parent, Opts, ?get_env(), Op),
wxe_util:rec(Op).
Create>>
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/wx/api_gen/wx_extra/wxListCtrl.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
@doc See <a href="#wxlistctrlwxlistctrl">external documentation</a>.
@doc Creates a listctrl with optional callback functions:
OnGetItemAttr = (This, Item) -> wxListItemAttr:wxListItemAttr()
See <a href="#wxlistctrlwxlistctrl">external documentation</a>.
~s
@equiv create(This,Parent, [])
@doc See <a href="#wxlistctrlcreate">external documentation</a>. | Copyright Ericsson AB 2011 - 2016 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
<<EXPORT:SortItems sortItems/2 SortItems:EXPORT>>
<<SortItems
-spec sortItems(This::wxListCtrl(), SortCallBack) -> boolean()
when SortCallBack :: fun((integer(), integer()) -> integer()).
sortItems(#wx_ref{type=ThisT}=This, SortCallBack)
when is_function(SortCallBack, 2) ->
?CLASS(ThisT,wxListCtrl),
SortId = wxe_util:get_cbId(SortCallBack),
Op = ~s,
wxe_util:queue_cmd(This, SortId, ?get_env(), Op),
wxe_util:rec(Op).
SortItems>>
<<EXPORT:wxListCtrl new/0, new/1, new/2 wxListCtrl:EXPORT>>
<<wxListCtrl_new_0
-spec new() -> wxListCtrl().
new() ->
Op = ~s,
wxe_util:queue_cmd(?get_env(), Op),
wxe_util:rec(Op).
wxListCtrl_new_0>>
<<wxListCtrl_new_2
-spec new(Parent) -> wxListCtrl() when
Parent::wxWindow:wxWindow().
new(Parent)
when is_record(Parent, wx_ref) ->
new(Parent, []).
OnGetItemText = ( This , Item , Column ) - > unicode : ( )
OnGetItemColumnImage = ( This , Item , Column ) - > integer ( )
-spec new(Parent, [Option]) -> wxListCtrl() when
Parent::wxWindow:wxWindow(),
Option::{winid, integer()} |
{pos, {X::integer(),Y::integer()}} |
{size, {W::integer(),H::integer()}} |
{style, integer()} |
{validator, wx:wx_object()} |
{onGetItemText, function()} |
{onGetItemAttr, function()} |
{onGetItemColumnImage, function()}.
new(#wx_ref{}=Parent, Options)
when is_list(Options)->
ListCtrl = new(),
true = create(ListCtrl,Parent,Options),
ListCtrl.
wxListCtrl_new_2>>
<<EXPORT:Create create/2, create/3 Create:EXPORT>>
<<Create
-spec create(This, Parent) -> boolean() when
This::wxWindow:wxWindow(),
Parent::wxWindow:wxWindow().
create(This,Parent)
when is_record(This, wx_ref),is_record(Parent, wx_ref) ->
create(This,Parent, []).
-spec create(This, Parent, [Option]) -> boolean() when
This::wxWindow:wxWindow(),
Parent::wxWindow:wxWindow(),
Option::{winid, integer()} |
{pos, {X::integer(),Y::integer()}} |
{size, {W::integer(),H::integer()}} |
{style, integer()} |
{validator, wx:wx_object()} |
{onGetItemText, function()} |
{onGetItemAttr, function()} |
{onGetItemColumnImage, function()}.
create(#wx_ref{type=ThisT}=This,#wx_ref{type=ParentT}=Parent, Options)
when is_list(Options) ->
?CLASS(ThisT,wxListCtrl),
?CLASS(ParentT,wxWindow),
Op = ~s,
MOpts = fun({winid, _} = Arg) -> Arg;
({pos, {_posX,_posY}} = Arg) -> Arg;
({size, {_sizeW,_sizeH}} = Arg) -> Arg;
({style, _style} = Arg) -> Arg;
({validator, #wx_ref{type=ValidatorT}} = Arg) -> ?CLASS(ValidatorT,wx),Arg;
({onGetItemText, Fun}) ->
ToStr = fun(A,B,C) -> unicode:characters_to_binary(Fun(A,B,C)) end,
{onGetItemText, wxe_util:get_cbId(ToStr)};
({onGetItemAttr, Fun}) -> {onGetItemAttr, wxe_util:get_cbId(Fun)};
({onGetItemColumnImage, Fun}) -> {onGetItemColumnImage, wxe_util:get_cbId(Fun)};
(BadOpt) -> erlang:error({badoption, BadOpt}) end,
Opts = lists:map(MOpts, Options),
wxe_util:queue_cmd(This, Parent, Opts, ?get_env(), Op),
wxe_util:rec(Op).
Create>>
|
8e451e9db58e2c289c50922fe4dc2a566d6eabf63c849506c59bdd28e612c84f | Clozure/ccl | gtk-minesweeper.lisp | -*-Mode : LISP ; Package : ( MINESWEEPER : USE ( CL CCL ) ) -*-
;;;
Copyright ( C ) 2001 Clozure Associates
;;;
This is a GTK+-based MineSweeper game , derived from a C program
developed by and published in " Developing Linux Programs
with GTK+ and GDK " , ( c ) 1999 New Riders Publishing .
;;;
;;; Anyone who wants to use this code for any purpose is free to do so.
;;; In doing so, the user acknowledges that this code is provided "as is",
;;; without warranty of any kind, and that no other party is legally or
;;; otherwise responsible for any consequences of its use.
(defpackage "MINESWEEPER"
(:use "CL" "CCL")
(:export "MINESWEEPER"))
(in-package "MINESWEEPER")
;;;
;;; Make GTK+ interface info available.
(eval-when (:compile-toplevel :execute)
(use-interface-dir :GTK2))
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "OPENMCL-GTK-SUPPORT"))
(defconstant max-rows 35)
(defconstant max-cols 35)
(defconstant button-width 24)
(defconstant button-height 26)
(defvar *nrows* 10)
(defvar *ncols* 10)
(defvar *ntotalbombs* 0)
(defvar *bgameover* nil)
(defvar *bresetgame* nil)
(defvar *nbombsleft* nil)
(defvar *table* nil)
(defvar *start-button* nil)
(defvar *bombs-label* nil)
(defvar *time-label* nil)
(defvar *vbox* nil)
(defstruct cell
(buttonstate :button-unknown
:type (member :button-down :button-unknown :button-flagged))
button
(bombsnearby 0)
(has-bomb nil)
row
col)
The original C Minesweeper example uses GtkToggleButtons to
;;; represent the cells on the grid. They seem to work reasonably
well except for one minor ( but annoying ) feature : " enter " and
;;; "leave" events cause the cells under the mouse to be highlighted,
;;; making it difficult to distinguish "unpressed buttons" from "the
;;; button under the mouse".
;;;
This defines a GtkQuietToggleButton class that 's exactly like
;;; GtkToggleButton except for the fact that it does nothing on
;;; "enter" and "leave" events. It's not necessarily the most
interesting example of subclassing a Gtk widget , but it -is- an
;;; example of doing so.
;;;
;;; GtkQuietToggleButtons seem to be better, but there is still some
;;; room for improvement.
(defcallback enter-or-leave-quietly (:address widget :void)
(let* ((id (with-cstrs ((cell-id "cell-id"))
(#_gtk_object_get_data widget cell-id)))
(cell (cell-id->cell id))
(desired-state
(if (member (cell-buttonstate cell)
'(:button-unknown :button-flagged))
#$GTK_STATE_NORMAL
#$GTK_STATE_ACTIVE))
(current-state (pref widget :<G>tk<W>idget.state)))
(unless (eql current-state desired-state)
(#_gtk_widget_set_state widget desired-state))))
(defcallback gtk_quiet_toggle_button_class_init (:address classptr :void)
(setf (pref classptr :<G>tk<B>utton<C>lass.enter) enter-or-leave-quietly
(pref classptr :<G>tk<B>utton<C>lass.leave) enter-or-leave-quietly))
(defcallback gtk_quiet_toggle_button_init (:address widget :void)
(declare (ignore widget)))
CCL::DEFLOADVAR behaves like DEFPARAMETER , but arranges to
;;; initialize the variable whenever a saved image start up
as well as when the DEFLOADVAR is executed .
(ccl::defloadvar *gtk-quiet-toggle-button-type-info*
(let* ((p (#_malloc (ccl::%foreign-type-or-record-size :<G>tk<T>ype<I>nfo :bytes))))
(setf
(pref p :<G>tk<T>ype<I>nfo.type_name)
(with-cstrs ((name "GtkQuietToggleButton")) (#_g_strdup name))
(pref p :<G>tk<T>ype<I>nfo.object_size)
(ccl::%foreign-type-or-record-size :<G>tk<T>oggle<B>utton :bytes)
(pref p :<G>tk<T>ype<I>nfo.class_size)
(ccl::%foreign-type-or-record-size :<G>tk<T>oggle<B>utton<C>lass :bytes)
(pref p :<G>tk<T>ype<I>nfo.class_init_func) gtk_quiet_toggle_button_class_init
(pref p :<G>tk<T>ype<I>nfo.object_init_func) gtk_quiet_toggle_button_init
(pref p :<G>tk<T>ype<I>nfo.reserved_1) (%null-ptr)
(pref p :<G>tk<T>ype<I>nfo.reserved_2) (%null-ptr)
(pref p :<G>tk<T>ype<I>nfo.base_class_init_func) (%null-ptr))
p))
(ccl::defloadvar *gtk-quiet-toggle-button-type* nil)
(defun gtk-quiet-toggle-button-get-type ()
(or *gtk-quiet-toggle-button-type*
(setq *gtk-quiet-toggle-button-type*
(#_gtk_type_unique (#_gtk_toggle_button_get_type)
*gtk-quiet-toggle-button-type-info*))))
(defcallback gtk_quiet_toggle_button_get_type (:unsigned-fullword)
(gtk-quiet-toggle-button-get-type))
(defun gtk-quiet-toggle-button-new ()
(#_gtk_type_new (gtk-quiet-toggle-button-get-type)))
(defcallback gtk_quiet_toggle_button_new (:address)
(gtk-quiet-toggle-button-new))
(defparameter *minesweeper-use-quiet-toggle-buttons* t)
;;; Display message dialogs (as for the About... box).
;;; A dialog widget has "grabbed" the focus. Call back here when
;;; the dialog is to be closed; yield the focus.
(defcallback close-show-message
(:address container :address data :void)
(declare (ignore container))
(let* ((dialog-widget data))
(#_gtk_grab_remove dialog-widget)
(#_gtk_widget_destroy dialog-widget)))
(defcallback clear-show-message
(:address widget :address data :void)
(declare (ignore data))
(#_gtk_grab_remove widget))
(defun show-message (title message)
(let* ((dialog-window (#_gtk_dialog_new)))
(with-cstrs ((destroy-name "destroy"))
(#_gtk_signal_connect_full dialog-window destroy-name clear-show-message
(%null-ptr) (%null-ptr) (%null-ptr) 0 0))
(with-cstrs ((title title))
(#_gtk_window_set_title dialog-window title))
(#_gtk_container_set_border_width dialog-window 0)
(let* ((button (with-cstrs ((ok "OK"))
(#_gtk_button_new_with_label ok))))
(with-cstrs ((clicked "clicked"))
(#_gtk_signal_connect_full button clicked close-show-message (%null-ptr) dialog-window (%null-ptr) 0 0))
(setf (pref button :<G>tk<O>bject.flags)
(logior (pref button :<G>tk<O>bject.flags) #$GTK_CAN_DEFAULT))
(#_gtk_box_pack_start (pref dialog-window :<G>tk<D>ialog.action_area)
button #$TRUE #$TRUE 0)
(#_gtk_widget_grab_default button)
(#_gtk_widget_show button))
(let* ((label (with-cstrs ((message message))
(#_gtk_label_new message))))
(#_gtk_misc_set_padding label 10 10)
(#_gtk_box_pack_start (pref dialog-window :<G>tk<D>ialog.vbox)
label #$TRUE #$TRUE 0)
(#_gtk_widget_show label))
(#_gtk_widget_show dialog-window)
(#_gtk_grab_add dialog-window)))
(defun show-about ()
(show-message "About ..."
"Minesweeper OpenMCL GTK+ example
Copyright 2001 Clozure Associates
Derived from Minesweeper v0.6 by Eric Harlow"))
(defvar *win-main* ())
(defvar *accel-group* ())
(defvar *tooltips* ())
(defun reset-minesweeper-globals ()
(setq *win-main* nil
*accel-group* nil
*tooltips* nil
*vbox* nil
*time-label* nil
*bombs-label* nil
*start-button* nil
*table* nil
*bgameover* nil
*bresetgame* nil))
(defun create-widget-from-xpm (window xpm-string-list)
(rlet ((mask (* :<G>dk<B>itmap)))
(with-string-vector (xpm-data xpm-string-list)
(let* ((pixmap-data (#_gdk_pixmap_create_from_xpm_d
(pref window :<G>tk<W>idget.window)
mask
(%null-ptr)
xpm-data))
(pixmap-widget (#_gtk_pixmap_new pixmap-data (%get-ptr mask))))
(#_gtk_widget_show pixmap-widget)
pixmap-widget))))
(defun create-menu-item (menu item-name accel tip func data)
A null or zero - length item - name indicates a separator .
(let* ((menuitem nil))
(if (and item-name (length item-name))
(with-cstrs ((item-name item-name)
(activate "activate"))
(setq menuitem (#_gtk_menu_item_new_with_label item-name))
(#_gtk_signal_connect_full menuitem activate func (%null-ptr) (or data (%null-ptr)) (%null-ptr) 0 0))
(setq menuitem (#_gtk_menu_item_new)))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(unless *accel-group*
(setq *accel-group*
(#_gtk_accel_group_new))
(#_gtk_window_add_accel_group *win-main* *accel-group*))
(if (and accel (char= (schar accel 0) #\^))
(with-cstrs ((activate "activate"))
(#_gtk_widget_add_accelerator
menuitem activate *accel-group* (char-code (schar accel 1))
#$GDK_CONTROL_MASK #$GTK_ACCEL_VISIBLE)))
(if (and tip (length tip))
(with-cstrs ((tip tip))
(#_gtk_tooltips_set_tip
(or *tooltips*
(setq *tooltips* (#_gtk_tooltips_new)))
menuitem
tip
(%null-ptr))))
menuitem))
(defun create-radio-menu-item (menu item-name group-ptr func data)
(with-cstrs ((item-name item-name)
(toggled "toggled"))
(let* ((menuitem (#_gtk_radio_menu_item_new_with_label
(%get-ptr group-ptr)
item-name)))
(setf (%get-ptr group-ptr)
(#_gtk_radio_menu_item_get_group menuitem))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(#_gtk_signal_connect_full menuitem toggled func (%null-ptr) (or data (%null-ptr)) (%null-ptr) 0 0)
menuitem)))
(defun create-bar-sub-menu (menu name)
(with-cstrs ((name name))
(let* ((menuitem (#_gtk_menu_item_new_with_label name)))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(let* ((submenu (#_gtk_menu_new)))
(#_gtk_menu_item_set_submenu menuitem submenu)
submenu))))
Represent string vectors as lists of strings . WITH - STRING - VECTOR
;;; will produce a foreign vector of C strings out of such a list.
(defvar *xpm-one*
'(
"12 12 2 1"
" c None"
"X c #3333CC"
" "
" XX "
" XXX "
" X XX "
" XX "
" XX "
" XX "
" XX "
" XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-two*
'(
"12 12 2 1"
" c None"
"X c #009900"
" "
" XXXXXX "
" X X "
" XX "
" XX "
" XX "
" XX "
" XX "
" XX "
" XXXXXXXX "
" "
" "
))
(defvar *xpm-three*
'(
"12 12 2 1"
" c None"
"X c #AA0000"
" "
" XXXXX "
" XX "
" XX "
" XXXXXX "
" XX "
" XX "
" XX "
" XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-four*
'(
"12 12 2 1"
" c None"
"X c #000066"
" "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXXXX "
" XX "
" XX "
" XX "
" XX "
" "
" "
))
(defvar *xpm-five*
'(
"12 12 2 1"
" c None"
"X c #992299"
" "
" XXXXXXXX "
" XX "
" XX "
" XXXXXXX "
" XX "
" XX "
" XX "
" XX XX "
" XXXXXXX "
" "
" "
))
(defvar *xpm-six*
'(
"12 12 2 1"
" c None"
"X c #550055"
" "
" XXXXXX "
" XX "
" XX "
" XXXXXXX "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-seven*
'(
"12 12 2 1"
" c None"
"X c #550000"
" "
" XXXXXXXX "
" XX "
" XX "
" XX "
" XX "
" XX "
" WX "
" XX "
" XX "
" "
" "
))
(defvar *xpm-eight*
'(
"12 12 2 1"
" c None"
"X c #441144"
" "
" XXXXXX "
" XX XX "
" XX XX "
" XXXXXX "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-flag*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
" "
" RRRRRRR "
" RRRRRrr "
" RRRrrrr "
" Rrrrrrr "
" X "
" X "
" X "
" X "
" X "
" XXX "
" "
))
;;;
;;; --- A bomb. Ooops, you're not as smart as you thought.
;;;
(defvar *xpm-bomb*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
" "
" X "
" X X X "
" XXXXX "
" XXXXX "
" XXXXXXXXX "
" XXXXX "
" XXXXX "
" X X X "
" X "
" "
" "
))
;;;
;;; --- Wrong move!
;;;
(defvar *xpm-bigx*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
"RRR RRR"
" RRR RRR "
" RRR RRR "
" RRRRRR "
" RRRR "
" RRRR "
" RRRR "
" RRRRRR "
" RRR RRR "
" RRR RRR "
"RRR RRR"
" "
))
;;;
;;; --- Bitmap of a smile
;;;
(defvar *xpm-smile*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX..XXXX..XX. "
".XXX..XXXX..XXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XX.XXXXXX.XX. "
" .XXX......XXX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
;;;
;;; --- frown. You lost.
;;;
(defvar *xpm-frown*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX.X.XX.X.XX. "
".XXXX.XXXX.XXXX."
".XXX.X.XX.X.XXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XXX......XXX. "
" .XX.XXXXXX.XX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
;;;
;;; --- We have a winner
;;;
(defvar *xpm-winner*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX...XX...XX. "
".XX..........XX."
".X.X...XX...X.X."
"..XXXXXXXXXXXX.."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XX.XXXXXX.XX. "
" .XXX......XXX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
(defvar *digits*
(vector nil *xpm-one* *xpm-two* *xpm-three* *xpm-four* *xpm-five*
*xpm-six* *xpm-seven* *xpm-eight*))
(defun set-grid (ncols nrows nbombs)
(when *table*
(#_gtk_widget_destroy *table*))
(setq *table* (#_gtk_table_new ncols nrows #$FALSE))
(#_gtk_box_pack_start *vbox* *table* #$FALSE #$FALSE 0)
(#_gtk_widget_realize *table*)
(reset-game ncols nrows nbombs t)
(#_gtk_widget_show *table*))
;;; Menu callbacks.
;;; This is called both when the start button is pressed and when
;;; the "New" menu item is selected.
(defcallback start-button-clicked (:address widget :address data :void)
(declare (ignore widget data))
(set-start-button-icon *xpm-smile*)
(reset-game *ncols* *nrows* *ntotalbombs* nil))
(defcallback action-beginner
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 10 10 10)))
(defcallback action-intermediate
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 20 15 40)))
(defcallback action-advanced
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 30 20 100)))
(defcallback action-quit (:address widget :address data :void)
(declare (ignore widget))
(stop-timer)
(#_gtk_widget_destroy data)
(reset-minesweeper-globals))
(defcallback action-about (:void)
(show-about))
(defun create-menu (window vbox-main)
(setq *win-main* window)
(setq *accel-group* (#_gtk_accel_group_new))
(#_gtk_window_add_accel_group *win-main* *accel-group*)
(let* ((menubar (#_gtk_menu_bar_new)))
(#_gtk_box_pack_start vbox-main menubar #$FALSE #$TRUE 0)
(#_gtk_widget_show menubar)
(let* ((game-menu (create-bar-sub-menu menubar "Game")))
(create-menu-item game-menu
"New" "^N" "New Game" start-button-clicked nil)
(create-menu-item game-menu nil nil nil nil nil)
(rlet ((group (* t)))
(setf (%get-ptr group) (%null-ptr))
(with-macptrs ((group-ptr group))
(create-radio-menu-item game-menu "Beginner" group-ptr
action-beginner nil)
(create-radio-menu-item game-menu "Intermediate" group-ptr
action-intermediate nil)
(create-radio-menu-item game-menu "Advanced" group-ptr
action-advanced nil)))
(create-menu-item game-menu nil nil nil nil nil)
(create-menu-item game-menu "Quit" nil "Quit game"
action-quit *win-main*))
(let* ((help-menu (create-bar-sub-menu menubar "Help")))
(create-menu-item help-menu "About Minesweeper" nil "Gory Details"
action-about nil))))
(defparameter *cells*
(let* ((a (make-array (list max-cols max-rows))))
(dotimes (row max-rows a)
(dotimes (col max-cols)
(setf (aref a col row)
(make-cell :row row :col col))))))
;;; Callbacks can receive (foreign) pointer arguments. Since we'd
;;; rather keep information in lisp structures/arrays, that's not
;;; directly helpful.
;;; We can identify a cell by its row and column and
;;; can easily pack the row and column into a fixnum. This function's
;;; caller can coerce that fixnum into a pointer (via ccl::%int-to-ptr).
(defun cell->cell-id (cell)
(dpb (cell-row cell)
(byte 8 8)
(cell-col cell)))
;;; The inverse operation: the caller (a callback) will generally have
;;; a foreign pointer; it can coerce that to a fixnum and obtain the
;;; corresponding cell by unpacking its indices from that fixnum.
(defun cell-id->cell (cell-id)
(let* ((id (if (typep cell-id 'macptr)
(%ptr-to-int cell-id)
cell-id))
(row (ldb (byte 8 8) id))
(col (ldb (byte 8 0) id)))
(declare (fixnum id row col))
(aref *cells* col row)))
;;; Free widget.
(defcallback FreeChildCallback (:address widget :void)
(#_gtk_widget_destroy widget))
;;; Free all of the widgets contained in this one.
(defun free-children (widget)
(#_gtk_container_foreach
(#_g_type_check_instance_cast widget (#_gtk_container_get_type))
FreeChildCallback (%null-ptr)))
(defun add-image-to-mine (cell xpm-data)
(let* ((widget (create-widget-from-xpm *table* xpm-data)))
(#_gtk_container_add (cell-button cell) widget)
(#_gdk_drawable_unref widget)
nil))
(defun open-nearby-squares (col row)
(declare (fixnum col row))
(let* ((mincol (max (1- col) 0))
(maxcol (min (1+ col) (1- *ncols*)))
(minrow (max (1- row) 0))
(maxrow (min (1+ row) (1- *nrows*))))
(declare (fixnum mincol maxcol minrow maxrow))
(do* ((i mincol (1+ i)))
((> i maxcol))
(declare (fixnum i))
(do* ((j minrow (1+ j)))
((> j maxrow))
(declare (fixnum j))
(display-hidden-info (aref *cells* i j))))))
(defun display-hidden-info (cell)
(case (cell-buttonstate cell)
(:button-down
(#_gtk_toggle_button_set_active (cell-button cell) #$TRUE))
(:button-flagged
(#_gtk_toggle_button_set_active (cell-button cell) #$FALSE))
(t
(setf (cell-buttonstate cell) :button-down)
(#_gtk_toggle_button_set_active (cell-button cell) #$TRUE)
(setf (pref (cell-button cell) :<G>tk<B>utton.button_down) #$TRUE)
(if (cell-has-bomb cell)
(add-image-to-mine cell *xpm-bomb*)
(let* ((nearby-bombs (cell-bombsnearby cell)))
(declare (fixnum nearby-bombs))
(if (> nearby-bombs 0)
(add-image-to-mine cell (svref *digits* nearby-bombs))
(open-nearby-squares (cell-col cell) (cell-row cell))))))))
(defun show-bombs ()
(dotimes (i *ncols*)
(dotimes (j *nrows*)
(let* ((cell (aref *cells* i j))
(buttonstate (cell-buttonstate cell))
(has-bomb (cell-has-bomb cell)))
(if (and (eq buttonstate :button-unknown) has-bomb)
(display-hidden-info cell)
(when (and (eq buttonstate :button-flagged) (not has-bomb))
(free-children (cell-button cell))
(add-image-to-mine cell *xpm-bigx*)))))))
(defcallback cell-toggled (:address widget :address data :void)
(let* ((cell (cell-id->cell data))
(state (cell-buttonstate cell)))
(unless (eq state :button-flagged)
(if *bgameover*
(#_gtk_toggle_button_set_active widget
(if (eq state
:button-down)
#$TRUE
#$FALSE))
(unless *bresetgame*
(start-timer)
(cond ((cell-has-bomb cell)
(setq *bgameover* t)
(set-start-button-icon *xpm-frown*)
(stop-timer)
(show-bombs))
(t
(display-hidden-info cell)
(check-for-win))))))))
(defcallback button-press (:address widget :address event :address data :void)
(unless *bgameover*
(when (and (eql (pref event :<G>dk<E>vent<B>utton.type) #$GDK_BUTTON_PRESS)
(eql (pref event :<G>dk<E>vent<B>utton.button) 3))
(let* ((cell (cell-id->cell data)))
(case (cell-buttonstate cell)
(:button-unknown
(free-children widget)
(setf (cell-buttonstate cell) :button-flagged)
(add-image-to-mine cell *xpm-flag*)
(decf *nbombsleft*))
(:button-flagged
(free-children widget)
(setf (cell-buttonstate cell) :button-unknown)
(incf *nbombsleft*)))
(display-bomb-count)
(check-for-win)))))
(defun set-start-button-icon (xpm-list)
(let* ((widget (create-widget-from-xpm *start-button* xpm-list)))
(free-children *start-button*)
(#_gtk_container_add *start-button* widget)))
(defun check-for-win ()
(let* ((nmines 0))
(declare (fixnum nmines))
(dotimes (col *ncols*)
(declare (fixnum col))
(dotimes (row *nrows*)
(declare (fixnum row))
(when (member (cell-buttonstate (aref *cells* col row))
'(:button-unknown :button-flagged))
(incf nmines))))
(when (= nmines (the fixnum *ntotalbombs*))
(stop-timer)
(set-start-button-icon *xpm-winner*)
(setq *bgameover* t))))
(defun create-button (table cell row column)
(let* ((button
(if *minesweeper-use-quiet-toggle-buttons*
(let* ((b (gtk-quiet-toggle-button-new))
(id (cell->cell-id (aref *cells* column row))))
(with-cstrs ((cell-id "cell-id"))
(#_gtk_object_set_data b cell-id (%int-to-ptr id)))
b)
(#_gtk_toggle_button_new)))
(cell-id (cell->cell-id cell)))
(with-cstrs ((toggled "toggled")
(button-press-event "button_press_event"))
(#_gtk_signal_connect_full button toggled cell-toggled
(%null-ptr) (%int-to-ptr cell-id) (%null-ptr) 0 0)
(#_gtk_signal_connect_full button button-press-event
button-press (%null-ptr) (%int-to-ptr cell-id) (%null-ptr) 0 0))
(#_gtk_table_attach table button
column (1+ column)
(1+ row) (+ row 2)
(logior #$GTK_FILL #$GTK_EXPAND)
(logior #$GTK_FILL #$GTK_EXPAND)
0 0)
(#_gtk_widget_set_usize button button-width button-height)
(#_gtk_widget_show button)
button))
(defun count-nearby-bombs (col row)
(declare (fixnum col row))
(let* ((mincol (max (1- col) 0))
(maxcol (min (1+ col) (1- *ncols*)))
(minrow (max (1- row) 0))
(maxrow (min (1+ row) (1- *nrows*)))
(ncount 0))
(declare (fixnum mincol maxcol minrow maxrow ncount))
(do* ((i mincol (1+ i)))
((> i maxcol) ncount)
(declare (fixnum i))
(do* ((j minrow (1+ j)))
((> j maxrow))
(declare (fixnum j))
(if (cell-has-bomb (aref *cells* i j))
(incf ncount))))))
(defun display-bomb-count ()
(with-cstrs ((buf (format nil "Bombs: ~d" *nbombsleft*)))
(#_gtk_label_set_text *bombs-label* buf)))
(defun update-seconds (seconds)
(with-cstrs ((buf (format nil "Time: ~d" seconds)))
(#_gtk_label_set_text *time-label* buf)))
(defun create-minesweeper-buttons (table ngridcols ngridrows bnewbuttons)
(setq *nrows* ngridrows
*ncols* ngridcols
*bgameover* nil
*bresetgame* t)
(display-bomb-count)
(dotimes (ci *ncols*)
(declare (fixnum ci))
(dotimes (ri *nrows*)
(declare (fixnum ri))
(let* ((cell (aref *cells* ci ri)))
(setf (cell-has-bomb cell) nil
(cell-buttonstate cell) :button-unknown)
(if bnewbuttons
(setf (cell-button cell) (create-button table cell ri ci))
(progn
(free-children (cell-button cell))
(#_gtk_toggle_button_set_active (cell-button cell) #$FALSE))))))
(do* ((nbombs *ntotalbombs*)
(state (make-random-state t)))
((zerop nbombs))
(declare (fixnum nbombs))
(let* ((cell (aref *cells* (random *ncols* state) (random *nrows* state))))
(unless (cell-has-bomb cell)
(setf (cell-has-bomb cell) t)
(decf nbombs))))
(dotimes (ci *ncols*)
(declare (fixnum ci))
(dotimes (ri *nrows*)
(declare (fixnum ri))
(setf (cell-bombsnearby (aref *cells* ci ri))
(count-nearby-bombs ci ri))))
(setq *bresetgame* nil))
(defun reset-game (ncols nrows nbombs bnewbuttons)
(setq *ntotalbombs* nbombs
*nbombsleft* nbombs)
(create-minesweeper-buttons *table* ncols nrows bnewbuttons)
(stop-timer)
(update-seconds 0)
(set-start-button-icon *xpm-smile*))
;;; Timer stuff.
(defvar *timer* nil)
(defvar *nseconds* 0)
(defcallback timer-callback (:address data :void)
(declare (ignore data))
(incf *nseconds*)
(update-seconds *nseconds*))
(defun start-timer ()
(unless *timer*
(setq *nseconds* 0
*timer* (#_gtk_timeout_add 1000 timer-callback *win-main*))))
(defun stop-timer ()
(when *timer*
(#_gtk_timeout_remove *timer*)
(setq *timer* nil)))
;;; Finally ...
(defun minesweeper ()
(when *win-main*
(cerror
"Close current minesweeper game and start a new one"
"It seems that a minesweeper game is already active.")
(do* ()
((null *win-main*))
(#_gtk_widget_destroy *win-main*)
(sleep 1)))
(let* ((window (#_gtk_window_new #$GTK_WINDOW_TOPLEVEL)))
(#_gtk_window_set_policy window #$FALSE #$FALSE #$TRUE)
(with-cstrs ((window-title "Minesweeper"))
(#_gtk_window_set_title window window-title)
(setq *vbox* (#_gtk_vbox_new #$FALSE 1))
(#_gtk_widget_show *vbox*)
(create-menu window *vbox*)
(let* ((hbox (#_gtk_hbox_new #$TRUE 1)))
(#_gtk_widget_show hbox)
(#_gtk_box_pack_start *vbox* hbox #$FALSE #$FALSE 0)
(with-cstrs ((len0-string ""))
(setq *bombs-label* (#_gtk_label_new len0-string)
*time-label* (#_gtk_label_new len0-string)))
(#_gtk_box_pack_start hbox *bombs-label* #$FALSE #$FALSE 0)
(#_gtk_widget_show *bombs-label*)
(setq *start-button* (#_gtk_button_new))
(with-cstrs ((clicked "clicked"))
(#_gtk_signal_connect_full *start-button* clicked start-button-clicked
(%null-ptr) (%null-ptr) (%null-ptr) 0 0))
(#_gtk_box_pack_start hbox *start-button* #$FALSE #$FALSE 0)
(#_gtk_widget_show *start-button*)
(#_gtk_box_pack_start hbox *time-label* #$FALSE #$FALSE 0)
(#_gtk_widget_show *time-label*)
(#_gtk_widget_show hbox)
(#_gtk_container_add window *vbox*)
(with-cstrs ((destroy "destroy"))
(#_gtk_signal_connect_full window destroy action-quit (%null-ptr) window (%null-ptr) 0 0))
(#_gtk_widget_show window)
(set-start-button-icon *xpm-smile*)
(set-grid 10 10 10)))))
| null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/examples/gtk-minesweeper.lisp | lisp | Package : ( MINESWEEPER : USE ( CL CCL ) ) -*-
Anyone who wants to use this code for any purpose is free to do so.
In doing so, the user acknowledges that this code is provided "as is",
without warranty of any kind, and that no other party is legally or
otherwise responsible for any consequences of its use.
Make GTK+ interface info available.
represent the cells on the grid. They seem to work reasonably
"leave" events cause the cells under the mouse to be highlighted,
making it difficult to distinguish "unpressed buttons" from "the
button under the mouse".
GtkToggleButton except for the fact that it does nothing on
"enter" and "leave" events. It's not necessarily the most
example of doing so.
GtkQuietToggleButtons seem to be better, but there is still some
room for improvement.
initialize the variable whenever a saved image start up
Display message dialogs (as for the About... box).
A dialog widget has "grabbed" the focus. Call back here when
the dialog is to be closed; yield the focus.
will produce a foreign vector of C strings out of such a list.
--- A bomb. Ooops, you're not as smart as you thought.
--- Wrong move!
--- Bitmap of a smile
--- frown. You lost.
--- We have a winner
Menu callbacks.
This is called both when the start button is pressed and when
the "New" menu item is selected.
Callbacks can receive (foreign) pointer arguments. Since we'd
rather keep information in lisp structures/arrays, that's not
directly helpful.
We can identify a cell by its row and column and
can easily pack the row and column into a fixnum. This function's
caller can coerce that fixnum into a pointer (via ccl::%int-to-ptr).
The inverse operation: the caller (a callback) will generally have
a foreign pointer; it can coerce that to a fixnum and obtain the
corresponding cell by unpacking its indices from that fixnum.
Free widget.
Free all of the widgets contained in this one.
Timer stuff.
Finally ... | Copyright ( C ) 2001 Clozure Associates
This is a GTK+-based MineSweeper game , derived from a C program
developed by and published in " Developing Linux Programs
with GTK+ and GDK " , ( c ) 1999 New Riders Publishing .
(defpackage "MINESWEEPER"
(:use "CL" "CCL")
(:export "MINESWEEPER"))
(in-package "MINESWEEPER")
(eval-when (:compile-toplevel :execute)
(use-interface-dir :GTK2))
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "OPENMCL-GTK-SUPPORT"))
(defconstant max-rows 35)
(defconstant max-cols 35)
(defconstant button-width 24)
(defconstant button-height 26)
(defvar *nrows* 10)
(defvar *ncols* 10)
(defvar *ntotalbombs* 0)
(defvar *bgameover* nil)
(defvar *bresetgame* nil)
(defvar *nbombsleft* nil)
(defvar *table* nil)
(defvar *start-button* nil)
(defvar *bombs-label* nil)
(defvar *time-label* nil)
(defvar *vbox* nil)
(defstruct cell
(buttonstate :button-unknown
:type (member :button-down :button-unknown :button-flagged))
button
(bombsnearby 0)
(has-bomb nil)
row
col)
The original C Minesweeper example uses GtkToggleButtons to
well except for one minor ( but annoying ) feature : " enter " and
This defines a GtkQuietToggleButton class that 's exactly like
interesting example of subclassing a Gtk widget , but it -is- an
(defcallback enter-or-leave-quietly (:address widget :void)
(let* ((id (with-cstrs ((cell-id "cell-id"))
(#_gtk_object_get_data widget cell-id)))
(cell (cell-id->cell id))
(desired-state
(if (member (cell-buttonstate cell)
'(:button-unknown :button-flagged))
#$GTK_STATE_NORMAL
#$GTK_STATE_ACTIVE))
(current-state (pref widget :<G>tk<W>idget.state)))
(unless (eql current-state desired-state)
(#_gtk_widget_set_state widget desired-state))))
(defcallback gtk_quiet_toggle_button_class_init (:address classptr :void)
(setf (pref classptr :<G>tk<B>utton<C>lass.enter) enter-or-leave-quietly
(pref classptr :<G>tk<B>utton<C>lass.leave) enter-or-leave-quietly))
(defcallback gtk_quiet_toggle_button_init (:address widget :void)
(declare (ignore widget)))
CCL::DEFLOADVAR behaves like DEFPARAMETER , but arranges to
as well as when the DEFLOADVAR is executed .
(ccl::defloadvar *gtk-quiet-toggle-button-type-info*
(let* ((p (#_malloc (ccl::%foreign-type-or-record-size :<G>tk<T>ype<I>nfo :bytes))))
(setf
(pref p :<G>tk<T>ype<I>nfo.type_name)
(with-cstrs ((name "GtkQuietToggleButton")) (#_g_strdup name))
(pref p :<G>tk<T>ype<I>nfo.object_size)
(ccl::%foreign-type-or-record-size :<G>tk<T>oggle<B>utton :bytes)
(pref p :<G>tk<T>ype<I>nfo.class_size)
(ccl::%foreign-type-or-record-size :<G>tk<T>oggle<B>utton<C>lass :bytes)
(pref p :<G>tk<T>ype<I>nfo.class_init_func) gtk_quiet_toggle_button_class_init
(pref p :<G>tk<T>ype<I>nfo.object_init_func) gtk_quiet_toggle_button_init
(pref p :<G>tk<T>ype<I>nfo.reserved_1) (%null-ptr)
(pref p :<G>tk<T>ype<I>nfo.reserved_2) (%null-ptr)
(pref p :<G>tk<T>ype<I>nfo.base_class_init_func) (%null-ptr))
p))
(ccl::defloadvar *gtk-quiet-toggle-button-type* nil)
(defun gtk-quiet-toggle-button-get-type ()
(or *gtk-quiet-toggle-button-type*
(setq *gtk-quiet-toggle-button-type*
(#_gtk_type_unique (#_gtk_toggle_button_get_type)
*gtk-quiet-toggle-button-type-info*))))
(defcallback gtk_quiet_toggle_button_get_type (:unsigned-fullword)
(gtk-quiet-toggle-button-get-type))
(defun gtk-quiet-toggle-button-new ()
(#_gtk_type_new (gtk-quiet-toggle-button-get-type)))
(defcallback gtk_quiet_toggle_button_new (:address)
(gtk-quiet-toggle-button-new))
(defparameter *minesweeper-use-quiet-toggle-buttons* t)
(defcallback close-show-message
(:address container :address data :void)
(declare (ignore container))
(let* ((dialog-widget data))
(#_gtk_grab_remove dialog-widget)
(#_gtk_widget_destroy dialog-widget)))
(defcallback clear-show-message
(:address widget :address data :void)
(declare (ignore data))
(#_gtk_grab_remove widget))
(defun show-message (title message)
(let* ((dialog-window (#_gtk_dialog_new)))
(with-cstrs ((destroy-name "destroy"))
(#_gtk_signal_connect_full dialog-window destroy-name clear-show-message
(%null-ptr) (%null-ptr) (%null-ptr) 0 0))
(with-cstrs ((title title))
(#_gtk_window_set_title dialog-window title))
(#_gtk_container_set_border_width dialog-window 0)
(let* ((button (with-cstrs ((ok "OK"))
(#_gtk_button_new_with_label ok))))
(with-cstrs ((clicked "clicked"))
(#_gtk_signal_connect_full button clicked close-show-message (%null-ptr) dialog-window (%null-ptr) 0 0))
(setf (pref button :<G>tk<O>bject.flags)
(logior (pref button :<G>tk<O>bject.flags) #$GTK_CAN_DEFAULT))
(#_gtk_box_pack_start (pref dialog-window :<G>tk<D>ialog.action_area)
button #$TRUE #$TRUE 0)
(#_gtk_widget_grab_default button)
(#_gtk_widget_show button))
(let* ((label (with-cstrs ((message message))
(#_gtk_label_new message))))
(#_gtk_misc_set_padding label 10 10)
(#_gtk_box_pack_start (pref dialog-window :<G>tk<D>ialog.vbox)
label #$TRUE #$TRUE 0)
(#_gtk_widget_show label))
(#_gtk_widget_show dialog-window)
(#_gtk_grab_add dialog-window)))
(defun show-about ()
(show-message "About ..."
"Minesweeper OpenMCL GTK+ example
Copyright 2001 Clozure Associates
Derived from Minesweeper v0.6 by Eric Harlow"))
(defvar *win-main* ())
(defvar *accel-group* ())
(defvar *tooltips* ())
(defun reset-minesweeper-globals ()
(setq *win-main* nil
*accel-group* nil
*tooltips* nil
*vbox* nil
*time-label* nil
*bombs-label* nil
*start-button* nil
*table* nil
*bgameover* nil
*bresetgame* nil))
(defun create-widget-from-xpm (window xpm-string-list)
(rlet ((mask (* :<G>dk<B>itmap)))
(with-string-vector (xpm-data xpm-string-list)
(let* ((pixmap-data (#_gdk_pixmap_create_from_xpm_d
(pref window :<G>tk<W>idget.window)
mask
(%null-ptr)
xpm-data))
(pixmap-widget (#_gtk_pixmap_new pixmap-data (%get-ptr mask))))
(#_gtk_widget_show pixmap-widget)
pixmap-widget))))
(defun create-menu-item (menu item-name accel tip func data)
A null or zero - length item - name indicates a separator .
(let* ((menuitem nil))
(if (and item-name (length item-name))
(with-cstrs ((item-name item-name)
(activate "activate"))
(setq menuitem (#_gtk_menu_item_new_with_label item-name))
(#_gtk_signal_connect_full menuitem activate func (%null-ptr) (or data (%null-ptr)) (%null-ptr) 0 0))
(setq menuitem (#_gtk_menu_item_new)))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(unless *accel-group*
(setq *accel-group*
(#_gtk_accel_group_new))
(#_gtk_window_add_accel_group *win-main* *accel-group*))
(if (and accel (char= (schar accel 0) #\^))
(with-cstrs ((activate "activate"))
(#_gtk_widget_add_accelerator
menuitem activate *accel-group* (char-code (schar accel 1))
#$GDK_CONTROL_MASK #$GTK_ACCEL_VISIBLE)))
(if (and tip (length tip))
(with-cstrs ((tip tip))
(#_gtk_tooltips_set_tip
(or *tooltips*
(setq *tooltips* (#_gtk_tooltips_new)))
menuitem
tip
(%null-ptr))))
menuitem))
(defun create-radio-menu-item (menu item-name group-ptr func data)
(with-cstrs ((item-name item-name)
(toggled "toggled"))
(let* ((menuitem (#_gtk_radio_menu_item_new_with_label
(%get-ptr group-ptr)
item-name)))
(setf (%get-ptr group-ptr)
(#_gtk_radio_menu_item_get_group menuitem))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(#_gtk_signal_connect_full menuitem toggled func (%null-ptr) (or data (%null-ptr)) (%null-ptr) 0 0)
menuitem)))
(defun create-bar-sub-menu (menu name)
(with-cstrs ((name name))
(let* ((menuitem (#_gtk_menu_item_new_with_label name)))
(#_gtk_menu_shell_append menu menuitem)
(#_gtk_widget_show menuitem)
(let* ((submenu (#_gtk_menu_new)))
(#_gtk_menu_item_set_submenu menuitem submenu)
submenu))))
Represent string vectors as lists of strings . WITH - STRING - VECTOR
(defvar *xpm-one*
'(
"12 12 2 1"
" c None"
"X c #3333CC"
" "
" XX "
" XXX "
" X XX "
" XX "
" XX "
" XX "
" XX "
" XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-two*
'(
"12 12 2 1"
" c None"
"X c #009900"
" "
" XXXXXX "
" X X "
" XX "
" XX "
" XX "
" XX "
" XX "
" XX "
" XXXXXXXX "
" "
" "
))
(defvar *xpm-three*
'(
"12 12 2 1"
" c None"
"X c #AA0000"
" "
" XXXXX "
" XX "
" XX "
" XXXXXX "
" XX "
" XX "
" XX "
" XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-four*
'(
"12 12 2 1"
" c None"
"X c #000066"
" "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXXXX "
" XX "
" XX "
" XX "
" XX "
" "
" "
))
(defvar *xpm-five*
'(
"12 12 2 1"
" c None"
"X c #992299"
" "
" XXXXXXXX "
" XX "
" XX "
" XXXXXXX "
" XX "
" XX "
" XX "
" XX XX "
" XXXXXXX "
" "
" "
))
(defvar *xpm-six*
'(
"12 12 2 1"
" c None"
"X c #550055"
" "
" XXXXXX "
" XX "
" XX "
" XXXXXXX "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-seven*
'(
"12 12 2 1"
" c None"
"X c #550000"
" "
" XXXXXXXX "
" XX "
" XX "
" XX "
" XX "
" XX "
" WX "
" XX "
" XX "
" "
" "
))
(defvar *xpm-eight*
'(
"12 12 2 1"
" c None"
"X c #441144"
" "
" XXXXXX "
" XX XX "
" XX XX "
" XXXXXX "
" XX XX "
" XX XX "
" XX XX "
" XX XX "
" XXXXXX "
" "
" "
))
(defvar *xpm-flag*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
" "
" RRRRRRR "
" RRRRRrr "
" RRRrrrr "
" Rrrrrrr "
" X "
" X "
" X "
" X "
" X "
" XXX "
" "
))
(defvar *xpm-bomb*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
" "
" X "
" X X X "
" XXXXX "
" XXXXX "
" XXXXXXXXX "
" XXXXX "
" XXXXX "
" X X X "
" X "
" "
" "
))
(defvar *xpm-bigx*
'(
"12 12 4 1"
" c None"
"X c #000000"
"R c #FF0000"
"r c #AA0000"
"RRR RRR"
" RRR RRR "
" RRR RRR "
" RRRRRR "
" RRRR "
" RRRR "
" RRRR "
" RRRRRR "
" RRR RRR "
" RRR RRR "
"RRR RRR"
" "
))
(defvar *xpm-smile*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX..XXXX..XX. "
".XXX..XXXX..XXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XX.XXXXXX.XX. "
" .XXX......XXX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
(defvar *xpm-frown*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX.X.XX.X.XX. "
".XXXX.XXXX.XXXX."
".XXX.X.XX.X.XXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XXX......XXX. "
" .XX.XXXXXX.XX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
(defvar *xpm-winner*
'(
"16 16 4 1"
" c None"
". c #000000"
"X c #FFFF00"
"r c #AA0000"
" ...... "
" ..XXXXXX.. "
" ..XXXXXXXXXX. "
" .XXXXXXXXXXXX. "
" .XX...XX...XX. "
".XX..........XX."
".X.X...XX...X.X."
"..XXXXXXXXXXXX.."
".XXXXXXXXXXXXXX."
".XXXXXXXXXXXXXX."
" .XX.XXXXXX.XX. "
" .XXX......XXX. "
" .XXXXXXXXXX. "
" ..XXXXXX.. "
" ...... "
" "
))
(defvar *digits*
(vector nil *xpm-one* *xpm-two* *xpm-three* *xpm-four* *xpm-five*
*xpm-six* *xpm-seven* *xpm-eight*))
(defun set-grid (ncols nrows nbombs)
(when *table*
(#_gtk_widget_destroy *table*))
(setq *table* (#_gtk_table_new ncols nrows #$FALSE))
(#_gtk_box_pack_start *vbox* *table* #$FALSE #$FALSE 0)
(#_gtk_widget_realize *table*)
(reset-game ncols nrows nbombs t)
(#_gtk_widget_show *table*))
(defcallback start-button-clicked (:address widget :address data :void)
(declare (ignore widget data))
(set-start-button-icon *xpm-smile*)
(reset-game *ncols* *nrows* *ntotalbombs* nil))
(defcallback action-beginner
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 10 10 10)))
(defcallback action-intermediate
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 20 15 40)))
(defcallback action-advanced
(:address widget :address data :void)
(declare (ignore data))
(unless (zerop (pref widget :<G>tk<C>heck<M>enu<I>tem.active))
(set-grid 30 20 100)))
(defcallback action-quit (:address widget :address data :void)
(declare (ignore widget))
(stop-timer)
(#_gtk_widget_destroy data)
(reset-minesweeper-globals))
(defcallback action-about (:void)
(show-about))
(defun create-menu (window vbox-main)
(setq *win-main* window)
(setq *accel-group* (#_gtk_accel_group_new))
(#_gtk_window_add_accel_group *win-main* *accel-group*)
(let* ((menubar (#_gtk_menu_bar_new)))
(#_gtk_box_pack_start vbox-main menubar #$FALSE #$TRUE 0)
(#_gtk_widget_show menubar)
(let* ((game-menu (create-bar-sub-menu menubar "Game")))
(create-menu-item game-menu
"New" "^N" "New Game" start-button-clicked nil)
(create-menu-item game-menu nil nil nil nil nil)
(rlet ((group (* t)))
(setf (%get-ptr group) (%null-ptr))
(with-macptrs ((group-ptr group))
(create-radio-menu-item game-menu "Beginner" group-ptr
action-beginner nil)
(create-radio-menu-item game-menu "Intermediate" group-ptr
action-intermediate nil)
(create-radio-menu-item game-menu "Advanced" group-ptr
action-advanced nil)))
(create-menu-item game-menu nil nil nil nil nil)
(create-menu-item game-menu "Quit" nil "Quit game"
action-quit *win-main*))
(let* ((help-menu (create-bar-sub-menu menubar "Help")))
(create-menu-item help-menu "About Minesweeper" nil "Gory Details"
action-about nil))))
(defparameter *cells*
(let* ((a (make-array (list max-cols max-rows))))
(dotimes (row max-rows a)
(dotimes (col max-cols)
(setf (aref a col row)
(make-cell :row row :col col))))))
(defun cell->cell-id (cell)
(dpb (cell-row cell)
(byte 8 8)
(cell-col cell)))
(defun cell-id->cell (cell-id)
(let* ((id (if (typep cell-id 'macptr)
(%ptr-to-int cell-id)
cell-id))
(row (ldb (byte 8 8) id))
(col (ldb (byte 8 0) id)))
(declare (fixnum id row col))
(aref *cells* col row)))
(defcallback FreeChildCallback (:address widget :void)
(#_gtk_widget_destroy widget))
(defun free-children (widget)
(#_gtk_container_foreach
(#_g_type_check_instance_cast widget (#_gtk_container_get_type))
FreeChildCallback (%null-ptr)))
(defun add-image-to-mine (cell xpm-data)
(let* ((widget (create-widget-from-xpm *table* xpm-data)))
(#_gtk_container_add (cell-button cell) widget)
(#_gdk_drawable_unref widget)
nil))
(defun open-nearby-squares (col row)
(declare (fixnum col row))
(let* ((mincol (max (1- col) 0))
(maxcol (min (1+ col) (1- *ncols*)))
(minrow (max (1- row) 0))
(maxrow (min (1+ row) (1- *nrows*))))
(declare (fixnum mincol maxcol minrow maxrow))
(do* ((i mincol (1+ i)))
((> i maxcol))
(declare (fixnum i))
(do* ((j minrow (1+ j)))
((> j maxrow))
(declare (fixnum j))
(display-hidden-info (aref *cells* i j))))))
(defun display-hidden-info (cell)
(case (cell-buttonstate cell)
(:button-down
(#_gtk_toggle_button_set_active (cell-button cell) #$TRUE))
(:button-flagged
(#_gtk_toggle_button_set_active (cell-button cell) #$FALSE))
(t
(setf (cell-buttonstate cell) :button-down)
(#_gtk_toggle_button_set_active (cell-button cell) #$TRUE)
(setf (pref (cell-button cell) :<G>tk<B>utton.button_down) #$TRUE)
(if (cell-has-bomb cell)
(add-image-to-mine cell *xpm-bomb*)
(let* ((nearby-bombs (cell-bombsnearby cell)))
(declare (fixnum nearby-bombs))
(if (> nearby-bombs 0)
(add-image-to-mine cell (svref *digits* nearby-bombs))
(open-nearby-squares (cell-col cell) (cell-row cell))))))))
(defun show-bombs ()
(dotimes (i *ncols*)
(dotimes (j *nrows*)
(let* ((cell (aref *cells* i j))
(buttonstate (cell-buttonstate cell))
(has-bomb (cell-has-bomb cell)))
(if (and (eq buttonstate :button-unknown) has-bomb)
(display-hidden-info cell)
(when (and (eq buttonstate :button-flagged) (not has-bomb))
(free-children (cell-button cell))
(add-image-to-mine cell *xpm-bigx*)))))))
(defcallback cell-toggled (:address widget :address data :void)
(let* ((cell (cell-id->cell data))
(state (cell-buttonstate cell)))
(unless (eq state :button-flagged)
(if *bgameover*
(#_gtk_toggle_button_set_active widget
(if (eq state
:button-down)
#$TRUE
#$FALSE))
(unless *bresetgame*
(start-timer)
(cond ((cell-has-bomb cell)
(setq *bgameover* t)
(set-start-button-icon *xpm-frown*)
(stop-timer)
(show-bombs))
(t
(display-hidden-info cell)
(check-for-win))))))))
(defcallback button-press (:address widget :address event :address data :void)
(unless *bgameover*
(when (and (eql (pref event :<G>dk<E>vent<B>utton.type) #$GDK_BUTTON_PRESS)
(eql (pref event :<G>dk<E>vent<B>utton.button) 3))
(let* ((cell (cell-id->cell data)))
(case (cell-buttonstate cell)
(:button-unknown
(free-children widget)
(setf (cell-buttonstate cell) :button-flagged)
(add-image-to-mine cell *xpm-flag*)
(decf *nbombsleft*))
(:button-flagged
(free-children widget)
(setf (cell-buttonstate cell) :button-unknown)
(incf *nbombsleft*)))
(display-bomb-count)
(check-for-win)))))
(defun set-start-button-icon (xpm-list)
(let* ((widget (create-widget-from-xpm *start-button* xpm-list)))
(free-children *start-button*)
(#_gtk_container_add *start-button* widget)))
(defun check-for-win ()
(let* ((nmines 0))
(declare (fixnum nmines))
(dotimes (col *ncols*)
(declare (fixnum col))
(dotimes (row *nrows*)
(declare (fixnum row))
(when (member (cell-buttonstate (aref *cells* col row))
'(:button-unknown :button-flagged))
(incf nmines))))
(when (= nmines (the fixnum *ntotalbombs*))
(stop-timer)
(set-start-button-icon *xpm-winner*)
(setq *bgameover* t))))
(defun create-button (table cell row column)
(let* ((button
(if *minesweeper-use-quiet-toggle-buttons*
(let* ((b (gtk-quiet-toggle-button-new))
(id (cell->cell-id (aref *cells* column row))))
(with-cstrs ((cell-id "cell-id"))
(#_gtk_object_set_data b cell-id (%int-to-ptr id)))
b)
(#_gtk_toggle_button_new)))
(cell-id (cell->cell-id cell)))
(with-cstrs ((toggled "toggled")
(button-press-event "button_press_event"))
(#_gtk_signal_connect_full button toggled cell-toggled
(%null-ptr) (%int-to-ptr cell-id) (%null-ptr) 0 0)
(#_gtk_signal_connect_full button button-press-event
button-press (%null-ptr) (%int-to-ptr cell-id) (%null-ptr) 0 0))
(#_gtk_table_attach table button
column (1+ column)
(1+ row) (+ row 2)
(logior #$GTK_FILL #$GTK_EXPAND)
(logior #$GTK_FILL #$GTK_EXPAND)
0 0)
(#_gtk_widget_set_usize button button-width button-height)
(#_gtk_widget_show button)
button))
(defun count-nearby-bombs (col row)
(declare (fixnum col row))
(let* ((mincol (max (1- col) 0))
(maxcol (min (1+ col) (1- *ncols*)))
(minrow (max (1- row) 0))
(maxrow (min (1+ row) (1- *nrows*)))
(ncount 0))
(declare (fixnum mincol maxcol minrow maxrow ncount))
(do* ((i mincol (1+ i)))
((> i maxcol) ncount)
(declare (fixnum i))
(do* ((j minrow (1+ j)))
((> j maxrow))
(declare (fixnum j))
(if (cell-has-bomb (aref *cells* i j))
(incf ncount))))))
(defun display-bomb-count ()
(with-cstrs ((buf (format nil "Bombs: ~d" *nbombsleft*)))
(#_gtk_label_set_text *bombs-label* buf)))
(defun update-seconds (seconds)
(with-cstrs ((buf (format nil "Time: ~d" seconds)))
(#_gtk_label_set_text *time-label* buf)))
(defun create-minesweeper-buttons (table ngridcols ngridrows bnewbuttons)
(setq *nrows* ngridrows
*ncols* ngridcols
*bgameover* nil
*bresetgame* t)
(display-bomb-count)
(dotimes (ci *ncols*)
(declare (fixnum ci))
(dotimes (ri *nrows*)
(declare (fixnum ri))
(let* ((cell (aref *cells* ci ri)))
(setf (cell-has-bomb cell) nil
(cell-buttonstate cell) :button-unknown)
(if bnewbuttons
(setf (cell-button cell) (create-button table cell ri ci))
(progn
(free-children (cell-button cell))
(#_gtk_toggle_button_set_active (cell-button cell) #$FALSE))))))
(do* ((nbombs *ntotalbombs*)
(state (make-random-state t)))
((zerop nbombs))
(declare (fixnum nbombs))
(let* ((cell (aref *cells* (random *ncols* state) (random *nrows* state))))
(unless (cell-has-bomb cell)
(setf (cell-has-bomb cell) t)
(decf nbombs))))
(dotimes (ci *ncols*)
(declare (fixnum ci))
(dotimes (ri *nrows*)
(declare (fixnum ri))
(setf (cell-bombsnearby (aref *cells* ci ri))
(count-nearby-bombs ci ri))))
(setq *bresetgame* nil))
(defun reset-game (ncols nrows nbombs bnewbuttons)
(setq *ntotalbombs* nbombs
*nbombsleft* nbombs)
(create-minesweeper-buttons *table* ncols nrows bnewbuttons)
(stop-timer)
(update-seconds 0)
(set-start-button-icon *xpm-smile*))
(defvar *timer* nil)
(defvar *nseconds* 0)
(defcallback timer-callback (:address data :void)
(declare (ignore data))
(incf *nseconds*)
(update-seconds *nseconds*))
(defun start-timer ()
(unless *timer*
(setq *nseconds* 0
*timer* (#_gtk_timeout_add 1000 timer-callback *win-main*))))
(defun stop-timer ()
(when *timer*
(#_gtk_timeout_remove *timer*)
(setq *timer* nil)))
(defun minesweeper ()
(when *win-main*
(cerror
"Close current minesweeper game and start a new one"
"It seems that a minesweeper game is already active.")
(do* ()
((null *win-main*))
(#_gtk_widget_destroy *win-main*)
(sleep 1)))
(let* ((window (#_gtk_window_new #$GTK_WINDOW_TOPLEVEL)))
(#_gtk_window_set_policy window #$FALSE #$FALSE #$TRUE)
(with-cstrs ((window-title "Minesweeper"))
(#_gtk_window_set_title window window-title)
(setq *vbox* (#_gtk_vbox_new #$FALSE 1))
(#_gtk_widget_show *vbox*)
(create-menu window *vbox*)
(let* ((hbox (#_gtk_hbox_new #$TRUE 1)))
(#_gtk_widget_show hbox)
(#_gtk_box_pack_start *vbox* hbox #$FALSE #$FALSE 0)
(with-cstrs ((len0-string ""))
(setq *bombs-label* (#_gtk_label_new len0-string)
*time-label* (#_gtk_label_new len0-string)))
(#_gtk_box_pack_start hbox *bombs-label* #$FALSE #$FALSE 0)
(#_gtk_widget_show *bombs-label*)
(setq *start-button* (#_gtk_button_new))
(with-cstrs ((clicked "clicked"))
(#_gtk_signal_connect_full *start-button* clicked start-button-clicked
(%null-ptr) (%null-ptr) (%null-ptr) 0 0))
(#_gtk_box_pack_start hbox *start-button* #$FALSE #$FALSE 0)
(#_gtk_widget_show *start-button*)
(#_gtk_box_pack_start hbox *time-label* #$FALSE #$FALSE 0)
(#_gtk_widget_show *time-label*)
(#_gtk_widget_show hbox)
(#_gtk_container_add window *vbox*)
(with-cstrs ((destroy "destroy"))
(#_gtk_signal_connect_full window destroy action-quit (%null-ptr) window (%null-ptr) 0 0))
(#_gtk_widget_show window)
(set-start-button-icon *xpm-smile*)
(set-grid 10 10 10)))))
|
6404b1de0b3387bdd28af34912d8a3cabfe1f112759ad26c28d5c86f25b12e4d | penpot/penpot | svg_raw.cljs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.main.ui.workspace.shapes.svg-raw
(:require
[app.main.refs :as refs]
[app.main.ui.shapes.shape :refer [shape-container]]
[app.main.ui.shapes.svg-raw :as svg-raw]
[app.util.svg :as usvg]
[rumext.v2 :as mf]))
(defn svg-raw-wrapper-factory
[shape-wrapper]
(let [svg-raw-shape (svg-raw/svg-raw-shape shape-wrapper)]
(mf/fnc svg-raw-wrapper
{::mf/wrap [#(mf/memo' % (mf/check-props ["shape"]))]
::mf/wrap-props false}
[props]
(let [shape (unchecked-get props "shape")
childs-ref (mf/use-memo (mf/deps (:id shape)) #(refs/children-objects (:id shape)))
childs (mf/deref childs-ref)
svg-tag (get-in shape [:content :tag])]
(if (contains? usvg/svg-group-safe-tags svg-tag)
[:> shape-container {:shape shape}
[:& svg-raw-shape {:shape shape
:childs childs}]]
[:& svg-raw-shape {:shape shape
:childs childs}])))))
| null | https://raw.githubusercontent.com/penpot/penpot/7303d311d5f23d515fa3fcdc6cd13cf7f429d1fe/frontend/src/app/main/ui/workspace/shapes/svg_raw.cljs | clojure |
Copyright (c) KALEIDOS INC | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.main.ui.workspace.shapes.svg-raw
(:require
[app.main.refs :as refs]
[app.main.ui.shapes.shape :refer [shape-container]]
[app.main.ui.shapes.svg-raw :as svg-raw]
[app.util.svg :as usvg]
[rumext.v2 :as mf]))
(defn svg-raw-wrapper-factory
[shape-wrapper]
(let [svg-raw-shape (svg-raw/svg-raw-shape shape-wrapper)]
(mf/fnc svg-raw-wrapper
{::mf/wrap [#(mf/memo' % (mf/check-props ["shape"]))]
::mf/wrap-props false}
[props]
(let [shape (unchecked-get props "shape")
childs-ref (mf/use-memo (mf/deps (:id shape)) #(refs/children-objects (:id shape)))
childs (mf/deref childs-ref)
svg-tag (get-in shape [:content :tag])]
(if (contains? usvg/svg-group-safe-tags svg-tag)
[:> shape-container {:shape shape}
[:& svg-raw-shape {:shape shape
:childs childs}]]
[:& svg-raw-shape {:shape shape
:childs childs}])))))
|
364cb161c719a64a138c30820363cf3c69ba47663a317e2fa0023c54af7bdd76 | xnning/EvEff | Ctl.hs | # LANGUAGE GADTs , -- match on for type equality
ExistentialQuantification -- forall b ans . Yield ...
#
ExistentialQuantification -- forall b ans. Yield ...
#-}
|
Description : Internal module for type - safe multi - prompt control
Copyright : ( c ) 2020 , Microsoft Research ; ;
License : MIT
Maintainer : ;
Stability : Experimental
Primitive module that implements type safe multi - prompt control .
Used by the " Control . Ev . Eff " module to implement effect handlers .
Description : Internal module for type-safe multi-prompt control
Copyright : (c) 2020, Microsoft Research; Daan Leijen; Ningning Xie
License : MIT
Maintainer : ;
Stability : Experimental
Primitive module that implements type safe multi-prompt control.
Used by the "Control.Ev.Eff" module to implement effect handlers.
-}
module Control.Ev.Ctl(
-- * Markers
Marker -- prompt marker
: : a - > Marker b - > Bool
-- * Control monad
, Ctl(Pure) -- multi-prompt control monad
run the control monad : : a
install a multi - prompt : : ( a - > Ctl a ) - > Ctl a
yield to a specific prompt : : ( ( b - > ) - > ) - > Ctl b
-- * Unsafe primitives for "Control.Ev.Eff"
lift IO into Ctl : : IO a - > Ctl a
, unsafePromptIORef -- IORef that gets restored per resumption
) where
import Prelude hiding (read,flip)
import Control.Monad( ap, liftM )
import Data.Type.Equality( (:~:)( Refl ) )
import Control.Monad.Primitive
-------------------------------------------------------
-- Assume some way to generate a fresh prompt marker
-- associated with specific answer type.
-------------------------------------------------------
import Unsafe.Coerce ( unsafeCoerce )
import System.IO.Unsafe ( unsafePerformIO )
import Data.IORef
-- | An abstract prompt marker
data Marker a = Marker !Integer
instance Show (Marker a) where
show (Marker i) = show i
instance Eq (Marker a) where
m1 == m2 = markerEq m1 m2
| Compare two markers of different types for equality
markerEq :: Marker a -> Marker b -> Bool
markerEq (Marker i) (Marker j) = (i == j)
-- if markers match, their types are the same
mmatch :: Marker a -> Marker b -> Maybe ((:~:) a b)
mmatch (Marker i) (Marker j) | i == j = Just (unsafeCoerce Refl)
mmatch _ _ = Nothing
-- global unique counter
# NOINLINE unique #
unique :: IORef Integer
unique = unsafePerformIO (newIORef 0)
-- evaluate a action with a fresh marker
# NOINLINE freshMarker #
freshMarker :: (Marker a -> Ctl a) -> Ctl a
freshMarker f
= let m = unsafePerformIO $
do i <- readIORef unique;
writeIORef unique (i+1);
return i
in seq m (f (Marker m))
| The Multi Prompt control monad ,
with existentials ` ans ` and ` b ` : where ` ans ` is the answer type , i.e. the type of the handler / prompt context ,
and ` b ` the result type of the operation .
with existentials `ans` and `b`: where `ans` is the answer type, i.e. the type of the handler/prompt context,
and `b` the result type of the operation.
-}
data Ctl a = Pure { result :: !a } -- ^ Pure results (only exported for use in the "Control.Ev.Eff" module)
| forall ans b.
Yield{ marker :: !(Marker ans), -- ^ prompt marker to yield to (in type context `::ans`)
^ the final action , just needs the resumption (: : b - > ) to be evaluated .
^ the ( partially ) build up resumption ; ` ( b - > Ctl a ) : ~ : ( b - > ) ` by the time we reach the prompt
}
| @yield m op@ yields to a specific marker and calls @op@ in that context
with a /resumption/ @k : : b - > Ctl ans@ that resumes at the original call - site
with a result of type If the marker is no longer in the evaluation context ,
( i.e. it escaped outside its prompt ) the ` yield ` fails with an @"unhandled operation"@ error .
{-# INLINE yield #-}
yield :: Marker ans -> ((b -> Ctl ans) -> Ctl ans) -> Ctl b
yield m op = Yield m op Pure
# INLINE kcompose #
kcompose :: (b -> Ctl c) -> (a -> Ctl b) -> a -> Ctl c -- Kleisli composition
kcompose g f x = case (f x) of
Pure x -> g x
Yield m op cont -> Yield m op (g `kcompose` cont)
# INLINE bind #
bind :: Ctl a -> (a -> Ctl b) -> Ctl b
bind (Pure x) f = f x
bind (Yield m op cont) f = Yield m op (f `kcompose` cont) -- keep yielding with an extended continuation
instance Functor Ctl where
fmap = liftM
instance Applicative Ctl where
pure = return
(<*>) = ap
instance Monad Ctl where
return x = Pure x
e >>= f = bind e f
-- install a prompt with a unique marker (and handle yields to it)
# INLINE mprompt #
mprompt :: Marker a -> Ctl a -> Ctl a
mprompt m p@(Pure _) = p
mprompt m (Yield n op cont)
= let cont' x = mprompt m (cont x) in -- extend the continuation with our own prompt
case mmatch m n of
Nothing -> Yield n op cont' -- keep yielding (but with the extended continuation)
Just Refl -> op cont' -- found our prompt, invoke `op`.
Note : ` Refl ` proves ` a ~ ans ` ( the existential ` ans ` in Yield )
| Install a /prompt/ with a specific prompt ` Marker ` to which one can ` yield ` .
-- This connects creation of a marker with instantiating the prompt. The marker passed
-- to the @action@ argument should not escape the @action@ (but this is not statically checked,
-- only at runtime when `yield`ing to it).
{-# INLINE prompt #-}
prompt :: (Marker a -> Ctl a) -> Ctl a
prompt action
= freshMarker $ \m -> -- create a fresh marker
mprompt m (action m) -- and install a prompt associated with this marker
| Run a control monad . This may fail with an @"unhandled operation"@ error if
-- there is a `yield` to a marker that escaped its prompt scope.
runCtl :: Ctl a -> a
runCtl (Pure x) = x
runCtl (Yield _ _ _) = error "Unhandled operation" -- only if marker escapes the scope of the prompt
-------------------------------------------------------
-- IORef's
-------------------------------------------------------
| Unsafe ` IO ` in the ` Ctl ` monad .
# INLINE unsafeIO #
unsafeIO :: IO a -> Ctl a
unsafeIO io = let x = unsafeInlinePrim io in seq x (Pure x)
-- A special prompt that saves and restores state per resumption
mpromptIORef :: IORef a -> Ctl b -> Ctl b
mpromptIORef r action
= case action of
p@(Pure _) -> p
Yield m op cont
-> do val <- unsafeIO (readIORef r) -- save current value on yielding
let cont' x = do unsafeIO (writeIORef r val) -- restore saved value on resume
mpromptIORef r (cont x)
Yield m op cont'
-- | Create an `IORef` connected to a prompt. The value of
-- the `IORef` is saved and restored through resumptions.
unsafePromptIORef :: a -> (Marker b -> IORef a -> Ctl b) -> Ctl b
unsafePromptIORef init action
= freshMarker $ \m ->
do r <- unsafeIO (newIORef init)
mpromptIORef r (action m r)
| null | https://raw.githubusercontent.com/xnning/EvEff/c003c04f05c89e8680dc6b2604dc313cd7dea1c8/src/Control/Ev/Ctl.hs | haskell | match on for type equality
forall b ans . Yield ...
forall b ans. Yield ...
* Markers
prompt marker
* Control monad
multi-prompt control monad
* Unsafe primitives for "Control.Ev.Eff"
IORef that gets restored per resumption
-----------------------------------------------------
Assume some way to generate a fresh prompt marker
associated with specific answer type.
-----------------------------------------------------
| An abstract prompt marker
if markers match, their types are the same
global unique counter
evaluate a action with a fresh marker
^ Pure results (only exported for use in the "Control.Ev.Eff" module)
^ prompt marker to yield to (in type context `::ans`)
# INLINE yield #
Kleisli composition
keep yielding with an extended continuation
install a prompt with a unique marker (and handle yields to it)
extend the continuation with our own prompt
keep yielding (but with the extended continuation)
found our prompt, invoke `op`.
This connects creation of a marker with instantiating the prompt. The marker passed
to the @action@ argument should not escape the @action@ (but this is not statically checked,
only at runtime when `yield`ing to it).
# INLINE prompt #
create a fresh marker
and install a prompt associated with this marker
there is a `yield` to a marker that escaped its prompt scope.
only if marker escapes the scope of the prompt
-----------------------------------------------------
IORef's
-----------------------------------------------------
A special prompt that saves and restores state per resumption
save current value on yielding
restore saved value on resume
| Create an `IORef` connected to a prompt. The value of
the `IORef` is saved and restored through resumptions. | #
#-}
|
Description : Internal module for type - safe multi - prompt control
Copyright : ( c ) 2020 , Microsoft Research ; ;
License : MIT
Maintainer : ;
Stability : Experimental
Primitive module that implements type safe multi - prompt control .
Used by the " Control . Ev . Eff " module to implement effect handlers .
Description : Internal module for type-safe multi-prompt control
Copyright : (c) 2020, Microsoft Research; Daan Leijen; Ningning Xie
License : MIT
Maintainer : ;
Stability : Experimental
Primitive module that implements type safe multi-prompt control.
Used by the "Control.Ev.Eff" module to implement effect handlers.
-}
module Control.Ev.Ctl(
: : a - > Marker b - > Bool
run the control monad : : a
install a multi - prompt : : ( a - > Ctl a ) - > Ctl a
yield to a specific prompt : : ( ( b - > ) - > ) - > Ctl b
lift IO into Ctl : : IO a - > Ctl a
) where
import Prelude hiding (read,flip)
import Control.Monad( ap, liftM )
import Data.Type.Equality( (:~:)( Refl ) )
import Control.Monad.Primitive
import Unsafe.Coerce ( unsafeCoerce )
import System.IO.Unsafe ( unsafePerformIO )
import Data.IORef
data Marker a = Marker !Integer
instance Show (Marker a) where
show (Marker i) = show i
instance Eq (Marker a) where
m1 == m2 = markerEq m1 m2
| Compare two markers of different types for equality
markerEq :: Marker a -> Marker b -> Bool
markerEq (Marker i) (Marker j) = (i == j)
mmatch :: Marker a -> Marker b -> Maybe ((:~:) a b)
mmatch (Marker i) (Marker j) | i == j = Just (unsafeCoerce Refl)
mmatch _ _ = Nothing
# NOINLINE unique #
unique :: IORef Integer
unique = unsafePerformIO (newIORef 0)
# NOINLINE freshMarker #
freshMarker :: (Marker a -> Ctl a) -> Ctl a
freshMarker f
= let m = unsafePerformIO $
do i <- readIORef unique;
writeIORef unique (i+1);
return i
in seq m (f (Marker m))
| The Multi Prompt control monad ,
with existentials ` ans ` and ` b ` : where ` ans ` is the answer type , i.e. the type of the handler / prompt context ,
and ` b ` the result type of the operation .
with existentials `ans` and `b`: where `ans` is the answer type, i.e. the type of the handler/prompt context,
and `b` the result type of the operation.
-}
| forall ans b.
^ the final action , just needs the resumption (: : b - > ) to be evaluated .
^ the ( partially ) build up resumption ; ` ( b - > Ctl a ) : ~ : ( b - > ) ` by the time we reach the prompt
}
| @yield m op@ yields to a specific marker and calls @op@ in that context
with a /resumption/ @k : : b - > Ctl ans@ that resumes at the original call - site
with a result of type If the marker is no longer in the evaluation context ,
( i.e. it escaped outside its prompt ) the ` yield ` fails with an @"unhandled operation"@ error .
yield :: Marker ans -> ((b -> Ctl ans) -> Ctl ans) -> Ctl b
yield m op = Yield m op Pure
# INLINE kcompose #
kcompose g f x = case (f x) of
Pure x -> g x
Yield m op cont -> Yield m op (g `kcompose` cont)
# INLINE bind #
bind :: Ctl a -> (a -> Ctl b) -> Ctl b
bind (Pure x) f = f x
instance Functor Ctl where
fmap = liftM
instance Applicative Ctl where
pure = return
(<*>) = ap
instance Monad Ctl where
return x = Pure x
e >>= f = bind e f
# INLINE mprompt #
mprompt :: Marker a -> Ctl a -> Ctl a
mprompt m p@(Pure _) = p
mprompt m (Yield n op cont)
case mmatch m n of
Note : ` Refl ` proves ` a ~ ans ` ( the existential ` ans ` in Yield )
| Install a /prompt/ with a specific prompt ` Marker ` to which one can ` yield ` .
prompt :: (Marker a -> Ctl a) -> Ctl a
prompt action
| Run a control monad . This may fail with an @"unhandled operation"@ error if
runCtl :: Ctl a -> a
runCtl (Pure x) = x
| Unsafe ` IO ` in the ` Ctl ` monad .
# INLINE unsafeIO #
unsafeIO :: IO a -> Ctl a
unsafeIO io = let x = unsafeInlinePrim io in seq x (Pure x)
mpromptIORef :: IORef a -> Ctl b -> Ctl b
mpromptIORef r action
= case action of
p@(Pure _) -> p
Yield m op cont
mpromptIORef r (cont x)
Yield m op cont'
unsafePromptIORef :: a -> (Marker b -> IORef a -> Ctl b) -> Ctl b
unsafePromptIORef init action
= freshMarker $ \m ->
do r <- unsafeIO (newIORef init)
mpromptIORef r (action m r)
|
3f1125c117019726a71ff697695717d909aca26260845cc2fd7ee78fce711f07 | TrustInSoft/tis-kernel | value_parameters.ml | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(* Dependencies to kernel options *)
let kernel_parameters_correctness = [
Kernel.MainFunction.parameter;
Kernel.LibEntry.parameter;
Kernel.AbsoluteValidRange.parameter;
Kernel.SafeArrays.parameter;
Kernel.UnspecifiedAccess.parameter;
Kernel.SignedOverflow.parameter;
Kernel.UnsignedOverflow.parameter;
Kernel.ConstReadonly.parameter;
]
let parameters_correctness = ref []
let parameters_tuning = ref []
let add_dep p =
State_dependency_graph.add_codependencies
~onto:Db.Value.self
[State.get p.Typed_parameter.name]
let add_correctness_dep p =
add_dep p;
parameters_correctness := p :: !parameters_correctness
let add_precision_dep p =
add_dep p;
parameters_tuning := p :: !parameters_tuning
let () = List.iter add_correctness_dep kernel_parameters_correctness
module Fc_config = Config
module Caml_string = String
include Plugin.Register
(struct
let name = "value analysis"
let shortname = "value"
let help =
"automatically computes variation domains for the variables of the \
program"
end)
let () = Help.add_aliases [ "-val-h" ]
module ForceValues =
WithOutput
(struct
let option_name = "-val"
let help = "compute values"
let output_by_default = true
end)
let precision_tuning = add_group "Precision vs. time"
let initial_context = add_group "Initial Context"
let performance = add_group "Results memoization vs. time"
let interpreter = add_group "Deterministic programs"
let alarms = add_group "Propagation and alarms "
(* -------------------------------------------------------------------------- *)
(* --- Performance options --- *)
(* -------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group performance
module NoResultsFunctions =
Kernel_function_set
(struct
let option_name = "-no-results-function"
let arg_name = "f"
let help = "do not record the values obtained for the statements of \
function f"
end)
let () = add_dep NoResultsFunctions.parameter
let () = Parameter_customize.set_group performance
let () = Parameter_customize.set_negative_option_name "-val-store-results"
module NoResultsAll =
False
(struct
let option_name = "-no-results"
let help = "do not record values for any of the statements of the \
program"
end)
let () = add_dep NoResultsAll.parameter
let () = Parameter_customize.set_group performance
module ExitOnDegeneration =
False
(struct
let option_name = "-val-exit-on-degeneration"
let help = "if the value analysis degenerates, exit immediately with return code 2"
end)
let () = add_dep ExitOnDegeneration.parameter
let () = Parameter_customize.set_group performance
let () = Parameter_customize.is_invisible ()
module ResultsAfter =
Bool
(struct
let option_name = "-val-after-results"
let help = "record precisely the values obtained after the evaluation of each statement"
let default = true
end)
let () =
ResultsAfter.add_set_hook
(fun _ new_ ->
if new_ then
Kernel.feedback "@[Option -val-after-results is now always set.@]"
else
Kernel.warning "@[Option -val-after-results can no longer be unset.@]")
let () = Parameter_customize.set_group performance
let () = Parameter_customize.is_invisible ()
module ResultsCallstack =
Bool
(struct
let option_name = "-val-callstack-results"
let help = "always enabled, cannot be disabled: used to record precisely the values obtained for each callstack leading to each statement"
let default = false
end)
let () = add_precision_dep ResultsCallstack.parameter
let () = Parameter_customize.set_group performance
module JoinResults =
Bool
(struct
let option_name = "-val-join-results"
let help = "precompute consolidated states once value is computed"
let default = true
end)
let () = Parameter_customize.set_group performance
module ResultsSlevel =
False
(struct
let option_name = "-val-slevel-results"
let help = "store states by slevel (before state only)"
end)
module WholeProgramGraph =
False
(struct
let option_name = "-whole-program-graph"
let help = "Compute a whole-program result graph (needed for some plugins)"
end)
let () = Parameter_customize.set_group performance
(* ------------------------------------------------------------------------- *)
(* --- Relational analyses --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group performance
module ReusedExprs =
Bool
(struct
let option_name = "-val-reused-expressions"
let help = "undocumented"
let default = false
end)
(* ------------------------------------------------------------------------- *)
(* --- Non-standard alarms --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group alarms
let () = Parameter_customize.set_negative_option_name
"-val-continue-on-pointer-library-function"
module AbortOnPointerLibraryFunction =
False
(struct
let option_name = "-val-abort-on-pointer-library-function"
let help = "Abort the analysis if a library function returning a \
pointer type is encountered"
end)
let () = add_correctness_dep AbortOnPointerLibraryFunction.parameter
let () = Parameter_customize.set_group alarms
module AllRoundingModes =
False
(struct
let option_name = "-all-rounding-modes"
let help = "Take more target FPU and compiler behaviors into account"
end)
let () = add_correctness_dep AllRoundingModes.parameter
let () = Parameter_customize.set_group alarms
module AllRoundingModesConstants =
False
(struct
let option_name = "-all-rounding-modes-constants"
let help = "Take into account the possibility of constants not being converted to the nearest representable value, or being converted to higher precision"
end)
let () = add_correctness_dep AllRoundingModesConstants.parameter
let () = Parameter_customize.set_group alarms
module UndefinedPointerComparisonPropagateAll =
False
(struct
let option_name = "-undefined-pointer-comparison-propagate-all"
let help = "if the target program appears to contain undefined pointer comparisons, propagate both outcomes {0; 1} in addition to the emission of an alarm"
end)
let () = add_correctness_dep UndefinedPointerComparisonPropagateAll.parameter
let () = Parameter_customize.set_group alarms
module WarnPointerComparison =
String
(struct
let option_name = "-val-warn-undefined-pointer-comparison"
let help = "warn on all pointer comparisons (default), on comparisons \
where the arguments have pointer type, or never warn"
let default = "all"
let arg_name = "all|pointer|none"
end)
let () = WarnPointerComparison.set_possible_values ["all"; "pointer"; "none"]
let () = add_correctness_dep WarnPointerComparison.parameter
let () = Parameter_customize.set_group alarms
module WarnLeftShiftNegative =
True
(struct
let option_name = "-val-warn-left-shift-negative"
let help =
"Emit alarms when left-shifting negative integers"
end)
let () = add_correctness_dep WarnLeftShiftNegative.parameter
let () = Parameter_customize.set_group alarms
let () = Parameter_customize.is_invisible ()
module LeftShiftNegativeOld =
True
(struct
let option_name = "-val-left-shift-negative-alarms"
let help =
"Emit alarms when left shifting negative integers"
end)
let () = LeftShiftNegativeOld.add_set_hook
(fun _oldv newv ->
let no = if newv then "" else "no-" in
warning "New option name for \
-%sval-left-shift-negative-alarms is -%sval-warn-left-shift-negative"
no no;
WarnLeftShiftNegative.set newv)
let () = Parameter_customize.set_group alarms
module WarnPointerSubstraction =
True
(struct
let option_name = "-val-warn-pointer-subtraction"
let help =
"Warn when subtracting two pointers that may not be in the same \
allocated block, and return the pointwise difference between the \
offsets. When unset, do not warn but generate imprecise offsets."
end)
let () = add_correctness_dep WarnPointerSubstraction.parameter
let () = Parameter_customize.set_group alarms
module WarnHarmlessFunctionPointer =
True
(struct
let option_name = "-val-warn-harmless-function-pointers"
let help =
"Warn for harmless mismatches between function pointer type and \
called function."
end)
let () = add_correctness_dep WarnHarmlessFunctionPointer.parameter
module WarnPointerArithmeticOutOfBounds =
False
(struct
let option_name = "-val-warn-pointer-arithmetic-out-of-bounds"
let help =
"Warn when adding an offset to a pointer produces an out-of-bounds \
pointer. When unset, do not warn but generate &a-1, &a+2..."
end)
let () = add_correctness_dep WarnPointerArithmeticOutOfBounds.parameter
let () = Parameter_customize.set_group alarms
module WarnVaArgTypeMismatch =
True
(struct
let option_name = "-val-warn-va-arg-type-mismatch"
let help =
"Warn for mismatches between the type parameter passed to the va_arg \
macro and the actual type of the next variadic argument."
end)
let () = add_correctness_dep WarnVaArgTypeMismatch.parameter
let () = Parameter_customize.set_group alarms
module IgnoreRecursiveCalls =
False
(struct
let option_name = "-val-ignore-recursive-calls"
let help =
"Pretend function calls that would be recursive do not happen. Causes unsoundness"
end)
let () = add_correctness_dep IgnoreRecursiveCalls.parameter
let () = Parameter_customize.set_group alarms
module WarnCopyIndeterminate =
Kernel_function_set
(struct
let option_name = "-val-warn-copy-indeterminate"
let arg_name = "f | @all"
let help = "warn when a statement of the specified functions copies a \
value that may be indeterminate (uninitalized or containing escaping address). \
Any number of function may be specified. If '@all' is present, this option \
becomes active for all functions. Inactive by default."
end)
let () = add_correctness_dep WarnCopyIndeterminate.parameter
let () = Parameter_customize.set_group alarms;;
module ShowTrace =
False
(struct
let option_name = "-val-show-trace"
let help =
"Compute and display execution traces together with alarms (experimental)"
end)
let () = ShowTrace.add_update_hook (fun _ b -> Trace.set_compute_trace b)
module ReduceOnLogicAlarms =
False
(struct
let option_name = "-val-reduce-on-logic-alarms"
let help = "Force reductions by a predicate to ignore logic alarms \
emitted while the predicated is evaluated (experimental)"
end)
let () = add_correctness_dep ReduceOnLogicAlarms.parameter
(* ------------------------------------------------------------------------- *)
(* --- Initial context --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group initial_context
module AutomaticContextMaxDepth =
Int
(struct
let option_name = "-context-depth"
let default = 2
let arg_name = "n"
let help = "use <n> as the depth of the default context for value analysis. (defaults to 2)"
end)
let () = add_correctness_dep AutomaticContextMaxDepth.parameter
let () = Parameter_customize.set_group initial_context
module AutomaticContextMaxWidth =
Int
(struct
let option_name = "-context-width"
let default = 2
let arg_name = "n"
let help = "use <n> as the width of the default context for value analysis. (defaults to 2)"
end)
let () = AutomaticContextMaxWidth.set_range ~min:1 ~max:max_int
let () = add_correctness_dep AutomaticContextMaxWidth.parameter
let () = Parameter_customize.set_group initial_context
module AllocatedContextValid =
False
(struct
let option_name = "-context-valid-pointers"
let help = "only allocate valid pointers until context-depth, and then use NULL (defaults to false)"
end)
let () = add_correctness_dep AllocatedContextValid.parameter
let () = Parameter_customize.set_group initial_context
module InitializationPaddingGlobals =
String
(struct
let default = "yes"
let option_name = "-val-initialization-padding-globals"
let arg_name = "yes|no|maybe"
let help = "Specify how padding bits are initialized inside global \
variables. Possible values are <yes> (padding is fully initialized), \
<no> (padding is completely uninitialized), or <maybe> \
(padding may be uninitialized). Default is <yes>."
end)
let () = InitializationPaddingGlobals.set_possible_values
["yes"; "no"; "maybe"]
let () = add_correctness_dep InitializationPaddingGlobals.parameter
let () = Parameter_customize.set_group initial_context
let () = Parameter_customize.set_negative_option_name
"-uninitialized-padding-globals"
let () = Parameter_customize.is_invisible ()
module InitializedPaddingGlobals =
True
(struct
let option_name = "-initialized-padding-globals"
let help = "Padding in global variables is uninitialized"
end)
let () = add_correctness_dep InitializedPaddingGlobals.parameter
let () = InitializedPaddingGlobals.add_update_hook
(fun _ v ->
warning "This option is deprecated. Use %s instead"
InitializationPaddingGlobals.name;
InitializationPaddingGlobals.set (if v then "yes" else "no"))
let () = Parameter_customize.set_group initial_context
module EntryPointArgs =
String
(struct
let option_name = "-val-args"
let arg_name = "\" arg_1 arg_2 … arg_k\""
let default = ""
let help = "Pass arguments to the entry point function. If the \
entry point has type int (int argc, char * argv[]), start analysis \
with argc bound to k+1 and argv pointing to a NULL-terminated array \
of pointers to strings \"program\",\"arg_1\",..., \"arg_k\". \
The first character is used as separator to split the arguments, \
a space works well in the common cases."
end)
let () = Parameter_customize.set_group initial_context
module ProgramName =
String
(struct
let default = "program"
let option_name = "-val-program-name"
let arg_name = "name"
let help = "Specify the name of the program. Default is \"program\"."
end)
let () = Parameter_customize.set_group initial_context
let () = Parameter_customize.set_negative_option_name
"-val-enable-constructors"
module DisableConstructors =
False
(struct
let option_name = "-val-disable-constructors"
let help = "disable call to functions with the constructor \
attribute before analyzing the entry point. \
Defaults is false."
end)
let () = add_correctness_dep DisableConstructors.parameter
(* ------------------------------------------------------------------------- *)
(* --- Tuning --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group precision_tuning
module WideningLevel =
Int
(struct
let default = 3
let option_name = "-wlevel"
let arg_name = "n"
let help =
"do <n> loop iterations before widening (defaults to 3)"
end)
let () = add_precision_dep WideningLevel.parameter
let () = Parameter_customize.set_group precision_tuning
module ILevel =
Int
(struct
let option_name = "-val-ilevel"
let default = 8
let arg_name = "n"
let help =
"Sets of integers are represented as sets up to <n> elements. \
Above, intervals with congruence information are used \
(defaults to 8; experimental)"
end)
let () = add_precision_dep ILevel.parameter
let () = ILevel.add_update_hook (fun _ i -> Ival.set_small_cardinal i)
let () = ILevel.set_range ~min:4 ~max:64
let () = Parameter_customize.set_group precision_tuning
module SemanticUnrollingLevel =
Zero
(struct
let option_name = "-slevel"
let arg_name = "n"
let help =
"superpose up to <n> states when unrolling control flow. The larger n, the more precise and expensive the analysis (defaults to 0)"
end)
let () = add_precision_dep SemanticUnrollingLevel.parameter
let () = SemanticUnrollingLevel.set_range ~min:(-1) ~max:0x3FFFFFFF
NOTE : 0x3FFFFFFF is the maximum value of a 31 - bit integer .
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module SlevelFunction =
Kernel_function_map
(struct
include Datatype.Int
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ s =
Extlib.opt_map
(fun s ->
try int_of_string s
with Failure _ ->
raise (Cannot_build ("'" ^ s ^ "' is not an integer")))
s
let to_string ~key:_ = Extlib.opt_map string_of_int
end)
(struct
let option_name = "-slevel-function"
let arg_name = "f:n"
let help = "override slevel with <n> when analyzing <f>"
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep SlevelFunction.parameter
let () = Parameter_customize.set_group precision_tuning
module SlevelMergeAfterLoop =
Kernel_function_set
(struct
let option_name = "-val-slevel-merge-after-loop"
let arg_name = "f | @all"
let help =
"when set, the different execution paths that originate from the body \
of a loop are merged before entering the next excution. Experimental."
end)
let () = add_precision_dep SemanticUnrollingLevel.parameter
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module SplitReturnFunction =
Kernel_function_map
(struct
this type is ad - hoc : can not use Kernel_function_multiple_map here
include Split_strategy
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ s =
try Extlib.opt_map Split_strategy.of_string s
with Split_strategy.ParseFailure s ->
raise (Cannot_build ("unknown split strategy " ^ s))
let to_string ~key:_ v =
Extlib.opt_map Split_strategy.to_string v
end)
(struct
let option_name = "-val-split-return-function"
let arg_name = "f:n"
let help = "split return states of function <f> according to \
\\result == n and \\result != n"
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep SplitReturnFunction.parameter
let () = Parameter_customize.set_group precision_tuning
module SplitReturn =
String
(struct
let option_name = "-val-split-return"
let arg_name = "mode"
let default = ""
let help = "when 'mode' is a number, or 'full', this is equivalent \
to -val-split-return-function f:mode for all functions f. \
When mode is 'auto', automatically split states at the end \
of all functions, according to the function return code"
end)
module SplitGlobalStrategy = State_builder.Ref (Split_strategy)
(struct
let default () = Split_strategy.NoSplit
let name = "Value_parameters.SplitGlobalStategy"
let dependencies = [SplitReturn.self]
end)
let () =
SplitReturn.add_set_hook
(fun _ x -> SplitGlobalStrategy.set (Split_strategy.of_string x))
let () = add_precision_dep SplitReturn.parameter
let () = Parameter_customize.is_invisible ()
module SplitReturnAuto =
False
(struct
let option_name = "-val-split-return-auto"
let help = ""
end)
let () =
SplitReturnAuto.add_set_hook
(fun _ b ->
warning "option \"-val-split-return-auto\" has been replaced by \
\"-val-split-return auto\"";
SplitGlobalStrategy.set
Split_strategy.(if b then SplitAuto else NoSplit))
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module BuiltinsOverrides =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:kf ~prev:_ nameopt =
begin match nameopt with
| Some name ->
if not (!Db.Value.mem_builtin name) then
abort "option '-val-builtin %a:%s': undeclared builtin '%s'@.\
declared builtins: @[%a@]"
Kernel_function.pretty kf name name
(Pretty_utils.pp_list ~sep:",@ " Format.pp_print_string)
(List.map fst (!Db.Value.registered_builtins ()))
| _ -> ()
end;
nameopt
let to_string ~key:_ name = name
end)
(struct
let option_name = "-val-builtin"
let arg_name = "f:ffc"
let help =
"when analyzing function <f>, try to use TrustInSoft Kernel builtin \
<ffc> instead. Fall back to <f> if <ffc> cannot handle its arguments \
(experimental)."
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep BuiltinsOverrides.parameter
let () = Parameter_customize.is_invisible ()
module Subdivide_float_in_expr =
Zero
(struct
let option_name = "-subdivide-float-var"
let arg_name = "n"
let help =
"use <n> as number of subdivisions allowed for float variables in \
expressions (experimental, defaults to 0)"
end)
let () =
Subdivide_float_in_expr.add_set_hook
(fun _ _ ->
Kernel.abort "@[option -subdivide-float-var has been replaced by \
-val-subdivide-non-linear@]")
let () = Parameter_customize.set_group precision_tuning
module LinearLevel =
Zero
(struct
let option_name = "-val-subdivide-non-linear"
let arg_name = "n"
let help =
"Improve precision when evaluating expressions in which a variable \
appears multiple times, by splitting its value at most n times. \
Experimental, defaults to 0."
end)
let () = add_precision_dep LinearLevel.parameter
let () = Parameter_customize.set_group precision_tuning
module UsePrototype =
Kernel_function_set
(struct
let option_name = "-val-use-spec"
let arg_name = "f1,..,fn"
let help = "use the ACSL specification of the functions instead of their definitions"
end)
let () = add_precision_dep UsePrototype.parameter
let () = Parameter_customize.set_group precision_tuning
module RmAssert =
False
(struct
let option_name = "-remove-redundant-alarms"
let help = "after the analysis, try to remove redundant alarms, so that the user needs inspect fewer of them"
end)
let () = add_precision_dep RmAssert.parameter
let () = Parameter_customize.set_group precision_tuning
module MemExecAll =
False
(struct
let option_name = "-memexec-all"
let help = "(experimental) speed up analysis by not recomputing functions already analyzed in the same context. Incompatible with some plugins and callbacks"
end)
let () =
MemExecAll.add_set_hook
(fun _bold bnew ->
if bnew then
try
Dynamic.Parameter.Bool.set "-inout-callwise" true
with Dynamic.Unbound_value _ | Dynamic.Incompatible_type _ ->
abort "Cannot set option -memexec-all. Is plugin Inout registered?"
)
let () = Parameter_customize.set_group precision_tuning
module ArrayPrecisionLevel =
Int
(struct
let default = 200
let option_name = "-plevel"
let arg_name = "n"
let help = "use <n> as the precision level for arrays accesses. \
Array accesses are precise as long as the interval for the index contains \
less than n values. (defaults to 200)"
end)
let () = add_precision_dep ArrayPrecisionLevel.parameter
let () = ArrayPrecisionLevel.add_update_hook
(fun _ v -> Offsetmap.set_plevel v)
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtStart =
String_set
(struct
let option_name = "-separate-stmts"
let arg_name = "n1,..,nk"
let help = ""
end)
let () = add_correctness_dep SeparateStmtStart.parameter
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtWord =
Int
(struct
let option_name = "-separate-n"
let default = 0
let arg_name = "n"
let help = ""
end)
let () = SeparateStmtWord.set_range ~min:0 ~max:1073741823
let () = add_correctness_dep SeparateStmtWord.parameter
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtOf =
Int
(struct
let option_name = "-separate-of"
let default = 0
let arg_name = "n"
let help = ""
end)
let () = SeparateStmtOf.set_range ~min:0 ~max:1073741823
let () = add_correctness_dep SeparateStmtOf.parameter
Options SaveFunctionState and LoadFunctionState are related
and mutually dependent for sanity checking .
Also , they depend on , so they can not be defined before it .
and mutually dependent for sanity checking.
Also, they depend on BuiltinsOverrides, so they cannot be defined before it. *)
let () = Parameter_customize.set_group initial_context
module SaveFunctionState =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ file = file
let to_string ~key:_ file = file
end)
(struct
let option_name = "-val-save-fun-state"
let arg_name = "function:filename"
let help = "save state of function <function> in file <filename>"
let default = Kernel_function.Map.empty
end)
module LoadFunctionState =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ file = file
let to_string ~key:_ file = file
end)
(struct
let option_name = "-val-load-fun-state"
let arg_name = "function:filename"
let help = "load state of function <function> from file <filename>"
let default = Kernel_function.Map.empty
end)
let () = add_correctness_dep SaveFunctionState.parameter
let () = add_correctness_dep LoadFunctionState.parameter
checks that SaveFunctionState has a unique argument pair , and returns it .
let get_SaveFunctionState () =
let is_first = ref true in
let (kf, filename) = SaveFunctionState.fold
(fun (kf, opt_filename) _acc ->
if !is_first then is_first := false
else abort "option `%s' requires a single function:filename pair"
SaveFunctionState.name;
let filename = Extlib.the opt_filename in
kf, filename
) (Kernel_function.dummy (), "")
in
if filename = "" then abort "option `%s' requires a function:filename pair"
SaveFunctionState.name
else kf, filename
checks that LoadFunctionState has a unique argument pair , and returns it .
let get_LoadFunctionState () =
let is_first = ref true in
let (kf, filename) = LoadFunctionState.fold
(fun (kf, opt_filename) _acc ->
if !is_first then is_first := false
else abort "option `%s' requires a single function:filename pair"
LoadFunctionState.name;
let filename = Extlib.the opt_filename in
kf, filename
) (Kernel_function.dummy (), "")
in
if filename = "" then abort "option `%s' requires a function:filename pair"
LoadFunctionState.name
else kf, filename
(* perform early sanity checks to avoid aborting the analysis only at the end *)
let () = Ast.apply_after_computed (fun _ ->
(* check the function to save returns 'void' *)
if SaveFunctionState.is_set () then begin
let (kf, _) = get_SaveFunctionState () in
if not (Kernel_function.returns_void kf) then
abort "option `%s': function `%a' must return void"
SaveFunctionState.name Kernel_function.pretty kf
end;
if SaveFunctionState.is_set () && LoadFunctionState.is_set () then begin
(* check that if both save and load are set, they do not specify the
same function name (note: cannot compare using function ids) *)
let (save_kf, _) = get_SaveFunctionState () in
let (load_kf, _) = get_LoadFunctionState () in
if Kernel_function.equal save_kf load_kf then
abort "options `%s' and `%s' cannot save/load the same function `%a'"
SaveFunctionState.name LoadFunctionState.name
Kernel_function.pretty save_kf
end;
if LoadFunctionState.is_set () then
let (kf, _) = get_LoadFunctionState () in
BuiltinsOverrides.add (kf, Some "Frama_C_load_state");
)
module TotalPtrComparison =
False
(struct
let option_name = "-val-ptr-total-comparison" (* NOTE: Shouldn't it rather be "-val-tis-ptr-total-comparison"? *)
let help = "compare any two pointers, even if they have different bases, \
using a total and persistent order (works only if both \
pointers are precise and an ordering can be determined)"
end)
let () = add_correctness_dep TotalPtrComparison.parameter
(* ------------------------------------------------------------------------- *)
(* --- Messages --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group messages
module ValShowProgress =
True
(struct
let option_name = "-val-show-progress"
let help = "Show progression messages during analysis"
end)
let () = Parameter_customize.set_group messages
module ValShowAllocations =
False
(struct
let option_name = "-val-show-allocations"
let help = "Show memory allocations"
end)
let () = Parameter_customize.set_group messages
module ValShowInitialState =
True
(struct
let option_name = "-val-show-initial-state"
let help = "Show initial state before analysis starts"
end)
let () = Parameter_customize.set_group messages
module ValShowPerf =
False
(struct
let option_name = "-val-show-perf"
let help = "Compute and shows a summary of the time spent analyzing function calls"
end)
let () = Parameter_customize.set_group messages
module ValStatistics =
True
(struct
let option_name = "-val-statistics"
let help = "Measure analysis time for statements and functions"
end)
let () = Parameter_customize.set_group messages
module ShowSlevel =
Int
(struct
let option_name = "-val-show-slevel"
let default = 100
let arg_name = "n"
let help = "Period for showing consumption of the alloted slevel during analysis"
end)
let () = Parameter_customize.set_group messages
module PrintCallstacks =
False
(struct
let option_name = "-val-print-callstacks"
let help = "When printing a message, also show the current call stack"
end)
(* ------------------------------------------------------------------------- *)
(* --- Interpreter mode --- *)
(* ------------------------------------------------------------------------- *)
let () = Parameter_customize.set_group interpreter
module InterpreterMode =
False
(struct
let option_name = "-val-interpreter-mode"
let help = "Stop at first call to a library function, if main() has \
arguments, on undecided branches"
end)
let () = Parameter_customize.set_group interpreter
module Interpreter_libc =
False
(struct
let option_name = "-tis-interpreter-libc"
let help = "Use the tis-interpreter default libc."
end)
let () =
let tis_kernel_libc s =
Caml_string.concat Filename.dir_sep [ Fc_config.datadir; "libc"; s; ]
in
let tis_interpreter_share s =
Caml_string.concat
Filename.dir_sep
[ Fc_config.datadir; "tis-interpreter"; s; ]
in
let tis_interpreter_runtimes =
[ tis_kernel_libc "tis_stdlib.c";
tis_kernel_libc "fc_runtime.c";
tis_interpreter_share "common_env.c";
tis_interpreter_share "common_resource.c";
tis_interpreter_share "common_time.c";
tis_interpreter_share "common_missing.c"; ]
in
List.iter
(fun file ->
File.pre_register_dynamic
Interpreter_libc.get
(fun () -> File.from_filename file))
tis_interpreter_runtimes
let () = Parameter_customize.set_group interpreter
module ObviouslyTerminatesFunctions =
Fundec_set
(struct
let option_name = "-obviously-terminates-function"
let arg_name = "f"
let help = ""
end)
let () = add_dep ObviouslyTerminatesFunctions.parameter
let () = Parameter_customize.set_group interpreter
module ObviouslyTerminatesAll =
False
(struct
let option_name = "-obviously-terminates"
let help = "undocumented. Among effects of this options are the same \
effects as -no-results"
end)
let () = add_dep ObviouslyTerminatesAll.parameter
let () = Parameter_customize.set_group interpreter
module StopAtNthAlarm =
Int(struct
let option_name = "-val-stop-at-nth-alarm"
let default = max_int
let arg_name = "n"
let help = ""
end)
let () = Parameter_customize.set_group interpreter
module CloneOnRecursiveCalls =
False
(struct
let option_name = "-val-clone-on-recursive-calls"
let help = "clone the called function when analyzing recursive \
calls. Only used when -val-ignore-recursive-calls is unset. \
The analysis has to be precise enought to be able to detect \
the termination (experimental)"
end)
let () = add_dep CloneOnRecursiveCalls.parameter
(* -------------------------------------------------------------------------- *)
(* --- Ugliness required for correctness --- *)
(* -------------------------------------------------------------------------- *)
let () = Parameter_customize.is_invisible ()
module InitialStateChanged =
Int (struct
let option_name = "-new-initial-state"
let default = 0
let arg_name = "n"
let help = ""
end)
Changing the user - supplied initial state ( or the arguments of main ) through
the API of Db . Value does reset the state of Value , but * not * the property
statuses that Value has positioned . Currently , statuses can only depend
on a command - line parameter . We use the dummy one above to force a reset
when needed .
the API of Db.Value does reset the state of Value, but *not* the property
statuses that Value has positioned. Currently, statuses can only depend
on a command-line parameter. We use the dummy one above to force a reset
when needed. *)
let () =
add_correctness_dep InitialStateChanged.parameter;
Db.Value.initial_state_changed :=
(fun () -> InitialStateChanged.set (InitialStateChanged.get () + 1))
let parameters_correctness = !parameters_correctness
let parameters_tuning = !parameters_tuning
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/plugins/value/value_parameters.ml | ocaml | ************************************************************************
************************************************************************
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
Dependencies to kernel options
--------------------------------------------------------------------------
--- Performance options ---
--------------------------------------------------------------------------
-------------------------------------------------------------------------
--- Relational analyses ---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--- Non-standard alarms ---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--- Initial context ---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--- Tuning ---
-------------------------------------------------------------------------
perform early sanity checks to avoid aborting the analysis only at the end
check the function to save returns 'void'
check that if both save and load are set, they do not specify the
same function name (note: cannot compare using function ids)
NOTE: Shouldn't it rather be "-val-tis-ptr-total-comparison"?
-------------------------------------------------------------------------
--- Messages ---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--- Interpreter mode ---
-------------------------------------------------------------------------
--------------------------------------------------------------------------
--- Ugliness required for correctness ---
--------------------------------------------------------------------------
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
let kernel_parameters_correctness = [
Kernel.MainFunction.parameter;
Kernel.LibEntry.parameter;
Kernel.AbsoluteValidRange.parameter;
Kernel.SafeArrays.parameter;
Kernel.UnspecifiedAccess.parameter;
Kernel.SignedOverflow.parameter;
Kernel.UnsignedOverflow.parameter;
Kernel.ConstReadonly.parameter;
]
let parameters_correctness = ref []
let parameters_tuning = ref []
let add_dep p =
State_dependency_graph.add_codependencies
~onto:Db.Value.self
[State.get p.Typed_parameter.name]
let add_correctness_dep p =
add_dep p;
parameters_correctness := p :: !parameters_correctness
let add_precision_dep p =
add_dep p;
parameters_tuning := p :: !parameters_tuning
let () = List.iter add_correctness_dep kernel_parameters_correctness
module Fc_config = Config
module Caml_string = String
include Plugin.Register
(struct
let name = "value analysis"
let shortname = "value"
let help =
"automatically computes variation domains for the variables of the \
program"
end)
let () = Help.add_aliases [ "-val-h" ]
module ForceValues =
WithOutput
(struct
let option_name = "-val"
let help = "compute values"
let output_by_default = true
end)
let precision_tuning = add_group "Precision vs. time"
let initial_context = add_group "Initial Context"
let performance = add_group "Results memoization vs. time"
let interpreter = add_group "Deterministic programs"
let alarms = add_group "Propagation and alarms "
let () = Parameter_customize.set_group performance
module NoResultsFunctions =
Kernel_function_set
(struct
let option_name = "-no-results-function"
let arg_name = "f"
let help = "do not record the values obtained for the statements of \
function f"
end)
let () = add_dep NoResultsFunctions.parameter
let () = Parameter_customize.set_group performance
let () = Parameter_customize.set_negative_option_name "-val-store-results"
module NoResultsAll =
False
(struct
let option_name = "-no-results"
let help = "do not record values for any of the statements of the \
program"
end)
let () = add_dep NoResultsAll.parameter
let () = Parameter_customize.set_group performance
module ExitOnDegeneration =
False
(struct
let option_name = "-val-exit-on-degeneration"
let help = "if the value analysis degenerates, exit immediately with return code 2"
end)
let () = add_dep ExitOnDegeneration.parameter
let () = Parameter_customize.set_group performance
let () = Parameter_customize.is_invisible ()
module ResultsAfter =
Bool
(struct
let option_name = "-val-after-results"
let help = "record precisely the values obtained after the evaluation of each statement"
let default = true
end)
let () =
ResultsAfter.add_set_hook
(fun _ new_ ->
if new_ then
Kernel.feedback "@[Option -val-after-results is now always set.@]"
else
Kernel.warning "@[Option -val-after-results can no longer be unset.@]")
let () = Parameter_customize.set_group performance
let () = Parameter_customize.is_invisible ()
module ResultsCallstack =
Bool
(struct
let option_name = "-val-callstack-results"
let help = "always enabled, cannot be disabled: used to record precisely the values obtained for each callstack leading to each statement"
let default = false
end)
let () = add_precision_dep ResultsCallstack.parameter
let () = Parameter_customize.set_group performance
module JoinResults =
Bool
(struct
let option_name = "-val-join-results"
let help = "precompute consolidated states once value is computed"
let default = true
end)
let () = Parameter_customize.set_group performance
module ResultsSlevel =
False
(struct
let option_name = "-val-slevel-results"
let help = "store states by slevel (before state only)"
end)
module WholeProgramGraph =
False
(struct
let option_name = "-whole-program-graph"
let help = "Compute a whole-program result graph (needed for some plugins)"
end)
let () = Parameter_customize.set_group performance
let () = Parameter_customize.set_group performance
module ReusedExprs =
Bool
(struct
let option_name = "-val-reused-expressions"
let help = "undocumented"
let default = false
end)
let () = Parameter_customize.set_group alarms
let () = Parameter_customize.set_negative_option_name
"-val-continue-on-pointer-library-function"
module AbortOnPointerLibraryFunction =
False
(struct
let option_name = "-val-abort-on-pointer-library-function"
let help = "Abort the analysis if a library function returning a \
pointer type is encountered"
end)
let () = add_correctness_dep AbortOnPointerLibraryFunction.parameter
let () = Parameter_customize.set_group alarms
module AllRoundingModes =
False
(struct
let option_name = "-all-rounding-modes"
let help = "Take more target FPU and compiler behaviors into account"
end)
let () = add_correctness_dep AllRoundingModes.parameter
let () = Parameter_customize.set_group alarms
module AllRoundingModesConstants =
False
(struct
let option_name = "-all-rounding-modes-constants"
let help = "Take into account the possibility of constants not being converted to the nearest representable value, or being converted to higher precision"
end)
let () = add_correctness_dep AllRoundingModesConstants.parameter
let () = Parameter_customize.set_group alarms
module UndefinedPointerComparisonPropagateAll =
False
(struct
let option_name = "-undefined-pointer-comparison-propagate-all"
let help = "if the target program appears to contain undefined pointer comparisons, propagate both outcomes {0; 1} in addition to the emission of an alarm"
end)
let () = add_correctness_dep UndefinedPointerComparisonPropagateAll.parameter
let () = Parameter_customize.set_group alarms
module WarnPointerComparison =
String
(struct
let option_name = "-val-warn-undefined-pointer-comparison"
let help = "warn on all pointer comparisons (default), on comparisons \
where the arguments have pointer type, or never warn"
let default = "all"
let arg_name = "all|pointer|none"
end)
let () = WarnPointerComparison.set_possible_values ["all"; "pointer"; "none"]
let () = add_correctness_dep WarnPointerComparison.parameter
let () = Parameter_customize.set_group alarms
module WarnLeftShiftNegative =
True
(struct
let option_name = "-val-warn-left-shift-negative"
let help =
"Emit alarms when left-shifting negative integers"
end)
let () = add_correctness_dep WarnLeftShiftNegative.parameter
let () = Parameter_customize.set_group alarms
let () = Parameter_customize.is_invisible ()
module LeftShiftNegativeOld =
True
(struct
let option_name = "-val-left-shift-negative-alarms"
let help =
"Emit alarms when left shifting negative integers"
end)
let () = LeftShiftNegativeOld.add_set_hook
(fun _oldv newv ->
let no = if newv then "" else "no-" in
warning "New option name for \
-%sval-left-shift-negative-alarms is -%sval-warn-left-shift-negative"
no no;
WarnLeftShiftNegative.set newv)
let () = Parameter_customize.set_group alarms
module WarnPointerSubstraction =
True
(struct
let option_name = "-val-warn-pointer-subtraction"
let help =
"Warn when subtracting two pointers that may not be in the same \
allocated block, and return the pointwise difference between the \
offsets. When unset, do not warn but generate imprecise offsets."
end)
let () = add_correctness_dep WarnPointerSubstraction.parameter
let () = Parameter_customize.set_group alarms
module WarnHarmlessFunctionPointer =
True
(struct
let option_name = "-val-warn-harmless-function-pointers"
let help =
"Warn for harmless mismatches between function pointer type and \
called function."
end)
let () = add_correctness_dep WarnHarmlessFunctionPointer.parameter
module WarnPointerArithmeticOutOfBounds =
False
(struct
let option_name = "-val-warn-pointer-arithmetic-out-of-bounds"
let help =
"Warn when adding an offset to a pointer produces an out-of-bounds \
pointer. When unset, do not warn but generate &a-1, &a+2..."
end)
let () = add_correctness_dep WarnPointerArithmeticOutOfBounds.parameter
let () = Parameter_customize.set_group alarms
module WarnVaArgTypeMismatch =
True
(struct
let option_name = "-val-warn-va-arg-type-mismatch"
let help =
"Warn for mismatches between the type parameter passed to the va_arg \
macro and the actual type of the next variadic argument."
end)
let () = add_correctness_dep WarnVaArgTypeMismatch.parameter
let () = Parameter_customize.set_group alarms
module IgnoreRecursiveCalls =
False
(struct
let option_name = "-val-ignore-recursive-calls"
let help =
"Pretend function calls that would be recursive do not happen. Causes unsoundness"
end)
let () = add_correctness_dep IgnoreRecursiveCalls.parameter
let () = Parameter_customize.set_group alarms
module WarnCopyIndeterminate =
Kernel_function_set
(struct
let option_name = "-val-warn-copy-indeterminate"
let arg_name = "f | @all"
let help = "warn when a statement of the specified functions copies a \
value that may be indeterminate (uninitalized or containing escaping address). \
Any number of function may be specified. If '@all' is present, this option \
becomes active for all functions. Inactive by default."
end)
let () = add_correctness_dep WarnCopyIndeterminate.parameter
let () = Parameter_customize.set_group alarms;;
module ShowTrace =
False
(struct
let option_name = "-val-show-trace"
let help =
"Compute and display execution traces together with alarms (experimental)"
end)
let () = ShowTrace.add_update_hook (fun _ b -> Trace.set_compute_trace b)
module ReduceOnLogicAlarms =
False
(struct
let option_name = "-val-reduce-on-logic-alarms"
let help = "Force reductions by a predicate to ignore logic alarms \
emitted while the predicated is evaluated (experimental)"
end)
let () = add_correctness_dep ReduceOnLogicAlarms.parameter
let () = Parameter_customize.set_group initial_context
module AutomaticContextMaxDepth =
Int
(struct
let option_name = "-context-depth"
let default = 2
let arg_name = "n"
let help = "use <n> as the depth of the default context for value analysis. (defaults to 2)"
end)
let () = add_correctness_dep AutomaticContextMaxDepth.parameter
let () = Parameter_customize.set_group initial_context
module AutomaticContextMaxWidth =
Int
(struct
let option_name = "-context-width"
let default = 2
let arg_name = "n"
let help = "use <n> as the width of the default context for value analysis. (defaults to 2)"
end)
let () = AutomaticContextMaxWidth.set_range ~min:1 ~max:max_int
let () = add_correctness_dep AutomaticContextMaxWidth.parameter
let () = Parameter_customize.set_group initial_context
module AllocatedContextValid =
False
(struct
let option_name = "-context-valid-pointers"
let help = "only allocate valid pointers until context-depth, and then use NULL (defaults to false)"
end)
let () = add_correctness_dep AllocatedContextValid.parameter
let () = Parameter_customize.set_group initial_context
module InitializationPaddingGlobals =
String
(struct
let default = "yes"
let option_name = "-val-initialization-padding-globals"
let arg_name = "yes|no|maybe"
let help = "Specify how padding bits are initialized inside global \
variables. Possible values are <yes> (padding is fully initialized), \
<no> (padding is completely uninitialized), or <maybe> \
(padding may be uninitialized). Default is <yes>."
end)
let () = InitializationPaddingGlobals.set_possible_values
["yes"; "no"; "maybe"]
let () = add_correctness_dep InitializationPaddingGlobals.parameter
let () = Parameter_customize.set_group initial_context
let () = Parameter_customize.set_negative_option_name
"-uninitialized-padding-globals"
let () = Parameter_customize.is_invisible ()
module InitializedPaddingGlobals =
True
(struct
let option_name = "-initialized-padding-globals"
let help = "Padding in global variables is uninitialized"
end)
let () = add_correctness_dep InitializedPaddingGlobals.parameter
let () = InitializedPaddingGlobals.add_update_hook
(fun _ v ->
warning "This option is deprecated. Use %s instead"
InitializationPaddingGlobals.name;
InitializationPaddingGlobals.set (if v then "yes" else "no"))
let () = Parameter_customize.set_group initial_context
module EntryPointArgs =
String
(struct
let option_name = "-val-args"
let arg_name = "\" arg_1 arg_2 … arg_k\""
let default = ""
let help = "Pass arguments to the entry point function. If the \
entry point has type int (int argc, char * argv[]), start analysis \
with argc bound to k+1 and argv pointing to a NULL-terminated array \
of pointers to strings \"program\",\"arg_1\",..., \"arg_k\". \
The first character is used as separator to split the arguments, \
a space works well in the common cases."
end)
let () = Parameter_customize.set_group initial_context
module ProgramName =
String
(struct
let default = "program"
let option_name = "-val-program-name"
let arg_name = "name"
let help = "Specify the name of the program. Default is \"program\"."
end)
let () = Parameter_customize.set_group initial_context
let () = Parameter_customize.set_negative_option_name
"-val-enable-constructors"
module DisableConstructors =
False
(struct
let option_name = "-val-disable-constructors"
let help = "disable call to functions with the constructor \
attribute before analyzing the entry point. \
Defaults is false."
end)
let () = add_correctness_dep DisableConstructors.parameter
let () = Parameter_customize.set_group precision_tuning
module WideningLevel =
Int
(struct
let default = 3
let option_name = "-wlevel"
let arg_name = "n"
let help =
"do <n> loop iterations before widening (defaults to 3)"
end)
let () = add_precision_dep WideningLevel.parameter
let () = Parameter_customize.set_group precision_tuning
module ILevel =
Int
(struct
let option_name = "-val-ilevel"
let default = 8
let arg_name = "n"
let help =
"Sets of integers are represented as sets up to <n> elements. \
Above, intervals with congruence information are used \
(defaults to 8; experimental)"
end)
let () = add_precision_dep ILevel.parameter
let () = ILevel.add_update_hook (fun _ i -> Ival.set_small_cardinal i)
let () = ILevel.set_range ~min:4 ~max:64
let () = Parameter_customize.set_group precision_tuning
module SemanticUnrollingLevel =
Zero
(struct
let option_name = "-slevel"
let arg_name = "n"
let help =
"superpose up to <n> states when unrolling control flow. The larger n, the more precise and expensive the analysis (defaults to 0)"
end)
let () = add_precision_dep SemanticUnrollingLevel.parameter
let () = SemanticUnrollingLevel.set_range ~min:(-1) ~max:0x3FFFFFFF
NOTE : 0x3FFFFFFF is the maximum value of a 31 - bit integer .
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module SlevelFunction =
Kernel_function_map
(struct
include Datatype.Int
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ s =
Extlib.opt_map
(fun s ->
try int_of_string s
with Failure _ ->
raise (Cannot_build ("'" ^ s ^ "' is not an integer")))
s
let to_string ~key:_ = Extlib.opt_map string_of_int
end)
(struct
let option_name = "-slevel-function"
let arg_name = "f:n"
let help = "override slevel with <n> when analyzing <f>"
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep SlevelFunction.parameter
let () = Parameter_customize.set_group precision_tuning
module SlevelMergeAfterLoop =
Kernel_function_set
(struct
let option_name = "-val-slevel-merge-after-loop"
let arg_name = "f | @all"
let help =
"when set, the different execution paths that originate from the body \
of a loop are merged before entering the next excution. Experimental."
end)
let () = add_precision_dep SemanticUnrollingLevel.parameter
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module SplitReturnFunction =
Kernel_function_map
(struct
this type is ad - hoc : can not use Kernel_function_multiple_map here
include Split_strategy
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ s =
try Extlib.opt_map Split_strategy.of_string s
with Split_strategy.ParseFailure s ->
raise (Cannot_build ("unknown split strategy " ^ s))
let to_string ~key:_ v =
Extlib.opt_map Split_strategy.to_string v
end)
(struct
let option_name = "-val-split-return-function"
let arg_name = "f:n"
let help = "split return states of function <f> according to \
\\result == n and \\result != n"
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep SplitReturnFunction.parameter
let () = Parameter_customize.set_group precision_tuning
module SplitReturn =
String
(struct
let option_name = "-val-split-return"
let arg_name = "mode"
let default = ""
let help = "when 'mode' is a number, or 'full', this is equivalent \
to -val-split-return-function f:mode for all functions f. \
When mode is 'auto', automatically split states at the end \
of all functions, according to the function return code"
end)
module SplitGlobalStrategy = State_builder.Ref (Split_strategy)
(struct
let default () = Split_strategy.NoSplit
let name = "Value_parameters.SplitGlobalStategy"
let dependencies = [SplitReturn.self]
end)
let () =
SplitReturn.add_set_hook
(fun _ x -> SplitGlobalStrategy.set (Split_strategy.of_string x))
let () = add_precision_dep SplitReturn.parameter
let () = Parameter_customize.is_invisible ()
module SplitReturnAuto =
False
(struct
let option_name = "-val-split-return-auto"
let help = ""
end)
let () =
SplitReturnAuto.add_set_hook
(fun _ b ->
warning "option \"-val-split-return-auto\" has been replaced by \
\"-val-split-return auto\"";
SplitGlobalStrategy.set
Split_strategy.(if b then SplitAuto else NoSplit))
let () = Parameter_customize.set_group precision_tuning
let () = Parameter_customize.argument_may_be_fundecl ()
module BuiltinsOverrides =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:kf ~prev:_ nameopt =
begin match nameopt with
| Some name ->
if not (!Db.Value.mem_builtin name) then
abort "option '-val-builtin %a:%s': undeclared builtin '%s'@.\
declared builtins: @[%a@]"
Kernel_function.pretty kf name name
(Pretty_utils.pp_list ~sep:",@ " Format.pp_print_string)
(List.map fst (!Db.Value.registered_builtins ()))
| _ -> ()
end;
nameopt
let to_string ~key:_ name = name
end)
(struct
let option_name = "-val-builtin"
let arg_name = "f:ffc"
let help =
"when analyzing function <f>, try to use TrustInSoft Kernel builtin \
<ffc> instead. Fall back to <f> if <ffc> cannot handle its arguments \
(experimental)."
let default = Kernel_function.Map.empty
end)
let () = add_precision_dep BuiltinsOverrides.parameter
let () = Parameter_customize.is_invisible ()
module Subdivide_float_in_expr =
Zero
(struct
let option_name = "-subdivide-float-var"
let arg_name = "n"
let help =
"use <n> as number of subdivisions allowed for float variables in \
expressions (experimental, defaults to 0)"
end)
let () =
Subdivide_float_in_expr.add_set_hook
(fun _ _ ->
Kernel.abort "@[option -subdivide-float-var has been replaced by \
-val-subdivide-non-linear@]")
let () = Parameter_customize.set_group precision_tuning
module LinearLevel =
Zero
(struct
let option_name = "-val-subdivide-non-linear"
let arg_name = "n"
let help =
"Improve precision when evaluating expressions in which a variable \
appears multiple times, by splitting its value at most n times. \
Experimental, defaults to 0."
end)
let () = add_precision_dep LinearLevel.parameter
let () = Parameter_customize.set_group precision_tuning
module UsePrototype =
Kernel_function_set
(struct
let option_name = "-val-use-spec"
let arg_name = "f1,..,fn"
let help = "use the ACSL specification of the functions instead of their definitions"
end)
let () = add_precision_dep UsePrototype.parameter
let () = Parameter_customize.set_group precision_tuning
module RmAssert =
False
(struct
let option_name = "-remove-redundant-alarms"
let help = "after the analysis, try to remove redundant alarms, so that the user needs inspect fewer of them"
end)
let () = add_precision_dep RmAssert.parameter
let () = Parameter_customize.set_group precision_tuning
module MemExecAll =
False
(struct
let option_name = "-memexec-all"
let help = "(experimental) speed up analysis by not recomputing functions already analyzed in the same context. Incompatible with some plugins and callbacks"
end)
let () =
MemExecAll.add_set_hook
(fun _bold bnew ->
if bnew then
try
Dynamic.Parameter.Bool.set "-inout-callwise" true
with Dynamic.Unbound_value _ | Dynamic.Incompatible_type _ ->
abort "Cannot set option -memexec-all. Is plugin Inout registered?"
)
let () = Parameter_customize.set_group precision_tuning
module ArrayPrecisionLevel =
Int
(struct
let default = 200
let option_name = "-plevel"
let arg_name = "n"
let help = "use <n> as the precision level for arrays accesses. \
Array accesses are precise as long as the interval for the index contains \
less than n values. (defaults to 200)"
end)
let () = add_precision_dep ArrayPrecisionLevel.parameter
let () = ArrayPrecisionLevel.add_update_hook
(fun _ v -> Offsetmap.set_plevel v)
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtStart =
String_set
(struct
let option_name = "-separate-stmts"
let arg_name = "n1,..,nk"
let help = ""
end)
let () = add_correctness_dep SeparateStmtStart.parameter
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtWord =
Int
(struct
let option_name = "-separate-n"
let default = 0
let arg_name = "n"
let help = ""
end)
let () = SeparateStmtWord.set_range ~min:0 ~max:1073741823
let () = add_correctness_dep SeparateStmtWord.parameter
let () = Parameter_customize.set_group precision_tuning
module SeparateStmtOf =
Int
(struct
let option_name = "-separate-of"
let default = 0
let arg_name = "n"
let help = ""
end)
let () = SeparateStmtOf.set_range ~min:0 ~max:1073741823
let () = add_correctness_dep SeparateStmtOf.parameter
Options SaveFunctionState and LoadFunctionState are related
and mutually dependent for sanity checking .
Also , they depend on , so they can not be defined before it .
and mutually dependent for sanity checking.
Also, they depend on BuiltinsOverrides, so they cannot be defined before it. *)
let () = Parameter_customize.set_group initial_context
module SaveFunctionState =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ file = file
let to_string ~key:_ file = file
end)
(struct
let option_name = "-val-save-fun-state"
let arg_name = "function:filename"
let help = "save state of function <function> in file <filename>"
let default = Kernel_function.Map.empty
end)
module LoadFunctionState =
Kernel_function_map
(struct
include Datatype.String
type key = Cil_types.kernel_function
let of_string ~key:_ ~prev:_ file = file
let to_string ~key:_ file = file
end)
(struct
let option_name = "-val-load-fun-state"
let arg_name = "function:filename"
let help = "load state of function <function> from file <filename>"
let default = Kernel_function.Map.empty
end)
let () = add_correctness_dep SaveFunctionState.parameter
let () = add_correctness_dep LoadFunctionState.parameter
checks that SaveFunctionState has a unique argument pair , and returns it .
let get_SaveFunctionState () =
let is_first = ref true in
let (kf, filename) = SaveFunctionState.fold
(fun (kf, opt_filename) _acc ->
if !is_first then is_first := false
else abort "option `%s' requires a single function:filename pair"
SaveFunctionState.name;
let filename = Extlib.the opt_filename in
kf, filename
) (Kernel_function.dummy (), "")
in
if filename = "" then abort "option `%s' requires a function:filename pair"
SaveFunctionState.name
else kf, filename
checks that LoadFunctionState has a unique argument pair , and returns it .
let get_LoadFunctionState () =
let is_first = ref true in
let (kf, filename) = LoadFunctionState.fold
(fun (kf, opt_filename) _acc ->
if !is_first then is_first := false
else abort "option `%s' requires a single function:filename pair"
LoadFunctionState.name;
let filename = Extlib.the opt_filename in
kf, filename
) (Kernel_function.dummy (), "")
in
if filename = "" then abort "option `%s' requires a function:filename pair"
LoadFunctionState.name
else kf, filename
let () = Ast.apply_after_computed (fun _ ->
if SaveFunctionState.is_set () then begin
let (kf, _) = get_SaveFunctionState () in
if not (Kernel_function.returns_void kf) then
abort "option `%s': function `%a' must return void"
SaveFunctionState.name Kernel_function.pretty kf
end;
if SaveFunctionState.is_set () && LoadFunctionState.is_set () then begin
let (save_kf, _) = get_SaveFunctionState () in
let (load_kf, _) = get_LoadFunctionState () in
if Kernel_function.equal save_kf load_kf then
abort "options `%s' and `%s' cannot save/load the same function `%a'"
SaveFunctionState.name LoadFunctionState.name
Kernel_function.pretty save_kf
end;
if LoadFunctionState.is_set () then
let (kf, _) = get_LoadFunctionState () in
BuiltinsOverrides.add (kf, Some "Frama_C_load_state");
)
module TotalPtrComparison =
False
(struct
let help = "compare any two pointers, even if they have different bases, \
using a total and persistent order (works only if both \
pointers are precise and an ordering can be determined)"
end)
let () = add_correctness_dep TotalPtrComparison.parameter
let () = Parameter_customize.set_group messages
module ValShowProgress =
True
(struct
let option_name = "-val-show-progress"
let help = "Show progression messages during analysis"
end)
let () = Parameter_customize.set_group messages
module ValShowAllocations =
False
(struct
let option_name = "-val-show-allocations"
let help = "Show memory allocations"
end)
let () = Parameter_customize.set_group messages
module ValShowInitialState =
True
(struct
let option_name = "-val-show-initial-state"
let help = "Show initial state before analysis starts"
end)
let () = Parameter_customize.set_group messages
module ValShowPerf =
False
(struct
let option_name = "-val-show-perf"
let help = "Compute and shows a summary of the time spent analyzing function calls"
end)
let () = Parameter_customize.set_group messages
module ValStatistics =
True
(struct
let option_name = "-val-statistics"
let help = "Measure analysis time for statements and functions"
end)
let () = Parameter_customize.set_group messages
module ShowSlevel =
Int
(struct
let option_name = "-val-show-slevel"
let default = 100
let arg_name = "n"
let help = "Period for showing consumption of the alloted slevel during analysis"
end)
let () = Parameter_customize.set_group messages
module PrintCallstacks =
False
(struct
let option_name = "-val-print-callstacks"
let help = "When printing a message, also show the current call stack"
end)
let () = Parameter_customize.set_group interpreter
module InterpreterMode =
False
(struct
let option_name = "-val-interpreter-mode"
let help = "Stop at first call to a library function, if main() has \
arguments, on undecided branches"
end)
let () = Parameter_customize.set_group interpreter
module Interpreter_libc =
False
(struct
let option_name = "-tis-interpreter-libc"
let help = "Use the tis-interpreter default libc."
end)
let () =
let tis_kernel_libc s =
Caml_string.concat Filename.dir_sep [ Fc_config.datadir; "libc"; s; ]
in
let tis_interpreter_share s =
Caml_string.concat
Filename.dir_sep
[ Fc_config.datadir; "tis-interpreter"; s; ]
in
let tis_interpreter_runtimes =
[ tis_kernel_libc "tis_stdlib.c";
tis_kernel_libc "fc_runtime.c";
tis_interpreter_share "common_env.c";
tis_interpreter_share "common_resource.c";
tis_interpreter_share "common_time.c";
tis_interpreter_share "common_missing.c"; ]
in
List.iter
(fun file ->
File.pre_register_dynamic
Interpreter_libc.get
(fun () -> File.from_filename file))
tis_interpreter_runtimes
let () = Parameter_customize.set_group interpreter
module ObviouslyTerminatesFunctions =
Fundec_set
(struct
let option_name = "-obviously-terminates-function"
let arg_name = "f"
let help = ""
end)
let () = add_dep ObviouslyTerminatesFunctions.parameter
let () = Parameter_customize.set_group interpreter
module ObviouslyTerminatesAll =
False
(struct
let option_name = "-obviously-terminates"
let help = "undocumented. Among effects of this options are the same \
effects as -no-results"
end)
let () = add_dep ObviouslyTerminatesAll.parameter
let () = Parameter_customize.set_group interpreter
module StopAtNthAlarm =
Int(struct
let option_name = "-val-stop-at-nth-alarm"
let default = max_int
let arg_name = "n"
let help = ""
end)
let () = Parameter_customize.set_group interpreter
module CloneOnRecursiveCalls =
False
(struct
let option_name = "-val-clone-on-recursive-calls"
let help = "clone the called function when analyzing recursive \
calls. Only used when -val-ignore-recursive-calls is unset. \
The analysis has to be precise enought to be able to detect \
the termination (experimental)"
end)
let () = add_dep CloneOnRecursiveCalls.parameter
let () = Parameter_customize.is_invisible ()
module InitialStateChanged =
Int (struct
let option_name = "-new-initial-state"
let default = 0
let arg_name = "n"
let help = ""
end)
Changing the user - supplied initial state ( or the arguments of main ) through
the API of Db . Value does reset the state of Value , but * not * the property
statuses that Value has positioned . Currently , statuses can only depend
on a command - line parameter . We use the dummy one above to force a reset
when needed .
the API of Db.Value does reset the state of Value, but *not* the property
statuses that Value has positioned. Currently, statuses can only depend
on a command-line parameter. We use the dummy one above to force a reset
when needed. *)
let () =
add_correctness_dep InitialStateChanged.parameter;
Db.Value.initial_state_changed :=
(fun () -> InitialStateChanged.set (InitialStateChanged.get () + 1))
let parameters_correctness = !parameters_correctness
let parameters_tuning = !parameters_tuning
|
fb2f5f7e695b426fbb92fcae2a2f6ffdd0c2826ea3d3164a0594afbb3b94180d | bumptech/nitro-haskell | Distributed.hs | {-# LANGUAGE OverloadedStrings #-}
import System.Nitro
import Control.Monad (void, forever)
import Control.Concurrent (forkIO, threadDelay)
import Data.ByteString.Char8 as BS
serverThread s i = forever $ do
fr <- recv s []
threadDelay 1000000
frBack <- bstrToFrame (BS.pack $ "Hi it's thread #" ++ (show i))
reply s fr frBack []
rpc = withSocket (connect "tcp:7723" defaultOpts)
(\client -> do
fr <- bstrToFrame "Whoa there"
send client fr []
print =<< frameToBstr =<< recv client []
)
main = do
nitroRuntimeStart
bound <- bind "tcp://*:7723" defaultOpts
mapM_ (\i -> void $ forkIO $ serverThread bound i) [1..2]
client <- connect "tcp:7723" defaultOpts
mapM_ (\_ -> rpc) [1..5]
close bound
close client
| null | https://raw.githubusercontent.com/bumptech/nitro-haskell/937415d361193bab86545d0bc784830b29573474/examples/Distributed.hs | haskell | # LANGUAGE OverloadedStrings # | import System.Nitro
import Control.Monad (void, forever)
import Control.Concurrent (forkIO, threadDelay)
import Data.ByteString.Char8 as BS
serverThread s i = forever $ do
fr <- recv s []
threadDelay 1000000
frBack <- bstrToFrame (BS.pack $ "Hi it's thread #" ++ (show i))
reply s fr frBack []
rpc = withSocket (connect "tcp:7723" defaultOpts)
(\client -> do
fr <- bstrToFrame "Whoa there"
send client fr []
print =<< frameToBstr =<< recv client []
)
main = do
nitroRuntimeStart
bound <- bind "tcp://*:7723" defaultOpts
mapM_ (\i -> void $ forkIO $ serverThread bound i) [1..2]
client <- connect "tcp:7723" defaultOpts
mapM_ (\_ -> rpc) [1..5]
close bound
close client
|
10bcc687c58f2092b20e464dae7961a6220036ad2831461d67be06d79be12e5d | lehins/massiv | ManifestSpec.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MonoLocalBinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
module Test.Massiv.Array.ManifestSpec (spec) where
import Data.ByteString as S
import Data.ByteString.Builder as S
import Data.ByteString.Lazy as SL
import Data.Massiv.Array as A
import Data.Word (Word8)
import Test.Massiv.Core
ByteString
prop_toFromByteString
:: (Show (Vector r Word8), Eq (Vector r Word8), Load r Ix1 Word8) => Vector r Word8 -> Property
prop_toFromByteString arr = arr === fromByteString (getComp arr) (toByteString arr)
prop_castToFromByteString :: Vector S Word8 -> Property
prop_castToFromByteString arr = arr === castFromByteString (getComp arr) (castToByteString arr)
prop_fromToByteString :: Comp -> [Word8] -> Property
prop_fromToByteString comp ls = bs === toByteString (fromByteString comp bs :: Vector P Word8)
where
bs = S.pack ls
prop_toBuilder :: Array P Ix1 Word8 -> Property
prop_toBuilder arr = bs === SL.toStrict (S.toLazyByteString (toBuilder S.word8 arr))
where
bs = toByteString arr
conversionSpec :: Spec
conversionSpec =
describe "ByteString" $ do
it "castTo/TromByteString" $ property prop_castToFromByteString
it "to/from ByteString P" $ property (prop_toFromByteString @P)
it "to/from ByteString S" $ property (prop_toFromByteString @S)
it "from/to ByteString" $ property prop_fromToByteString
it "toBuilder" $ property prop_toBuilder
spec :: Spec
spec = describe "Conversion" conversionSpec
| null | https://raw.githubusercontent.com/lehins/massiv/67a920d4403f210d0bfdad1acc4bec208d80a588/massiv-test/tests/Test/Massiv/Array/ManifestSpec.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MonoLocalBinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
module Test.Massiv.Array.ManifestSpec (spec) where
import Data.ByteString as S
import Data.ByteString.Builder as S
import Data.ByteString.Lazy as SL
import Data.Massiv.Array as A
import Data.Word (Word8)
import Test.Massiv.Core
ByteString
prop_toFromByteString
:: (Show (Vector r Word8), Eq (Vector r Word8), Load r Ix1 Word8) => Vector r Word8 -> Property
prop_toFromByteString arr = arr === fromByteString (getComp arr) (toByteString arr)
prop_castToFromByteString :: Vector S Word8 -> Property
prop_castToFromByteString arr = arr === castFromByteString (getComp arr) (castToByteString arr)
prop_fromToByteString :: Comp -> [Word8] -> Property
prop_fromToByteString comp ls = bs === toByteString (fromByteString comp bs :: Vector P Word8)
where
bs = S.pack ls
prop_toBuilder :: Array P Ix1 Word8 -> Property
prop_toBuilder arr = bs === SL.toStrict (S.toLazyByteString (toBuilder S.word8 arr))
where
bs = toByteString arr
conversionSpec :: Spec
conversionSpec =
describe "ByteString" $ do
it "castTo/TromByteString" $ property prop_castToFromByteString
it "to/from ByteString P" $ property (prop_toFromByteString @P)
it "to/from ByteString S" $ property (prop_toFromByteString @S)
it "from/to ByteString" $ property prop_fromToByteString
it "toBuilder" $ property prop_toBuilder
spec :: Spec
spec = describe "Conversion" conversionSpec
| |
ee4c42c3a2eebf5843acb693858a9d6b103085fb1aa15cf33cbd493fdc9a5d3b | softwarelanguageslab/maf | R5RS_various_infinite-3-1.scm | ; Changes:
* removed : 0
* added : 2
* swaps : 0
; * negated predicates: 0
; * swapped branches: 0
; * calls to id fun: 0
((lambda (x) (x x)) (lambda (y) (<change> () (display (y y))) (<change> () (y y)) (y y))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_various_infinite-3-1.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0
* calls to id fun: 0 | * removed : 0
* added : 2
* swaps : 0
((lambda (x) (x x)) (lambda (y) (<change> () (display (y y))) (<change> () (y y)) (y y))) |
76d3512574095bbc51c6da88bee74fd328bcc538a1d0e39aa1d1a51c71e95e8c | cram2/cram | coupling.lisp | Regression test COUPLING for GSLL , automatically generated
;;
Copyright 2009
Distributed under the terms of the GNU General Public License
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST COUPLING
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.7071067811865475d0 3.14018491736755d-16)
(MULTIPLE-VALUE-LIST (COUPLING-3J 0 1 1 0 1 -1)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.408248290463863d0 5.438959822042073d-16)
(MULTIPLE-VALUE-LIST (COUPLING-6J 1 1 2 0 2 1)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1388888888888889d0 6.638400825147663d-16)
(MULTIPLE-VALUE-LIST (COUPLING-9J 1 1 2 1 2 1 2 1 1))))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_3rdparty/gsll/src/tests/coupling.lisp | lisp |
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>. | Regression test COUPLING for GSLL , automatically generated
Copyright 2009
Distributed under the terms of the GNU General Public License
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST COUPLING
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.7071067811865475d0 3.14018491736755d-16)
(MULTIPLE-VALUE-LIST (COUPLING-3J 0 1 1 0 1 -1)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.408248290463863d0 5.438959822042073d-16)
(MULTIPLE-VALUE-LIST (COUPLING-6J 1 1 2 0 2 1)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1388888888888889d0 6.638400825147663d-16)
(MULTIPLE-VALUE-LIST (COUPLING-9J 1 1 2 1 2 1 2 1 1))))
|
8de7002bcaa27d0e7f96252240380d55ec5dd5068c29aed7358a9823c46eb86a | maoe/lifted-async | Safe.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
|
Module : Control . Concurrent . Async . Lifted . Safe
Copyright : Copyright ( C ) 2012 - 2018 Mitsutoshi Aoe
License : BSD - style ( see the file LICENSE )
Maintainer : < >
Stability : experimental
This is a safe variant of @Control . Concurrent . Async . Lifted@.
This module assumes your monad stack to satisfy @'StM ' m a ~ a@ so you ca n't
mess up monadic effects . If your monad stack is stateful , use
@Control . Concurrent . Async . Lifted@ with special care .
Module : Control.Concurrent.Async.Lifted.Safe
Copyright : Copyright (C) 2012-2018 Mitsutoshi Aoe
License : BSD-style (see the file LICENSE)
Maintainer : Mitsutoshi Aoe <>
Stability : experimental
This is a safe variant of @Control.Concurrent.Async.Lifted@.
This module assumes your monad stack to satisfy @'StM' m a ~ a@ so you can't
mess up monadic effects. If your monad stack is stateful, use
@Control.Concurrent.Async.Lifted@ with special care.
-}
module Control.Concurrent.Async.Lifted.Safe
(
-- * Asynchronous actions
A.Async
, Pure
, Forall
-- ** Spawning
, async, asyncBound, asyncOn, asyncWithUnmask, asyncOnWithUnmask
-- ** Spawning with automatic 'cancel'ation
, withAsync, withAsyncBound, withAsyncOn
, withAsyncWithUnmask, withAsyncOnWithUnmask
-- ** Quering 'Async's
, wait, poll, waitCatch
, cancel
, uninterruptibleCancel
, cancelWith
, A.asyncThreadId
, A.AsyncCancelled(..)
* * STM operations
, A.waitSTM, A.pollSTM, A.waitCatchSTM
-- ** Waiting for multiple 'Async's
, waitAny, waitAnyCatch, waitAnyCancel, waitAnyCatchCancel
, waitEither, waitEitherCatch, waitEitherCancel, waitEitherCatchCancel
, waitEither_
, waitBoth
* * Waiting for multiple ' Async 's in STM
, A.waitAnySTM
, A.waitAnyCatchSTM
, A.waitEitherSTM
, A.waitEitherCatchSTM
, A.waitEitherSTM_
, A.waitBothSTM
-- ** Linking
, Unsafe.link, Unsafe.link2
, A.ExceptionInLinkedThread(..)
-- * Convenient utilities
, race, race_, concurrently, concurrently_
, mapConcurrently, mapConcurrently_
, forConcurrently, forConcurrently_
, replicateConcurrently, replicateConcurrently_
, Concurrently(..)
, A.compareAsyncs
)
where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Monad
import Data.Foldable (fold)
import Control.Concurrent.Async (Async)
import Control.Exception.Lifted (SomeException, Exception)
import Control.Monad.Base (MonadBase(..))
import Control.Monad.Trans.Control hiding (restoreM)
import Data.Constraint ((\\), (:-))
import Data.Constraint.Forall (Forall, inst)
import qualified Control.Concurrent.Async as A
import qualified Control.Concurrent.Async.Lifted as Unsafe
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 710
import Data.Foldable
import Data.Traversable
#endif
#if !MIN_VERSION_base(4, 8, 0)
import Data.Monoid (Monoid(mappend, mempty))
#elif MIN_VERSION_base(4, 9, 0) && !MIN_VERSION_base(4, 13, 0)
import Data.Semigroup (Semigroup((<>)))
#endif
-- | Generalized version of 'A.async'.
async
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m (Async a)
async = Unsafe.async
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncBound ' .
asyncBound
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m (Async a)
asyncBound = Unsafe.asyncBound
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' ' .
asyncOn
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> Int -> m a -> m (Async a)
asyncOn cpu m = Unsafe.asyncOn cpu m
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncWithUnmask ' .
asyncWithUnmask
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> ((forall b. m b -> m b) -> m a)
-> m (Async a)
asyncWithUnmask restore = Unsafe.asyncWithUnmask restore
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncOnWithUnmask ' .
asyncOnWithUnmask
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> ((forall b. m b -> m b) -> m a)
-> m (Async a)
asyncOnWithUnmask cpu restore = Unsafe.asyncOnWithUnmask cpu restore
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.withAsync'.
withAsync
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a
-> (Async a -> m b)
-> m b
withAsync = Unsafe.withAsync
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncBound ' .
withAsyncBound
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a
-> (Async a -> m b)
-> m b
withAsyncBound = Unsafe.withAsyncBound
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncOn ' .
withAsyncOn
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> (Async a -> m b)
-> m b
withAsyncOn = Unsafe.withAsyncOn
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.withAsyncWithUnmask'.
withAsyncWithUnmask
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> ((forall c. m c -> m c) -> m a)
-> (Async a -> m b)
-> m b
withAsyncWithUnmask restore = Unsafe.withAsyncWithUnmask restore
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncOnWithUnmask ' .
withAsyncOnWithUnmask
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> ((forall c. m c -> m c) -> m a)
-> (Async a -> m b)
-> m b
withAsyncOnWithUnmask cpu restore = Unsafe.withAsyncOnWithUnmask cpu restore
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.wait'.
wait
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a -> m a
wait = liftBase . A.wait
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.poll'.
poll
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a
-> m (Maybe (Either SomeException a))
poll = liftBase . A.poll
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of .
waitCatch
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a
-> m (Either SomeException a)
waitCatch = liftBase . A.waitCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.cancel'.
cancel :: MonadBase IO m => Async a -> m ()
cancel = Unsafe.cancel
-- | Generalized version of 'A.cancelWith'.
cancelWith :: (MonadBase IO m, Exception e) => Async a -> e -> m ()
cancelWith = Unsafe.cancelWith
-- | Generalized version of 'A.uninterruptibleCancel'.
uninterruptibleCancel :: MonadBase IO m => Async a -> m ()
uninterruptibleCancel = Unsafe.uninterruptibleCancel
-- | Generalized version of 'A.waitAny'.
waitAny
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a] -> m (Async a, a)
waitAny = liftBase . A.waitAny
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.waitAnyCatch'.
waitAnyCatch
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, Either SomeException a)
waitAnyCatch = liftBase . A.waitAnyCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.waitAnyCancel ' .
waitAnyCancel
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, a)
waitAnyCancel = liftBase . A.waitAnyCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.waitAnyCatchCancel ' .
waitAnyCatchCancel
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, Either SomeException a)
waitAnyCatchCancel = liftBase . A.waitAnyCatchCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
-- | Generalized version of 'A.waitEither'.
waitEither
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either a b)
waitEither = (liftBase .) . A.waitEither
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
-- | Generalized version of 'A.waitEitherCatch'.
waitEitherCatch
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either (Either SomeException a) (Either SomeException b))
waitEitherCatch = (liftBase .) . A.waitEitherCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
-- | Generalized version of 'A.waitEitherCancel'.
waitEitherCancel
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either a b)
waitEitherCancel = (liftBase .) . A.waitEitherCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
-- | Generalized version of 'A.waitEitherCatchCancel'.
waitEitherCatchCancel
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either (Either SomeException a) (Either SomeException b))
waitEitherCatchCancel = (liftBase .) . A.waitEitherCatchCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
| Generalized version of ' A.waitEither _ '
waitEither_ :: MonadBase IO m => Async a -> Async b -> m ()
waitEither_ = Unsafe.waitEither_
-- | Generalized version of 'A.waitBoth'.
waitBoth
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (a, b)
waitBoth = (liftBase .) . A.waitBoth
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
-- | Generalized version of 'A.race'.
race
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m (Either a b)
race = liftBaseOp2_ A.race
-- | Generalized version of 'A.race_'.
race_
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m ()
race_ = liftBaseOp2_ A.race_
-- | Generalized version of 'A.concurrently'.
concurrently
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m (a, b)
concurrently = liftBaseOp2_ A.concurrently
-- | Generalized version of 'A.concurrently_'.
concurrently_
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m ()
concurrently_ = liftBaseOp2_ A.concurrently_
-- | Similar to 'A.liftBaseOp_' but takes a binary function
-- and leverages @'StM' m a ~ a@.
liftBaseOp2_
:: forall base m a b c. (MonadBaseControl base m, Forall (Pure m))
=> (base a -> base b -> base c)
-> m a -> m b -> m c
liftBaseOp2_ f left right = liftBaseWith $ \run -> f
(run left \\ (inst :: Forall (Pure m) :- Pure m a))
(run right \\ (inst :: Forall (Pure m) :- Pure m b))
-- | Generalized version of 'A.mapConcurrently'.
mapConcurrently
:: (Traversable t, MonadBaseControl IO m, Forall (Pure m))
=> (a -> m b)
-> t a
-> m (t b)
mapConcurrently f = runConcurrently . traverse (Concurrently . f)
-- | Generalized version of 'A.mapConcurrently_'.
mapConcurrently_
:: (Foldable t, MonadBaseControl IO m, Forall (Pure m))
=> (a -> m b)
-> t a
-> m ()
mapConcurrently_ f = runConcurrently . foldMap (Concurrently . void . f)
-- | Generalized version of 'A.forConcurrently'.
forConcurrently
:: (Traversable t, MonadBaseControl IO m, Forall (Pure m))
=> t a
-> (a -> m b)
-> m (t b)
forConcurrently = flip mapConcurrently
-- | Generalized version of 'A.forConcurrently_'.
forConcurrently_
:: (Foldable t, MonadBaseControl IO m, Forall (Pure m))
=> t a
-> (a -> m b)
-> m ()
forConcurrently_ = flip mapConcurrently_
-- | Generalized version of 'A.replicateConcurrently'.
replicateConcurrently
:: (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> m [a]
replicateConcurrently n =
runConcurrently . sequenceA . replicate n . Concurrently
-- | Generalized version of 'A.replicateConcurrently_'.
replicateConcurrently_
:: (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> m ()
replicateConcurrently_ n =
runConcurrently . fold . replicate n . Concurrently . void
-- | Generalized version of 'A.Concurrently'.
--
-- A value of type @'Concurrently' m a@ is an IO-based operation that can be
-- composed with other 'Concurrently' values, using the 'Applicative' and
-- 'Alternative' instances.
--
Calling ' ' on a value of type @'Concurrently ' m a@ will
execute the IO - based lifted operations it contains concurrently , before
-- delivering the result of type 'a'.
--
-- For example
--
-- @
( page1 , , page3 ) < - ' runConcurrently ' $ ( , , )
-- '<$>' 'Concurrently' (getURL "url1")
-- '<*>' 'Concurrently' (getURL "url2")
-- '<*>' 'Concurrently' (getURL "url3")
-- @
data Concurrently m a where
Concurrently
:: Forall (Pure m) => { runConcurrently :: m a } -> Concurrently m a
-- | Most of the functions in this module have @'Forall' ('Pure' m)@ in their
-- constraints, which means they require the monad 'm' satisfies
-- @'StM' m a ~ a@ for all 'a'.
class StM m a ~ a => Pure m a
instance StM m a ~ a => Pure m a
instance Functor m => Functor (Concurrently m) where
fmap f (Concurrently a) = Concurrently $ f <$> a
instance (MonadBaseControl IO m, Forall (Pure m)) =>
Applicative (Concurrently m) where
pure = Concurrently . pure
Concurrently (fs :: m (a -> b)) <*> Concurrently as =
Concurrently (uncurry ($) <$> concurrently fs as)
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m (a -> b))
instance (MonadBaseControl IO m, Forall (Pure m)) =>
Alternative (Concurrently m) where
empty = Concurrently $ liftBaseWith $ \_ -> forever $ threadDelay maxBound
Concurrently (as :: m a) <|> Concurrently bs =
Concurrently (either id id <$> race as bs)
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
#if MIN_VERSION_base(4, 9, 0)
instance (MonadBaseControl IO m, Semigroup a, Forall (Pure m)) =>
Semigroup (Concurrently m a) where
(<>) = liftA2 (<>)
instance (MonadBaseControl IO m, Semigroup a, Monoid a, Forall (Pure m)) =>
Monoid (Concurrently m a) where
mempty = pure mempty
mappend = (<>)
#else
instance (MonadBaseControl IO m, Monoid a, Forall (Pure m)) =>
Monoid (Concurrently m a) where
mempty = pure mempty
mappend = liftA2 mappend
#endif
| null | https://raw.githubusercontent.com/maoe/lifted-async/922f5469acb09d66e4dbb996167f963cc811d52a/src/Control/Concurrent/Async/Lifted/Safe.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
* Asynchronous actions
** Spawning
** Spawning with automatic 'cancel'ation
** Quering 'Async's
** Waiting for multiple 'Async's
** Linking
* Convenient utilities
| Generalized version of 'A.async'.
| Generalized version of 'A.withAsync'.
| Generalized version of 'A.withAsyncWithUnmask'.
| Generalized version of 'A.wait'.
| Generalized version of 'A.poll'.
| Generalized version of 'A.cancel'.
| Generalized version of 'A.cancelWith'.
| Generalized version of 'A.uninterruptibleCancel'.
| Generalized version of 'A.waitAny'.
| Generalized version of 'A.waitAnyCatch'.
| Generalized version of 'A.waitEither'.
| Generalized version of 'A.waitEitherCatch'.
| Generalized version of 'A.waitEitherCancel'.
| Generalized version of 'A.waitEitherCatchCancel'.
| Generalized version of 'A.waitBoth'.
| Generalized version of 'A.race'.
| Generalized version of 'A.race_'.
| Generalized version of 'A.concurrently'.
| Generalized version of 'A.concurrently_'.
| Similar to 'A.liftBaseOp_' but takes a binary function
and leverages @'StM' m a ~ a@.
| Generalized version of 'A.mapConcurrently'.
| Generalized version of 'A.mapConcurrently_'.
| Generalized version of 'A.forConcurrently'.
| Generalized version of 'A.forConcurrently_'.
| Generalized version of 'A.replicateConcurrently'.
| Generalized version of 'A.replicateConcurrently_'.
| Generalized version of 'A.Concurrently'.
A value of type @'Concurrently' m a@ is an IO-based operation that can be
composed with other 'Concurrently' values, using the 'Applicative' and
'Alternative' instances.
delivering the result of type 'a'.
For example
@
'<$>' 'Concurrently' (getURL "url1")
'<*>' 'Concurrently' (getURL "url2")
'<*>' 'Concurrently' (getURL "url3")
@
| Most of the functions in this module have @'Forall' ('Pure' m)@ in their
constraints, which means they require the monad 'm' satisfies
@'StM' m a ~ a@ for all 'a'. | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
|
Module : Control . Concurrent . Async . Lifted . Safe
Copyright : Copyright ( C ) 2012 - 2018 Mitsutoshi Aoe
License : BSD - style ( see the file LICENSE )
Maintainer : < >
Stability : experimental
This is a safe variant of @Control . Concurrent . Async . Lifted@.
This module assumes your monad stack to satisfy @'StM ' m a ~ a@ so you ca n't
mess up monadic effects . If your monad stack is stateful , use
@Control . Concurrent . Async . Lifted@ with special care .
Module : Control.Concurrent.Async.Lifted.Safe
Copyright : Copyright (C) 2012-2018 Mitsutoshi Aoe
License : BSD-style (see the file LICENSE)
Maintainer : Mitsutoshi Aoe <>
Stability : experimental
This is a safe variant of @Control.Concurrent.Async.Lifted@.
This module assumes your monad stack to satisfy @'StM' m a ~ a@ so you can't
mess up monadic effects. If your monad stack is stateful, use
@Control.Concurrent.Async.Lifted@ with special care.
-}
module Control.Concurrent.Async.Lifted.Safe
(
A.Async
, Pure
, Forall
, async, asyncBound, asyncOn, asyncWithUnmask, asyncOnWithUnmask
, withAsync, withAsyncBound, withAsyncOn
, withAsyncWithUnmask, withAsyncOnWithUnmask
, wait, poll, waitCatch
, cancel
, uninterruptibleCancel
, cancelWith
, A.asyncThreadId
, A.AsyncCancelled(..)
* * STM operations
, A.waitSTM, A.pollSTM, A.waitCatchSTM
, waitAny, waitAnyCatch, waitAnyCancel, waitAnyCatchCancel
, waitEither, waitEitherCatch, waitEitherCancel, waitEitherCatchCancel
, waitEither_
, waitBoth
* * Waiting for multiple ' Async 's in STM
, A.waitAnySTM
, A.waitAnyCatchSTM
, A.waitEitherSTM
, A.waitEitherCatchSTM
, A.waitEitherSTM_
, A.waitBothSTM
, Unsafe.link, Unsafe.link2
, A.ExceptionInLinkedThread(..)
, race, race_, concurrently, concurrently_
, mapConcurrently, mapConcurrently_
, forConcurrently, forConcurrently_
, replicateConcurrently, replicateConcurrently_
, Concurrently(..)
, A.compareAsyncs
)
where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Monad
import Data.Foldable (fold)
import Control.Concurrent.Async (Async)
import Control.Exception.Lifted (SomeException, Exception)
import Control.Monad.Base (MonadBase(..))
import Control.Monad.Trans.Control hiding (restoreM)
import Data.Constraint ((\\), (:-))
import Data.Constraint.Forall (Forall, inst)
import qualified Control.Concurrent.Async as A
import qualified Control.Concurrent.Async.Lifted as Unsafe
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 710
import Data.Foldable
import Data.Traversable
#endif
#if !MIN_VERSION_base(4, 8, 0)
import Data.Monoid (Monoid(mappend, mempty))
#elif MIN_VERSION_base(4, 9, 0) && !MIN_VERSION_base(4, 13, 0)
import Data.Semigroup (Semigroup((<>)))
#endif
async
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m (Async a)
async = Unsafe.async
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncBound ' .
asyncBound
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m (Async a)
asyncBound = Unsafe.asyncBound
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' ' .
asyncOn
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> Int -> m a -> m (Async a)
asyncOn cpu m = Unsafe.asyncOn cpu m
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncWithUnmask ' .
asyncWithUnmask
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> ((forall b. m b -> m b) -> m a)
-> m (Async a)
asyncWithUnmask restore = Unsafe.asyncWithUnmask restore
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.asyncOnWithUnmask ' .
asyncOnWithUnmask
:: forall m a. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> ((forall b. m b -> m b) -> m a)
-> m (Async a)
asyncOnWithUnmask cpu restore = Unsafe.asyncOnWithUnmask cpu restore
\\ (inst :: Forall (Pure m) :- Pure m a)
withAsync
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a
-> (Async a -> m b)
-> m b
withAsync = Unsafe.withAsync
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncBound ' .
withAsyncBound
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a
-> (Async a -> m b)
-> m b
withAsyncBound = Unsafe.withAsyncBound
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncOn ' .
withAsyncOn
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> (Async a -> m b)
-> m b
withAsyncOn = Unsafe.withAsyncOn
\\ (inst :: Forall (Pure m) :- Pure m a)
withAsyncWithUnmask
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> ((forall c. m c -> m c) -> m a)
-> (Async a -> m b)
-> m b
withAsyncWithUnmask restore = Unsafe.withAsyncWithUnmask restore
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.withAsyncOnWithUnmask ' .
withAsyncOnWithUnmask
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> ((forall c. m c -> m c) -> m a)
-> (Async a -> m b)
-> m b
withAsyncOnWithUnmask cpu restore = Unsafe.withAsyncOnWithUnmask cpu restore
\\ (inst :: Forall (Pure m) :- Pure m a)
wait
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a -> m a
wait = liftBase . A.wait
\\ (inst :: Forall (Pure m) :- Pure m a)
poll
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a
-> m (Maybe (Either SomeException a))
poll = liftBase . A.poll
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of .
waitCatch
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> Async a
-> m (Either SomeException a)
waitCatch = liftBase . A.waitCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
cancel :: MonadBase IO m => Async a -> m ()
cancel = Unsafe.cancel
cancelWith :: (MonadBase IO m, Exception e) => Async a -> e -> m ()
cancelWith = Unsafe.cancelWith
uninterruptibleCancel :: MonadBase IO m => Async a -> m ()
uninterruptibleCancel = Unsafe.uninterruptibleCancel
waitAny
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a] -> m (Async a, a)
waitAny = liftBase . A.waitAny
\\ (inst :: Forall (Pure m) :- Pure m a)
waitAnyCatch
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, Either SomeException a)
waitAnyCatch = liftBase . A.waitAnyCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.waitAnyCancel ' .
waitAnyCancel
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, a)
waitAnyCancel = liftBase . A.waitAnyCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
| Generalized version of ' A.waitAnyCatchCancel ' .
waitAnyCatchCancel
:: forall m a. (MonadBase IO m, Forall (Pure m))
=> [Async a]
-> m (Async a, Either SomeException a)
waitAnyCatchCancel = liftBase . A.waitAnyCatchCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
waitEither
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either a b)
waitEither = (liftBase .) . A.waitEither
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
waitEitherCatch
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either (Either SomeException a) (Either SomeException b))
waitEitherCatch = (liftBase .) . A.waitEitherCatch
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
waitEitherCancel
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either a b)
waitEitherCancel = (liftBase .) . A.waitEitherCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
waitEitherCatchCancel
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (Either (Either SomeException a) (Either SomeException b))
waitEitherCatchCancel = (liftBase .) . A.waitEitherCatchCancel
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
| Generalized version of ' A.waitEither _ '
waitEither_ :: MonadBase IO m => Async a -> Async b -> m ()
waitEither_ = Unsafe.waitEither_
waitBoth
:: forall m a b. (MonadBase IO m, Forall (Pure m))
=> Async a
-> Async b
-> m (a, b)
waitBoth = (liftBase .) . A.waitBoth
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
race
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m (Either a b)
race = liftBaseOp2_ A.race
race_
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m ()
race_ = liftBaseOp2_ A.race_
concurrently
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m (a, b)
concurrently = liftBaseOp2_ A.concurrently
concurrently_
:: forall m a b. (MonadBaseControl IO m, Forall (Pure m))
=> m a -> m b -> m ()
concurrently_ = liftBaseOp2_ A.concurrently_
liftBaseOp2_
:: forall base m a b c. (MonadBaseControl base m, Forall (Pure m))
=> (base a -> base b -> base c)
-> m a -> m b -> m c
liftBaseOp2_ f left right = liftBaseWith $ \run -> f
(run left \\ (inst :: Forall (Pure m) :- Pure m a))
(run right \\ (inst :: Forall (Pure m) :- Pure m b))
mapConcurrently
:: (Traversable t, MonadBaseControl IO m, Forall (Pure m))
=> (a -> m b)
-> t a
-> m (t b)
mapConcurrently f = runConcurrently . traverse (Concurrently . f)
mapConcurrently_
:: (Foldable t, MonadBaseControl IO m, Forall (Pure m))
=> (a -> m b)
-> t a
-> m ()
mapConcurrently_ f = runConcurrently . foldMap (Concurrently . void . f)
forConcurrently
:: (Traversable t, MonadBaseControl IO m, Forall (Pure m))
=> t a
-> (a -> m b)
-> m (t b)
forConcurrently = flip mapConcurrently
forConcurrently_
:: (Foldable t, MonadBaseControl IO m, Forall (Pure m))
=> t a
-> (a -> m b)
-> m ()
forConcurrently_ = flip mapConcurrently_
replicateConcurrently
:: (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> m [a]
replicateConcurrently n =
runConcurrently . sequenceA . replicate n . Concurrently
replicateConcurrently_
:: (MonadBaseControl IO m, Forall (Pure m))
=> Int
-> m a
-> m ()
replicateConcurrently_ n =
runConcurrently . fold . replicate n . Concurrently . void
Calling ' ' on a value of type @'Concurrently ' m a@ will
execute the IO - based lifted operations it contains concurrently , before
( page1 , , page3 ) < - ' runConcurrently ' $ ( , , )
data Concurrently m a where
Concurrently
:: Forall (Pure m) => { runConcurrently :: m a } -> Concurrently m a
class StM m a ~ a => Pure m a
instance StM m a ~ a => Pure m a
instance Functor m => Functor (Concurrently m) where
fmap f (Concurrently a) = Concurrently $ f <$> a
instance (MonadBaseControl IO m, Forall (Pure m)) =>
Applicative (Concurrently m) where
pure = Concurrently . pure
Concurrently (fs :: m (a -> b)) <*> Concurrently as =
Concurrently (uncurry ($) <$> concurrently fs as)
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m (a -> b))
instance (MonadBaseControl IO m, Forall (Pure m)) =>
Alternative (Concurrently m) where
empty = Concurrently $ liftBaseWith $ \_ -> forever $ threadDelay maxBound
Concurrently (as :: m a) <|> Concurrently bs =
Concurrently (either id id <$> race as bs)
\\ (inst :: Forall (Pure m) :- Pure m a)
\\ (inst :: Forall (Pure m) :- Pure m b)
#if MIN_VERSION_base(4, 9, 0)
instance (MonadBaseControl IO m, Semigroup a, Forall (Pure m)) =>
Semigroup (Concurrently m a) where
(<>) = liftA2 (<>)
instance (MonadBaseControl IO m, Semigroup a, Monoid a, Forall (Pure m)) =>
Monoid (Concurrently m a) where
mempty = pure mempty
mappend = (<>)
#else
instance (MonadBaseControl IO m, Monoid a, Forall (Pure m)) =>
Monoid (Concurrently m a) where
mempty = pure mempty
mappend = liftA2 mappend
#endif
|
ef682c6c536ff8a4ec34e60384b97017650c2c2aaf97f5e8e396444c7116d0df | facebook/infer | ObjCDispatchModels.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
let dispatch_models =
[ "_dispatch_once"
; "dispatch_async"
; "dispatch_after"
; "dispatch_group_async"
; "dispatch_barrier_async"
; "dispatch_group_notify" ]
let is_model proc_name = List.mem dispatch_models ~equal:String.equal (Procname.to_string proc_name)
let get_dispatch_closure_opt actual_params =
List.find_map actual_params ~f:(fun (exp, _) ->
match exp with
| Exp.Closure c when Procname.is_objc_block c.name ->
(* We assume that for these modelled functions, the block passed as parameter doesn't
have arguments, so we only pass the captured variables. *)
Some (c.name, exp, [])
| _ ->
None )
| null | https://raw.githubusercontent.com/facebook/infer/2e3b33f21214653742967e5e1ab3235cc6d43e41/infer/src/IR/ObjCDispatchModels.ml | ocaml | We assume that for these modelled functions, the block passed as parameter doesn't
have arguments, so we only pass the captured variables. |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
let dispatch_models =
[ "_dispatch_once"
; "dispatch_async"
; "dispatch_after"
; "dispatch_group_async"
; "dispatch_barrier_async"
; "dispatch_group_notify" ]
let is_model proc_name = List.mem dispatch_models ~equal:String.equal (Procname.to_string proc_name)
let get_dispatch_closure_opt actual_params =
List.find_map actual_params ~f:(fun (exp, _) ->
match exp with
| Exp.Closure c when Procname.is_objc_block c.name ->
Some (c.name, exp, [])
| _ ->
None )
|
f08b12cbaf8d400e381e44bac921a41ebd318aa2129996ea3d92b3acd34d9db4 | evilbinary/scheme-lib | logical.scm | ;;;; "logical.scm", bit access and operations for integers for Scheme
Copyright ( C ) 1991 , 1993 , 2001 , 2003 , 2005
;
;Permission to copy this software, to modify it, to redistribute it,
;to distribute modified versions, and to use it for any purpose is
;granted, subject to the following restrictions and understandings.
;
1 . Any copy made of this software must include this copyright notice
;in full.
;
2 . I have made no warranty or representation that the operation of
;this software will be error-free, and I am under no obligation to
;provide any services, by way of maintenance, update, or otherwise.
;
3 . In conjunction with products arising from the use of this
;material, there shall be no use of my name in any advertising,
;promotional, or sales literature without prior written consent in
;each case.
(define logical:boole-xor
'#(#(0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15)
#(1 0 3 2 5 4 7 6 9 8 11 10 13 12 15 14)
#(2 3 0 1 6 7 4 5 10 11 8 9 14 15 12 13)
#(3 2 1 0 7 6 5 4 11 10 9 8 15 14 13 12)
#(4 5 6 7 0 1 2 3 12 13 14 15 8 9 10 11)
#(5 4 7 6 1 0 3 2 13 12 15 14 9 8 11 10)
#(6 7 4 5 2 3 0 1 14 15 12 13 10 11 8 9)
#(7 6 5 4 3 2 1 0 15 14 13 12 11 10 9 8)
#(8 9 10 11 12 13 14 15 0 1 2 3 4 5 6 7)
#(9 8 11 10 13 12 15 14 1 0 3 2 5 4 7 6)
#(10 11 8 9 14 15 12 13 2 3 0 1 6 7 4 5)
#(11 10 9 8 15 14 13 12 3 2 1 0 7 6 5 4)
#(12 13 14 15 8 9 10 11 4 5 6 7 0 1 2 3)
#(13 12 15 14 9 8 11 10 5 4 7 6 1 0 3 2)
#(14 15 12 13 10 11 8 9 6 7 4 5 2 3 0 1)
#(15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0)))
(define logical:boole-and
'#(#(0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0)
#(0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1)
#(0 0 2 2 0 0 2 2 0 0 2 2 0 0 2 2)
#(0 1 2 3 0 1 2 3 0 1 2 3 0 1 2 3)
#(0 0 0 0 4 4 4 4 0 0 0 0 4 4 4 4)
#(0 1 0 1 4 5 4 5 0 1 0 1 4 5 4 5)
#(0 0 2 2 4 4 6 6 0 0 2 2 4 4 6 6)
#(0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7)
#(0 0 0 0 0 0 0 0 8 8 8 8 8 8 8 8)
#(0 1 0 1 0 1 0 1 8 9 8 9 8 9 8 9)
#(0 0 2 2 0 0 2 2 8 8 10 10 8 8 10 10)
#(0 1 2 3 0 1 2 3 8 9 10 11 8 9 10 11)
#(0 0 0 0 4 4 4 4 8 8 8 8 12 12 12 12)
#(0 1 0 1 4 5 4 5 8 9 8 9 12 13 12 13)
#(0 0 2 2 4 4 6 6 8 8 10 10 12 12 14 14)
#(0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15)))
(define (logical:ash-4 x)
(if (negative? x)
(+ -1 (quotient (+ 1 x) 16))
(quotient x 16)))
(define (logical:reduce op4 ident)
(lambda args
(do ((res ident (op4 res (car rgs) 1 0))
(rgs args (cdr rgs)))
((null? rgs) res))))
;@
(define logand
(letrec
((lgand
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) (+ acc (* scl n1)))
((zero? n2) acc)
((zero? n1) acc)
(else (lgand (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (vector-ref (vector-ref logical:boole-and
(modulo n1 16))
(modulo n2 16))
scl)
acc)))))))
(logical:reduce lgand -1)))
;@
(define logior
(letrec
((lgior
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) (+ acc (* scl n1)))
((zero? n2) (+ acc (* scl n1)))
((zero? n1) (+ acc (* scl n2)))
(else (lgior (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (- 15 (vector-ref
(vector-ref logical:boole-and
(- 15 (modulo n1 16)))
(- 15 (modulo n2 16))))
scl)
acc)))))))
(logical:reduce lgior 0)))
;@
(define logxor
(letrec
((lgxor
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) acc)
((zero? n2) (+ acc (* scl n1)))
((zero? n1) (+ acc (* scl n2)))
(else (lgxor (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (vector-ref (vector-ref logical:boole-xor
(modulo n1 16))
(modulo n2 16))
scl)
acc)))))))
(logical:reduce lgxor 0)))
;@
(define (lognot n) (- -1 n))
;@
(define (logtest n1 n2)
(not (zero? (logand n1 n2))))
;@
(define (logbit? index n)
(logtest (expt 2 index) n))
;@
(define (copy-bit index to bool)
(if bool
(logior to (arithmetic-shift 1 index))
(logand to (lognot (arithmetic-shift 1 index)))))
;@
(define (bitwise-if mask n0 n1)
(logior (logand mask n0)
(logand (lognot mask) n1)))
;@
(define (bit-field n start end)
(logand (lognot (ash -1 (- end start)))
(arithmetic-shift n (- start))))
;@
(define (copy-bit-field to from start end)
(bitwise-if (arithmetic-shift (lognot (ash -1 (- end start))) start)
(arithmetic-shift from start)
to))
;@
(define (rotate-bit-field n count start end)
(define width (- end start))
(set! count (modulo count width))
(let ((mask (lognot (ash -1 width))))
(define zn (logand mask (arithmetic-shift n (- start))))
(logior (arithmetic-shift
(logior (logand mask (arithmetic-shift zn count))
(arithmetic-shift zn (- count width)))
start)
(logand (lognot (ash mask start)) n))))
;@
(define (arithmetic-shift n count)
(if (negative? count)
(let ((k (expt 2 (- count))))
(if (negative? n)
(+ -1 (quotient (+ 1 n) k))
(quotient n k)))
(* (expt 2 count) n)))
;@
(define integer-length
(letrec ((intlen (lambda (n tot)
(case n
((0 -1) (+ 0 tot))
((1 -2) (+ 1 tot))
((2 3 -3 -4) (+ 2 tot))
((4 5 6 7 -5 -6 -7 -8) (+ 3 tot))
(else (intlen (logical:ash-4 n) (+ 4 tot)))))))
(lambda (n) (intlen n 0))))
;@
(define bitwise-bit-count
(letrec ((logcnt (lambda (n tot)
(if (zero? n)
tot
(logcnt (quotient n 16)
(+ (vector-ref
'#(0 1 1 2 1 2 2 3 1 2 2 3 2 3 3 4)
(modulo n 16))
tot))))))
(lambda (n)
(cond ((negative? n) (lognot (logcnt (lognot n) 0)))
((positive? n) (logcnt n 0))
(else 0)))))
;@
(define (logcount n)
(cond ((negative? n) (bitwise-bit-count (lognot n)))
(else (bitwise-bit-count n))))
;@
(define (log2-binary-factors n)
(+ -1 (integer-length (logand n (- n)))))
(define (bit-reverse k n)
(do ((m (if (negative? n) (lognot n) n) (arithmetic-shift m -1))
(k (+ -1 k) (+ -1 k))
(rvs 0 (logior (arithmetic-shift rvs 1) (logand 1 m))))
((negative? k) (if (negative? n) (lognot rvs) rvs))))
;@
(define (reverse-bit-field n start end)
(define width (- end start))
(let ((mask (lognot (ash -1 width))))
(define zn (logand mask (arithmetic-shift n (- start))))
(logior (arithmetic-shift (bit-reverse width zn) start)
(logand (lognot (ash mask start)) n))))
;@
(define (integer->list k . len)
(if (negative? k) (slib:error 'integer->list 'negative? k))
(if (null? len)
(do ((k k (arithmetic-shift k -1))
(lst '() (cons (odd? k) lst)))
((<= k 0) lst))
(do ((idx (+ -1 (car len)) (+ -1 idx))
(k k (arithmetic-shift k -1))
(lst '() (cons (odd? k) lst)))
((negative? idx) lst))))
;@
(define (list->integer bools)
(do ((bs bools (cdr bs))
(acc 0 (+ acc acc (if (car bs) 1 0))))
((null? bs) acc)))
(define (booleans->integer . bools)
(list->integer bools))
;;;;@ SRFI-60 aliases
(define ash arithmetic-shift)
(define bitwise-ior logior)
(define bitwise-xor logxor)
(define bitwise-and logand)
(define bitwise-not lognot)
(define bit-count logcount)
(define bit-set? logbit?)
(define any-bits-set? logtest)
(define first-set-bit log2-binary-factors)
(define bitwise-merge bitwise-if)
(provide 'srfi-60)
;;; Legacy
;;(define (logical:rotate k count len) (rotate-bit-field k count 0 len))
;;(define (logical:ones deg) (lognot (ash -1 deg)))
;;(define integer-expt expt) ; legacy name
| null | https://raw.githubusercontent.com/evilbinary/scheme-lib/6df491c1f616929caa4e6569fa44e04df7a356a7/packages/slib/logical.scm | scheme | "logical.scm", bit access and operations for integers for Scheme
Permission to copy this software, to modify it, to redistribute it,
to distribute modified versions, and to use it for any purpose is
granted, subject to the following restrictions and understandings.
in full.
this software will be error-free, and I am under no obligation to
provide any services, by way of maintenance, update, or otherwise.
material, there shall be no use of my name in any advertising,
promotional, or sales literature without prior written consent in
each case.
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@
@ SRFI-60 aliases
Legacy
(define (logical:rotate k count len) (rotate-bit-field k count 0 len))
(define (logical:ones deg) (lognot (ash -1 deg)))
(define integer-expt expt) ; legacy name | Copyright ( C ) 1991 , 1993 , 2001 , 2003 , 2005
1 . Any copy made of this software must include this copyright notice
2 . I have made no warranty or representation that the operation of
3 . In conjunction with products arising from the use of this
(define logical:boole-xor
'#(#(0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15)
#(1 0 3 2 5 4 7 6 9 8 11 10 13 12 15 14)
#(2 3 0 1 6 7 4 5 10 11 8 9 14 15 12 13)
#(3 2 1 0 7 6 5 4 11 10 9 8 15 14 13 12)
#(4 5 6 7 0 1 2 3 12 13 14 15 8 9 10 11)
#(5 4 7 6 1 0 3 2 13 12 15 14 9 8 11 10)
#(6 7 4 5 2 3 0 1 14 15 12 13 10 11 8 9)
#(7 6 5 4 3 2 1 0 15 14 13 12 11 10 9 8)
#(8 9 10 11 12 13 14 15 0 1 2 3 4 5 6 7)
#(9 8 11 10 13 12 15 14 1 0 3 2 5 4 7 6)
#(10 11 8 9 14 15 12 13 2 3 0 1 6 7 4 5)
#(11 10 9 8 15 14 13 12 3 2 1 0 7 6 5 4)
#(12 13 14 15 8 9 10 11 4 5 6 7 0 1 2 3)
#(13 12 15 14 9 8 11 10 5 4 7 6 1 0 3 2)
#(14 15 12 13 10 11 8 9 6 7 4 5 2 3 0 1)
#(15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0)))
(define logical:boole-and
'#(#(0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0)
#(0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1)
#(0 0 2 2 0 0 2 2 0 0 2 2 0 0 2 2)
#(0 1 2 3 0 1 2 3 0 1 2 3 0 1 2 3)
#(0 0 0 0 4 4 4 4 0 0 0 0 4 4 4 4)
#(0 1 0 1 4 5 4 5 0 1 0 1 4 5 4 5)
#(0 0 2 2 4 4 6 6 0 0 2 2 4 4 6 6)
#(0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7)
#(0 0 0 0 0 0 0 0 8 8 8 8 8 8 8 8)
#(0 1 0 1 0 1 0 1 8 9 8 9 8 9 8 9)
#(0 0 2 2 0 0 2 2 8 8 10 10 8 8 10 10)
#(0 1 2 3 0 1 2 3 8 9 10 11 8 9 10 11)
#(0 0 0 0 4 4 4 4 8 8 8 8 12 12 12 12)
#(0 1 0 1 4 5 4 5 8 9 8 9 12 13 12 13)
#(0 0 2 2 4 4 6 6 8 8 10 10 12 12 14 14)
#(0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15)))
(define (logical:ash-4 x)
(if (negative? x)
(+ -1 (quotient (+ 1 x) 16))
(quotient x 16)))
(define (logical:reduce op4 ident)
(lambda args
(do ((res ident (op4 res (car rgs) 1 0))
(rgs args (cdr rgs)))
((null? rgs) res))))
(define logand
(letrec
((lgand
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) (+ acc (* scl n1)))
((zero? n2) acc)
((zero? n1) acc)
(else (lgand (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (vector-ref (vector-ref logical:boole-and
(modulo n1 16))
(modulo n2 16))
scl)
acc)))))))
(logical:reduce lgand -1)))
(define logior
(letrec
((lgior
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) (+ acc (* scl n1)))
((zero? n2) (+ acc (* scl n1)))
((zero? n1) (+ acc (* scl n2)))
(else (lgior (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (- 15 (vector-ref
(vector-ref logical:boole-and
(- 15 (modulo n1 16)))
(- 15 (modulo n2 16))))
scl)
acc)))))))
(logical:reduce lgior 0)))
(define logxor
(letrec
((lgxor
(lambda (n2 n1 scl acc)
(cond ((= n1 n2) acc)
((zero? n2) (+ acc (* scl n1)))
((zero? n1) (+ acc (* scl n2)))
(else (lgxor (logical:ash-4 n2)
(logical:ash-4 n1)
(* 16 scl)
(+ (* (vector-ref (vector-ref logical:boole-xor
(modulo n1 16))
(modulo n2 16))
scl)
acc)))))))
(logical:reduce lgxor 0)))
(define (lognot n) (- -1 n))
(define (logtest n1 n2)
(not (zero? (logand n1 n2))))
(define (logbit? index n)
(logtest (expt 2 index) n))
(define (copy-bit index to bool)
(if bool
(logior to (arithmetic-shift 1 index))
(logand to (lognot (arithmetic-shift 1 index)))))
(define (bitwise-if mask n0 n1)
(logior (logand mask n0)
(logand (lognot mask) n1)))
(define (bit-field n start end)
(logand (lognot (ash -1 (- end start)))
(arithmetic-shift n (- start))))
(define (copy-bit-field to from start end)
(bitwise-if (arithmetic-shift (lognot (ash -1 (- end start))) start)
(arithmetic-shift from start)
to))
(define (rotate-bit-field n count start end)
(define width (- end start))
(set! count (modulo count width))
(let ((mask (lognot (ash -1 width))))
(define zn (logand mask (arithmetic-shift n (- start))))
(logior (arithmetic-shift
(logior (logand mask (arithmetic-shift zn count))
(arithmetic-shift zn (- count width)))
start)
(logand (lognot (ash mask start)) n))))
(define (arithmetic-shift n count)
(if (negative? count)
(let ((k (expt 2 (- count))))
(if (negative? n)
(+ -1 (quotient (+ 1 n) k))
(quotient n k)))
(* (expt 2 count) n)))
(define integer-length
(letrec ((intlen (lambda (n tot)
(case n
((0 -1) (+ 0 tot))
((1 -2) (+ 1 tot))
((2 3 -3 -4) (+ 2 tot))
((4 5 6 7 -5 -6 -7 -8) (+ 3 tot))
(else (intlen (logical:ash-4 n) (+ 4 tot)))))))
(lambda (n) (intlen n 0))))
(define bitwise-bit-count
(letrec ((logcnt (lambda (n tot)
(if (zero? n)
tot
(logcnt (quotient n 16)
(+ (vector-ref
'#(0 1 1 2 1 2 2 3 1 2 2 3 2 3 3 4)
(modulo n 16))
tot))))))
(lambda (n)
(cond ((negative? n) (lognot (logcnt (lognot n) 0)))
((positive? n) (logcnt n 0))
(else 0)))))
(define (logcount n)
(cond ((negative? n) (bitwise-bit-count (lognot n)))
(else (bitwise-bit-count n))))
(define (log2-binary-factors n)
(+ -1 (integer-length (logand n (- n)))))
(define (bit-reverse k n)
(do ((m (if (negative? n) (lognot n) n) (arithmetic-shift m -1))
(k (+ -1 k) (+ -1 k))
(rvs 0 (logior (arithmetic-shift rvs 1) (logand 1 m))))
((negative? k) (if (negative? n) (lognot rvs) rvs))))
(define (reverse-bit-field n start end)
(define width (- end start))
(let ((mask (lognot (ash -1 width))))
(define zn (logand mask (arithmetic-shift n (- start))))
(logior (arithmetic-shift (bit-reverse width zn) start)
(logand (lognot (ash mask start)) n))))
(define (integer->list k . len)
(if (negative? k) (slib:error 'integer->list 'negative? k))
(if (null? len)
(do ((k k (arithmetic-shift k -1))
(lst '() (cons (odd? k) lst)))
((<= k 0) lst))
(do ((idx (+ -1 (car len)) (+ -1 idx))
(k k (arithmetic-shift k -1))
(lst '() (cons (odd? k) lst)))
((negative? idx) lst))))
(define (list->integer bools)
(do ((bs bools (cdr bs))
(acc 0 (+ acc acc (if (car bs) 1 0))))
((null? bs) acc)))
(define (booleans->integer . bools)
(list->integer bools))
(define ash arithmetic-shift)
(define bitwise-ior logior)
(define bitwise-xor logxor)
(define bitwise-and logand)
(define bitwise-not lognot)
(define bit-count logcount)
(define bit-set? logbit?)
(define any-bits-set? logtest)
(define first-set-bit log2-binary-factors)
(define bitwise-merge bitwise-if)
(provide 'srfi-60)
|
6cfb4ed4081862efe84b55b79b0d9afa9233add5dd74ff3c4fb2a34db1ec6314 | kosmikus/multirec | TH.hs | # LANGUAGE TemplateHaskell #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
# LANGUAGE PatternGuards #
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Generics.MultiRec.TH
Copyright : ( c ) 2008 - -2010 Universiteit Utrecht
-- License : BSD3
--
Maintainer :
-- Stability : experimental
-- Portability : non-portable
--
This module contains Template Haskell code that can be used to
-- automatically generate the boilerplate code for the multirec
-- library. The constructor information can be generated per datatype,
-- the rest per family of datatypes.
--
-----------------------------------------------------------------------------
module Generics.MultiRec.TH
( deriveAll,
deriveConstructors,
deriveFamily, deriveSystem,
derivePF,
deriveEl,
deriveFam,
deriveEqS
) where
import Generics.MultiRec.Base
import Language.Haskell.TH hiding (Fixity())
import Control.Applicative
import Control.Monad
import Data.Foldable (foldrM)
import Data.Maybe (fromJust)
| Given the name of the family index GADT , derive everything .
deriveAll :: Name -> Q [Dec]
deriveAll n =
do
info <- reify n
-- runIO (print info)
let ps = init (extractParameters info)
runIO ( print $ ps )
runIO ( print $ extractConstructorNames ps info )
let nps = map (\ (n, ps) -> (remakeName n, ps)) (extractConstructorNames ps info)
let ns = map fst nps
runIO ( print nps )
cs <- deriveConstructors ns
pf <- derivePFInstance n ps nps
el <- deriveEl n ps nps
fam <- deriveFam n ps ns
eq <- deriveEqS n ps ns
return $ cs ++ pf ++ el ++ fam ++ eq
-- | Given a list of datatype names, derive datatypes and
-- instances of class 'Constructor'. Not needed if 'deriveAll'
-- is used.
deriveConstructors :: [Name] -> Q [Dec]
deriveConstructors =
liftM concat . mapM constrInstance
-- | Compatibility. Use 'deriveAll' instead.
--
Given the name of the index GADT , the names of the
-- types in the family, and the name (as string) for the
-- pattern functor to derive, generate the 'Ix' and 'PF'
-- instances. /IMPORTANT/: It is assumed that the constructors
of the GADT have the same names as the datatypes in the
-- family.
{-# DEPRECATED deriveFamily "Use deriveAll instead." #-}
deriveFamily :: Name -> [Name] -> String -> Q [Dec]
deriveFamily n ns pfn =
do
pf <- derivePF pfn ns
el <- deriveEl n [] (zip ns (repeat []))
fam <- deriveFam n [] ns
eq <- deriveEqS n [] (map remakeName ns)
return $ pf ++ el ++ fam ++ eq
-- | Compatibility. Use 'deriveAll' instead.
{-# DEPRECATED deriveSystem "Use deriveFamily instead" #-}
deriveSystem :: Name -> [Name] -> String -> Q [Dec]
deriveSystem = deriveFamily
-- | Derive only the 'PF' instance. Not needed if 'deriveAll'
-- is used.
derivePF :: String -> [Name] -> Q [Dec]
derivePF pfn ns =
return <$>
tySynD (mkName pfn) [] (foldr1 sum (map (pfType ns []) (zip ns (repeat []))))
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
derivePFInstance :: Name -> [Name] -> [(Name, [Name])] -> Q [Dec]
derivePFInstance n ps nps = return <$> myTySynInst
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
tys = [foldl appT (conT n) (map varT ps)]
ty = foldr1 sum (map (pfType (map fst nps) ps) nps)
#if __GLASGOW_HASKELL__ > 706
myTySynInst = tySynInstD ''PF (tySynEqn tys ty)
#else
myTySynInst = tySynInstD ''PF tys ty
#endif
-- | Derive only the 'El' instances. Not needed if 'deriveAll'
-- is used.
deriveEl :: Name -> [Name] -> [(Name, [Name])] -> Q [Dec]
deriveEl s ps ns =
mapM (elInstance s ps) ns
-- | Derive only the 'Fam' instance. Not needed if 'deriveAll'
-- is used.
deriveFam :: Name -> [Name] -> [Name] -> Q [Dec]
deriveFam s ps ns =
do
fcs <- liftM concat $ zipWithM (mkFrom ns (length ns)) [0..] ns
tcs <- liftM concat $ zipWithM (mkTo ns (length ns)) [0..] ns
return <$>
instanceD (cxt []) (conT ''Fam `appT` (foldl appT (conT s) (map varT ps)))
[funD 'from fcs, funD 'to tcs]
| Derive only the ' EqS ' instance . Not needed if ' deriveAll '
-- is used.
deriveEqS :: Name -> [Name] -> [Name] -> Q [Dec]
deriveEqS s ps ns =
return <$>
instanceD (cxt []) (conT ''EqS `appT` (foldl appT (conT s) (map varT ps)))
[funD 'eqS (trues ++ falses)]
where
trueClause n = clause [conP n [], conP n []] (normalB (conE 'Just `appE` conE 'Refl)) []
falseClause = clause [wildP, wildP] (normalB (conE 'Nothing)) []
trues = map trueClause ns
falses = if length trues == 1 then [] else [falseClause]
| Process the reified info of the index GADT , and extract
-- its constructor names, which are also the names of the datatypes
-- that are part of the family.
extractConstructorNames :: [Name] -> Info -> [(Name, [Name])]
#if MIN_VERSION_template_haskell(2,11,0)
extractConstructorNames ps (TyConI (DataD _ _ _ _ cs _)) = concatMap extractFrom cs
#else
extractConstructorNames ps (TyConI (DataD _ _ _ cs _)) = concatMap extractFrom cs
#endif
where
extractFrom :: Con -> [(Name, [Name])]
extractFrom (ForallC _ eqs c) = map (\ (n, ps) -> (n, ps ++ concatMap extractEq eqs)) (extractFrom c)
extractFrom (InfixC _ n _) = [(n, [])]
extractFrom (RecC n _) = [(n, [])]
extractFrom (NormalC n []) = [(n, [])]
#if MIN_VERSION_template_haskell(2,11,0)
extractFrom (GadtC ns _ t) = map (\ n -> (n, extractType t)) ns
#endif
extractFrom _ = []
extractEq :: Pred -> [Name]
#if __GLASGOW_HASKELL__ > 708
extractEq (EqualityT `AppT` t1 `AppT` t2) =
#else
extractEq (EqualP t1 t2) =
#endif
filter (\ p -> p `elem` ps) (extractArgs t1 ++ extractArgs t2)
extractEq _ = []
extractArgs :: Type -> [Name]
extractArgs (AppT x (VarT n)) = extractArgs x ++ [n]
extractArgs (VarT n) = [n]
extractArgs _ = []
extractType :: Type -> [Name]
extractType (AppT a1 a2) = combine (extractVars a1) (extractVars a2)
where
combine :: [Name] -> [Name] -> [Name]
combine vs1 vs2 =
let
table = zip vs1 ps
in
map (fromJust . flip lookup table) vs2
extractType _ = []
extractVars :: Type -> [Name]
extractVars (AppT t (VarT v)) = extractVars t ++ [v]
extractVars (AppT t _) = extractVars t
extractVars _ = []
extractConstructorNames _ _ = []
| Process the reified info of the index GADT , and extract
-- its type parameters.
extractParameters :: Info -> [Name]
#if MIN_VERSION_template_haskell(2,11,0)
extractParameters (TyConI (DataD _ _ ns _ _ _)) = concatMap extractFromBndr ns
#else
extractParameters (TyConI (DataD _ _ ns _ _)) = concatMap extractFromBndr ns
#endif
extractParameters (TyConI (TySynD _ ns _)) = concatMap extractFromBndr ns
extractParameters _ = []
extractFromBndr :: TyVarBndr -> [Name]
extractFromBndr (PlainTV n) = [n]
extractFromBndr (KindedTV n _) = [n]
-- | Turn a record-constructor into a normal constructor by just
-- removing all the field names.
stripRecordNames :: Con -> Con
stripRecordNames (RecC n f) =
NormalC n (map (\(_, s, t) -> (s, t)) f)
stripRecordNames c = c
-- | Takes the name of a datatype (element of the family).
-- By reifying the datatype, we obtain its constructors.
-- For each constructor, we then generate a constructor-specific
-- datatype, and an instance of the 'Constructor' class.
constrInstance :: Name -> Q [Dec]
constrInstance n =
do
i <- reify n
-- runIO (print i)
let cs = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) -> cs
#else
TyConI (DataD _ _ _ cs _) -> cs
#endif
_ -> []
ds <- mapM mkData cs
is <- mapM mkInstance cs
return $ ds ++ is
-- | Given a constructor, create an empty datatype of
-- the same name.
mkData :: Con -> Q Dec
mkData (NormalC n _) =
#if MIN_VERSION_template_haskell(2,12,0)
dataD (cxt []) (remakeName n) [] Nothing [] []
#elif MIN_VERSION_template_haskell(2,11,0)
dataD (cxt []) (remakeName n) [] Nothing [] (cxt [])
#else
dataD (cxt []) (remakeName n) [] [] []
#endif
mkData r@(RecC _ _) =
mkData (stripRecordNames r)
mkData (InfixC t1 n t2) =
mkData (NormalC n [t1,t2])
mkData (ForallC _ _ c) =
mkData c
fixity :: Fixity -> ExpQ
fixity Prefix = conE 'Prefix
fixity (Infix a n) = conE 'Infix `appE` assoc a `appE` [| n |]
assoc :: Associativity -> ExpQ
assoc LeftAssociative = conE 'LeftAssociative
assoc RightAssociative = conE 'RightAssociative
assoc NotAssociative = conE 'NotAssociative
-- | Given a constructor, create an instance of the 'Constructor'
-- class for the datatype associated with the constructor.
mkInstance :: Con -> Q Dec
mkInstance (NormalC n _) =
instanceD (cxt []) (appT (conT ''Constructor) (conT $ remakeName n))
[funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []]]
mkInstance r@(RecC _ _) =
mkInstance (stripRecordNames r)
mkInstance (ForallC _ _ c) =
mkInstance c
mkInstance (InfixC t1 n t2) =
do
#if MIN_VERSION_template_haskell(2,11,0)
i <- reifyFixity n
let fi = case i of
Just f -> convertFixity f
Nothing -> Prefix
#else
i <- reify n
let fi = case i of
DataConI _ _ _ f -> convertFixity f
_ -> Prefix
#endif
instanceD (cxt []) (appT (conT ''Constructor) (conT $ remakeName n))
[funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []],
funD 'conFixity [clause [wildP] (normalB (fixity fi)) []]]
where
convertFixity (Fixity n d) = Infix (convertDirection d) n
convertDirection InfixL = LeftAssociative
convertDirection InfixR = RightAssociative
convertDirection InfixN = NotAssociative
-- | Takes all the names of datatypes belonging to the family, and
-- a particular of these names. Produces the right hand side of the 'PF'
-- type family instance for this family.
pfType :: [Name] -> [Name] -> (Name, [Name]) -> Q Type
pfType ns ps (n, rs) =
do
i <- reify n
let qs = extractParameters i
runIO $ putStrLn $ " processing " + + show n
let b = case i of
-- datatypes are nested binary sums of their constructors
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
foldr1 sum (map (pfCon ns (zip qs rs)) cs)
-- type synonyms are always treated as constants
TyConI (TySynD t _ _) ->
conT ''K `appT` foldl appT (conT t) (map varT rs)
_ -> error "unknown construct"
appT (appT (conT ''(:>:)) b) (foldl appT (conT $ remakeName n) (map varT rs))
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
-- | Takes all the names of datatypes belonging to the family, and
a particular name of a constructor of one of the datatypes . Creates
-- the product structure for this constructor.
pfCon :: [Name] -> [(Name, Name)] -> Con -> Q Type
pfCon ns ps r@(RecC _ _) =
pfCon ns ps (stripRecordNames r)
pfCon ns ps (InfixC t1 n t2) =
pfCon ns ps (NormalC n [t1,t2])
pfCon ns ps (ForallC _ _ c) =
pfCon ns ps c
pfCon ns ps (NormalC n []) =
-- a constructor without arguments is represented using 'U'
appT (appT (conT ''C) (conT $ remakeName n)) (conT ''U)
pfCon ns ps (NormalC n fs) =
-- a constructor with arguments is a nested binary product
appT (appT (conT ''C) (conT $ remakeName n))
(foldr1 prod (map (pfField ns ps . snd) fs))
where
prod :: Q Type -> Q Type -> Q Type
prod a b = conT ''(:*:) `appT` a `appT` b
-- | Takes all the names of datatypes belonging to the family, and
a particular type ( that occurs as a field in one of these
-- datatypes). Produces the structure for this type. We have to
-- distinguish between recursive calls, compositions, and constants.
--
-- TODO: We currently treat all applications as compositions. However,
-- we can argue that applications should be treated as compositions only
-- if the entire construct cannot be treated as a constant.
pfField :: [Name] -> [(Name, Name)] -> Type -> Q Type
pfField ns ps t@(ConT n)
| remakeName n `elem` ns = conT ''I `appT` return t
pfField ns ps t
| ConT n : a <- unApp t, remakeName n `elem` ns
= conT ''I `appT` (foldl appT (conT n) (map rename a))
where
rename (VarT n)
| Just p <- lookup n ps = varT p
rename t = return t
pfField ns ps t@(AppT f a)
| TupleT n : ts <- unApp t = foldrM (\ s t -> conT ''(:*:) `appT` pfField ns ps s `appT` return t) (ConT ''U) ts
| otherwise = conT ''(:.:) `appT` return f `appT` pfField ns ps a
pfField ns ps t@(VarT n)
runIO ( print ( ps , n ) ) > >
pfField ns ps t = conT ''K `appT` return t
unApp :: Type -> [Type]
unApp (AppT f a) = unApp f ++ [a]
unApp t = [t]
elInstance :: Name -> [Name] -> (Name, [Name]) -> Q Dec
elInstance s ps (n, qs) =
do
runIO ( print ( ps , qs ) )
instanceD (cxt []) (conT ''El `appT` (foldl appT (conT s) (map varT ps)) `appT` (foldl appT (conT n) (map varT qs)))
[mkProof n]
mkFrom :: [Name] -> Int -> Int -> Name -> Q [Q Clause]
mkFrom ns m i n =
do
runIO $ putStrLn $ " processing " + + show n
let wrapE e = lrE m i (conE 'Tag `appE` e)
i <- reify n
let dn = remakeName n
let b = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
zipWith (fromCon wrapE ns dn (length cs)) [0..] cs
TyConI (TySynD t _ _) ->
[clause [conP dn [], varP (field 0)] (normalB (wrapE $ conE 'K `appE` varE (field 0))) []]
_ -> error "unknown construct"
return b
mkTo :: [Name] -> Int -> Int -> Name -> Q [Q Clause]
mkTo ns m i n =
do
runIO $ putStrLn $ " processing " + + show n
let wrapP p = lrP m i (conP 'Tag [p])
i <- reify n
let dn = remakeName n
let b = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
zipWith (toCon wrapP ns dn (length cs)) [0..] cs
TyConI (TySynD t _ _) ->
[clause [conP dn [], wrapP $ conP 'K [varP (field 0)]] (normalB $ varE (field 0)) []]
_ -> error "unknown construct"
return b
mkProof :: Name -> Q Dec
mkProof n =
funD 'proof [clause [] (normalB (conE (remakeName n))) []]
fromCon :: (Q Exp -> Q Exp) -> [Name] -> Name -> Int -> Int -> Con -> Q Clause
fromCon wrap ns n m i (NormalC cn []) =
clause
[conP n [], conP cn []]
(normalB $ wrap $ lrE m i $ conE 'C `appE` (conE 'U)) []
fromCon wrap ns n m i (NormalC cn fs) =
-- runIO (putStrLn ("constructor " ++ show ix)) >>
clause
[conP n [], conP cn (map (varP . field) [0..length fs - 1])]
(normalB $ wrap $ lrE m i $ conE 'C `appE` foldr1 prod (zipWith (fromField ns) [0..] (map snd fs))) []
where
prod x y = conE '(:*:) `appE` x `appE` y
fromCon wrap ns n m i r@(RecC _ _) =
fromCon wrap ns n m i (stripRecordNames r)
fromCon wrap ns n m i (InfixC t1 cn t2) =
fromCon wrap ns n m i (NormalC cn [t1,t2])
fromCon wrap ns n m i (ForallC _ _ c) =
fromCon wrap ns n m i c
toCon :: (Q Pat -> Q Pat) -> [Name] -> Name -> Int -> Int -> Con -> Q Clause
toCon wrap ns n m i (NormalC cn []) =
clause
[conP n [], wrap $ lrP m i $ conP 'C [conP 'U []]]
(normalB $ conE cn) []
toCon wrap ns n m i (NormalC cn fs) =
-- runIO (putStrLn ("constructor " ++ show ix)) >>
clause
[conP n [], wrap $ lrP m i $ conP 'C [foldr1 prod (map (varP . field) [0..length fs - 1])]]
(normalB $ foldl appE (conE cn) (zipWith (toField ns) [0..] (map snd fs))) []
where
prod x y = conP '(:*:) [x,y]
toCon wrap ns n m i r@(RecC _ _) =
toCon wrap ns n m i (stripRecordNames r)
toCon wrap ns n m i (InfixC t1 cn t2) =
toCon wrap ns n m i (NormalC cn [t1,t2])
toCon wrap ns n m i (ForallC _ _ c) =
toCon wrap ns n m i c
fromField :: [Name] -> Int -> Type -> Q Exp
fromField ns nr t = [| $(fromFieldFun ns t) $(varE (field nr)) |]
fromFieldFun :: [Name] -> Type -> Q Exp
fromFieldFun ns t@(ConT n)
| remakeName n `elem` ns = [| I . I0 |]
fromFieldFun ns t
| ConT n : a <- unApp t, remakeName n `elem` ns
= [| I . I0 |]
fromFieldFun ns t@(AppT f a)
| TupleT n : ts <- unApp t = mapM (newName . ("x" ++) . show) [1..n] >>= \ vs ->
lam1E (tupP (varP <$> vs)) $
foldrM (\ (v, t) x -> conE '(:*:) `appE` (fromFieldFun ns t `appE` varE v) `appE` return x) (ConE 'U) (zip vs ts)
| otherwise = [| D . fmap $(fromFieldFun ns a) |]
fromFieldFun ns t = [| K |]
toField :: [Name] -> Int -> Type -> Q Exp
toField ns nr t = [| $(toFieldFun ns t) $(varE (field nr)) |]
toFieldFun :: [Name] -> Type -> Q Exp
toFieldFun ns t@(ConT n)
| remakeName n `elem` ns = [| unI0 . unI |]
toFieldFun ns t
| ConT n : a <- unApp t, remakeName n `elem` ns
= [| unI0 . unI |]
toFieldFun ns t@(AppT f a)
| TupleT n : ts <- unApp t = mapM (newName . ("x" ++) . show) [1..n] >>= \ vs ->
lam1E (foldr (\ v p -> conP '(:*:) [varP v, p]) (conP 'U []) vs) $
tupE (zipWith (\ v t -> toFieldFun ns t `appE` varE v) vs ts)
| otherwise = [| fmap $(toFieldFun ns a) . unD |]
toFieldFun ns t = [| unK |]
field :: Int -> Name
field n = mkName $ "f" ++ show n
lrP :: Int -> Int -> (Q Pat -> Q Pat)
lrP 1 0 p = p
lrP m 0 p = conP 'L [p]
lrP m i p = conP 'R [lrP (m-1) (i-1) p]
lrE :: Int -> Int -> (Q Exp -> Q Exp)
lrE 1 0 e = e
lrE m 0 e = conE 'L `appE` e
lrE m i e = conE 'R `appE` lrE (m-1) (i-1) e
-- Should we, under certain circumstances, maintain the module name?
remakeName :: Name -> Name
remakeName n = mkName (nameBase n)
| null | https://raw.githubusercontent.com/kosmikus/multirec/10ea901f0e1067bbe3632b1fab34103c83eeff0f/src/Generics/MultiRec/TH.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE CPP #
---------------------------------------------------------------------------
|
Module : Generics.MultiRec.TH
License : BSD3
Stability : experimental
Portability : non-portable
automatically generate the boilerplate code for the multirec
library. The constructor information can be generated per datatype,
the rest per family of datatypes.
---------------------------------------------------------------------------
runIO (print info)
| Given a list of datatype names, derive datatypes and
instances of class 'Constructor'. Not needed if 'deriveAll'
is used.
| Compatibility. Use 'deriveAll' instead.
types in the family, and the name (as string) for the
pattern functor to derive, generate the 'Ix' and 'PF'
instances. /IMPORTANT/: It is assumed that the constructors
family.
# DEPRECATED deriveFamily "Use deriveAll instead." #
| Compatibility. Use 'deriveAll' instead.
# DEPRECATED deriveSystem "Use deriveFamily instead" #
| Derive only the 'PF' instance. Not needed if 'deriveAll'
is used.
| Derive only the 'El' instances. Not needed if 'deriveAll'
is used.
| Derive only the 'Fam' instance. Not needed if 'deriveAll'
is used.
is used.
its constructor names, which are also the names of the datatypes
that are part of the family.
its type parameters.
| Turn a record-constructor into a normal constructor by just
removing all the field names.
| Takes the name of a datatype (element of the family).
By reifying the datatype, we obtain its constructors.
For each constructor, we then generate a constructor-specific
datatype, and an instance of the 'Constructor' class.
runIO (print i)
| Given a constructor, create an empty datatype of
the same name.
| Given a constructor, create an instance of the 'Constructor'
class for the datatype associated with the constructor.
| Takes all the names of datatypes belonging to the family, and
a particular of these names. Produces the right hand side of the 'PF'
type family instance for this family.
datatypes are nested binary sums of their constructors
type synonyms are always treated as constants
| Takes all the names of datatypes belonging to the family, and
the product structure for this constructor.
a constructor without arguments is represented using 'U'
a constructor with arguments is a nested binary product
| Takes all the names of datatypes belonging to the family, and
datatypes). Produces the structure for this type. We have to
distinguish between recursive calls, compositions, and constants.
TODO: We currently treat all applications as compositions. However,
we can argue that applications should be treated as compositions only
if the entire construct cannot be treated as a constant.
runIO (putStrLn ("constructor " ++ show ix)) >>
runIO (putStrLn ("constructor " ++ show ix)) >>
Should we, under certain circumstances, maintain the module name? | # LANGUAGE TemplateHaskell #
# LANGUAGE PatternGuards #
Copyright : ( c ) 2008 - -2010 Universiteit Utrecht
Maintainer :
This module contains Template Haskell code that can be used to
module Generics.MultiRec.TH
( deriveAll,
deriveConstructors,
deriveFamily, deriveSystem,
derivePF,
deriveEl,
deriveFam,
deriveEqS
) where
import Generics.MultiRec.Base
import Language.Haskell.TH hiding (Fixity())
import Control.Applicative
import Control.Monad
import Data.Foldable (foldrM)
import Data.Maybe (fromJust)
| Given the name of the family index GADT , derive everything .
deriveAll :: Name -> Q [Dec]
deriveAll n =
do
info <- reify n
let ps = init (extractParameters info)
runIO ( print $ ps )
runIO ( print $ extractConstructorNames ps info )
let nps = map (\ (n, ps) -> (remakeName n, ps)) (extractConstructorNames ps info)
let ns = map fst nps
runIO ( print nps )
cs <- deriveConstructors ns
pf <- derivePFInstance n ps nps
el <- deriveEl n ps nps
fam <- deriveFam n ps ns
eq <- deriveEqS n ps ns
return $ cs ++ pf ++ el ++ fam ++ eq
deriveConstructors :: [Name] -> Q [Dec]
deriveConstructors =
liftM concat . mapM constrInstance
Given the name of the index GADT , the names of the
of the GADT have the same names as the datatypes in the
deriveFamily :: Name -> [Name] -> String -> Q [Dec]
deriveFamily n ns pfn =
do
pf <- derivePF pfn ns
el <- deriveEl n [] (zip ns (repeat []))
fam <- deriveFam n [] ns
eq <- deriveEqS n [] (map remakeName ns)
return $ pf ++ el ++ fam ++ eq
deriveSystem :: Name -> [Name] -> String -> Q [Dec]
deriveSystem = deriveFamily
derivePF :: String -> [Name] -> Q [Dec]
derivePF pfn ns =
return <$>
tySynD (mkName pfn) [] (foldr1 sum (map (pfType ns []) (zip ns (repeat []))))
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
derivePFInstance :: Name -> [Name] -> [(Name, [Name])] -> Q [Dec]
derivePFInstance n ps nps = return <$> myTySynInst
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
tys = [foldl appT (conT n) (map varT ps)]
ty = foldr1 sum (map (pfType (map fst nps) ps) nps)
#if __GLASGOW_HASKELL__ > 706
myTySynInst = tySynInstD ''PF (tySynEqn tys ty)
#else
myTySynInst = tySynInstD ''PF tys ty
#endif
deriveEl :: Name -> [Name] -> [(Name, [Name])] -> Q [Dec]
deriveEl s ps ns =
mapM (elInstance s ps) ns
deriveFam :: Name -> [Name] -> [Name] -> Q [Dec]
deriveFam s ps ns =
do
fcs <- liftM concat $ zipWithM (mkFrom ns (length ns)) [0..] ns
tcs <- liftM concat $ zipWithM (mkTo ns (length ns)) [0..] ns
return <$>
instanceD (cxt []) (conT ''Fam `appT` (foldl appT (conT s) (map varT ps)))
[funD 'from fcs, funD 'to tcs]
| Derive only the ' EqS ' instance . Not needed if ' deriveAll '
deriveEqS :: Name -> [Name] -> [Name] -> Q [Dec]
deriveEqS s ps ns =
return <$>
instanceD (cxt []) (conT ''EqS `appT` (foldl appT (conT s) (map varT ps)))
[funD 'eqS (trues ++ falses)]
where
trueClause n = clause [conP n [], conP n []] (normalB (conE 'Just `appE` conE 'Refl)) []
falseClause = clause [wildP, wildP] (normalB (conE 'Nothing)) []
trues = map trueClause ns
falses = if length trues == 1 then [] else [falseClause]
| Process the reified info of the index GADT , and extract
extractConstructorNames :: [Name] -> Info -> [(Name, [Name])]
#if MIN_VERSION_template_haskell(2,11,0)
extractConstructorNames ps (TyConI (DataD _ _ _ _ cs _)) = concatMap extractFrom cs
#else
extractConstructorNames ps (TyConI (DataD _ _ _ cs _)) = concatMap extractFrom cs
#endif
where
extractFrom :: Con -> [(Name, [Name])]
extractFrom (ForallC _ eqs c) = map (\ (n, ps) -> (n, ps ++ concatMap extractEq eqs)) (extractFrom c)
extractFrom (InfixC _ n _) = [(n, [])]
extractFrom (RecC n _) = [(n, [])]
extractFrom (NormalC n []) = [(n, [])]
#if MIN_VERSION_template_haskell(2,11,0)
extractFrom (GadtC ns _ t) = map (\ n -> (n, extractType t)) ns
#endif
extractFrom _ = []
extractEq :: Pred -> [Name]
#if __GLASGOW_HASKELL__ > 708
extractEq (EqualityT `AppT` t1 `AppT` t2) =
#else
extractEq (EqualP t1 t2) =
#endif
filter (\ p -> p `elem` ps) (extractArgs t1 ++ extractArgs t2)
extractEq _ = []
extractArgs :: Type -> [Name]
extractArgs (AppT x (VarT n)) = extractArgs x ++ [n]
extractArgs (VarT n) = [n]
extractArgs _ = []
extractType :: Type -> [Name]
extractType (AppT a1 a2) = combine (extractVars a1) (extractVars a2)
where
combine :: [Name] -> [Name] -> [Name]
combine vs1 vs2 =
let
table = zip vs1 ps
in
map (fromJust . flip lookup table) vs2
extractType _ = []
extractVars :: Type -> [Name]
extractVars (AppT t (VarT v)) = extractVars t ++ [v]
extractVars (AppT t _) = extractVars t
extractVars _ = []
extractConstructorNames _ _ = []
| Process the reified info of the index GADT , and extract
extractParameters :: Info -> [Name]
#if MIN_VERSION_template_haskell(2,11,0)
extractParameters (TyConI (DataD _ _ ns _ _ _)) = concatMap extractFromBndr ns
#else
extractParameters (TyConI (DataD _ _ ns _ _)) = concatMap extractFromBndr ns
#endif
extractParameters (TyConI (TySynD _ ns _)) = concatMap extractFromBndr ns
extractParameters _ = []
extractFromBndr :: TyVarBndr -> [Name]
extractFromBndr (PlainTV n) = [n]
extractFromBndr (KindedTV n _) = [n]
stripRecordNames :: Con -> Con
stripRecordNames (RecC n f) =
NormalC n (map (\(_, s, t) -> (s, t)) f)
stripRecordNames c = c
constrInstance :: Name -> Q [Dec]
constrInstance n =
do
i <- reify n
let cs = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) -> cs
#else
TyConI (DataD _ _ _ cs _) -> cs
#endif
_ -> []
ds <- mapM mkData cs
is <- mapM mkInstance cs
return $ ds ++ is
mkData :: Con -> Q Dec
mkData (NormalC n _) =
#if MIN_VERSION_template_haskell(2,12,0)
dataD (cxt []) (remakeName n) [] Nothing [] []
#elif MIN_VERSION_template_haskell(2,11,0)
dataD (cxt []) (remakeName n) [] Nothing [] (cxt [])
#else
dataD (cxt []) (remakeName n) [] [] []
#endif
mkData r@(RecC _ _) =
mkData (stripRecordNames r)
mkData (InfixC t1 n t2) =
mkData (NormalC n [t1,t2])
mkData (ForallC _ _ c) =
mkData c
fixity :: Fixity -> ExpQ
fixity Prefix = conE 'Prefix
fixity (Infix a n) = conE 'Infix `appE` assoc a `appE` [| n |]
assoc :: Associativity -> ExpQ
assoc LeftAssociative = conE 'LeftAssociative
assoc RightAssociative = conE 'RightAssociative
assoc NotAssociative = conE 'NotAssociative
mkInstance :: Con -> Q Dec
mkInstance (NormalC n _) =
instanceD (cxt []) (appT (conT ''Constructor) (conT $ remakeName n))
[funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []]]
mkInstance r@(RecC _ _) =
mkInstance (stripRecordNames r)
mkInstance (ForallC _ _ c) =
mkInstance c
mkInstance (InfixC t1 n t2) =
do
#if MIN_VERSION_template_haskell(2,11,0)
i <- reifyFixity n
let fi = case i of
Just f -> convertFixity f
Nothing -> Prefix
#else
i <- reify n
let fi = case i of
DataConI _ _ _ f -> convertFixity f
_ -> Prefix
#endif
instanceD (cxt []) (appT (conT ''Constructor) (conT $ remakeName n))
[funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []],
funD 'conFixity [clause [wildP] (normalB (fixity fi)) []]]
where
convertFixity (Fixity n d) = Infix (convertDirection d) n
convertDirection InfixL = LeftAssociative
convertDirection InfixR = RightAssociative
convertDirection InfixN = NotAssociative
pfType :: [Name] -> [Name] -> (Name, [Name]) -> Q Type
pfType ns ps (n, rs) =
do
i <- reify n
let qs = extractParameters i
runIO $ putStrLn $ " processing " + + show n
let b = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
foldr1 sum (map (pfCon ns (zip qs rs)) cs)
TyConI (TySynD t _ _) ->
conT ''K `appT` foldl appT (conT t) (map varT rs)
_ -> error "unknown construct"
appT (appT (conT ''(:>:)) b) (foldl appT (conT $ remakeName n) (map varT rs))
where
sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
a particular name of a constructor of one of the datatypes . Creates
pfCon :: [Name] -> [(Name, Name)] -> Con -> Q Type
pfCon ns ps r@(RecC _ _) =
pfCon ns ps (stripRecordNames r)
pfCon ns ps (InfixC t1 n t2) =
pfCon ns ps (NormalC n [t1,t2])
pfCon ns ps (ForallC _ _ c) =
pfCon ns ps c
pfCon ns ps (NormalC n []) =
appT (appT (conT ''C) (conT $ remakeName n)) (conT ''U)
pfCon ns ps (NormalC n fs) =
appT (appT (conT ''C) (conT $ remakeName n))
(foldr1 prod (map (pfField ns ps . snd) fs))
where
prod :: Q Type -> Q Type -> Q Type
prod a b = conT ''(:*:) `appT` a `appT` b
a particular type ( that occurs as a field in one of these
pfField :: [Name] -> [(Name, Name)] -> Type -> Q Type
pfField ns ps t@(ConT n)
| remakeName n `elem` ns = conT ''I `appT` return t
pfField ns ps t
| ConT n : a <- unApp t, remakeName n `elem` ns
= conT ''I `appT` (foldl appT (conT n) (map rename a))
where
rename (VarT n)
| Just p <- lookup n ps = varT p
rename t = return t
pfField ns ps t@(AppT f a)
| TupleT n : ts <- unApp t = foldrM (\ s t -> conT ''(:*:) `appT` pfField ns ps s `appT` return t) (ConT ''U) ts
| otherwise = conT ''(:.:) `appT` return f `appT` pfField ns ps a
pfField ns ps t@(VarT n)
runIO ( print ( ps , n ) ) > >
pfField ns ps t = conT ''K `appT` return t
unApp :: Type -> [Type]
unApp (AppT f a) = unApp f ++ [a]
unApp t = [t]
elInstance :: Name -> [Name] -> (Name, [Name]) -> Q Dec
elInstance s ps (n, qs) =
do
runIO ( print ( ps , qs ) )
instanceD (cxt []) (conT ''El `appT` (foldl appT (conT s) (map varT ps)) `appT` (foldl appT (conT n) (map varT qs)))
[mkProof n]
mkFrom :: [Name] -> Int -> Int -> Name -> Q [Q Clause]
mkFrom ns m i n =
do
runIO $ putStrLn $ " processing " + + show n
let wrapE e = lrE m i (conE 'Tag `appE` e)
i <- reify n
let dn = remakeName n
let b = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
zipWith (fromCon wrapE ns dn (length cs)) [0..] cs
TyConI (TySynD t _ _) ->
[clause [conP dn [], varP (field 0)] (normalB (wrapE $ conE 'K `appE` varE (field 0))) []]
_ -> error "unknown construct"
return b
mkTo :: [Name] -> Int -> Int -> Name -> Q [Q Clause]
mkTo ns m i n =
do
runIO $ putStrLn $ " processing " + + show n
let wrapP p = lrP m i (conP 'Tag [p])
i <- reify n
let dn = remakeName n
let b = case i of
#if MIN_VERSION_template_haskell(2,11,0)
TyConI (DataD _ _ _ _ cs _) ->
#else
TyConI (DataD _ _ _ cs _) ->
#endif
zipWith (toCon wrapP ns dn (length cs)) [0..] cs
TyConI (TySynD t _ _) ->
[clause [conP dn [], wrapP $ conP 'K [varP (field 0)]] (normalB $ varE (field 0)) []]
_ -> error "unknown construct"
return b
mkProof :: Name -> Q Dec
mkProof n =
funD 'proof [clause [] (normalB (conE (remakeName n))) []]
fromCon :: (Q Exp -> Q Exp) -> [Name] -> Name -> Int -> Int -> Con -> Q Clause
fromCon wrap ns n m i (NormalC cn []) =
clause
[conP n [], conP cn []]
(normalB $ wrap $ lrE m i $ conE 'C `appE` (conE 'U)) []
fromCon wrap ns n m i (NormalC cn fs) =
clause
[conP n [], conP cn (map (varP . field) [0..length fs - 1])]
(normalB $ wrap $ lrE m i $ conE 'C `appE` foldr1 prod (zipWith (fromField ns) [0..] (map snd fs))) []
where
prod x y = conE '(:*:) `appE` x `appE` y
fromCon wrap ns n m i r@(RecC _ _) =
fromCon wrap ns n m i (stripRecordNames r)
fromCon wrap ns n m i (InfixC t1 cn t2) =
fromCon wrap ns n m i (NormalC cn [t1,t2])
fromCon wrap ns n m i (ForallC _ _ c) =
fromCon wrap ns n m i c
toCon :: (Q Pat -> Q Pat) -> [Name] -> Name -> Int -> Int -> Con -> Q Clause
toCon wrap ns n m i (NormalC cn []) =
clause
[conP n [], wrap $ lrP m i $ conP 'C [conP 'U []]]
(normalB $ conE cn) []
toCon wrap ns n m i (NormalC cn fs) =
clause
[conP n [], wrap $ lrP m i $ conP 'C [foldr1 prod (map (varP . field) [0..length fs - 1])]]
(normalB $ foldl appE (conE cn) (zipWith (toField ns) [0..] (map snd fs))) []
where
prod x y = conP '(:*:) [x,y]
toCon wrap ns n m i r@(RecC _ _) =
toCon wrap ns n m i (stripRecordNames r)
toCon wrap ns n m i (InfixC t1 cn t2) =
toCon wrap ns n m i (NormalC cn [t1,t2])
toCon wrap ns n m i (ForallC _ _ c) =
toCon wrap ns n m i c
fromField :: [Name] -> Int -> Type -> Q Exp
fromField ns nr t = [| $(fromFieldFun ns t) $(varE (field nr)) |]
fromFieldFun :: [Name] -> Type -> Q Exp
fromFieldFun ns t@(ConT n)
| remakeName n `elem` ns = [| I . I0 |]
fromFieldFun ns t
| ConT n : a <- unApp t, remakeName n `elem` ns
= [| I . I0 |]
fromFieldFun ns t@(AppT f a)
| TupleT n : ts <- unApp t = mapM (newName . ("x" ++) . show) [1..n] >>= \ vs ->
lam1E (tupP (varP <$> vs)) $
foldrM (\ (v, t) x -> conE '(:*:) `appE` (fromFieldFun ns t `appE` varE v) `appE` return x) (ConE 'U) (zip vs ts)
| otherwise = [| D . fmap $(fromFieldFun ns a) |]
fromFieldFun ns t = [| K |]
toField :: [Name] -> Int -> Type -> Q Exp
toField ns nr t = [| $(toFieldFun ns t) $(varE (field nr)) |]
toFieldFun :: [Name] -> Type -> Q Exp
toFieldFun ns t@(ConT n)
| remakeName n `elem` ns = [| unI0 . unI |]
toFieldFun ns t
| ConT n : a <- unApp t, remakeName n `elem` ns
= [| unI0 . unI |]
toFieldFun ns t@(AppT f a)
| TupleT n : ts <- unApp t = mapM (newName . ("x" ++) . show) [1..n] >>= \ vs ->
lam1E (foldr (\ v p -> conP '(:*:) [varP v, p]) (conP 'U []) vs) $
tupE (zipWith (\ v t -> toFieldFun ns t `appE` varE v) vs ts)
| otherwise = [| fmap $(toFieldFun ns a) . unD |]
toFieldFun ns t = [| unK |]
field :: Int -> Name
field n = mkName $ "f" ++ show n
lrP :: Int -> Int -> (Q Pat -> Q Pat)
lrP 1 0 p = p
lrP m 0 p = conP 'L [p]
lrP m i p = conP 'R [lrP (m-1) (i-1) p]
lrE :: Int -> Int -> (Q Exp -> Q Exp)
lrE 1 0 e = e
lrE m 0 e = conE 'L `appE` e
lrE m i e = conE 'R `appE` lrE (m-1) (i-1) e
remakeName :: Name -> Name
remakeName n = mkName (nameBase n)
|
396113644da268adc5a07d1ac9ed9fec362b59a14f03bc7a3b74b7b78395b3fd | gator1/jepsen | project.clj | (defproject jepsen.galera "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:main jepsen.galera.test
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.5"]
[honeysql "0.6.1"]
[org.clojure/java.jdbc "0.4.1"]
[org.mariadb.jdbc/mariadb-java-client "1.2.0"]])
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/galera/project.clj | clojure | (defproject jepsen.galera "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:main jepsen.galera.test
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.5"]
[honeysql "0.6.1"]
[org.clojure/java.jdbc "0.4.1"]
[org.mariadb.jdbc/mariadb-java-client "1.2.0"]])
| |
d4d6fd3f4c10ac0120fae95c6482dc4ebc6c485c4c56d63529ad894777f466ae | scarvalhojr/haskellbook | section22.5.hs |
newtype Reader r a = Reader { runReader:: r -> a }
ask :: Reader a a
ask = Reader id
| null | https://raw.githubusercontent.com/scarvalhojr/haskellbook/6016a5a78da3fc4a29f5ea68b239563895c448d5/chapter22/section22.5.hs | haskell |
newtype Reader r a = Reader { runReader:: r -> a }
ask :: Reader a a
ask = Reader id
| |
027f35d7d9a6e447a015613b25373eb41e99559ce583805c93deb91a842529af | offby1/rudybot | reloadable.rkt | #lang racket/base
;; based on handin-server/private/reloadable
(require syntax/moddep)
(provide reload-module)
(define (reload-module modspec path [notifier #f])
;; the path argument is not needed (could use resolve-module-path here), but
;; its always known when this function is called
(let* ([name ((current-module-name-resolver) modspec #f #f #t)])
(when notifier (notifier "(re)loading module from ~a" modspec))
(parameterize ([current-module-declare-name name]
[compile-enforce-module-constants #f])
;; only notify, it's fine to reset the file timer, since there's no point
;; in attempting to reload it yet again until it is edited.
(with-handlers ([exn?
(lambda (e)
(notifier "error, module not reloaded (~a)"
(exn-message e))
(notifier "~a~%" (continuation-mark-set->context (exn-continuation-marks e))))])
(namespace-require '(only scheme module #%top-interaction))
(load/use-compiled path)))))
;; pulls out a value from a module, reloading the module if its source file was
;; modified
(provide auto-reload-value)
(define module-times (make-hash))
(define (auto-reload-value modspec valname)
(let* ([path (resolve-module-path modspec #f)]
[last (hash-ref module-times path #f)]
[cur (file-or-directory-modify-seconds path)])
(unless (equal? cur last)
(hash-set! module-times path cur)
(reload-module modspec path))
(dynamic-require modspec valname)))
poll at most once every two seconds
;; pulls out a procedure from a module, and returns a wrapped procedure that
;; automatically reloads the module if the file was changed whenever the
;; procedure is used
(provide auto-reload-procedure)
(define (auto-reload-procedure
modspec procname #:notifier [notifier #f] #:on-reload [on-reload #f])
(let ([path (resolve-module-path modspec #f)] [date #f] [proc #f] [poll #f])
(define (reload)
(unless (and proc (< (- (current-inexact-milliseconds) poll) poll-freq))
(set! poll (current-inexact-milliseconds))
(let ([cur (file-or-directory-modify-seconds path)])
(unless (equal? cur date)
(when on-reload (on-reload))
(set! date cur)
(reload-module modspec path notifier)
(set! proc (dynamic-require modspec procname))))))
(reload)
(lambda xs (reload) (apply proc xs))))
| null | https://raw.githubusercontent.com/offby1/rudybot/74773ce9c1224813ee963f4d5d8a7748197f6963/reloadable.rkt | racket | based on handin-server/private/reloadable
the path argument is not needed (could use resolve-module-path here), but
its always known when this function is called
only notify, it's fine to reset the file timer, since there's no point
in attempting to reload it yet again until it is edited.
pulls out a value from a module, reloading the module if its source file was
modified
pulls out a procedure from a module, and returns a wrapped procedure that
automatically reloads the module if the file was changed whenever the
procedure is used | #lang racket/base
(require syntax/moddep)
(provide reload-module)
(define (reload-module modspec path [notifier #f])
(let* ([name ((current-module-name-resolver) modspec #f #f #t)])
(when notifier (notifier "(re)loading module from ~a" modspec))
(parameterize ([current-module-declare-name name]
[compile-enforce-module-constants #f])
(with-handlers ([exn?
(lambda (e)
(notifier "error, module not reloaded (~a)"
(exn-message e))
(notifier "~a~%" (continuation-mark-set->context (exn-continuation-marks e))))])
(namespace-require '(only scheme module #%top-interaction))
(load/use-compiled path)))))
(provide auto-reload-value)
(define module-times (make-hash))
(define (auto-reload-value modspec valname)
(let* ([path (resolve-module-path modspec #f)]
[last (hash-ref module-times path #f)]
[cur (file-or-directory-modify-seconds path)])
(unless (equal? cur last)
(hash-set! module-times path cur)
(reload-module modspec path))
(dynamic-require modspec valname)))
poll at most once every two seconds
(provide auto-reload-procedure)
(define (auto-reload-procedure
modspec procname #:notifier [notifier #f] #:on-reload [on-reload #f])
(let ([path (resolve-module-path modspec #f)] [date #f] [proc #f] [poll #f])
(define (reload)
(unless (and proc (< (- (current-inexact-milliseconds) poll) poll-freq))
(set! poll (current-inexact-milliseconds))
(let ([cur (file-or-directory-modify-seconds path)])
(unless (equal? cur date)
(when on-reload (on-reload))
(set! date cur)
(reload-module modspec path notifier)
(set! proc (dynamic-require modspec procname))))))
(reload)
(lambda xs (reload) (apply proc xs))))
|
13a37951d8e990c9b110cfda0ee685ea868ee7b4a814231455144b0360fadb16 | yrashk/erlang | snmpc_mib_to_hrl.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(snmpc_mib_to_hrl).
-include_lib("stdlib/include/erl_compile.hrl").
-include("snmp_types.hrl").
%% External exports
-export([convert/1, compile/3]).
%%-----------------------------------------------------------------
%% Func: convert/1
: MibName = string ( ) without extension .
Purpose : Produce a .hrl file with oid for tables and variables ,
%% column numbers for columns and values for enums.
Writes only the first occurence of a name . Prints a
%% warning if a duplicate name is found.
%% Returns: ok | {error, Reason}
%% Note: The Mib must be compiled.
%%-----------------------------------------------------------------
convert(MibName) ->
MibFile = MibName ++ ".bin",
HrlFile = MibName ++ ".hrl",
put(verbosity, trace),
convert(MibFile, HrlFile, MibName).
convert(MibFile, HrlFile, MibName) ->
t("convert -> entry with"
"~n MibFile: ~s"
"~n HrlFile: ~s"
"~n MibName: ~s", [MibFile, HrlFile, MibName]),
case snmpc_misc:read_mib(MibFile) of
{ok, #mib{asn1_types = Types, mes = MEs, traps = Traps}} ->
d("mib successfully read"),
resolve(Types, MEs, Traps, HrlFile,
filename:basename(MibName)),
ok;
{error, Reason} ->
i("failed reading mib: "
"~n Reason: ~p", [Reason]),
{error, Reason}
end.
resolve(Types, MEs, Traps, HrlFile, MibName) ->
t("resolve -> entry"),
case file:open(HrlFile, [write]) of
{ok, Fd} ->
insert_header(Fd),
insert_begin(Fd, MibName),
insert_notifs(Traps, Fd),
insert_oids(MEs, Fd),
insert_range(MEs, Fd),
insert_enums(Types, MEs, Fd),
insert_defvals(MEs, Fd),
insert_end(Fd),
file:close(Fd),
l("~s written", [HrlFile]);
{error, Reason} ->
i("failed opening output file: "
"~n Reason: ~p", [Reason]),
{error, Reason}
end.
insert_header(Fd) ->
d("insert file header"),
io:format(Fd, "%%% This file was automatically generated by "
"snmpc_mib_to_hrl version ~s~n", [?version]),
{Y,Mo,D} = date(),
{H,Mi,S} = time(),
io:format(Fd, "%%% Date: ~2.2.0w-~s-~w::~2.2.0w:~2.2.0w:~2.2.0w~n",
[D,month(Mo),Y,H,Mi,S]).
insert_begin(Fd, MibName) ->
d("insert file begin"),
io:format(Fd,
"-ifndef('~s').~n"
"-define('~s', true).~n", [MibName, MibName]).
insert_end(Fd) ->
d("insert file end"),
io:format(Fd, "-endif.~n", []).
insert_oids(MEs, Fd) ->
d("insert oids"),
io:format(Fd, "~n%% Oids~n", []),
insert_oids2(MEs, Fd),
io:format(Fd, "~n", []).
insert_oids2([#me{imported = true} | T], Fd) ->
insert_oids2(T, Fd);
insert_oids2([#me{entrytype = table_column, oid = Oid, aliasname = Name} | T],
Fd) ->
t("insert oid [table column]: ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, lists:last(Oid)]),
insert_oids2(T, Fd);
insert_oids2([#me{entrytype = variable, oid = Oid, aliasname = Name} | T],
Fd) ->
t("insert oid [variable]: ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [merge_atoms(Name, instance),
Oid ++ [0]]),
insert_oids2(T, Fd);
insert_oids2([#me{oid = Oid, aliasname = Name} | T], Fd) ->
t("insert oid: ~p - ~w", [Name, Oid]),
io:format(Fd, "~n-define(~w, ~w).~n", [Name, Oid]),
insert_oids2(T, Fd);
insert_oids2([], _Fd) ->
ok.
insert_notifs(Traps, Fd) ->
d("insert notifications"),
Notifs = [Notif || Notif <- Traps, is_record(Notif, notification)],
case Notifs of
[] ->
ok;
_ ->
io:format(Fd, "~n%% Notifications~n", []),
insert_notifs2(Notifs, Fd)
end.
insert_notifs2([], _Fd) ->
ok;
insert_notifs2([#notification{trapname = Name, oid = Oid}|T], Fd) ->
t("insert notification ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, Oid]),
insert_notifs2(T, Fd).
%%-----------------------------------------------------------------
%% There's nothing strange with this function! Enums can be
defined in types and in mibentries ; therefore , we first call
%% ins_types and then ins_mes to insert enums from different places.
%%-----------------------------------------------------------------
insert_enums(Types, MEs, Fd) ->
d("insert enums"),
T = ins_types(Types, Fd, []),
ins_mes(MEs, T, Fd).
Insert all types , but not the imported . Ret the names of inserted
%% types.
ins_types([#asn1_type{aliasname = Name,
assocList = Alist,
imported = false} | T],
Fd, Res)
when list(Alist) ->
case lists:keysearch(enums, 1, Alist) of
{value, {enums, Enums}} when Enums /= [] ->
case Enums of
[] -> ins_types(T, Fd, Res);
NewEnums ->
io:format(Fd, "~n%% Definitions from ~w~n", [Name]),
ins_enums(NewEnums, Name, Fd),
ins_types(T, Fd, [Name | Res])
end;
_ -> ins_types(T, Fd, Res)
end;
ins_types([_ | T], Fd, Res) ->
ins_types(T, Fd, Res);
ins_types([], _Fd, Res) -> Res.
ins_mes([#me{entrytype = internal} | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([#me{entrytype = table} | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([#me{aliasname = Name,
asn1_type = #asn1_type{assocList = Alist,
aliasname = Aname},
imported = false} | T],
Types, Fd)
when list(Alist) ->
case lists:keysearch(enums, 1, Alist) of
{value, {enums, Enums}} when Enums /= [] ->
case Enums of
[] -> ins_mes(T, Types, Fd);
NewEnums ->
%% Now, check if the type is already inserted
%% (by ins_types).
case lists:member(Aname, Types) of
false ->
io:format(Fd, "~n%% Enum definitions from ~w~n",
[Name]),
ins_enums(NewEnums, Name, Fd),
ins_mes(T, Types, Fd);
_ -> ins_mes(T, Types, Fd)
end
end;
_ -> ins_mes(T, Types, Fd)
end;
ins_mes([_ | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([], _Types, _Fd) -> ok.
ins_enums([{Name, Val} | T], Origin, Fd) ->
EnumName = merge_atoms(Origin, Name),
io:format(Fd, "-define(~w, ~w).~n", [EnumName, Val]),
ins_enums(T, Origin, Fd);
ins_enums([], _Origin, _Fd) ->
ok.
%%----------------------------------------------------------------------
%% Solves the problem with placing '' around some atoms.
You ca n't write two atoms using .
%%----------------------------------------------------------------------
merge_atoms(TypeOrigin, Name) ->
list_to_atom(lists:append([atom_to_list(TypeOrigin), "_",
atom_to_list(Name)])).
insert_defvals(Mes, Fd) ->
d("insert default values"),
io:format(Fd, "~n%% Default values~n", []),
insert_defvals2(Mes, Fd),
io:format(Fd, "~n", []).
insert_defvals2([#me{imported = true} | T], Fd) ->
insert_defvals2(T, Fd);
insert_defvals2([#me{entrytype = table_column, assocList = Alist,
aliasname = Name} | T],
Fd) ->
case snmpc_misc:assq(defval, Alist) of
{value, Val} ->
Atom = merge_atoms('default', Name),
io:format(Fd, "-define(~w, ~w).~n", [Atom, Val]);
_ -> ok
end,
insert_defvals2(T, Fd);
insert_defvals2([#me{entrytype = variable, assocList = Alist, aliasname = Name}
| T],
Fd) ->
case snmpc_misc:assq(variable_info, Alist) of
{value, VarInfo} ->
case VarInfo#variable_info.defval of
undefined -> ok;
Val ->
Atom = merge_atoms('default', Name),
io:format(Fd, "-define(~w, ~w).~n", [Atom, Val])
end;
_ -> ok
end,
insert_defvals2(T, Fd);
insert_defvals2([_ | T], Fd) ->
insert_defvals2(T, Fd);
insert_defvals2([], _Fd) -> ok.
insert_range(Mes, Fd) ->
d("insert range"),
io:format(Fd, "~n%% Range values~n", []),
insert_range2(Mes, Fd),
io:format(Fd, "~n", []).
insert_range2([#me{imported = true} | T], Fd)->
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='OCTET STRING',lo=Low,hi=High},aliasname=Name}|T],Fd)->
case Low==undefined of
true->
insert_range2(T,Fd);
false->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd)
end;
insert_range2([#me{asn1_type=#asn1_type{bertype='Unsigned32',lo=Low,hi=High},aliasname=Name}|T],Fd)->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='Counter32',lo=Low,hi=High},aliasname=Name}|T],Fd)->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='INTEGER',lo=Low,hi=High},aliasname=Name}|T],Fd)->
case Low==undefined of
true->
insert_range2(T,Fd);
false->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd)
end;
insert_range2([_|T],Fd) ->
insert_range2(T,Fd);
insert_range2([],_Fd) ->
ok.
month(1) -> "Jan";
month(2) -> "Feb";
month(3) -> "Mar";
month(4) -> "Apr";
month(5) -> "May";
month(6) -> "Jun";
month(7) -> "Jul";
month(8) -> "Aug";
month(9) -> "Sep";
month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
%%%-----------------------------------------------------------------
Interface for erl_compile .
%%%-----------------------------------------------------------------
%% Opts#options.specific
compile(Input, Output, Opts) ->
set_verbosity(Opts),
set_filename(Input),
t("compile -> entry with"
"~n Input: ~s"
"~n Output: ~s"
"~n Opts: ~p", [Input, Output, Opts]),
case convert(Input++".bin", Output++".hrl", Input) of
ok ->
ok;
{error, Reason} ->
io:format("~p", [Reason]),
error
end.
set_verbosity(#options{verbose = Verbose, specific = Spec}) ->
set_verbosity(Verbose, Spec).
set_verbosity(Verbose, Spec) ->
Verbosity =
case lists:keysearch(verbosity, 1, Spec) of
{value, {verbosity, V}} ->
case (catch snmpc_lib:vvalidate(V)) of
ok ->
case Verbose of
true ->
case V of
silence ->
log;
info ->
log;
_ ->
V
end;
_ ->
V
end;
_ ->
case Verbose of
true ->
log;
false ->
silence
end
end;
false ->
case Verbose of
true ->
log;
false ->
silence
end
end,
put(verbosity, Verbosity).
set_filename(Filename) ->
Rootname = filename:rootname(Filename),
Basename = filename:basename(Rootname ++ ".mib"),
put(filename, Basename).
i(F, A) ->
snmpc_lib:i(F, A).
l(F, A) ->
snmpc_lib:l(F, A).
d(F) ->
d(F, []).
d(F, A) ->
snmpc_lib:d(F, A).
t(F) ->
t(F, []).
t(F, A) ->
snmpc_lib:t(F, A).
| null | https://raw.githubusercontent.com/yrashk/erlang/e1282325ed75e52a98d58f5bd9fb0fa27896173f/lib/snmp/src/compile/snmpc_mib_to_hrl.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
External exports
-----------------------------------------------------------------
Func: convert/1
column numbers for columns and values for enums.
warning if a duplicate name is found.
Returns: ok | {error, Reason}
Note: The Mib must be compiled.
-----------------------------------------------------------------
-----------------------------------------------------------------
There's nothing strange with this function! Enums can be
ins_types and then ins_mes to insert enums from different places.
-----------------------------------------------------------------
types.
Now, check if the type is already inserted
(by ins_types).
----------------------------------------------------------------------
Solves the problem with placing '' around some atoms.
----------------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
Opts#options.specific | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(snmpc_mib_to_hrl).
-include_lib("stdlib/include/erl_compile.hrl").
-include("snmp_types.hrl").
-export([convert/1, compile/3]).
: MibName = string ( ) without extension .
Purpose : Produce a .hrl file with oid for tables and variables ,
Writes only the first occurence of a name . Prints a
convert(MibName) ->
MibFile = MibName ++ ".bin",
HrlFile = MibName ++ ".hrl",
put(verbosity, trace),
convert(MibFile, HrlFile, MibName).
convert(MibFile, HrlFile, MibName) ->
t("convert -> entry with"
"~n MibFile: ~s"
"~n HrlFile: ~s"
"~n MibName: ~s", [MibFile, HrlFile, MibName]),
case snmpc_misc:read_mib(MibFile) of
{ok, #mib{asn1_types = Types, mes = MEs, traps = Traps}} ->
d("mib successfully read"),
resolve(Types, MEs, Traps, HrlFile,
filename:basename(MibName)),
ok;
{error, Reason} ->
i("failed reading mib: "
"~n Reason: ~p", [Reason]),
{error, Reason}
end.
resolve(Types, MEs, Traps, HrlFile, MibName) ->
t("resolve -> entry"),
case file:open(HrlFile, [write]) of
{ok, Fd} ->
insert_header(Fd),
insert_begin(Fd, MibName),
insert_notifs(Traps, Fd),
insert_oids(MEs, Fd),
insert_range(MEs, Fd),
insert_enums(Types, MEs, Fd),
insert_defvals(MEs, Fd),
insert_end(Fd),
file:close(Fd),
l("~s written", [HrlFile]);
{error, Reason} ->
i("failed opening output file: "
"~n Reason: ~p", [Reason]),
{error, Reason}
end.
insert_header(Fd) ->
d("insert file header"),
io:format(Fd, "%%% This file was automatically generated by "
"snmpc_mib_to_hrl version ~s~n", [?version]),
{Y,Mo,D} = date(),
{H,Mi,S} = time(),
io:format(Fd, "%%% Date: ~2.2.0w-~s-~w::~2.2.0w:~2.2.0w:~2.2.0w~n",
[D,month(Mo),Y,H,Mi,S]).
insert_begin(Fd, MibName) ->
d("insert file begin"),
io:format(Fd,
"-ifndef('~s').~n"
"-define('~s', true).~n", [MibName, MibName]).
insert_end(Fd) ->
d("insert file end"),
io:format(Fd, "-endif.~n", []).
insert_oids(MEs, Fd) ->
d("insert oids"),
io:format(Fd, "~n%% Oids~n", []),
insert_oids2(MEs, Fd),
io:format(Fd, "~n", []).
insert_oids2([#me{imported = true} | T], Fd) ->
insert_oids2(T, Fd);
insert_oids2([#me{entrytype = table_column, oid = Oid, aliasname = Name} | T],
Fd) ->
t("insert oid [table column]: ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, lists:last(Oid)]),
insert_oids2(T, Fd);
insert_oids2([#me{entrytype = variable, oid = Oid, aliasname = Name} | T],
Fd) ->
t("insert oid [variable]: ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [merge_atoms(Name, instance),
Oid ++ [0]]),
insert_oids2(T, Fd);
insert_oids2([#me{oid = Oid, aliasname = Name} | T], Fd) ->
t("insert oid: ~p - ~w", [Name, Oid]),
io:format(Fd, "~n-define(~w, ~w).~n", [Name, Oid]),
insert_oids2(T, Fd);
insert_oids2([], _Fd) ->
ok.
insert_notifs(Traps, Fd) ->
d("insert notifications"),
Notifs = [Notif || Notif <- Traps, is_record(Notif, notification)],
case Notifs of
[] ->
ok;
_ ->
io:format(Fd, "~n%% Notifications~n", []),
insert_notifs2(Notifs, Fd)
end.
insert_notifs2([], _Fd) ->
ok;
insert_notifs2([#notification{trapname = Name, oid = Oid}|T], Fd) ->
t("insert notification ~p - ~w", [Name, Oid]),
io:format(Fd, "-define(~w, ~w).~n", [Name, Oid]),
insert_notifs2(T, Fd).
defined in types and in mibentries ; therefore , we first call
insert_enums(Types, MEs, Fd) ->
d("insert enums"),
T = ins_types(Types, Fd, []),
ins_mes(MEs, T, Fd).
Insert all types , but not the imported . Ret the names of inserted
ins_types([#asn1_type{aliasname = Name,
assocList = Alist,
imported = false} | T],
Fd, Res)
when list(Alist) ->
case lists:keysearch(enums, 1, Alist) of
{value, {enums, Enums}} when Enums /= [] ->
case Enums of
[] -> ins_types(T, Fd, Res);
NewEnums ->
io:format(Fd, "~n%% Definitions from ~w~n", [Name]),
ins_enums(NewEnums, Name, Fd),
ins_types(T, Fd, [Name | Res])
end;
_ -> ins_types(T, Fd, Res)
end;
ins_types([_ | T], Fd, Res) ->
ins_types(T, Fd, Res);
ins_types([], _Fd, Res) -> Res.
ins_mes([#me{entrytype = internal} | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([#me{entrytype = table} | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([#me{aliasname = Name,
asn1_type = #asn1_type{assocList = Alist,
aliasname = Aname},
imported = false} | T],
Types, Fd)
when list(Alist) ->
case lists:keysearch(enums, 1, Alist) of
{value, {enums, Enums}} when Enums /= [] ->
case Enums of
[] -> ins_mes(T, Types, Fd);
NewEnums ->
case lists:member(Aname, Types) of
false ->
io:format(Fd, "~n%% Enum definitions from ~w~n",
[Name]),
ins_enums(NewEnums, Name, Fd),
ins_mes(T, Types, Fd);
_ -> ins_mes(T, Types, Fd)
end
end;
_ -> ins_mes(T, Types, Fd)
end;
ins_mes([_ | T], Types, Fd) ->
ins_mes(T, Types, Fd);
ins_mes([], _Types, _Fd) -> ok.
ins_enums([{Name, Val} | T], Origin, Fd) ->
EnumName = merge_atoms(Origin, Name),
io:format(Fd, "-define(~w, ~w).~n", [EnumName, Val]),
ins_enums(T, Origin, Fd);
ins_enums([], _Origin, _Fd) ->
ok.
You ca n't write two atoms using .
merge_atoms(TypeOrigin, Name) ->
list_to_atom(lists:append([atom_to_list(TypeOrigin), "_",
atom_to_list(Name)])).
insert_defvals(Mes, Fd) ->
d("insert default values"),
io:format(Fd, "~n%% Default values~n", []),
insert_defvals2(Mes, Fd),
io:format(Fd, "~n", []).
insert_defvals2([#me{imported = true} | T], Fd) ->
insert_defvals2(T, Fd);
insert_defvals2([#me{entrytype = table_column, assocList = Alist,
aliasname = Name} | T],
Fd) ->
case snmpc_misc:assq(defval, Alist) of
{value, Val} ->
Atom = merge_atoms('default', Name),
io:format(Fd, "-define(~w, ~w).~n", [Atom, Val]);
_ -> ok
end,
insert_defvals2(T, Fd);
insert_defvals2([#me{entrytype = variable, assocList = Alist, aliasname = Name}
| T],
Fd) ->
case snmpc_misc:assq(variable_info, Alist) of
{value, VarInfo} ->
case VarInfo#variable_info.defval of
undefined -> ok;
Val ->
Atom = merge_atoms('default', Name),
io:format(Fd, "-define(~w, ~w).~n", [Atom, Val])
end;
_ -> ok
end,
insert_defvals2(T, Fd);
insert_defvals2([_ | T], Fd) ->
insert_defvals2(T, Fd);
insert_defvals2([], _Fd) -> ok.
insert_range(Mes, Fd) ->
d("insert range"),
io:format(Fd, "~n%% Range values~n", []),
insert_range2(Mes, Fd),
io:format(Fd, "~n", []).
insert_range2([#me{imported = true} | T], Fd)->
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='OCTET STRING',lo=Low,hi=High},aliasname=Name}|T],Fd)->
case Low==undefined of
true->
insert_range2(T,Fd);
false->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd)
end;
insert_range2([#me{asn1_type=#asn1_type{bertype='Unsigned32',lo=Low,hi=High},aliasname=Name}|T],Fd)->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='Counter32',lo=Low,hi=High},aliasname=Name}|T],Fd)->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd);
insert_range2([#me{asn1_type=#asn1_type{bertype='INTEGER',lo=Low,hi=High},aliasname=Name}|T],Fd)->
case Low==undefined of
true->
insert_range2(T,Fd);
false->
AtomLow = merge_atoms('low', Name),
AtomHigh = merge_atoms('high', Name),
io:format(Fd,"-define(~w, ~w).~n",[AtomLow,Low]),
io:format(Fd,"-define(~w, ~w).~n",[AtomHigh,High]),
insert_range2(T,Fd)
end;
insert_range2([_|T],Fd) ->
insert_range2(T,Fd);
insert_range2([],_Fd) ->
ok.
month(1) -> "Jan";
month(2) -> "Feb";
month(3) -> "Mar";
month(4) -> "Apr";
month(5) -> "May";
month(6) -> "Jun";
month(7) -> "Jul";
month(8) -> "Aug";
month(9) -> "Sep";
month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
Interface for erl_compile .
compile(Input, Output, Opts) ->
set_verbosity(Opts),
set_filename(Input),
t("compile -> entry with"
"~n Input: ~s"
"~n Output: ~s"
"~n Opts: ~p", [Input, Output, Opts]),
case convert(Input++".bin", Output++".hrl", Input) of
ok ->
ok;
{error, Reason} ->
io:format("~p", [Reason]),
error
end.
set_verbosity(#options{verbose = Verbose, specific = Spec}) ->
set_verbosity(Verbose, Spec).
set_verbosity(Verbose, Spec) ->
Verbosity =
case lists:keysearch(verbosity, 1, Spec) of
{value, {verbosity, V}} ->
case (catch snmpc_lib:vvalidate(V)) of
ok ->
case Verbose of
true ->
case V of
silence ->
log;
info ->
log;
_ ->
V
end;
_ ->
V
end;
_ ->
case Verbose of
true ->
log;
false ->
silence
end
end;
false ->
case Verbose of
true ->
log;
false ->
silence
end
end,
put(verbosity, Verbosity).
set_filename(Filename) ->
Rootname = filename:rootname(Filename),
Basename = filename:basename(Rootname ++ ".mib"),
put(filename, Basename).
i(F, A) ->
snmpc_lib:i(F, A).
l(F, A) ->
snmpc_lib:l(F, A).
d(F) ->
d(F, []).
d(F, A) ->
snmpc_lib:d(F, A).
t(F) ->
t(F, []).
t(F, A) ->
snmpc_lib:t(F, A).
|
493b7a7ac7ec0ad3a39acd376342d33d2dd2b8a87775b1eb802070bcd3708b60 | camlp5/camlp5 | camlp5_top.ml | (* camlp5r *)
(* camlp5_top.ml,v *)
Copyright ( c ) INRIA 2007 - 2017
#load "pa_macro.cmo";
#load "q_MLast.cmo";
open Parsetree;
open Lexing;
open Versdep;
open Camlp5_top_funs;
Toploop.parse_toplevel_phrase.val := wrapped_toplevel_phrase ;
Toploop.parse_use_file.val :=
wrap use_file (fun lb -> lb.lex_curr_pos - lb.lex_start_pos)
;
Pcaml.warning.val :=
fun loc txt ->
Toploop.print_warning (Ast2pt.mkloc loc) Format.err_formatter
(Warnings.Preprocessor txt)
;
| null | https://raw.githubusercontent.com/camlp5/camlp5/9e8155f8ae5a584bbb4ad96d10d6fec63ed8204c/top/camlp5_top.ml | ocaml | camlp5r
camlp5_top.ml,v | Copyright ( c ) INRIA 2007 - 2017
#load "pa_macro.cmo";
#load "q_MLast.cmo";
open Parsetree;
open Lexing;
open Versdep;
open Camlp5_top_funs;
Toploop.parse_toplevel_phrase.val := wrapped_toplevel_phrase ;
Toploop.parse_use_file.val :=
wrap use_file (fun lb -> lb.lex_curr_pos - lb.lex_start_pos)
;
Pcaml.warning.val :=
fun loc txt ->
Toploop.print_warning (Ast2pt.mkloc loc) Format.err_formatter
(Warnings.Preprocessor txt)
;
|
6f60624322388d2850f296da15a14307046809971c5be1133871289f8ac785f0 | fp-works/2019-winter-Haskell-school | LogAnalysisSpec.hs | import Test.Hspec
import Log
import LogAnalysis
main :: IO ()
main =
hspec $ do
let infoLog = "I 29 la la la"
let warningLog = "W 823 warning message"
let errorLog = "E 2 562 help help"
let unknownLog = "This is an unknown log"
let log1 = LogMessage Info 1 "log 1"
let log2 = LogMessage Warning 2 "log 2"
let log3 = LogMessage (Error 2) 3 "log 3"
let log4 = LogMessage (Error 3) 4 "log 4"
let log5 = LogMessage Warning 5 "log 5"
let logErr51 = LogMessage (Error 51) 7 "log 51"
let logErr53 = LogMessage (Error 53) 9 "log 53"
let notlog = Unknown "this is not a log"
let oneNodeMessageTree = Node Leaf log2 Leaf
let twoNodesMessageTree1 = Node (Node Leaf log1 Leaf) log2 Leaf
let twoNodesMessageTree2 = Node Leaf log2 (Node Leaf log3 Leaf)
describe "LogAnalysis tests" $
Exercise 1 --
do
it "should parse correct log messages" $ do
parseMessage infoLog `shouldBe` (LogMessage Info 29 "la la la")
parseMessage warningLog `shouldBe`
(LogMessage Warning 823 "warning message")
parseMessage errorLog `shouldBe` (LogMessage (Error 2) 562 "help help")
parseMessage unknownLog `shouldBe` (Unknown "This is an unknown log")
Exercise 2 --
it "should return original message tree when entering a unknown log" $ do
insert notlog oneNodeMessageTree `shouldBe` (oneNodeMessageTree)
it "should return correct message tree with initial Leaf message tree" $ do
insert log1 Leaf `shouldBe` (Node Leaf log1 Leaf)
it
"should correct message tree with a log with earlier timestamp given one node message tree" $ do
insert log1 oneNodeMessageTree `shouldBe`
(Node (Node Leaf log1 Leaf) log2 Leaf)
it
"should correct message tree with a log with later timestamp given one node message tree" $ do
insert log3 oneNodeMessageTree `shouldBe`
(Node Leaf log2 (Node Leaf log3 Leaf))
it
"should correct message tree with a log with later timestamp given two nodes message tree 1" $ do
insert log4 twoNodesMessageTree1 `shouldBe`
(Node (Node Leaf log1 Leaf) log2 (Node Leaf log4 Leaf))
it
"should correct message tree with a log with later timestamp given two nodes message tree 2" $ do
insert log4 twoNodesMessageTree2 `shouldBe`
(Node Leaf log2 (Node Leaf log3 (Node Leaf log4 Leaf)))
-- Excercise 3 --
it "should return a sorted message tree" $ do
let testLogMessageList = [log2, log3, log1, log5, log4]
build testLogMessageList `shouldBe`
(Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf))
it "should return a sorted message tree with unknown log" $ do
let testLogMessageListWithUnKnownLog =
[log2, log3, log1, notlog, log5, log4]
build testLogMessageListWithUnKnownLog `shouldBe`
(Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf))
Exercise 4 --
it "should return a sorted log message list" $ do
let testMessageTree =
Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf)
inOrder testMessageTree `shouldBe` ([log1, log2, log3, log4, log5])
Exercise 5 --
it
"should return correct error message in a string list from a unsorted log message list" $ do
let testLogMessageList = [log4, log3, logErr53, log5, logErr51, log2]
whatWentWrong testLogMessageList `shouldBe` (["log 51", "log 53"])
| null | https://raw.githubusercontent.com/fp-works/2019-winter-Haskell-school/823b67f019b9e7bc0d3be36711c0cc7da4eba7d2/cis194/week2/tien/homework2/test/LogAnalysisSpec.hs | haskell |
Excercise 3 --
| import Test.Hspec
import Log
import LogAnalysis
main :: IO ()
main =
hspec $ do
let infoLog = "I 29 la la la"
let warningLog = "W 823 warning message"
let errorLog = "E 2 562 help help"
let unknownLog = "This is an unknown log"
let log1 = LogMessage Info 1 "log 1"
let log2 = LogMessage Warning 2 "log 2"
let log3 = LogMessage (Error 2) 3 "log 3"
let log4 = LogMessage (Error 3) 4 "log 4"
let log5 = LogMessage Warning 5 "log 5"
let logErr51 = LogMessage (Error 51) 7 "log 51"
let logErr53 = LogMessage (Error 53) 9 "log 53"
let notlog = Unknown "this is not a log"
let oneNodeMessageTree = Node Leaf log2 Leaf
let twoNodesMessageTree1 = Node (Node Leaf log1 Leaf) log2 Leaf
let twoNodesMessageTree2 = Node Leaf log2 (Node Leaf log3 Leaf)
describe "LogAnalysis tests" $
do
it "should parse correct log messages" $ do
parseMessage infoLog `shouldBe` (LogMessage Info 29 "la la la")
parseMessage warningLog `shouldBe`
(LogMessage Warning 823 "warning message")
parseMessage errorLog `shouldBe` (LogMessage (Error 2) 562 "help help")
parseMessage unknownLog `shouldBe` (Unknown "This is an unknown log")
it "should return original message tree when entering a unknown log" $ do
insert notlog oneNodeMessageTree `shouldBe` (oneNodeMessageTree)
it "should return correct message tree with initial Leaf message tree" $ do
insert log1 Leaf `shouldBe` (Node Leaf log1 Leaf)
it
"should correct message tree with a log with earlier timestamp given one node message tree" $ do
insert log1 oneNodeMessageTree `shouldBe`
(Node (Node Leaf log1 Leaf) log2 Leaf)
it
"should correct message tree with a log with later timestamp given one node message tree" $ do
insert log3 oneNodeMessageTree `shouldBe`
(Node Leaf log2 (Node Leaf log3 Leaf))
it
"should correct message tree with a log with later timestamp given two nodes message tree 1" $ do
insert log4 twoNodesMessageTree1 `shouldBe`
(Node (Node Leaf log1 Leaf) log2 (Node Leaf log4 Leaf))
it
"should correct message tree with a log with later timestamp given two nodes message tree 2" $ do
insert log4 twoNodesMessageTree2 `shouldBe`
(Node Leaf log2 (Node Leaf log3 (Node Leaf log4 Leaf)))
it "should return a sorted message tree" $ do
let testLogMessageList = [log2, log3, log1, log5, log4]
build testLogMessageList `shouldBe`
(Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf))
it "should return a sorted message tree with unknown log" $ do
let testLogMessageListWithUnKnownLog =
[log2, log3, log1, notlog, log5, log4]
build testLogMessageListWithUnKnownLog `shouldBe`
(Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf))
it "should return a sorted log message list" $ do
let testMessageTree =
Node
(Node Leaf log1 (Node (Node Leaf log2 Leaf) log3 Leaf))
log4
(Node Leaf log5 Leaf)
inOrder testMessageTree `shouldBe` ([log1, log2, log3, log4, log5])
it
"should return correct error message in a string list from a unsorted log message list" $ do
let testLogMessageList = [log4, log3, logErr53, log5, logErr51, log2]
whatWentWrong testLogMessageList `shouldBe` (["log 51", "log 53"])
|
ccd56196517898ea58b5e86bd55e64a18d5edab7fea9d73374564c2d7e8c5965 | Kappa-Dev/KappaTools | rule_modes.ml | type arity = Usual | Unary | Unary_refinement
type direction = Direct | Op
module RuleModeIdS:
SetMap.S with type elt = int * arity * direction
=
SetMap.Make
(struct
type t = int * arity * direction
let compare = compare
let print _ _ = ()
end)
module RuleModeIdSet = RuleModeIdS.Set
module RuleModeS:
SetMap.S with type elt = arity * direction
=
SetMap.Make
(struct
type t = arity * direction
let compare = compare
let print _ _ = ()
end)
module RuleModeMap = RuleModeS.Map
let sum_map add map1 map2 =
snd
(RuleModeMap.monadic_fold2 () ()
(fun () () key a1 a2 map ->
(),RuleModeMap.add
key (add a1 a2) map)
(fun () () key a1 map ->
(),RuleModeMap.add key a1 map)
(fun () () _ _ map -> (),map)
map1
map2
map2)
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/eef2337e8688018eda47ccc838aea809cae68de7/core/symmetries/rule_modes.ml | ocaml | type arity = Usual | Unary | Unary_refinement
type direction = Direct | Op
module RuleModeIdS:
SetMap.S with type elt = int * arity * direction
=
SetMap.Make
(struct
type t = int * arity * direction
let compare = compare
let print _ _ = ()
end)
module RuleModeIdSet = RuleModeIdS.Set
module RuleModeS:
SetMap.S with type elt = arity * direction
=
SetMap.Make
(struct
type t = arity * direction
let compare = compare
let print _ _ = ()
end)
module RuleModeMap = RuleModeS.Map
let sum_map add map1 map2 =
snd
(RuleModeMap.monadic_fold2 () ()
(fun () () key a1 a2 map ->
(),RuleModeMap.add
key (add a1 a2) map)
(fun () () key a1 map ->
(),RuleModeMap.add key a1 map)
(fun () () _ _ map -> (),map)
map1
map2
map2)
| |
ffffeddecf3f30075eaa4f9e40403dab4b38f23e2d98508476d9ab8a86b544d2 | hatashiro/line | Types.hs | {-|
This module is to define aliases commonly used in other modules.
-}
module Line.Messaging.Common.Types (
-- * General types
ID,
URL,
-- * LINE API types
ChannelSecret,
ChannelAccessToken,
) where
import qualified Data.Text as T
-- | A type alias to specify an identifier of something.
type ID = T.Text
-- | A type alias to specify a URL.
type URL = T.Text
-- | A type alias to specify a channel secret. About issueing and using the
-- channel secret, please refer to corresponding LINE documentations.
type ChannelSecret = T.Text
-- | A type alias to specify a channel access token. About issueing and using
-- the channel access token, please refer to corresponding LINE documentations.
type ChannelAccessToken = T.Text
| null | https://raw.githubusercontent.com/hatashiro/line/3b5ddb0b98e5937d86157308512c1c96cd49c86a/src/Line/Messaging/Common/Types.hs | haskell | |
This module is to define aliases commonly used in other modules.
* General types
* LINE API types
| A type alias to specify an identifier of something.
| A type alias to specify a URL.
| A type alias to specify a channel secret. About issueing and using the
channel secret, please refer to corresponding LINE documentations.
| A type alias to specify a channel access token. About issueing and using
the channel access token, please refer to corresponding LINE documentations. |
module Line.Messaging.Common.Types (
ID,
URL,
ChannelSecret,
ChannelAccessToken,
) where
import qualified Data.Text as T
type ID = T.Text
type URL = T.Text
type ChannelSecret = T.Text
type ChannelAccessToken = T.Text
|
ae26f2a8bfc1e8f4cdf5145b74bda32b9c47c667b39cf34abd6390b37dd51a04 | argp/bap | check.ml | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2007
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
open Graph
module Int = struct
type t = int
let compare = compare
let hash = Hashtbl.hash
let equal = (=)
let default = 0
end
module W = struct
type label = int
type t = int
let weight x = x
let zero = 0
let add = (+)
let compare = compare
end
(********************************************)
Generic functions
(********************************************)
module Generic = struct
Generic tests for imperative graphs
module Make
(G : Sig.I with type V.label = int)
(V : sig val v: int val e: int end) =
struct
module O = Oper.I(G)
let test_mirror g =
if G.is_directed then begin (* TODO: remove *)
let g' = O.mirror g in
assert (G.nb_vertex g = G.nb_vertex g');
G.iter_edges (fun v1 v2 -> assert (G.mem_edge g' v2 v1)) g;
G.iter_edges (fun v1 v2 -> assert (G.mem_edge g v2 v1)) g';
()
end
let g = G.create ()
let () =
let v1 = G.V.create 1 in
let v2 = G.V.create 2 in
let v3 = G.V.create 3 in
test_mirror g;
G.add_edge g v1 v2;
G.add_edge g v1 v3;
G.add_edge g v2 v1;
G.add_edge g v2 v2;
G.add_edge g v2 v2;
test_mirror g;
assert (G.nb_vertex g = V.v && G.nb_edges g = V.e);
G.remove_vertex g v1;
assert (G.nb_vertex g = 2 && G.nb_edges g = 1);
G.remove_vertex g v2;
assert (G.nb_vertex g = 1 && G.nb_edges g = 0);
test_mirror g;
G.clear g;
assert (G.nb_vertex g = 0 && G.nb_edges g = 0)
end
let () =
let module A = Make
(Imperative.Digraph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.Concrete(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.Concrete(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.Abstract(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.Abstract(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.ConcreteBidirectional(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Digraph.ConcreteBidirectionalLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
()
Generic tests for persistent graphs
module MakeP
(G : Sig.P with type V.label = int)
(V : sig val v: int val e: int end) =
struct
module O = Oper.P(G)
let test_mirror g =
let g' = O.mirror g in
assert (G.nb_vertex g = G.nb_vertex g')
let () =
let g = G.empty in
let v1 = G.V.create 1 in
let v2 = G.V.create 2 in
let v3 = G.V.create 3 in
test_mirror g;
let g = G.add_edge g v1 v2 in
let g = G.add_edge g v1 v3 in
let g = G.add_edge g v2 v1 in
let g = G.add_edge g v2 v2 in
let g = G.add_edge g v2 v2 in
test_mirror g;
assert (G.nb_vertex g = V.v && G.nb_edges g = V.e);
let g = G.remove_vertex g v1 in
assert (G.nb_vertex g = 2 && G.nb_edges g = 1);
let g = G.remove_vertex g v2 in
assert (G.nb_vertex g = 1 && G.nb_edges g = 0);
test_mirror g
end
let () =
let module A = MakeP
(Persistent.Digraph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.Concrete(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.Concrete(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.Abstract(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.Abstract(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.ConcreteBidirectional(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Digraph.ConcreteBidirectionalLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
()
(* find_edge *)
module Make2
(G : Sig.I
with type V.t = int and type E.label = int and type E.t = int * int * int)
=
struct
let g = G.create ()
let test_exn v1 v2 =
assert (G.find_all_edges g v1 v2 = []);
try
let _ = G.find_edge g v1 v2 in
assert false
with Not_found ->
()
let () =
let e1 = 1, 0, 2 in
let e2 = 1, 1, 3 in
let e2' = 1, 2, 3 in
let e3 = 2, 2, 1 in
G.add_edge_e g e1;
G.add_edge_e g e2;
G.add_edge_e g e2';
G.add_edge_e g e3;
G.add_edge_e g e3;
assert (G.find_edge g 1 2 = e1);
assert (List.length (G.find_all_edges g 1 3) = 2);
test_exn 2 3;
test_exn 2 4;
test_exn 5 2;
G.remove_vertex g 2;
assert (G.nb_vertex g = 2 && G.nb_edges g = 2)
end
let () =
let module D = Make2(Imperative.Digraph.ConcreteLabeled(Int)(Int)) in
D.test_exn 3 1;
let module G = Imperative.Graph.ConcreteLabeled(Int)(Int) in
let module G2 = Make2(G) in
assert (G.find_edge G2.g 3 1 = (3, 1, 1))
end
(********************************************)
Dijkstra
(********************************************)
module Dijkstra = struct
module TestDijkstra
(G : Sig.G with type V.label = int and type E.label = int)
(B : Builder.S with module G = G) =
struct
let g = B.empty ()
let v1 = G.V.create 1
let g = B.add_vertex g v1
let v2 = G.V.create 2
let g = B.add_vertex g v2
let v3 = G.V.create 3
let g = B.add_vertex g v3
let v4 = G.V.create 4
let g = B.add_vertex g v4
let v5 = G.V.create 5
let g = B.add_vertex g v5
let g = B.add_edge_e g (G.E.create v1 10 v2)
let g = B.add_edge_e g (G.E.create v2 50 v3)
let g = B.add_edge_e g (G.E.create v1 30 v4)
let g = B.add_edge_e g (G.E.create v1 100 v5)
let g = B.add_edge_e g (G.E.create v3 10 v5)
let g = B.add_edge_e g (G.E.create v4 20 v3)
let g = B.add_edge_e g (G.E.create v4 60 v5)
module Dij = Path.Dijkstra(G)(W)
module Dfs = Traverse.Dfs(G)
let test g i j w l =
let p,w' = Dij.shortest_path g i j in
assert (w' = w && List.length p = l)
let test_not_found g i j =
try let _ = Dij.shortest_path g i j in assert false with Not_found -> ()
let () = test g v1 v5 60 3
let () = test g v1 v1 0 0
let () = if G.is_directed then test_not_found g v5 v1
let () = assert (not (Dfs.has_cycle g))
let gc = B.add_edge_e g (G.E.create v5 10 v1)
let v6 = G.V.create 6
let gc = B.add_vertex gc v6
let () = if G.is_directed then test gc v1 v5 60 3
let () = test gc v5 v1 10 1
let () = test_not_found gc v1 v6
let () = assert (Dfs.has_cycle gc)
end
Dijkstra on Persistent Directed Labeled Graphs
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
module Test1 = TestDijkstra(G)(Builder.P(G))
Dijkstra on Persistent Directed Abstract Labeled Graphs
module G2 = Persistent.Digraph.AbstractLabeled(Int)(Int)
module Test2 = TestDijkstra(G2)(Builder.P(G2))
Dijkstra on Imperative Hashed Directed Labeled Graphs
module G3 = Imperative.Digraph.ConcreteLabeled(Int)(Int)
module Test3 = TestDijkstra(G3)(Builder.I(G3))
end
(********************************************)
Traversal
(********************************************)
module Traversal = struct
module G = Imperative.Digraph.AbstractLabeled(Int)(Int)
module Dfs = Traverse.Dfs(G)
module Mark = Traverse.Mark(G)
let g = G.create ()
let newv g = let v = G.V.create 0 in G.add_vertex g v; v
let v1 = newv g
let v2 = newv g
let v3 = newv g
let v4 = newv g
let v5 = newv g
let add_edge g v1 l v2 = G.add_edge_e g (G.E.create v1 l v2)
let () =
add_edge g v1 10 v2;
add_edge g v2 50 v3;
add_edge g v1 30 v4;
add_edge g v1 100 v5;
add_edge g v3 10 v5;
add_edge g v4 20 v3;
add_edge g v4 60 v5
let () = assert (not (Mark.has_cycle g) && not (Dfs.has_cycle g))
let v6 = newv g
let () = assert (not (Mark.has_cycle g) && not (Dfs.has_cycle g))
let () = add_edge g v5 10 v1
let () = assert (Mark.has_cycle g && Dfs.has_cycle g)
debug dfs / Cormen p 479
let g = G.create ()
let newv i = let v = G.V.create i in G.add_vertex g v; v
let u = newv 1
let v = newv 2
let w = newv 3
let x = newv 4
let y = newv 5
let z = newv 6
let edge a b = add_edge g a 0 b
let () =
edge u v; edge u x;
edge v y;
edge w y; edge w z;
edge x v;
edge y x;
edge z z
open Format
let pre v = printf "pre %d@." (G.V.label v)
let post v = printf "post %d@." (G.V.label v)
let () = printf "iter:@."; Dfs.iter_component ~pre ~post g w
let () = printf "prefix:@."; Dfs.prefix_component pre g w
let () =
printf "step:@.";
let rec visit it =
let v = Dfs.get it in
printf "visit %d@." (G.V.label v);
visit (Dfs.step it)
in
try visit (Dfs.start g) with Exit -> ()
end
(********************************************)
Ford - Fulkerson and
(********************************************)
module FF_Goldberg = struct
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
let add_edge g v1 l v2 = G.add_edge_e g (G.E.create v1 l v2)
let g = G.empty
let g = add_edge g 1 16 2
let g = add_edge g 1 13 3
let g = add_edge g 2 10 3
let g = add_edge g 3 4 2
let g = add_edge g 2 12 4
let g = add_edge g 4 9 3
let g = add_edge g 3 14 5
let g = add_edge g 5 7 4
let g = add_edge g 4 20 6
let g = add_edge g 5 4 6
module F = struct
type label = int
type t = int
let max_capacity x = x
let min_capacity _ = 0
let flow _ = 0
let add = (+)
let sub = (-)
let compare = compare
let zero = 0
end
module FF = Flow.Ford_Fulkerson(G)(F)
module Gold = Flow.Goldberg(G)(F)
let () =
assert (snd (FF.maxflow g 1 6) = 23);
assert (snd (Gold.maxflow g 1 6) = 23);
assert (snd (FF.maxflow g 1 1) = 0);
assert (snd (Gold.maxflow g 1 1) = 0)
module G2 =
Persistent.Digraph.ConcreteLabeled
(Int)
(struct include Util.OTProduct(Int)(Int) let default = 0, 0 end)
let add_edge g v1 l v2 = G2.add_edge_e g (G2.E.create v1 l v2)
let g = G2.empty
let g = add_edge g 1 (1, 1) 2
let g = add_edge g 1 (3, 0) 3
let g = add_edge g 2 (1, 1) 3
let g = add_edge g 3 (1, 0) 2
let g = add_edge g 2 (3, 0) 4
let g = add_edge g 3 (1, 1) 4
module F2 = struct
type label = int * int
type t = int
let max_capacity = fst
let min_capacity _ = 0
let flow = snd
let add = (+)
let sub = (-)
let compare = compare
let zero = 0
end
module FF2 = Flow.Ford_Fulkerson(G2)(F2)
module Gold2 = Flow.Goldberg(G2)(F2)
let () =
assert (snd (FF2.maxflow g 1 4) = 2);
assert (snd (Gold2.maxflow g 1 4) = 2)
end
(********************************************)
(* Neighbourhood *)
(********************************************)
module Neighbourhood = struct
module G = Graph.Imperative.Graph.Concrete(Int)
open G
let g = create ()
let add = add_edge g
let () =
add 1 2;
add 1 3;
add 1 4;
add 2 5;
add 3 5;
add 4 5;
add 5 6
module N = Oper.Neighbourhood(G)
module V = N.Vertex_Set
let s2 = V.add 1 (V.singleton 5)
let () = assert (V.equal (N.set_from_vertex g 2) s2)
let s25 = V.add 1 (V.add 3 (V.add 4 (V.singleton 6)))
let () = assert (V.equal (N.set_from_vertices g [ 2; 5 ]) s25)
end
(********************************************)
(* Minimal seperators *)
(********************************************)
module Minsep = struct
module P = struct
module G = Graph.Persistent.Graph.Concrete(Int)
open G
let g = empty
let g = add_edge g 1 2
let g = add_edge g 1 3
let g = add_edge g 1 4
let g = add_edge g 2 5
let g = add_edge g 3 5
let g = add_edge g 4 5
let g = add_edge g 5 6
module M = Minsep.P(G)
module S = M.Vertex_Set
module VS = M.VSetset
let s5 = S.singleton 5
let s15 = S.add 1 s5
let s234 = S.add 2 (S.add 3 (S.singleton 4))
let bigs = VS.add s5 (VS.add s15 (VS.singleton s234))
let () = assert (VS.equal (M.set_of_allminsep g) bigs)
end
module I = struct
module G = Graph.Imperative.Graph.Abstract(struct type t = unit end)
open G
let g = create ()
let v1 = V.create ()
let v2 = V.create ()
let v3 = V.create ()
let v4 = V.create ()
let v5 = V.create ()
let v6 = V.create ()
let add = add_edge g
let () =
add v1 v2;
add v1 v3;
add v1 v4;
add v2 v5;
add v3 v5;
add v4 v5;
add v5 v6
module M = Minsep.I(G)
module S = M.Vertex_Set
module VS = M.VSetset
let s5 = S.singleton v5
let s15 = S.add v1 s5
let s234 = S.add v2 (S.add v3 (S.singleton v4))
let bigs = VS.add s5 (VS.add s15 (VS.singleton s234))
let () =
let _ = G.copy g in
assert (VS.equal (M.set_of_allminsep g) bigs)
end
end
(********************************************)
(* Checking signature *)
(********************************************)
(* check that signature [Sig_pack.S] (which is manually expanded) does not
forget anything *)
module type RightSigPack = sig
include Sig.IM with type V.label = int and type E.label = int
val find_vertex : t -> int -> V.t
include Oper.S with type g = t
module Dfs : sig
val iter : ?pre:(V.t -> unit) ->
?post:(V.t -> unit) -> t -> unit
val prefix : (V.t -> unit) -> t -> unit
val postfix : (V.t -> unit) -> t -> unit
val iter_component :
?pre:(V.t -> unit) ->
?post:(V.t -> unit) -> t -> V.t -> unit
val prefix_component : (V.t -> unit) -> t -> V.t -> unit
val postfix_component : (V.t -> unit) -> t -> V.t -> unit
val has_cycle : t -> bool
end
module Bfs : sig
val iter : (V.t -> unit) -> t -> unit
val iter_component : (V.t -> unit) -> t -> V.t -> unit
end
module Marking : sig
val dfs : t -> unit
val has_cycle : t -> bool
end
module Classic : sig
val divisors : int -> t
val de_bruijn : int -> t
val vertex_only : int -> t
val full : ?self:bool -> int -> t
end
module Rand : sig
val graph : ?loops:bool -> v:int -> e:int -> unit -> t
val labeled :
(V.t -> V.t -> E.label) ->
?loops:bool -> v:int -> e:int -> unit -> t
end
module Components : sig
val scc : t -> int*(V.t -> int)
val scc_array : t -> V.t list array
val scc_list : t -> V.t list list
end
val shortest_path : t -> V.t -> V.t -> E.t list * int
val ford_fulkerson : t -> V.t -> V.t -> (E.t -> int) * int
val goldberg : t -> V.t -> V.t -> (E.t -> int) * int
val dot_output : t -> string -> unit
end
module TestSigPack : RightSigPack = struct
include Pack.Digraph
type g = t
end
(*
Local Variables:
compile-command: "make -C .. check"
End:
*)
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/ocamlgraph/tests/check.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
******************************************
******************************************
TODO: remove
find_edge
******************************************
******************************************
******************************************
******************************************
******************************************
******************************************
******************************************
Neighbourhood
******************************************
******************************************
Minimal seperators
******************************************
******************************************
Checking signature
******************************************
check that signature [Sig_pack.S] (which is manually expanded) does not
forget anything
Local Variables:
compile-command: "make -C .. check"
End:
| : a generic graph library for OCaml
Copyright ( C ) 2004 - 2007
, and
modify it under the terms of the GNU Library General Public
License version 2 , with the special exception on linking
open Graph
module Int = struct
type t = int
let compare = compare
let hash = Hashtbl.hash
let equal = (=)
let default = 0
end
module W = struct
type label = int
type t = int
let weight x = x
let zero = 0
let add = (+)
let compare = compare
end
Generic functions
module Generic = struct
Generic tests for imperative graphs
module Make
(G : Sig.I with type V.label = int)
(V : sig val v: int val e: int end) =
struct
module O = Oper.I(G)
let test_mirror g =
let g' = O.mirror g in
assert (G.nb_vertex g = G.nb_vertex g');
G.iter_edges (fun v1 v2 -> assert (G.mem_edge g' v2 v1)) g;
G.iter_edges (fun v1 v2 -> assert (G.mem_edge g v2 v1)) g';
()
end
let g = G.create ()
let () =
let v1 = G.V.create 1 in
let v2 = G.V.create 2 in
let v3 = G.V.create 3 in
test_mirror g;
G.add_edge g v1 v2;
G.add_edge g v1 v3;
G.add_edge g v2 v1;
G.add_edge g v2 v2;
G.add_edge g v2 v2;
test_mirror g;
assert (G.nb_vertex g = V.v && G.nb_edges g = V.e);
G.remove_vertex g v1;
assert (G.nb_vertex g = 2 && G.nb_edges g = 1);
G.remove_vertex g v2;
assert (G.nb_vertex g = 1 && G.nb_edges g = 0);
test_mirror g;
G.clear g;
assert (G.nb_vertex g = 0 && G.nb_edges g = 0)
end
let () =
let module A = Make
(Imperative.Digraph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.Concrete(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.Concrete(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.Abstract(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Graph.Abstract(Int))
(struct let v = 3 let e = 3 end)
in
let module A = Make
(Imperative.Digraph.ConcreteBidirectional(Int))
(struct let v = 3 let e = 4 end)
in
let module A = Make
(Imperative.Digraph.ConcreteBidirectionalLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
()
Generic tests for persistent graphs
module MakeP
(G : Sig.P with type V.label = int)
(V : sig val v: int val e: int end) =
struct
module O = Oper.P(G)
let test_mirror g =
let g' = O.mirror g in
assert (G.nb_vertex g = G.nb_vertex g')
let () =
let g = G.empty in
let v1 = G.V.create 1 in
let v2 = G.V.create 2 in
let v3 = G.V.create 3 in
test_mirror g;
let g = G.add_edge g v1 v2 in
let g = G.add_edge g v1 v3 in
let g = G.add_edge g v2 v1 in
let g = G.add_edge g v2 v2 in
let g = G.add_edge g v2 v2 in
test_mirror g;
assert (G.nb_vertex g = V.v && G.nb_edges g = V.e);
let g = G.remove_vertex g v1 in
assert (G.nb_vertex g = 2 && G.nb_edges g = 1);
let g = G.remove_vertex g v2 in
assert (G.nb_vertex g = 1 && G.nb_edges g = 0);
test_mirror g
end
let () =
let module A = MakeP
(Persistent.Digraph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.ConcreteLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.AbstractLabeled(Int)(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.Concrete(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.Concrete(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.Abstract(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Graph.Abstract(Int))
(struct let v = 3 let e = 3 end)
in
let module A = MakeP
(Persistent.Digraph.ConcreteBidirectional(Int))
(struct let v = 3 let e = 4 end)
in
let module A = MakeP
(Persistent.Digraph.ConcreteBidirectionalLabeled(Int)(Int))
(struct let v = 3 let e = 4 end)
in
()
module Make2
(G : Sig.I
with type V.t = int and type E.label = int and type E.t = int * int * int)
=
struct
let g = G.create ()
let test_exn v1 v2 =
assert (G.find_all_edges g v1 v2 = []);
try
let _ = G.find_edge g v1 v2 in
assert false
with Not_found ->
()
let () =
let e1 = 1, 0, 2 in
let e2 = 1, 1, 3 in
let e2' = 1, 2, 3 in
let e3 = 2, 2, 1 in
G.add_edge_e g e1;
G.add_edge_e g e2;
G.add_edge_e g e2';
G.add_edge_e g e3;
G.add_edge_e g e3;
assert (G.find_edge g 1 2 = e1);
assert (List.length (G.find_all_edges g 1 3) = 2);
test_exn 2 3;
test_exn 2 4;
test_exn 5 2;
G.remove_vertex g 2;
assert (G.nb_vertex g = 2 && G.nb_edges g = 2)
end
let () =
let module D = Make2(Imperative.Digraph.ConcreteLabeled(Int)(Int)) in
D.test_exn 3 1;
let module G = Imperative.Graph.ConcreteLabeled(Int)(Int) in
let module G2 = Make2(G) in
assert (G.find_edge G2.g 3 1 = (3, 1, 1))
end
Dijkstra
module Dijkstra = struct
module TestDijkstra
(G : Sig.G with type V.label = int and type E.label = int)
(B : Builder.S with module G = G) =
struct
let g = B.empty ()
let v1 = G.V.create 1
let g = B.add_vertex g v1
let v2 = G.V.create 2
let g = B.add_vertex g v2
let v3 = G.V.create 3
let g = B.add_vertex g v3
let v4 = G.V.create 4
let g = B.add_vertex g v4
let v5 = G.V.create 5
let g = B.add_vertex g v5
let g = B.add_edge_e g (G.E.create v1 10 v2)
let g = B.add_edge_e g (G.E.create v2 50 v3)
let g = B.add_edge_e g (G.E.create v1 30 v4)
let g = B.add_edge_e g (G.E.create v1 100 v5)
let g = B.add_edge_e g (G.E.create v3 10 v5)
let g = B.add_edge_e g (G.E.create v4 20 v3)
let g = B.add_edge_e g (G.E.create v4 60 v5)
module Dij = Path.Dijkstra(G)(W)
module Dfs = Traverse.Dfs(G)
let test g i j w l =
let p,w' = Dij.shortest_path g i j in
assert (w' = w && List.length p = l)
let test_not_found g i j =
try let _ = Dij.shortest_path g i j in assert false with Not_found -> ()
let () = test g v1 v5 60 3
let () = test g v1 v1 0 0
let () = if G.is_directed then test_not_found g v5 v1
let () = assert (not (Dfs.has_cycle g))
let gc = B.add_edge_e g (G.E.create v5 10 v1)
let v6 = G.V.create 6
let gc = B.add_vertex gc v6
let () = if G.is_directed then test gc v1 v5 60 3
let () = test gc v5 v1 10 1
let () = test_not_found gc v1 v6
let () = assert (Dfs.has_cycle gc)
end
Dijkstra on Persistent Directed Labeled Graphs
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
module Test1 = TestDijkstra(G)(Builder.P(G))
Dijkstra on Persistent Directed Abstract Labeled Graphs
module G2 = Persistent.Digraph.AbstractLabeled(Int)(Int)
module Test2 = TestDijkstra(G2)(Builder.P(G2))
Dijkstra on Imperative Hashed Directed Labeled Graphs
module G3 = Imperative.Digraph.ConcreteLabeled(Int)(Int)
module Test3 = TestDijkstra(G3)(Builder.I(G3))
end
Traversal
module Traversal = struct
module G = Imperative.Digraph.AbstractLabeled(Int)(Int)
module Dfs = Traverse.Dfs(G)
module Mark = Traverse.Mark(G)
let g = G.create ()
let newv g = let v = G.V.create 0 in G.add_vertex g v; v
let v1 = newv g
let v2 = newv g
let v3 = newv g
let v4 = newv g
let v5 = newv g
let add_edge g v1 l v2 = G.add_edge_e g (G.E.create v1 l v2)
let () =
add_edge g v1 10 v2;
add_edge g v2 50 v3;
add_edge g v1 30 v4;
add_edge g v1 100 v5;
add_edge g v3 10 v5;
add_edge g v4 20 v3;
add_edge g v4 60 v5
let () = assert (not (Mark.has_cycle g) && not (Dfs.has_cycle g))
let v6 = newv g
let () = assert (not (Mark.has_cycle g) && not (Dfs.has_cycle g))
let () = add_edge g v5 10 v1
let () = assert (Mark.has_cycle g && Dfs.has_cycle g)
debug dfs / Cormen p 479
let g = G.create ()
let newv i = let v = G.V.create i in G.add_vertex g v; v
let u = newv 1
let v = newv 2
let w = newv 3
let x = newv 4
let y = newv 5
let z = newv 6
let edge a b = add_edge g a 0 b
let () =
edge u v; edge u x;
edge v y;
edge w y; edge w z;
edge x v;
edge y x;
edge z z
open Format
let pre v = printf "pre %d@." (G.V.label v)
let post v = printf "post %d@." (G.V.label v)
let () = printf "iter:@."; Dfs.iter_component ~pre ~post g w
let () = printf "prefix:@."; Dfs.prefix_component pre g w
let () =
printf "step:@.";
let rec visit it =
let v = Dfs.get it in
printf "visit %d@." (G.V.label v);
visit (Dfs.step it)
in
try visit (Dfs.start g) with Exit -> ()
end
Ford - Fulkerson and
module FF_Goldberg = struct
module G = Persistent.Digraph.ConcreteLabeled(Int)(Int)
let add_edge g v1 l v2 = G.add_edge_e g (G.E.create v1 l v2)
let g = G.empty
let g = add_edge g 1 16 2
let g = add_edge g 1 13 3
let g = add_edge g 2 10 3
let g = add_edge g 3 4 2
let g = add_edge g 2 12 4
let g = add_edge g 4 9 3
let g = add_edge g 3 14 5
let g = add_edge g 5 7 4
let g = add_edge g 4 20 6
let g = add_edge g 5 4 6
module F = struct
type label = int
type t = int
let max_capacity x = x
let min_capacity _ = 0
let flow _ = 0
let add = (+)
let sub = (-)
let compare = compare
let zero = 0
end
module FF = Flow.Ford_Fulkerson(G)(F)
module Gold = Flow.Goldberg(G)(F)
let () =
assert (snd (FF.maxflow g 1 6) = 23);
assert (snd (Gold.maxflow g 1 6) = 23);
assert (snd (FF.maxflow g 1 1) = 0);
assert (snd (Gold.maxflow g 1 1) = 0)
module G2 =
Persistent.Digraph.ConcreteLabeled
(Int)
(struct include Util.OTProduct(Int)(Int) let default = 0, 0 end)
let add_edge g v1 l v2 = G2.add_edge_e g (G2.E.create v1 l v2)
let g = G2.empty
let g = add_edge g 1 (1, 1) 2
let g = add_edge g 1 (3, 0) 3
let g = add_edge g 2 (1, 1) 3
let g = add_edge g 3 (1, 0) 2
let g = add_edge g 2 (3, 0) 4
let g = add_edge g 3 (1, 1) 4
module F2 = struct
type label = int * int
type t = int
let max_capacity = fst
let min_capacity _ = 0
let flow = snd
let add = (+)
let sub = (-)
let compare = compare
let zero = 0
end
module FF2 = Flow.Ford_Fulkerson(G2)(F2)
module Gold2 = Flow.Goldberg(G2)(F2)
let () =
assert (snd (FF2.maxflow g 1 4) = 2);
assert (snd (Gold2.maxflow g 1 4) = 2)
end
module Neighbourhood = struct
module G = Graph.Imperative.Graph.Concrete(Int)
open G
let g = create ()
let add = add_edge g
let () =
add 1 2;
add 1 3;
add 1 4;
add 2 5;
add 3 5;
add 4 5;
add 5 6
module N = Oper.Neighbourhood(G)
module V = N.Vertex_Set
let s2 = V.add 1 (V.singleton 5)
let () = assert (V.equal (N.set_from_vertex g 2) s2)
let s25 = V.add 1 (V.add 3 (V.add 4 (V.singleton 6)))
let () = assert (V.equal (N.set_from_vertices g [ 2; 5 ]) s25)
end
module Minsep = struct
module P = struct
module G = Graph.Persistent.Graph.Concrete(Int)
open G
let g = empty
let g = add_edge g 1 2
let g = add_edge g 1 3
let g = add_edge g 1 4
let g = add_edge g 2 5
let g = add_edge g 3 5
let g = add_edge g 4 5
let g = add_edge g 5 6
module M = Minsep.P(G)
module S = M.Vertex_Set
module VS = M.VSetset
let s5 = S.singleton 5
let s15 = S.add 1 s5
let s234 = S.add 2 (S.add 3 (S.singleton 4))
let bigs = VS.add s5 (VS.add s15 (VS.singleton s234))
let () = assert (VS.equal (M.set_of_allminsep g) bigs)
end
module I = struct
module G = Graph.Imperative.Graph.Abstract(struct type t = unit end)
open G
let g = create ()
let v1 = V.create ()
let v2 = V.create ()
let v3 = V.create ()
let v4 = V.create ()
let v5 = V.create ()
let v6 = V.create ()
let add = add_edge g
let () =
add v1 v2;
add v1 v3;
add v1 v4;
add v2 v5;
add v3 v5;
add v4 v5;
add v5 v6
module M = Minsep.I(G)
module S = M.Vertex_Set
module VS = M.VSetset
let s5 = S.singleton v5
let s15 = S.add v1 s5
let s234 = S.add v2 (S.add v3 (S.singleton v4))
let bigs = VS.add s5 (VS.add s15 (VS.singleton s234))
let () =
let _ = G.copy g in
assert (VS.equal (M.set_of_allminsep g) bigs)
end
end
module type RightSigPack = sig
include Sig.IM with type V.label = int and type E.label = int
val find_vertex : t -> int -> V.t
include Oper.S with type g = t
module Dfs : sig
val iter : ?pre:(V.t -> unit) ->
?post:(V.t -> unit) -> t -> unit
val prefix : (V.t -> unit) -> t -> unit
val postfix : (V.t -> unit) -> t -> unit
val iter_component :
?pre:(V.t -> unit) ->
?post:(V.t -> unit) -> t -> V.t -> unit
val prefix_component : (V.t -> unit) -> t -> V.t -> unit
val postfix_component : (V.t -> unit) -> t -> V.t -> unit
val has_cycle : t -> bool
end
module Bfs : sig
val iter : (V.t -> unit) -> t -> unit
val iter_component : (V.t -> unit) -> t -> V.t -> unit
end
module Marking : sig
val dfs : t -> unit
val has_cycle : t -> bool
end
module Classic : sig
val divisors : int -> t
val de_bruijn : int -> t
val vertex_only : int -> t
val full : ?self:bool -> int -> t
end
module Rand : sig
val graph : ?loops:bool -> v:int -> e:int -> unit -> t
val labeled :
(V.t -> V.t -> E.label) ->
?loops:bool -> v:int -> e:int -> unit -> t
end
module Components : sig
val scc : t -> int*(V.t -> int)
val scc_array : t -> V.t list array
val scc_list : t -> V.t list list
end
val shortest_path : t -> V.t -> V.t -> E.t list * int
val ford_fulkerson : t -> V.t -> V.t -> (E.t -> int) * int
val goldberg : t -> V.t -> V.t -> (E.t -> int) * int
val dot_output : t -> string -> unit
end
module TestSigPack : RightSigPack = struct
include Pack.Digraph
type g = t
end
|
53050c5553534fea39ca2b8282bb7ca88630dd7ed04f4889304eba0f70a55983 | shentufoundation/deepsea | test2.ml | open Sast
let _ =
let lexbuf = Lexing.from_channel stdin in
let program = Parser.program Scanner.token lexbuf in
let sprogram = Semant.check program in
print_endline (string_of_sprogram sprogram)
| null | https://raw.githubusercontent.com/shentufoundation/deepsea/970576a97c8992655ed2f173f576502d73b827e1/src/OpenSC/src/test2.ml | ocaml | open Sast
let _ =
let lexbuf = Lexing.from_channel stdin in
let program = Parser.program Scanner.token lexbuf in
let sprogram = Semant.check program in
print_endline (string_of_sprogram sprogram)
| |
9c59b97be7a64f1bde0e45f758afcba1f8d13ef380c513a0c4753f420b473b36 | brownplt/LambdaS5 | fixpoint.ml | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
Copyright ( c ) 2010 - 2011 Technische Universitaet Muenchen
* < >
* All rights reserved .
* Markus W. Weissmann <>
* All rights reserved. *)
maximum fixpoint point calculation with the work list algorithm ;
to implement a concrete analysis , implement a module that satisfies
the Rules signature . Such a module in the Analysis functor gives a
complete analysis / optimization module that works on a CFG .
to implement a concrete analysis, implement a module that satisfies
the Rules signature. Such a module in the Analysis functor gives a
complete analysis/optimization module that works on a CFG.
*)
type direction = Forward | Backward
module type Analysis = sig
type data
type label
type vertex
type cfg
val direction : direction
val join : data -> data -> data
val equal : data -> data -> bool
val analyze : label -> data -> data
end
(** Minimal graph signature for work list algorithm *)
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
type label
val label : t -> label
val dst : t -> V.t
val src : t -> V.t
end
val fold_vertex : (V.t -> 'a -> 'a) -> t -> 'a -> 'a
val succ_e : t -> V.t -> E.t list
val pred_e : t -> V.t -> E.t list
val succ : t -> V.t -> V.t list
val pred : t -> V.t -> V.t list
end
module Make
(G : G)
(A : Analysis with type cfg = G.t with type label = G.E.label
with type vertex = G.V.t) =
struct
module M = Map.Make(G.V)
module N = Set.Make(G.V)
let analyze initial cfg =
let (nodes, data) =
G.fold_vertex
(fun vertex (n, m) ->
(N.add vertex n, M.add vertex (initial vertex) m))
cfg (N.empty, M.empty)
in
(* generate an associative map to quickly find the incoming
* (outgoing) edges of a node during the anaysis store a pair of
* a partially applied analysis function and the corresponding
* 'partner' node *)
let nodemap : ((A.data -> A.data) * G.V.t) list M.t =
let add = match A.direction with
| Forward ->
(fun n ->
let preds = G.pred_e cfg n in
List.map
(fun edge -> (A.analyze (G.E.label edge), G.E.src edge))
preds)
| Backward ->
(fun n ->
let succs = G.succ_e cfg n in
List.map
(fun edge -> (A.analyze (G.E.label edge), G.E.dst edge))
succs)
in
G.fold_vertex (fun vertex m -> M.add vertex (add vertex) m) cfg M.empty
in
let rec worklist (data : A.data M.t) (wl : N.t) =
(* 'meet' an arbitrary number of data-sets *)
let meet ~default = function
| [] -> default
| [x] -> x
| x::xs -> List.fold_left (fun a b -> A.join a b) x xs
in
analyze one node , creating a new data - set and node - worklist
as necessary
as necessary *)
let analyze_node analysis n d wl =
match analysis d n with
| None -> (d, wl)
| Some d' -> (d', N.add n wl)
in
try
(* get some node from the node-set -- this will eventually trigger
an exception *)
let n = N.choose wl in
(* remove the chosen node from the set *)
let wl = N.remove n wl in
let (f, ns) = match A.direction with
analyze all INCOMING edges of all SUCCESSOR nodes of the
node to be processed
node to be processed *)
| Forward ->
process one node : analyze all it 's incoming edges
and merge the resulting data ;
if the result is different to the previously stored data
for this node , return a new tuple , else None
and merge the resulting data;
if the result is different to the previously stored data
for this node, return a new tuple, else None *)
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, src) -> f (M.find src data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.succ cfg n)
(* analyze all OUTGOING edges of all PREDECESSOR nodes
of the node to be processed *)
| Backward ->
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, dst) -> f (M.find dst data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.pred cfg n)
in
(* analyze all successor nodes by analyzing all of their
predecessor edges *)
let (data, wl) =
List.fold_left (fun (d, wl) n -> analyze_node f n d wl)
(data, wl) ns
in
(* do a recursive call: the recursion will eventually end with a
* Not_found exception when no nodes are left in the work list *)
worklist data wl
with Not_found -> data
in
let data = worklist data nodes in
(fun n -> M.find n data)
end
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/ocamlgraph-1.8.1/src/fixpoint.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
* Minimal graph signature for work list algorithm
generate an associative map to quickly find the incoming
* (outgoing) edges of a node during the anaysis store a pair of
* a partially applied analysis function and the corresponding
* 'partner' node
'meet' an arbitrary number of data-sets
get some node from the node-set -- this will eventually trigger
an exception
remove the chosen node from the set
analyze all OUTGOING edges of all PREDECESSOR nodes
of the node to be processed
analyze all successor nodes by analyzing all of their
predecessor edges
do a recursive call: the recursion will eventually end with a
* Not_found exception when no nodes are left in the work list | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
Copyright ( c ) 2010 - 2011 Technische Universitaet Muenchen
* < >
* All rights reserved .
* Markus W. Weissmann <>
* All rights reserved. *)
maximum fixpoint point calculation with the work list algorithm ;
to implement a concrete analysis , implement a module that satisfies
the Rules signature . Such a module in the Analysis functor gives a
complete analysis / optimization module that works on a CFG .
to implement a concrete analysis, implement a module that satisfies
the Rules signature. Such a module in the Analysis functor gives a
complete analysis/optimization module that works on a CFG.
*)
type direction = Forward | Backward
module type Analysis = sig
type data
type label
type vertex
type cfg
val direction : direction
val join : data -> data -> data
val equal : data -> data -> bool
val analyze : label -> data -> data
end
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
type label
val label : t -> label
val dst : t -> V.t
val src : t -> V.t
end
val fold_vertex : (V.t -> 'a -> 'a) -> t -> 'a -> 'a
val succ_e : t -> V.t -> E.t list
val pred_e : t -> V.t -> E.t list
val succ : t -> V.t -> V.t list
val pred : t -> V.t -> V.t list
end
module Make
(G : G)
(A : Analysis with type cfg = G.t with type label = G.E.label
with type vertex = G.V.t) =
struct
module M = Map.Make(G.V)
module N = Set.Make(G.V)
let analyze initial cfg =
let (nodes, data) =
G.fold_vertex
(fun vertex (n, m) ->
(N.add vertex n, M.add vertex (initial vertex) m))
cfg (N.empty, M.empty)
in
let nodemap : ((A.data -> A.data) * G.V.t) list M.t =
let add = match A.direction with
| Forward ->
(fun n ->
let preds = G.pred_e cfg n in
List.map
(fun edge -> (A.analyze (G.E.label edge), G.E.src edge))
preds)
| Backward ->
(fun n ->
let succs = G.succ_e cfg n in
List.map
(fun edge -> (A.analyze (G.E.label edge), G.E.dst edge))
succs)
in
G.fold_vertex (fun vertex m -> M.add vertex (add vertex) m) cfg M.empty
in
let rec worklist (data : A.data M.t) (wl : N.t) =
let meet ~default = function
| [] -> default
| [x] -> x
| x::xs -> List.fold_left (fun a b -> A.join a b) x xs
in
analyze one node , creating a new data - set and node - worklist
as necessary
as necessary *)
let analyze_node analysis n d wl =
match analysis d n with
| None -> (d, wl)
| Some d' -> (d', N.add n wl)
in
try
let n = N.choose wl in
let wl = N.remove n wl in
let (f, ns) = match A.direction with
analyze all INCOMING edges of all SUCCESSOR nodes of the
node to be processed
node to be processed *)
| Forward ->
process one node : analyze all it 's incoming edges
and merge the resulting data ;
if the result is different to the previously stored data
for this node , return a new tuple , else None
and merge the resulting data;
if the result is different to the previously stored data
for this node, return a new tuple, else None *)
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, src) -> f (M.find src data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.succ cfg n)
| Backward ->
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, dst) -> f (M.find dst data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.pred cfg n)
in
let (data, wl) =
List.fold_left (fun (d, wl) n -> analyze_node f n d wl)
(data, wl) ns
in
worklist data wl
with Not_found -> data
in
let data = worklist data nodes in
(fun n -> M.find n data)
end
|
38755bff62a8f26857de4f554c56f81a478a90e81fb7106a9b9aab66c303c671 | Decentralized-Pictures/T4L3NT | client_baking_blocks.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
open Alpha_context
type block_info = {
hash : Block_hash.t;
chain_id : Chain_id.t;
predecessor : Block_hash.t;
fitness : Bytes.t list;
timestamp : Time.Protocol.t;
protocol : Protocol_hash.t;
next_protocol : Protocol_hash.t;
proto_level : int;
level : Raw_level.t;
context : Context_hash.t;
}
val info :
#Protocol_client_context.rpc_context ->
?chain:Chain_services.chain ->
Block_services.block ->
block_info tzresult Lwt.t
val monitor_valid_blocks :
#Protocol_client_context.rpc_context ->
?chains:Chain_services.chain list ->
?protocols:Protocol_hash.t list ->
next_protocols:Protocol_hash.t list option ->
unit ->
block_info tzresult Lwt_stream.t tzresult Lwt.t
val monitor_heads :
#Protocol_client_context.rpc_context ->
next_protocols:Protocol_hash.t list option ->
Chain_services.chain ->
block_info tzresult Lwt_stream.t tzresult Lwt.t
val blocks_from_current_cycle :
#Protocol_client_context.rpc_context ->
?chain:Chain_services.chain ->
Block_services.block ->
?offset:int32 ->
unit ->
Block_hash.t list tzresult Lwt.t
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_alpha/lib_delegate/client_baking_blocks.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*************************************************************************** | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Alpha_context
type block_info = {
hash : Block_hash.t;
chain_id : Chain_id.t;
predecessor : Block_hash.t;
fitness : Bytes.t list;
timestamp : Time.Protocol.t;
protocol : Protocol_hash.t;
next_protocol : Protocol_hash.t;
proto_level : int;
level : Raw_level.t;
context : Context_hash.t;
}
val info :
#Protocol_client_context.rpc_context ->
?chain:Chain_services.chain ->
Block_services.block ->
block_info tzresult Lwt.t
val monitor_valid_blocks :
#Protocol_client_context.rpc_context ->
?chains:Chain_services.chain list ->
?protocols:Protocol_hash.t list ->
next_protocols:Protocol_hash.t list option ->
unit ->
block_info tzresult Lwt_stream.t tzresult Lwt.t
val monitor_heads :
#Protocol_client_context.rpc_context ->
next_protocols:Protocol_hash.t list option ->
Chain_services.chain ->
block_info tzresult Lwt_stream.t tzresult Lwt.t
val blocks_from_current_cycle :
#Protocol_client_context.rpc_context ->
?chain:Chain_services.chain ->
Block_services.block ->
?offset:int32 ->
unit ->
Block_hash.t list tzresult Lwt.t
|
e1aa8eb535a1cb83f0a8eef0c497d1bff532a402235e02343ad426bacfe5f2bc | janestreet/ecaml | marker.mli | * A [ Marker.t ] specifies a position in a buffer relative to the surrounding text . A
marker changes its offset from the beginning of the buffer automatically whenever text
is inserted or deleted , so that it stays with the two characters on either side of it .
[ ( Info - goto - node " ( " ) ]
marker changes its offset from the beginning of the buffer automatically whenever text
is inserted or deleted, so that it stays with the two characters on either side of it.
[(Info-goto-node "(elisp)Markers")] *)
open! Core
open! Import
include Value.Subtype
* When you insert text directly at the place where a marker points , there are two
possible ways to relocate that marker : it can point before the inserted text , or point
after it . You can specify which one a given marker should do by setting its
" insertion type " . [ ( Info - goto - node " ( elisp)Marker Insertion Types " ) ] .
possible ways to relocate that marker: it can point before the inserted text, or point
after it. You can specify which one a given marker should do by setting its
"insertion type". [(Info-goto-node "(elisp)Marker Insertion Types")]. *)
module Insertion_type : sig
type t =
| After_inserted_text
| Before_inserted_text
[@@deriving sexp_of]
end
(** [(describe-function 'marker-buffer)]
[(Info-goto-node "(elisp)Information from Markers")] *)
val buffer : t -> Buffer.t option
(** [(describe-function 'marker-insertion-type)]
[(Info-goto-node "(elisp)Marker Insertion Types")] *)
val insertion_type : t -> Insertion_type.t
(** [(describe-function 'marker-position)]
[(Info-goto-node "(elisp)Information from Markers")] *)
val position : t -> Position.t option
* [ ( describe - function ' make - marker ) ]
[ ( Info - goto - node " ( elisp)Creating Markers " ) ]
[(Info-goto-node "(elisp)Creating Markers")] *)
val create : unit -> t
* [ ( describe - function ' copy - marker ) ]
[ ( Info - goto - node " ( elisp)Creating Markers " ) ]
[(Info-goto-node "(elisp)Creating Markers")] *)
val copy : t -> t
(** [(describe-function 'set-marker-insertion-type)]
[(Info-goto-node "(elisp)Marker Insertion Types")] *)
val set_insertion_type : t -> Insertion_type.t -> unit
(** [(describe-function 'set-marker)]
[(Info-goto-node "(elisp)Moving Markers")] *)
val set : t -> Buffer.t -> Position.t -> unit
| null | https://raw.githubusercontent.com/janestreet/ecaml/7c16e5720ee1da04e0757cf185a074debf9088df/src/marker.mli | ocaml | * [(describe-function 'marker-buffer)]
[(Info-goto-node "(elisp)Information from Markers")]
* [(describe-function 'marker-insertion-type)]
[(Info-goto-node "(elisp)Marker Insertion Types")]
* [(describe-function 'marker-position)]
[(Info-goto-node "(elisp)Information from Markers")]
* [(describe-function 'set-marker-insertion-type)]
[(Info-goto-node "(elisp)Marker Insertion Types")]
* [(describe-function 'set-marker)]
[(Info-goto-node "(elisp)Moving Markers")] | * A [ Marker.t ] specifies a position in a buffer relative to the surrounding text . A
marker changes its offset from the beginning of the buffer automatically whenever text
is inserted or deleted , so that it stays with the two characters on either side of it .
[ ( Info - goto - node " ( " ) ]
marker changes its offset from the beginning of the buffer automatically whenever text
is inserted or deleted, so that it stays with the two characters on either side of it.
[(Info-goto-node "(elisp)Markers")] *)
open! Core
open! Import
include Value.Subtype
* When you insert text directly at the place where a marker points , there are two
possible ways to relocate that marker : it can point before the inserted text , or point
after it . You can specify which one a given marker should do by setting its
" insertion type " . [ ( Info - goto - node " ( elisp)Marker Insertion Types " ) ] .
possible ways to relocate that marker: it can point before the inserted text, or point
after it. You can specify which one a given marker should do by setting its
"insertion type". [(Info-goto-node "(elisp)Marker Insertion Types")]. *)
module Insertion_type : sig
type t =
| After_inserted_text
| Before_inserted_text
[@@deriving sexp_of]
end
val buffer : t -> Buffer.t option
val insertion_type : t -> Insertion_type.t
val position : t -> Position.t option
* [ ( describe - function ' make - marker ) ]
[ ( Info - goto - node " ( elisp)Creating Markers " ) ]
[(Info-goto-node "(elisp)Creating Markers")] *)
val create : unit -> t
* [ ( describe - function ' copy - marker ) ]
[ ( Info - goto - node " ( elisp)Creating Markers " ) ]
[(Info-goto-node "(elisp)Creating Markers")] *)
val copy : t -> t
val set_insertion_type : t -> Insertion_type.t -> unit
val set : t -> Buffer.t -> Position.t -> unit
|
7fac928a3f398aabd1e3af12abc2b31137f5cf1933608416743d5fe482b1d69e | degree9/enterprise | client.clj | (ns degree9.env.client)
(defmacro define [name & [default]]
`('goog-define ~name ~default))
| null | https://raw.githubusercontent.com/degree9/enterprise/65737c347e513d0a0bf94f2d4374935c7270185d/src/degree9/env/client.clj | clojure | (ns degree9.env.client)
(defmacro define [name & [default]]
`('goog-define ~name ~default))
| |
13dbf9078678661eb27ab089bf68deb934ccda5673972d0ccf8940c0c2438254 | S8A/htdp-exercises | ex502.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex502) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
[ NEList - of 1String ] - > [ NEList - of 1String ]
; creates a palindrome from s0
(define (mirror s0)
(append (all-but-last s0)
(list (last s0))
(reverse (all-but-last s0))))
(check-expect (mirror (explode "abc")) (explode "abcba"))
[ NEList - of X ] - > X
; extracts the last item from the list
(define (last l)
(cond
[(empty? (rest l)) (first l)]
[(cons? (rest l)) (last (rest l))]))
[ NEList - of X ] - > [ List - of X ]
; removes the last item from the list
(define (all-but-last l)
(cond
[(empty? (rest l)) '()]
[(cons? (rest l)) (cons (first l) (all-but-last (rest l)))]))
[ NEList - of X ] - > [ NEList - of X ]
; creates a palindrome from s0
(define (palindrome s0)
[ NEList - of X ] [ List - of X ] - > [ NEList - of X ]
; creates a palindrome from s and a
; accumulator a is the reverse of s0 without the items
; in s
(define (palindrome/a s a)
(cond
[(empty? (rest s)) (append s0 a)]
[(cons? (rest s)) (palindrome/a (rest s) (cons (first s) a))])))
(palindrome/a s0 '())))
(check-expect (palindrome (explode "abc")) (explode "abcba"))
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex502.rkt | racket | about the language level of this file in a form that our tools can easily process.
creates a palindrome from s0
extracts the last item from the list
removes the last item from the list
creates a palindrome from s0
creates a palindrome from s and a
accumulator a is the reverse of s0 without the items
in s | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex502) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
[ NEList - of 1String ] - > [ NEList - of 1String ]
(define (mirror s0)
(append (all-but-last s0)
(list (last s0))
(reverse (all-but-last s0))))
(check-expect (mirror (explode "abc")) (explode "abcba"))
[ NEList - of X ] - > X
(define (last l)
(cond
[(empty? (rest l)) (first l)]
[(cons? (rest l)) (last (rest l))]))
[ NEList - of X ] - > [ List - of X ]
(define (all-but-last l)
(cond
[(empty? (rest l)) '()]
[(cons? (rest l)) (cons (first l) (all-but-last (rest l)))]))
[ NEList - of X ] - > [ NEList - of X ]
(define (palindrome s0)
[ NEList - of X ] [ List - of X ] - > [ NEList - of X ]
(define (palindrome/a s a)
(cond
[(empty? (rest s)) (append s0 a)]
[(cons? (rest s)) (palindrome/a (rest s) (cons (first s) a))])))
(palindrome/a s0 '())))
(check-expect (palindrome (explode "abc")) (explode "abcba"))
|
2e8d46afffa73542e15e9d8ef9a999495e702baf57a3b3c2c70d84edb0da5bae | babashka/nbb | core.cljs | (ns print-cli-args.core
(:require [clojure.string :as str]))
(def cmd-line-args (not-empty (js->clj (.slice js/process.argv 2))))
(println "Your command line arguments:"
(or (some->> cmd-line-args (str/join " "))
"None"))
| null | https://raw.githubusercontent.com/babashka/nbb/2ce6c0a4673342e92637fb28b432c6bc801fc5db/doc/publish/print-cli-args/src/print_cli_args/core.cljs | clojure | (ns print-cli-args.core
(:require [clojure.string :as str]))
(def cmd-line-args (not-empty (js->clj (.slice js/process.argv 2))))
(println "Your command line arguments:"
(or (some->> cmd-line-args (str/join " "))
"None"))
| |
429648a8464fdd85569b0279db64bb05066a4aeb5d2e660215ede016b7fc212c | exercism/haskell | ArmstrongNumbers.hs | module ArmstrongNumbers (armstrong) where
armstrong :: Integral a => a -> Bool
armstrong = error "You need to implement this function."
| null | https://raw.githubusercontent.com/exercism/haskell/2b98084efc7d5ab098975c462f7977ee19c2fd29/exercises/practice/armstrong-numbers/src/ArmstrongNumbers.hs | haskell | module ArmstrongNumbers (armstrong) where
armstrong :: Integral a => a -> Bool
armstrong = error "You need to implement this function."
| |
44a7737c12c789b2a7a23038f9b9c8f9853623a513c3841fba7ed2ba32380ca3 | silky/quipper | And_rev.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
import Quipper
and_gate :: (Qubit, Qubit) -> Circ (Qubit)
and_gate (a, b) = do
c <- qinit False
qnot_at c `controlled` [a, b]
return c
and_list :: [Qubit] -> Circ Qubit
and_list [] = do
c <- qinit True
return c
and_list [q] = do
return q
and_list (q:t) = do
d <- and_list t
e <- and_gate (d, q)
return e
and_rev :: ([Qubit], Qubit) -> Circ ([Qubit], Qubit)
and_rev = classical_to_reversible and_list
main =
print_generic Preview and_rev (replicate 10 qubit, qubit)
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/tests/And_rev.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
====================================================================== | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
import Quipper
and_gate :: (Qubit, Qubit) -> Circ (Qubit)
and_gate (a, b) = do
c <- qinit False
qnot_at c `controlled` [a, b]
return c
and_list :: [Qubit] -> Circ Qubit
and_list [] = do
c <- qinit True
return c
and_list [q] = do
return q
and_list (q:t) = do
d <- and_list t
e <- and_gate (d, q)
return e
and_rev :: ([Qubit], Qubit) -> Circ ([Qubit], Qubit)
and_rev = classical_to_reversible and_list
main =
print_generic Preview and_rev (replicate 10 qubit, qubit)
|
9a1f41b230d4d72f115161bb97ddae31600517983fe06ff4c5b6669cdc5f624c | MastodonC/kixi.datastore | s3.clj | (ns kixi.datastore.filestore.s3
(:require [amazonica.core :as aws]
[amazonica.aws.s3 :as s3]
[amazonica.aws.s3transfer :as s3t]
[byte-streams :as bs]
[clojure.core.async :as async :refer [go]]
[com.stuartsierra.component :as component]
[kixi.datastore.filestore :as fs :refer [FileStore FileStoreUploadCache]]
[kixi.datastore.filestore.command-handler :as ch]
[kixi.datastore.filestore.event-handler :as eh]
[kixi.datastore.filestore.upload :as up]
[kixi.datastore.time :as t]
[kixi.comms :as c]
[taoensso.timbre :as log :refer [error]])
(:import [com.amazonaws.services.s3.model AmazonS3Exception GeneratePresignedUrlRequest PartETag CompleteMultipartUploadRequest]))
(defn ensure-bucket
[creds bucket]
(when-not (s3/does-bucket-exist creds bucket)
(let [rules [{:max-age-seconds 3000 :allowed-origins ["*"] :allowed-methods [:PUT] :exposed-headers ["etag"] :allowed-headers ["*"]}
{:max-age-seconds 3000 :allowed-origins ["*"] :allowed-methods [:GET] :allowed-headers ["*"]}]]
(s3/create-bucket creds bucket)
(s3/set-bucket-cross-origin-configuration creds bucket {:rules rules}))))
(defn init-multi-part-upload-creator
[creds bucket]
(fn [id part-ranges]
(let [{:keys [upload-id] :as initate-resp}
(s3/initiate-multipart-upload creds :bucket-name bucket :key id)
client (com.amazonaws.services.s3.AmazonS3ClientBuilder/defaultClient)
links (vec (map-indexed
(fn [i p]
(assoc p :url
24hrs
req (doto (GeneratePresignedUrlRequest. bucket id com.amazonaws.HttpMethod/PUT)
(.setExpiration (.toDate exp))
(.addRequestParameter "partNumber" (str (inc i)))
(.addRequestParameter "uploadId" upload-id))]
(str (.generatePresignedUrl client req))))) part-ranges))]
{:upload-id upload-id
:upload-parts links})))
(defn complete-multi-part-upload-creator
[creds bucket]
(fn [id etags upload]
(try (let [upload-id (::up/id upload)
req (CompleteMultipartUploadRequest. bucket id upload-id (map-indexed #(PartETag. (inc %1) %2) etags))
client (com.amazonaws.services.s3.AmazonS3ClientBuilder/defaultClient)]
(.completeMultipartUpload client req)
[true nil nil])
(catch com.amazonaws.services.s3.model.AmazonS3Exception e
(cond
(clojure.string/starts-with? (.getMessage e) "Your proposed upload is smaller than the minimum allowed size")
[false :data-too-small (.getMessage e)]
(clojure.string/starts-with? (.getMessage e) "One or more of the specified parts could not be found")
[false :file-missing (.getMessage e)]
:else
(throw e))))))
(defn create-link
[creds bucket]
(fn [id]
(str
(s3/generate-presigned-url
creds
:bucket-name bucket
:key id
:expiration (t/minutes-from-now 30)
:method "PUT"))))
(defn object-size
[creds bucket id]
(try
(when-let [meta (s3/get-object-metadata creds bucket id)]
(:instance-length meta))
(catch AmazonS3Exception e
nil)))
(def unallowed-chars #"[^\p{Digit}\p{IsAlphabetic}]")
(def multi-hyphens #"-{2,}")
(def hyphen (clojure.string/re-quote-replacement "-"))
(defn clean
[s]
(-> s
(clojure.string/replace unallowed-chars hyphen)
(clojure.string/replace multi-hyphens hyphen)))
(defn sanitize-filename
[f-name]
(let [extension-dex (clojure.string/last-index-of f-name ".")
extension (subs f-name (inc extension-dex))]
(-> f-name
(subs 0 extension-dex)
clean
(str "." (clean extension)))))
(defn create-dload-link
[creds bucket id file-name expiry]
(let [header-overrides (com.amazonaws.services.s3.model.ResponseHeaderOverrides.)]
(when file-name
(.setContentDisposition header-overrides (str "attachment; filename=" (sanitize-filename file-name))))
(-> (s3/generate-presigned-url creds
:bucket-name bucket
:key id
:expiration expiry
:method "GET"
:response-headers header-overrides)
str)))
(defn complete-small-file-upload-creator
[creds bucket]
(fn [id part-ids _]
(if (s3/does-object-exist creds bucket id)
[true nil nil]
[false :file-missing nil])))
(defrecord S3
[communications filestore-upload-cache logging region endpoint access-key secret-key link-expiration-mins bucket client-options
creds]
FileStore
(exists [this id]
(s3/does-object-exist creds bucket id))
(size [this id]
(object-size creds bucket id))
(retrieve [this id]
(when (s3/does-object-exist creds bucket id)
(:object-content (s3/get-object creds bucket id))))
(create-link [this id file-name]
(when (s3/does-object-exist creds bucket id)
(create-dload-link creds bucket id file-name
(t/minutes-from-now link-expiration-mins))))
component/Lifecycle
(start [component]
(if-not creds
(let [c (merge {:endpoint endpoint}
(when secret-key
{:secret-key secret-key})
(when access-key
{:access-key access-key}))]
(log/info "Starting S3 FileStore - bucket:" bucket)
(ensure-bucket c bucket)
;; LEGACY
(c/attach-command-handler!
communications
:kixi.datastore/filestore
:kixi.datastore.filestore/create-upload-link
"1.0.0" (ch/create-upload-cmd-handler (create-link c bucket)))
;; NEW
(c/attach-validating-command-handler!
communications
:kixi.datastore/filestore-multi-part
:kixi.datastore.filestore/initiate-file-upload
"1.0.0" (ch/create-initiate-file-upload-cmd-handler
(create-link c bucket)
(init-multi-part-upload-creator c bucket)
filestore-upload-cache))
(c/attach-validating-command-handler!
communications
:kixi.datastore/filestore-multi-part-completed
:kixi.datastore.filestore/complete-file-upload
"1.0.0" (ch/create-complete-file-upload-cmd-handler
(complete-small-file-upload-creator c bucket)
(complete-multi-part-upload-creator c bucket)
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-completed
:kixi.datastore.filestore/file-upload-completed
"1.0.0" (eh/create-file-upload-completed-event-handler
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-failed
:kixi.datastore.filestore/file-upload-failed
"1.0.0" (eh/create-file-upload-failed-or-rejected-event-handler
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-rejected
:kixi.datastore.filestore/file-upload-rejected
"1.0.0" (eh/create-file-upload-failed-or-rejected-event-handler
filestore-upload-cache))
(assoc component
:creds
c))
component))
(stop [component]
(log/info "Stopping S3 FileStore")
(if creds
(dissoc component
:creds)
component)))
| null | https://raw.githubusercontent.com/MastodonC/kixi.datastore/f33bba4b1fdd8c56cc7ac0f559ffe35254c9ca99/src/kixi/datastore/filestore/s3.clj | clojure | LEGACY
NEW | (ns kixi.datastore.filestore.s3
(:require [amazonica.core :as aws]
[amazonica.aws.s3 :as s3]
[amazonica.aws.s3transfer :as s3t]
[byte-streams :as bs]
[clojure.core.async :as async :refer [go]]
[com.stuartsierra.component :as component]
[kixi.datastore.filestore :as fs :refer [FileStore FileStoreUploadCache]]
[kixi.datastore.filestore.command-handler :as ch]
[kixi.datastore.filestore.event-handler :as eh]
[kixi.datastore.filestore.upload :as up]
[kixi.datastore.time :as t]
[kixi.comms :as c]
[taoensso.timbre :as log :refer [error]])
(:import [com.amazonaws.services.s3.model AmazonS3Exception GeneratePresignedUrlRequest PartETag CompleteMultipartUploadRequest]))
(defn ensure-bucket
[creds bucket]
(when-not (s3/does-bucket-exist creds bucket)
(let [rules [{:max-age-seconds 3000 :allowed-origins ["*"] :allowed-methods [:PUT] :exposed-headers ["etag"] :allowed-headers ["*"]}
{:max-age-seconds 3000 :allowed-origins ["*"] :allowed-methods [:GET] :allowed-headers ["*"]}]]
(s3/create-bucket creds bucket)
(s3/set-bucket-cross-origin-configuration creds bucket {:rules rules}))))
(defn init-multi-part-upload-creator
[creds bucket]
(fn [id part-ranges]
(let [{:keys [upload-id] :as initate-resp}
(s3/initiate-multipart-upload creds :bucket-name bucket :key id)
client (com.amazonaws.services.s3.AmazonS3ClientBuilder/defaultClient)
links (vec (map-indexed
(fn [i p]
(assoc p :url
24hrs
req (doto (GeneratePresignedUrlRequest. bucket id com.amazonaws.HttpMethod/PUT)
(.setExpiration (.toDate exp))
(.addRequestParameter "partNumber" (str (inc i)))
(.addRequestParameter "uploadId" upload-id))]
(str (.generatePresignedUrl client req))))) part-ranges))]
{:upload-id upload-id
:upload-parts links})))
(defn complete-multi-part-upload-creator
[creds bucket]
(fn [id etags upload]
(try (let [upload-id (::up/id upload)
req (CompleteMultipartUploadRequest. bucket id upload-id (map-indexed #(PartETag. (inc %1) %2) etags))
client (com.amazonaws.services.s3.AmazonS3ClientBuilder/defaultClient)]
(.completeMultipartUpload client req)
[true nil nil])
(catch com.amazonaws.services.s3.model.AmazonS3Exception e
(cond
(clojure.string/starts-with? (.getMessage e) "Your proposed upload is smaller than the minimum allowed size")
[false :data-too-small (.getMessage e)]
(clojure.string/starts-with? (.getMessage e) "One or more of the specified parts could not be found")
[false :file-missing (.getMessage e)]
:else
(throw e))))))
(defn create-link
[creds bucket]
(fn [id]
(str
(s3/generate-presigned-url
creds
:bucket-name bucket
:key id
:expiration (t/minutes-from-now 30)
:method "PUT"))))
(defn object-size
[creds bucket id]
(try
(when-let [meta (s3/get-object-metadata creds bucket id)]
(:instance-length meta))
(catch AmazonS3Exception e
nil)))
(def unallowed-chars #"[^\p{Digit}\p{IsAlphabetic}]")
(def multi-hyphens #"-{2,}")
(def hyphen (clojure.string/re-quote-replacement "-"))
(defn clean
[s]
(-> s
(clojure.string/replace unallowed-chars hyphen)
(clojure.string/replace multi-hyphens hyphen)))
(defn sanitize-filename
[f-name]
(let [extension-dex (clojure.string/last-index-of f-name ".")
extension (subs f-name (inc extension-dex))]
(-> f-name
(subs 0 extension-dex)
clean
(str "." (clean extension)))))
(defn create-dload-link
[creds bucket id file-name expiry]
(let [header-overrides (com.amazonaws.services.s3.model.ResponseHeaderOverrides.)]
(when file-name
(.setContentDisposition header-overrides (str "attachment; filename=" (sanitize-filename file-name))))
(-> (s3/generate-presigned-url creds
:bucket-name bucket
:key id
:expiration expiry
:method "GET"
:response-headers header-overrides)
str)))
(defn complete-small-file-upload-creator
[creds bucket]
(fn [id part-ids _]
(if (s3/does-object-exist creds bucket id)
[true nil nil]
[false :file-missing nil])))
(defrecord S3
[communications filestore-upload-cache logging region endpoint access-key secret-key link-expiration-mins bucket client-options
creds]
FileStore
(exists [this id]
(s3/does-object-exist creds bucket id))
(size [this id]
(object-size creds bucket id))
(retrieve [this id]
(when (s3/does-object-exist creds bucket id)
(:object-content (s3/get-object creds bucket id))))
(create-link [this id file-name]
(when (s3/does-object-exist creds bucket id)
(create-dload-link creds bucket id file-name
(t/minutes-from-now link-expiration-mins))))
component/Lifecycle
(start [component]
(if-not creds
(let [c (merge {:endpoint endpoint}
(when secret-key
{:secret-key secret-key})
(when access-key
{:access-key access-key}))]
(log/info "Starting S3 FileStore - bucket:" bucket)
(ensure-bucket c bucket)
(c/attach-command-handler!
communications
:kixi.datastore/filestore
:kixi.datastore.filestore/create-upload-link
"1.0.0" (ch/create-upload-cmd-handler (create-link c bucket)))
(c/attach-validating-command-handler!
communications
:kixi.datastore/filestore-multi-part
:kixi.datastore.filestore/initiate-file-upload
"1.0.0" (ch/create-initiate-file-upload-cmd-handler
(create-link c bucket)
(init-multi-part-upload-creator c bucket)
filestore-upload-cache))
(c/attach-validating-command-handler!
communications
:kixi.datastore/filestore-multi-part-completed
:kixi.datastore.filestore/complete-file-upload
"1.0.0" (ch/create-complete-file-upload-cmd-handler
(complete-small-file-upload-creator c bucket)
(complete-multi-part-upload-creator c bucket)
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-completed
:kixi.datastore.filestore/file-upload-completed
"1.0.0" (eh/create-file-upload-completed-event-handler
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-failed
:kixi.datastore.filestore/file-upload-failed
"1.0.0" (eh/create-file-upload-failed-or-rejected-event-handler
filestore-upload-cache))
(c/attach-validating-event-handler!
communications
:kixi.datastore/filestore-file-upload-rejected
:kixi.datastore.filestore/file-upload-rejected
"1.0.0" (eh/create-file-upload-failed-or-rejected-event-handler
filestore-upload-cache))
(assoc component
:creds
c))
component))
(stop [component]
(log/info "Stopping S3 FileStore")
(if creds
(dissoc component
:creds)
component)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.