_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
41e5ca8c897ebc1ccd80b941bfbd62654a5014892b07d523a418d139082a622c | bobzhang/fan | pprecordtyp.ml | open Camlp4.PreCast
let _loc = Loc.mk "?"
let base base fields ty =
let fields = List.fold_right (fun field acc ->
let c = <:ctyp< $lid:field$ : $uid:field$.record >> in
<:ctyp< $c$ ; $acc$ >>) fields <:ctyp< >>
in
<:module_binding< $uid:base$ :
sig type record = {
key : $ty$;
$fields$
} end = struct
type record = {
key : $ty$;
$fields$
} end
>>
module CleanAst = Camlp4.Struct.CleanAst.Make(Ast)
let _ =
let b = base "b" ["f1"; "f2"] <:ctyp< int >> in
Camlp4.PreCast.Printers.OCaml.print_implem
((new CleanAst.clean_ast)#str_item
<:str_item< module rec $b$ >>)
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/todoml/test/fixtures/pprecordtyp.ml | ocaml | open Camlp4.PreCast
let _loc = Loc.mk "?"
let base base fields ty =
let fields = List.fold_right (fun field acc ->
let c = <:ctyp< $lid:field$ : $uid:field$.record >> in
<:ctyp< $c$ ; $acc$ >>) fields <:ctyp< >>
in
<:module_binding< $uid:base$ :
sig type record = {
key : $ty$;
$fields$
} end = struct
type record = {
key : $ty$;
$fields$
} end
>>
module CleanAst = Camlp4.Struct.CleanAst.Make(Ast)
let _ =
let b = base "b" ["f1"; "f2"] <:ctyp< int >> in
Camlp4.PreCast.Printers.OCaml.print_implem
((new CleanAst.clean_ast)#str_item
<:str_item< module rec $b$ >>)
| |
e7854313913f2592443c8a7b7ef2c7f4623f2cb98b36a5fd2d63da67f026f259 | etiago/dlink-camera-api | core_test.clj | (ns dlink-camera-api.core-test
(:require [clojure.test :refer :all]
[dlink-camera-api.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 1 1))))
| null | https://raw.githubusercontent.com/etiago/dlink-camera-api/d5b35a8e346cd385be833bbd6085064a998f3395/test/dlink_camera_api/core_test.clj | clojure | (ns dlink-camera-api.core-test
(:require [clojure.test :refer :all]
[dlink-camera-api.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 1 1))))
| |
7b3816910e86e6357fd575fd9adc30e8e14c1f6fb7cfa54271a5d6c3dc2ee8af | pyr/clj-statsd | clj_statsd.clj | (ns clj-statsd
"Send metrics to statsd."
(:require [clojure.string :as str])
(:import [java.util Random])
(:import [java.net DatagramPacket DatagramSocket InetAddress]))
(def
^{:doc "Atom holding the socket configuration"}
cfg
(atom nil))
(def
^{:doc "Agent holding the datagram socket"}
sockagt
(agent nil))
(defn setup
"Initialize configuration"
[host port & opts]
(send sockagt #(or % (DatagramSocket.)))
(swap! cfg #(or % (merge {:random (Random.)
:host (InetAddress/getByName host)
:port (if (integer? port) port (Integer/parseInt port))}
(apply hash-map opts)))))
(defn- send-packet
"Send a packet to the socket. Expected to be used through the `sockagt` agent"
[^DatagramSocket socket ^DatagramPacket packet]
(try
(doto socket (.send packet))
(catch Exception _
socket)))
(defn format-tags
[tags]
(when (seq tags)
(str "|#" (str/join "," (map name tags)))))
(defn format-stat
^String [prefix content tags]
(str prefix content (format-tags tags)))
(defn send-stat
"Send a raw metric over the network."
[prefix content tags]
(let [fmt (format-stat prefix content tags)]
(when-let [packet (try
(DatagramPacket.
^"[B" (.getBytes fmt)
^Integer (count fmt)
^InetAddress (:host @cfg)
^Integer (:port @cfg))
(catch Exception _
nil))]
(send sockagt send-packet packet))))
(defn publish
"Send a metric over the network, based on the provided sampling rate.
This should be a fully formatted statsd metric line."
[^String content rate tags]
(cond
(nil? @cfg)
nil
(>= rate 1.0)
(send-stat (:prefix @cfg) content tags)
(<= (.nextDouble ^Random (:random @cfg)) rate)
(send-stat (:prefix @cfg) (format "%s|@%f" content rate) tags)
:else
nil))
(defn increment
"Increment a counter at specified rate, defaults to a one increment
with a 1.0 rate"
([k] (increment k 1 1.0 []))
([k v] (increment k v 1.0 []))
([k v rate] (increment k v rate []))
([k v rate tags] (publish (str (name k) ":" v "|c") rate tags)))
(defn round-millis
"Given a numeric value of milliseconds, convert it to an integer value of
milliseconds by rounding to the nearest millisecond if necessary."
[v]
(cond (integer? v) v
(number? v) (Math/round (double v))
:else 0))
(defn timing
"Time an event at specified rate, defaults to 1.0 rate"
([k v] (timing k v 1.0))
([k v rate] (timing k v rate []))
([k v rate tags] (publish (str (name k) ":" (round-millis v) "|ms") rate tags)))
(defn decrement
"Decrement a counter at specified rate, defaults to a one decrement
with a 1.0 rate"
([k] (increment k -1 1.0))
([k v] (increment k (* -1 v) 1.0))
([k v rate] (increment k (* -1 v) rate))
([k v rate tags] (increment k (* -1 v) rate tags)))
(defn- prepare-gauge-for-modify
"Get the correct absolute value for this gauge"
[v]
(if (neg? v)
(if (float? v) (Math/abs (double v)) (Math/abs (long v)))
v))
(defn modify-gauge
"Increment or decrement the value of a previously sent gauge"
([k v] (modify-gauge k v 1.0 []))
([k v rate] (modify-gauge k v rate []))
([k v rate tags]
(publish (str (name k) ":" (if (neg? v) "-" "+")
(prepare-gauge-for-modify v) "|g") rate tags)))
(defn- sanitize-gauge
"Ensure gauge value can be sent on the wire"
[v]
(when (neg? v)
(throw (IllegalArgumentException. (str "bad value for gauge: " v))))
v)
(defn gauge
"Send an arbitrary value."
([k v] (gauge k v 1.0 []))
([k v rate] (gauge k v rate []))
([k v rate tags] (publish (str (name k) ":" v "|g") rate tags))
([k v rate tags {:keys [change]}]
(if (true? change)
(modify-gauge k v rate tags)
(publish (str (name k) ":" (sanitize-gauge v) "|g") rate tags))))
(defn unique
"Send an event, unique occurences of which per flush interval
will be counted by the statsd server. We have no rate call
signature here because that wouldn't make much sense."
([k v] (publish (str (name k) ":" v "|s") 1.0 []))
([k v tags] (publish (str (name k) ":" v "|s") 1.0 tags)))
(defn with-timing-fn
"Helper function for the timing macros. Time the execution of f, a function
of no args, and then call timing with the other args."
[f k rate tags]
(let [start (System/nanoTime)]
(try
(f)
(finally
(timing k (/ (- (System/nanoTime) start) 1e6) rate tags)))))
(defmacro with-tagged-timing
"Time the execution of the provided code, with sampling and tags."
[k rate tags & body]
`(with-timing-fn (fn [] ~@body) ~k ~rate ~tags))
(defmacro with-sampled-timing
"Time the execution of the provided code, with sampling."
[k rate & body]
`(with-timing-fn (fn [] ~@body) ~k ~rate []))
(defmacro with-timing
"Time the execution of the provided code."
[k & body]
`(with-timing-fn (fn [] ~@body) ~k 1.0 []))
| null | https://raw.githubusercontent.com/pyr/clj-statsd/0d9e13de75951bf67b8abf952a5610aaf431b2ba/src/clj_statsd.clj | clojure | (ns clj-statsd
"Send metrics to statsd."
(:require [clojure.string :as str])
(:import [java.util Random])
(:import [java.net DatagramPacket DatagramSocket InetAddress]))
(def
^{:doc "Atom holding the socket configuration"}
cfg
(atom nil))
(def
^{:doc "Agent holding the datagram socket"}
sockagt
(agent nil))
(defn setup
"Initialize configuration"
[host port & opts]
(send sockagt #(or % (DatagramSocket.)))
(swap! cfg #(or % (merge {:random (Random.)
:host (InetAddress/getByName host)
:port (if (integer? port) port (Integer/parseInt port))}
(apply hash-map opts)))))
(defn- send-packet
"Send a packet to the socket. Expected to be used through the `sockagt` agent"
[^DatagramSocket socket ^DatagramPacket packet]
(try
(doto socket (.send packet))
(catch Exception _
socket)))
(defn format-tags
[tags]
(when (seq tags)
(str "|#" (str/join "," (map name tags)))))
(defn format-stat
^String [prefix content tags]
(str prefix content (format-tags tags)))
(defn send-stat
"Send a raw metric over the network."
[prefix content tags]
(let [fmt (format-stat prefix content tags)]
(when-let [packet (try
(DatagramPacket.
^"[B" (.getBytes fmt)
^Integer (count fmt)
^InetAddress (:host @cfg)
^Integer (:port @cfg))
(catch Exception _
nil))]
(send sockagt send-packet packet))))
(defn publish
"Send a metric over the network, based on the provided sampling rate.
This should be a fully formatted statsd metric line."
[^String content rate tags]
(cond
(nil? @cfg)
nil
(>= rate 1.0)
(send-stat (:prefix @cfg) content tags)
(<= (.nextDouble ^Random (:random @cfg)) rate)
(send-stat (:prefix @cfg) (format "%s|@%f" content rate) tags)
:else
nil))
(defn increment
"Increment a counter at specified rate, defaults to a one increment
with a 1.0 rate"
([k] (increment k 1 1.0 []))
([k v] (increment k v 1.0 []))
([k v rate] (increment k v rate []))
([k v rate tags] (publish (str (name k) ":" v "|c") rate tags)))
(defn round-millis
"Given a numeric value of milliseconds, convert it to an integer value of
milliseconds by rounding to the nearest millisecond if necessary."
[v]
(cond (integer? v) v
(number? v) (Math/round (double v))
:else 0))
(defn timing
"Time an event at specified rate, defaults to 1.0 rate"
([k v] (timing k v 1.0))
([k v rate] (timing k v rate []))
([k v rate tags] (publish (str (name k) ":" (round-millis v) "|ms") rate tags)))
(defn decrement
"Decrement a counter at specified rate, defaults to a one decrement
with a 1.0 rate"
([k] (increment k -1 1.0))
([k v] (increment k (* -1 v) 1.0))
([k v rate] (increment k (* -1 v) rate))
([k v rate tags] (increment k (* -1 v) rate tags)))
(defn- prepare-gauge-for-modify
"Get the correct absolute value for this gauge"
[v]
(if (neg? v)
(if (float? v) (Math/abs (double v)) (Math/abs (long v)))
v))
(defn modify-gauge
"Increment or decrement the value of a previously sent gauge"
([k v] (modify-gauge k v 1.0 []))
([k v rate] (modify-gauge k v rate []))
([k v rate tags]
(publish (str (name k) ":" (if (neg? v) "-" "+")
(prepare-gauge-for-modify v) "|g") rate tags)))
(defn- sanitize-gauge
"Ensure gauge value can be sent on the wire"
[v]
(when (neg? v)
(throw (IllegalArgumentException. (str "bad value for gauge: " v))))
v)
(defn gauge
"Send an arbitrary value."
([k v] (gauge k v 1.0 []))
([k v rate] (gauge k v rate []))
([k v rate tags] (publish (str (name k) ":" v "|g") rate tags))
([k v rate tags {:keys [change]}]
(if (true? change)
(modify-gauge k v rate tags)
(publish (str (name k) ":" (sanitize-gauge v) "|g") rate tags))))
(defn unique
"Send an event, unique occurences of which per flush interval
will be counted by the statsd server. We have no rate call
signature here because that wouldn't make much sense."
([k v] (publish (str (name k) ":" v "|s") 1.0 []))
([k v tags] (publish (str (name k) ":" v "|s") 1.0 tags)))
(defn with-timing-fn
"Helper function for the timing macros. Time the execution of f, a function
of no args, and then call timing with the other args."
[f k rate tags]
(let [start (System/nanoTime)]
(try
(f)
(finally
(timing k (/ (- (System/nanoTime) start) 1e6) rate tags)))))
(defmacro with-tagged-timing
"Time the execution of the provided code, with sampling and tags."
[k rate tags & body]
`(with-timing-fn (fn [] ~@body) ~k ~rate ~tags))
(defmacro with-sampled-timing
"Time the execution of the provided code, with sampling."
[k rate & body]
`(with-timing-fn (fn [] ~@body) ~k ~rate []))
(defmacro with-timing
"Time the execution of the provided code."
[k & body]
`(with-timing-fn (fn [] ~@body) ~k 1.0 []))
| |
947a5538aeca8eaccd768c4ac2b190601e90baca7dbc8de5455c91f1a75b2645 | CarlosMChica/HaskellBook | fixIt.hs | module Sing where
fstString:: [Char] -> [Char]
fstString x = x ++ " in the rain"
sndString:: [Char] -> [Char]
sndString x = x ++ " over the rainbow"
sing = if x > y then fstString x else sndString y
where x = "Singin"
y = "Somewhere"
main :: IO()
main = do
print (1 + 2)
putStrLn "10"
print (negate (-1))
print ((+) 0 blah)
where blah = negate 1
| null | https://raw.githubusercontent.com/CarlosMChica/HaskellBook/86f82cf36cd00003b1a1aebf264e4b5d606ddfad/chapter5/fixIt.hs | haskell | module Sing where
fstString:: [Char] -> [Char]
fstString x = x ++ " in the rain"
sndString:: [Char] -> [Char]
sndString x = x ++ " over the rainbow"
sing = if x > y then fstString x else sndString y
where x = "Singin"
y = "Somewhere"
main :: IO()
main = do
print (1 + 2)
putStrLn "10"
print (negate (-1))
print ((+) 0 blah)
where blah = negate 1
| |
a1e987cfe269bb0fada7d7239d4f8960459dbe363e43992b6ed13e33dd5f24b6 | danieljharvey/mimsa | ParserSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module Test.Parser.ParserSpec (spec) where
import Calc
import Data.Foldable (traverse_)
import Data.Functor
import qualified Data.Text as T
import Test.Hspec
int :: (Monoid ann) => Integer -> Expr ann
int = EPrim mempty . PInt
bool :: (Monoid ann) => Bool -> Expr ann
bool = EPrim mempty . PBool
spec :: Spec
spec = do
describe "ParserSpec" $ do
describe "Type" $ do
let strings =
[ ("Boolean", TPrim () TBool),
("Integer", TPrim () TInt)
]
traverse_
( \(str, expr) -> it (T.unpack str) $ do
case parseTypeAndFormatError str of
Right parsedExp -> parsedExp $> () `shouldBe` expr
Left e -> error (T.unpack e)
)
strings
describe "Expr" $ do
let strings =
[ ("-1", int (-1)),
("1 + 2", EInfix () OpAdd (int 1) (int 2)),
("True", EPrim () (PBool True)),
("False", EPrim () (PBool False)),
( "1 + 2 + 3",
EInfix
()
OpAdd
( EInfix
()
OpAdd
(int 1)
(int 2)
)
(int 3)
),
("1 == 2", EInfix () OpEquals (int 1) (int 2)),
("if True then 1 else 2", EIf () (bool True) (int 1) (int 2))
]
traverse_
( \(str, expr) -> it (T.unpack str) $ do
case parseExprAndFormatError str of
Right parsedExp -> parsedExp $> () `shouldBe` expr
Left e -> error (T.unpack e)
)
strings
describe "Expr with Annotation" $ do
it "Parses an infix operation with annotations" $ do
parseExprAndFormatError "20 + 22"
`shouldBe` Right
( EInfix
(Location 0 7)
OpAdd
(EPrim (Location 0 2) (PInt 20))
(EPrim (Location 5 7) (PInt 22))
)
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/c5bcab3e9f961216cea540edb95a8a66db773980/llvm-calc2/test/Test/Parser/ParserSpec.hs | haskell | # LANGUAGE OverloadedStrings # |
module Test.Parser.ParserSpec (spec) where
import Calc
import Data.Foldable (traverse_)
import Data.Functor
import qualified Data.Text as T
import Test.Hspec
int :: (Monoid ann) => Integer -> Expr ann
int = EPrim mempty . PInt
bool :: (Monoid ann) => Bool -> Expr ann
bool = EPrim mempty . PBool
spec :: Spec
spec = do
describe "ParserSpec" $ do
describe "Type" $ do
let strings =
[ ("Boolean", TPrim () TBool),
("Integer", TPrim () TInt)
]
traverse_
( \(str, expr) -> it (T.unpack str) $ do
case parseTypeAndFormatError str of
Right parsedExp -> parsedExp $> () `shouldBe` expr
Left e -> error (T.unpack e)
)
strings
describe "Expr" $ do
let strings =
[ ("-1", int (-1)),
("1 + 2", EInfix () OpAdd (int 1) (int 2)),
("True", EPrim () (PBool True)),
("False", EPrim () (PBool False)),
( "1 + 2 + 3",
EInfix
()
OpAdd
( EInfix
()
OpAdd
(int 1)
(int 2)
)
(int 3)
),
("1 == 2", EInfix () OpEquals (int 1) (int 2)),
("if True then 1 else 2", EIf () (bool True) (int 1) (int 2))
]
traverse_
( \(str, expr) -> it (T.unpack str) $ do
case parseExprAndFormatError str of
Right parsedExp -> parsedExp $> () `shouldBe` expr
Left e -> error (T.unpack e)
)
strings
describe "Expr with Annotation" $ do
it "Parses an infix operation with annotations" $ do
parseExprAndFormatError "20 + 22"
`shouldBe` Right
( EInfix
(Location 0 7)
OpAdd
(EPrim (Location 0 2) (PInt 20))
(EPrim (Location 5 7) (PInt 22))
)
|
1b198215fd1ea06e383e57ca23f5f7bb4fbbb5452db06f7101d641a723f22a36 | myuon/minilight | SpecMain.hs | module Main.SpecMain where
import MiniLight
import Test.Tasty.Hspec hiding (Failure, Success)
spec_main :: Spec
spec_main = do
describe "main" $ do
it "should start and quit" $ do
() <- runLightTWith (defLightConfig { headless = True }) $ do
runMiniloop defConfig (return ()) (\_ -> quit)
() `shouldBe` ()
| null | https://raw.githubusercontent.com/myuon/minilight/3c6a4f6b9ed15c97c7de02a281c31152442fb4d7/test/Main/SpecMain.hs | haskell | module Main.SpecMain where
import MiniLight
import Test.Tasty.Hspec hiding (Failure, Success)
spec_main :: Spec
spec_main = do
describe "main" $ do
it "should start and quit" $ do
() <- runLightTWith (defLightConfig { headless = True }) $ do
runMiniloop defConfig (return ()) (\_ -> quit)
() `shouldBe` ()
| |
c9ea9ceca478f195fb4a6b6bdce2e51516a4360989fb13d6a6b66b0b0914dcc0 | kraison/vivace-graph | utilities.lisp | (in-package #:vivace-graph)
(defmacro logger (level msg &rest args)
"Syslogger"
`(funcall #'sb-posix:syslog (gethash ',level *syslog-priorities*) ,msg ,@args))
(defun ip-to-string (ip)
(format nil "~A.~A.~A.~A" (aref ip 0) (aref ip 1) (aref ip 2) (aref ip 3)))
(defgeneric less-than (x y)
(:documentation "Generic less-than operator. Allows comparison of apples and oranges.")
(:method ((x symbol) (y symbol)) (string< (symbol-name x) (symbol-name y)))
(:method ((x symbol) (y string)) (string< (symbol-name x) y))
(:method ((x symbol) (y number)) (string< (symbol-name x) (write-to-string y)))
(:method ((x symbol) (y uuid:uuid)) (string< (symbol-name x) (uuid:print-bytes nil y)))
(:method ((x number) (y number)) (< x y))
(:method ((x number) (y symbol)) (string< (write-to-string x) (symbol-name y)))
(:method ((x number) (y string)) (string< (write-to-string x) y))
(:method ((x number) (y uuid:uuid)) (string< (write-to-string x) (uuid:print-bytes nil y)))
(:method ((x string) (y string)) (string< x y))
(:method ((x string) (y symbol)) (string< x (symbol-name y)))
(:method ((x string) (y number)) (string< x (write-to-string y)))
(:method ((x string) (y uuid:uuid)) (string< x (uuid:print-bytes nil y)))
(:method ((x timestamp) (y timestamp)) (timestamp< x y))
(:method ((x number) (y timestamp)) (< (timestamp-to-universal x) y))
(:method ((x timestamp) (y number)) (< x (timestamp-to-universal y)))
(:method ((x uuid:uuid) (y uuid:uuid))
(string< (uuid:print-bytes nil x) (uuid:print-bytes nil y)))
(:method ((x uuid:uuid) (y string)) (string< (uuid:print-bytes nil x) y))
(:method ((x uuid:uuid) (y symbol)) (string< (uuid:print-bytes nil x) (symbol-name y)))
(:method ((x uuid:uuid) (y number)) (string< (uuid:print-bytes nil x) (write-to-string y))))
(defgeneric greater-than (x y)
(:documentation "Generic greater-than operator. Allows comparison of apples and oranges.")
(:method ((x symbol) (y symbol)) (string> (symbol-name x) (symbol-name y)))
(:method ((x symbol) (y string)) (string> (symbol-name x) y))
(:method ((x symbol) (y number)) (string> (symbol-name x) (write-to-string y)))
(:method ((x symbol) (y uuid:uuid)) (string> (symbol-name x) (uuid:print-bytes nil y)))
(:method ((x number) (y number)) (> x y))
(:method ((x number) (y symbol)) (string> (write-to-string x) (symbol-name y)))
(:method ((x number) (y string)) (string> (write-to-string x) y))
(:method ((x number) (y uuid:uuid)) (string> (write-to-string x) (uuid:print-bytes nil y)))
(:method ((x string) (y string)) (string> x y))
(:method ((x string) (y symbol)) (string> x (symbol-name y)))
(:method ((x string) (y number)) (string> x (write-to-string y)))
(:method ((x string) (y uuid:uuid)) (string> x (uuid:print-bytes nil y)))
(:method ((x timestamp) (y timestamp)) (timestamp> x y))
(:method ((x number) (y timestamp)) (> (timestamp-to-universal x) y))
(:method ((x timestamp) (y number)) (> x (timestamp-to-universal y)))
(:method ((x uuid:uuid) (y uuid:uuid))
(string> (uuid:print-bytes nil x) (uuid:print-bytes nil y)))
(:method ((x uuid:uuid) (y string)) (string> (uuid:print-bytes nil x) y))
(:method ((x uuid:uuid) (y symbol)) (string> (uuid:print-bytes nil x) (symbol-name y)))
(:method ((x uuid:uuid) (y number)) (string> (uuid:print-bytes nil x) (write-to-string y))))
(defun uri? (string)
(cl-ppcre:scan "^(https?|ftp)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/.*)?$" string))
(defun make-slot-key (id slot)
(format nil "~A~A~A" id #\Nul slot))
;; Make compare-and-swap shorter to call
(defmacro cas (place old new)
`(sb-ext:compare-and-swap ,place ,old ,new))
;; String split without regexes.
(defun split (string &optional (ws '(#\Space #\Tab)) max)
"Split STRING along whitespace as defined by the sequence WS.
Whitespace which causes a split is elided from the result. The whole
string will be split, unless MAX is provided, in which case the
string will be split into MAX tokens at most, the last one
containing the whole rest of the given STRING, if any."
(flet ((is-ws (char) (find char ws)))
(nreverse
(let ((list nil) (start 0) (words 0) end)
(loop
(when (and max (>= words (1- max)))
(return (cons (subseq string start) list)))
(setf end (position-if #'is-ws string :start start))
(push (subseq string start end) list)
(incf words)
(unless end (return list))
(setf start (1+ end)))))))
(defun print-hash (ht)
"Dump the k-v pairs of a hash table to stdout."
(maphash #'(lambda (k v) (format t "~A: ~A~%" k v)) ht))
Plists
(defun get-prop (plist prop)
"Return the value of a property in a property list."
(cond ((null plist) nil)
((eql (car plist) prop)
(cadr plist))
(t (get-prop (cddr plist) prop))))
utilities
(defun rest2 (x)
"The rest of a list after the first TWO elements."
(rest (rest x)))
(defun continue-p ()
"Ask user if we should continue looking for solutions."
(case (read-char)
(#\; t)
(#\. nil)
(#\newline (continue-p))
(otherwise
(format t " Type ; to see more or . to stop")
(continue-p))))
(defun length=1 (list)
"Is this a list of exactly one element?"
(and (consp list) (null (cdr list))))
(defun proper-listp (x)
"Is x a proper (non-dotted) list?"
(or (null x)
(and (consp x) (proper-listp (rest x)))))
(defun new-interned-symbol (&rest args)
"Concatenate symbols or strings to form an interned symbol"
(intern (format nil "~{~a~}" args)))
(defun new-symbol (&rest args)
"Concatenate symbols or strings to form an uninterned symbol"
(make-symbol (format nil "~{~a~}" args)))
(defun find-all (item sequence &rest keyword-args
&key (test #'eql) test-not &allow-other-keys)
"Find all those elements of sequence that match item,
according to the keywords. Doesn't alter sequence."
(if test-not
(apply #'remove item sequence
:test-not (complement test-not) keyword-args)
(apply #'remove item sequence
:test (complement test) keyword-args)))
(defun find-anywhere (item tree)
"Does item occur anywhere in tree? If so, return it."
(cond ((eql item tree) tree)
((atom tree) nil)
((find-anywhere item (first tree)))
((find-anywhere item (rest tree)))))
(defun find-if-anywhere (predicate tree)
"Does predicate apply to any atom in the tree?"
(if (atom tree)
(funcall predicate tree)
(or (find-if-anywhere predicate (first tree))
(find-if-anywhere predicate (rest tree)))))
(defun unique-find-anywhere-if (predicate tree &optional found-so-far)
"return a list of leaves of tree satisfying predicate, with duplicates removed."
(if (atom tree)
(if (funcall predicate tree)
(adjoin tree found-so-far)
found-so-far)
(unique-find-anywhere-if
predicate
(first tree)
(unique-find-anywhere-if predicate (rest tree) found-so-far))))
(defun reuse-cons (x y x-y)
"Return (cons x y), or reuse x-y if it is equal to (cons x y)"
(if (and (eql x (car x-y)) (eql y (cdr x-y)))
x-y
(cons x y)))
Borrowed from On by
(defmacro while (test &rest body)
`(loop until (not ,test) do
,@body))
(defmacro aif (test-form then-form &optional else-form)
`(let ((it ,test-form))
(if it ,then-form ,else-form)))
(defmacro aif2 (test &optional then else)
(let ((win (gensym)))
`(multiple-value-bind (it ,win) ,test
(if (or it ,win) ,then ,else))))
(define-modify-macro conc1f (obj)
(lambda (place obj)
(nconc place (list obj))))
(defmacro with-gensyms (syms &body body)
`(let ,(mapcar #'(lambda (s)
`(,s (gensym)))
syms)
,@body))
(defun flatten (x)
(labels ((rec (x acc)
(cond ((null x) acc)
((atom x) (cons x acc))
(t (rec (car x) (rec (cdr x) acc))))))
(rec x nil)))
(defmacro acond2 (&rest clauses)
(if (null clauses)
nil
(let ((cl1 (car clauses))
(val (gensym))
(win (gensym)))
`(multiple-value-bind (,val ,win) ,(car cl1)
(if (or ,val ,win)
(let ((it ,val)) ,@(cdr cl1))
(acond2 ,@(cdr clauses)))))))
| null | https://raw.githubusercontent.com/kraison/vivace-graph/6b5b5eca3e2613e48846da326ecf36cd9dcd7ceb/utilities.lisp | lisp | Make compare-and-swap shorter to call
String split without regexes.
t) | (in-package #:vivace-graph)
(defmacro logger (level msg &rest args)
"Syslogger"
`(funcall #'sb-posix:syslog (gethash ',level *syslog-priorities*) ,msg ,@args))
(defun ip-to-string (ip)
(format nil "~A.~A.~A.~A" (aref ip 0) (aref ip 1) (aref ip 2) (aref ip 3)))
(defgeneric less-than (x y)
(:documentation "Generic less-than operator. Allows comparison of apples and oranges.")
(:method ((x symbol) (y symbol)) (string< (symbol-name x) (symbol-name y)))
(:method ((x symbol) (y string)) (string< (symbol-name x) y))
(:method ((x symbol) (y number)) (string< (symbol-name x) (write-to-string y)))
(:method ((x symbol) (y uuid:uuid)) (string< (symbol-name x) (uuid:print-bytes nil y)))
(:method ((x number) (y number)) (< x y))
(:method ((x number) (y symbol)) (string< (write-to-string x) (symbol-name y)))
(:method ((x number) (y string)) (string< (write-to-string x) y))
(:method ((x number) (y uuid:uuid)) (string< (write-to-string x) (uuid:print-bytes nil y)))
(:method ((x string) (y string)) (string< x y))
(:method ((x string) (y symbol)) (string< x (symbol-name y)))
(:method ((x string) (y number)) (string< x (write-to-string y)))
(:method ((x string) (y uuid:uuid)) (string< x (uuid:print-bytes nil y)))
(:method ((x timestamp) (y timestamp)) (timestamp< x y))
(:method ((x number) (y timestamp)) (< (timestamp-to-universal x) y))
(:method ((x timestamp) (y number)) (< x (timestamp-to-universal y)))
(:method ((x uuid:uuid) (y uuid:uuid))
(string< (uuid:print-bytes nil x) (uuid:print-bytes nil y)))
(:method ((x uuid:uuid) (y string)) (string< (uuid:print-bytes nil x) y))
(:method ((x uuid:uuid) (y symbol)) (string< (uuid:print-bytes nil x) (symbol-name y)))
(:method ((x uuid:uuid) (y number)) (string< (uuid:print-bytes nil x) (write-to-string y))))
(defgeneric greater-than (x y)
(:documentation "Generic greater-than operator. Allows comparison of apples and oranges.")
(:method ((x symbol) (y symbol)) (string> (symbol-name x) (symbol-name y)))
(:method ((x symbol) (y string)) (string> (symbol-name x) y))
(:method ((x symbol) (y number)) (string> (symbol-name x) (write-to-string y)))
(:method ((x symbol) (y uuid:uuid)) (string> (symbol-name x) (uuid:print-bytes nil y)))
(:method ((x number) (y number)) (> x y))
(:method ((x number) (y symbol)) (string> (write-to-string x) (symbol-name y)))
(:method ((x number) (y string)) (string> (write-to-string x) y))
(:method ((x number) (y uuid:uuid)) (string> (write-to-string x) (uuid:print-bytes nil y)))
(:method ((x string) (y string)) (string> x y))
(:method ((x string) (y symbol)) (string> x (symbol-name y)))
(:method ((x string) (y number)) (string> x (write-to-string y)))
(:method ((x string) (y uuid:uuid)) (string> x (uuid:print-bytes nil y)))
(:method ((x timestamp) (y timestamp)) (timestamp> x y))
(:method ((x number) (y timestamp)) (> (timestamp-to-universal x) y))
(:method ((x timestamp) (y number)) (> x (timestamp-to-universal y)))
(:method ((x uuid:uuid) (y uuid:uuid))
(string> (uuid:print-bytes nil x) (uuid:print-bytes nil y)))
(:method ((x uuid:uuid) (y string)) (string> (uuid:print-bytes nil x) y))
(:method ((x uuid:uuid) (y symbol)) (string> (uuid:print-bytes nil x) (symbol-name y)))
(:method ((x uuid:uuid) (y number)) (string> (uuid:print-bytes nil x) (write-to-string y))))
(defun uri? (string)
(cl-ppcre:scan "^(https?|ftp)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/.*)?$" string))
(defun make-slot-key (id slot)
(format nil "~A~A~A" id #\Nul slot))
(defmacro cas (place old new)
`(sb-ext:compare-and-swap ,place ,old ,new))
(defun split (string &optional (ws '(#\Space #\Tab)) max)
"Split STRING along whitespace as defined by the sequence WS.
Whitespace which causes a split is elided from the result. The whole
string will be split, unless MAX is provided, in which case the
string will be split into MAX tokens at most, the last one
containing the whole rest of the given STRING, if any."
(flet ((is-ws (char) (find char ws)))
(nreverse
(let ((list nil) (start 0) (words 0) end)
(loop
(when (and max (>= words (1- max)))
(return (cons (subseq string start) list)))
(setf end (position-if #'is-ws string :start start))
(push (subseq string start end) list)
(incf words)
(unless end (return list))
(setf start (1+ end)))))))
(defun print-hash (ht)
"Dump the k-v pairs of a hash table to stdout."
(maphash #'(lambda (k v) (format t "~A: ~A~%" k v)) ht))
Plists
(defun get-prop (plist prop)
"Return the value of a property in a property list."
(cond ((null plist) nil)
((eql (car plist) prop)
(cadr plist))
(t (get-prop (cddr plist) prop))))
utilities
(defun rest2 (x)
"The rest of a list after the first TWO elements."
(rest (rest x)))
(defun continue-p ()
"Ask user if we should continue looking for solutions."
(case (read-char)
(#\. nil)
(#\newline (continue-p))
(otherwise
(format t " Type ; to see more or . to stop")
(continue-p))))
(defun length=1 (list)
"Is this a list of exactly one element?"
(and (consp list) (null (cdr list))))
(defun proper-listp (x)
"Is x a proper (non-dotted) list?"
(or (null x)
(and (consp x) (proper-listp (rest x)))))
(defun new-interned-symbol (&rest args)
"Concatenate symbols or strings to form an interned symbol"
(intern (format nil "~{~a~}" args)))
(defun new-symbol (&rest args)
"Concatenate symbols or strings to form an uninterned symbol"
(make-symbol (format nil "~{~a~}" args)))
(defun find-all (item sequence &rest keyword-args
&key (test #'eql) test-not &allow-other-keys)
"Find all those elements of sequence that match item,
according to the keywords. Doesn't alter sequence."
(if test-not
(apply #'remove item sequence
:test-not (complement test-not) keyword-args)
(apply #'remove item sequence
:test (complement test) keyword-args)))
(defun find-anywhere (item tree)
"Does item occur anywhere in tree? If so, return it."
(cond ((eql item tree) tree)
((atom tree) nil)
((find-anywhere item (first tree)))
((find-anywhere item (rest tree)))))
(defun find-if-anywhere (predicate tree)
"Does predicate apply to any atom in the tree?"
(if (atom tree)
(funcall predicate tree)
(or (find-if-anywhere predicate (first tree))
(find-if-anywhere predicate (rest tree)))))
(defun unique-find-anywhere-if (predicate tree &optional found-so-far)
"return a list of leaves of tree satisfying predicate, with duplicates removed."
(if (atom tree)
(if (funcall predicate tree)
(adjoin tree found-so-far)
found-so-far)
(unique-find-anywhere-if
predicate
(first tree)
(unique-find-anywhere-if predicate (rest tree) found-so-far))))
(defun reuse-cons (x y x-y)
"Return (cons x y), or reuse x-y if it is equal to (cons x y)"
(if (and (eql x (car x-y)) (eql y (cdr x-y)))
x-y
(cons x y)))
Borrowed from On by
(defmacro while (test &rest body)
`(loop until (not ,test) do
,@body))
(defmacro aif (test-form then-form &optional else-form)
`(let ((it ,test-form))
(if it ,then-form ,else-form)))
(defmacro aif2 (test &optional then else)
(let ((win (gensym)))
`(multiple-value-bind (it ,win) ,test
(if (or it ,win) ,then ,else))))
(define-modify-macro conc1f (obj)
(lambda (place obj)
(nconc place (list obj))))
(defmacro with-gensyms (syms &body body)
`(let ,(mapcar #'(lambda (s)
`(,s (gensym)))
syms)
,@body))
(defun flatten (x)
(labels ((rec (x acc)
(cond ((null x) acc)
((atom x) (cons x acc))
(t (rec (car x) (rec (cdr x) acc))))))
(rec x nil)))
(defmacro acond2 (&rest clauses)
(if (null clauses)
nil
(let ((cl1 (car clauses))
(val (gensym))
(win (gensym)))
`(multiple-value-bind (,val ,win) ,(car cl1)
(if (or ,val ,win)
(let ((it ,val)) ,@(cdr cl1))
(acond2 ,@(cdr clauses)))))))
|
ccd26664d34798ed8373239546b897bdab31e3c5a3ddab3efd73e79afa0171b6 | juji-io/editscript | project.clj | (defproject juji/editscript "0.6.2"
:description "A diff library for Clojure/ClojureScript data structures"
:url "-io/editscript"
:lein-release {:deploy-via :clojars}
:deploy-repositories [["clojars" {:url ""
:username :env/clojars_username
:password :env/clojars_password
:sign-releases false}]]
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.11.1"]]
:plugins [[lein-cljsbuild "1.1.7"]
[lein-doo "0.1.10"]]
:doo {:build "node"
:paths {:karma "./node_modules/karma/bin/karma"}
:karma {:config {"browserDisconnectTimeout" 30000
"browserNoActivityTimeout" 90000}}}
:clean-targets ^{:protect false} [:target-path "out" "target"]
:cljsbuild {:builds
{:dev
{:source-paths ["src" "test" "dev"]
:compiler {:output-to "target/editscript.js"
:output-dir "target"
:optimizations :none
:source-map true
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}
:node
{:source-paths ["src" "test"]
:compiler {:output-to "out/node/editscript.js"
:output-dir "out/node"
:optimizations :advanced
:main "editscript.test"
:source-map "out/node/editscript.js.map"
:target :nodejs
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}
:browser
{:source-paths ["src" "test"]
:compiler {:output-to "out/browser/editscript.js"
:output-dir "out/browser"
:optimizations :advanced
:main "editscript.test"
:source-map "out/browser/editscript.js.map"
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}}}
:profiles {:deploy
{:aot [#"editscript\.*"]
:jvm-opts ["-Dclojure.compiler.direct-linking=true"] }
:dev
{:dependencies [[org.clojure/clojurescript "1.11.60"
:exclusions [org.clojure/core.rrb-vector]]
;;see -cljsbuild/issues/469
[quantum/org.clojure.core.rrb-vector "0.0.12"]
[criterium "0.4.6"]
[doo "0.1.11"]
[org.clojure/test.check "1.1.1"]
[ cider / piggieback " 0.5.2 " ]
]
:source-paths ["src" "test" "dev"]
;; :repl-options {:nrepl-middleware [cider.piggieback/wrap-cljs-repl]}
}})
| null | https://raw.githubusercontent.com/juji-io/editscript/661502c19229cbae296896d4519a40eb848255c9/project.clj | clojure | see -cljsbuild/issues/469
:repl-options {:nrepl-middleware [cider.piggieback/wrap-cljs-repl]} | (defproject juji/editscript "0.6.2"
:description "A diff library for Clojure/ClojureScript data structures"
:url "-io/editscript"
:lein-release {:deploy-via :clojars}
:deploy-repositories [["clojars" {:url ""
:username :env/clojars_username
:password :env/clojars_password
:sign-releases false}]]
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.11.1"]]
:plugins [[lein-cljsbuild "1.1.7"]
[lein-doo "0.1.10"]]
:doo {:build "node"
:paths {:karma "./node_modules/karma/bin/karma"}
:karma {:config {"browserDisconnectTimeout" 30000
"browserNoActivityTimeout" 90000}}}
:clean-targets ^{:protect false} [:target-path "out" "target"]
:cljsbuild {:builds
{:dev
{:source-paths ["src" "test" "dev"]
:compiler {:output-to "target/editscript.js"
:output-dir "target"
:optimizations :none
:source-map true
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}
:node
{:source-paths ["src" "test"]
:compiler {:output-to "out/node/editscript.js"
:output-dir "out/node"
:optimizations :advanced
:main "editscript.test"
:source-map "out/node/editscript.js.map"
:target :nodejs
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}
:browser
{:source-paths ["src" "test"]
:compiler {:output-to "out/browser/editscript.js"
:output-dir "out/browser"
:optimizations :advanced
:main "editscript.test"
:source-map "out/browser/editscript.js.map"
:cache-analysis true
:checked-arrays :warn
:parallel-build true}}}}
:profiles {:deploy
{:aot [#"editscript\.*"]
:jvm-opts ["-Dclojure.compiler.direct-linking=true"] }
:dev
{:dependencies [[org.clojure/clojurescript "1.11.60"
:exclusions [org.clojure/core.rrb-vector]]
[quantum/org.clojure.core.rrb-vector "0.0.12"]
[criterium "0.4.6"]
[doo "0.1.11"]
[org.clojure/test.check "1.1.1"]
[ cider / piggieback " 0.5.2 " ]
]
:source-paths ["src" "test" "dev"]
}})
|
6f7e35b22a200cad89bbaef787c1bbb3998021090eea8257c7f712dac4d127c9 | tejasbubane/haskell-book-code | Game.hs | module Game where
import System.Exit (exitSuccess)
import Data.List (intersperse)
import Data.Maybe (isJust)
import Control.Monad (forever)
data Puzzle = Puzzle String [Maybe Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
-- Take puzzle word and turn it into list of Nothing
freshPuzzle :: String -> Puzzle
freshPuzzle str =
Puzzle str (map (\x -> const Nothing x) str) []
-- check if guessed character is in puzzle
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle str _ _) char = char `elem` str
-- check if guessed character is already guessed
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessed) char =
char `elem` guessed
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just s) = s
-- success case - don't count guess
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s) c =
Puzzle word newFilledInSoFar s
where zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
-- failure case - count the guess
countGuess :: Puzzle -> Char -> Puzzle
countGuess (Puzzle word filledInSoFar s) c =
Puzzle word filledInSoFar (c:s)
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess
, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that\
\ character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word,\
\ filling in the word accordingly"
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in\
\ the word, try again."
return (countGuess puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed) =
if (length guessed) > 7 then
do putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _) =
if all isJust filledInSoFar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameOver puzzle
gameWin puzzle
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character"
| null | https://raw.githubusercontent.com/tejasbubane/haskell-book-code/deaac8ab4db0ae8692d0278826528bb8a746ed82/ch-13/hangman/src/Game.hs | haskell | Take puzzle word and turn it into list of Nothing
check if guessed character is in puzzle
check if guessed character is already guessed
success case - don't count guess
failure case - count the guess | module Game where
import System.Exit (exitSuccess)
import Data.List (intersperse)
import Data.Maybe (isJust)
import Control.Monad (forever)
data Puzzle = Puzzle String [Maybe Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
freshPuzzle :: String -> Puzzle
freshPuzzle str =
Puzzle str (map (\x -> const Nothing x) str) []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle str _ _) char = char `elem` str
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessed) char =
char `elem` guessed
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just s) = s
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s) c =
Puzzle word newFilledInSoFar s
where zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
countGuess :: Puzzle -> Char -> Puzzle
countGuess (Puzzle word filledInSoFar s) c =
Puzzle word filledInSoFar (c:s)
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess
, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that\
\ character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word,\
\ filling in the word accordingly"
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in\
\ the word, try again."
return (countGuess puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed) =
if (length guessed) > 7 then
do putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _) =
if all isJust filledInSoFar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameOver puzzle
gameWin puzzle
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character"
|
31b1a79628e44a89e38de7184b14a70ae2f02f98caf8c99468f8045bd443b00c | panda-planner-dev/ipc2020-domains | p-13.lisp | (defproblem problem domain
(
(In_City FrauenStrassePost Ulm)
(In_City MuenchnerStrassePost Muenchen)
(At_Vehicle LKW_Ulm FrauenStrassePost)
(At_Vehicle LKW_Muenchen HauptbahnhofMuenchen)
(At_Vehicle Eisenbahnwagen HauptbahnhofUlm)
(Serves HauptbahnhofUlm Ulm)
(Serves HauptbahnhofMuenchen Muenchen)
(Available HauptbahnhofUlm)
(Available HauptbahnhofMuenchen)
(At_Vehicle Lokomotive HauptbahnhofUlm)
(Connects James_Franck_Ring FrauenStrassePost HauptbahnhofUlm)
(Connects UlmMuenchenRailRoute HauptbahnhofUlm HauptbahnhofMuenchen)
(Connects BlumenStrasse HauptbahnhofMuenchen MuenchnerStrassePost)
(Available James_Franck_Ring)
(Available UlmMuenchenRailRoute)
(Available BlumenStrasse)
(Available LKW_Ulm)
(Available Lokomotive)
(Available LKW_Muenchen)
(PV_Compatible Stuehle LKW_Ulm)
(PV_Compatible Stuehle Eisenbahnwagen)
(PV_Compatible Stuehle LKW_Muenchen)
(RV_Compatible James_Franck_Ring LKW_Ulm)
(RV_Compatible UlmMuenchenRailRoute Lokomotive)
(RV_Compatible BlumenStrasse LKW_Muenchen)
(At_Package Stuehle FrauenStrassePost)
(type_City Muenchen)
(type_City Ulm)
(type_City_Location FrauenStrassePost)
(type_City_Location HauptbahnhofMuenchen)
(type_City_Location HauptbahnhofUlm)
(type_City_Location MuenchnerStrassePost)
(type_Equipment_Position Eisenbahnwagen)
(type_Equipment_Position FrauenStrassePost)
(type_Equipment_Position HauptbahnhofMuenchen)
(type_Equipment_Position HauptbahnhofUlm)
(type_Equipment_Position LKW_Muenchen)
(type_Equipment_Position LKW_Ulm)
(type_Equipment_Position Lokomotive)
(type_Equipment_Position Muenchen)
(type_Equipment_Position MuenchnerStrassePost)
(type_Equipment_Position Ulm)
(type_Local_Road_Route BlumenStrasse)
(type_Local_Road_Route James_Franck_Ring)
(type_Location FrauenStrassePost)
(type_Location HauptbahnhofMuenchen)
(type_Location HauptbahnhofUlm)
(type_Location Muenchen)
(type_Location MuenchnerStrassePost)
(type_Location Ulm)
(type_Not_TCenter FrauenStrassePost)
(type_Not_TCenter MuenchnerStrassePost)
(type_Object Eisenbahnwagen)
(type_Object LKW_Muenchen)
(type_Object LKW_Ulm)
(type_Object Lokomotive)
(type_Object Stuehle)
(type_Package Stuehle)
(type_Package_Storage_Position Eisenbahnwagen)
(type_Package_Storage_Position FrauenStrassePost)
(type_Package_Storage_Position HauptbahnhofMuenchen)
(type_Package_Storage_Position HauptbahnhofUlm)
(type_Package_Storage_Position LKW_Muenchen)
(type_Package_Storage_Position LKW_Ulm)
(type_Package_Storage_Position Lokomotive)
(type_Package_Storage_Position Muenchen)
(type_Package_Storage_Position MuenchnerStrassePost)
(type_Package_Storage_Position Ulm)
(type_Parcels Stuehle)
(type_Physical Eisenbahnwagen)
(type_Physical LKW_Muenchen)
(type_Physical LKW_Ulm)
(type_Physical Stuehle)
(type_Post_Office FrauenStrassePost)
(type_Post_Office MuenchnerStrassePost)
(type_Rail_Route UlmMuenchenRailRoute)
(type_Regular Eisenbahnwagen)
(type_Regular LKW_Muenchen)
(type_Regular LKW_Ulm)
(type_Regular Stuehle)
(type_Regular_Traincar Eisenbahnwagen)
(type_Regular_Truck LKW_Muenchen)
(type_Regular_Truck LKW_Ulm)
(type_Regular_Vehicle Eisenbahnwagen)
(type_Regular_Vehicle LKW_Muenchen)
(type_Regular_Vehicle LKW_Ulm)
(type_Road_Route BlumenStrasse)
(type_Road_Route James_Franck_Ring)
(type_Route BlumenStrasse)
(type_Route James_Franck_Ring)
(type_Route UlmMuenchenRailRoute)
(type_TCenter HauptbahnhofMuenchen)
(type_TCenter HauptbahnhofUlm)
(type_Thing BlumenStrasse)
(type_Thing Eisenbahnwagen)
(type_Thing FrauenStrassePost)
(type_Thing HauptbahnhofMuenchen)
(type_Thing HauptbahnhofUlm)
(type_Thing James_Franck_Ring)
(type_Thing LKW_Muenchen)
(type_Thing LKW_Ulm)
(type_Thing Lokomotive)
(type_Thing Muenchen)
(type_Thing MuenchnerStrassePost)
(type_Thing Stuehle)
(type_Thing Ulm)
(type_Thing UlmMuenchenRailRoute)
(type_Train Lokomotive)
(type_Train_Station HauptbahnhofMuenchen)
(type_Train_Station HauptbahnhofUlm)
(type_Traincar Eisenbahnwagen)
(type_Truck LKW_Muenchen)
(type_Truck LKW_Ulm)
(type_Vehicle Eisenbahnwagen)
(type_Vehicle LKW_Muenchen)
(type_Vehicle LKW_Ulm)
(type_Vehicle Lokomotive)
(type_Vehicle_Position Eisenbahnwagen)
(type_Vehicle_Position FrauenStrassePost)
(type_Vehicle_Position HauptbahnhofMuenchen)
(type_Vehicle_Position HauptbahnhofUlm)
(type_Vehicle_Position Muenchen)
(type_Vehicle_Position MuenchnerStrassePost)
(type_Vehicle_Position Ulm)
(type_sort_for_BlumenStrasse BlumenStrasse)
(type_sort_for_Eisenbahnwagen Eisenbahnwagen)
(type_sort_for_FrauenStrassePost FrauenStrassePost)
(type_sort_for_HauptbahnhofMuenchen HauptbahnhofMuenchen)
(type_sort_for_HauptbahnhofUlm HauptbahnhofUlm)
(type_sort_for_James_Franck_Ring James_Franck_Ring)
(type_sort_for_LKW_Muenchen LKW_Muenchen)
(type_sort_for_LKW_Ulm LKW_Ulm)
(type_sort_for_Lokomotive Lokomotive)
(type_sort_for_Muenchen Muenchen)
(type_sort_for_MuenchnerStrassePost MuenchnerStrassePost)
(type_sort_for_Stuehle Stuehle)
(type_sort_for_Ulm Ulm)
(type_sort_for_UlmMuenchenRailRoute UlmMuenchenRailRoute)
)
((__top))
)
| null | https://raw.githubusercontent.com/panda-planner-dev/ipc2020-domains/9adb54325d3df35907adc7115fcc65f0ce5953cc/partial-order/UM-Translog/other/SHOP2/p-13.lisp | lisp | (defproblem problem domain
(
(In_City FrauenStrassePost Ulm)
(In_City MuenchnerStrassePost Muenchen)
(At_Vehicle LKW_Ulm FrauenStrassePost)
(At_Vehicle LKW_Muenchen HauptbahnhofMuenchen)
(At_Vehicle Eisenbahnwagen HauptbahnhofUlm)
(Serves HauptbahnhofUlm Ulm)
(Serves HauptbahnhofMuenchen Muenchen)
(Available HauptbahnhofUlm)
(Available HauptbahnhofMuenchen)
(At_Vehicle Lokomotive HauptbahnhofUlm)
(Connects James_Franck_Ring FrauenStrassePost HauptbahnhofUlm)
(Connects UlmMuenchenRailRoute HauptbahnhofUlm HauptbahnhofMuenchen)
(Connects BlumenStrasse HauptbahnhofMuenchen MuenchnerStrassePost)
(Available James_Franck_Ring)
(Available UlmMuenchenRailRoute)
(Available BlumenStrasse)
(Available LKW_Ulm)
(Available Lokomotive)
(Available LKW_Muenchen)
(PV_Compatible Stuehle LKW_Ulm)
(PV_Compatible Stuehle Eisenbahnwagen)
(PV_Compatible Stuehle LKW_Muenchen)
(RV_Compatible James_Franck_Ring LKW_Ulm)
(RV_Compatible UlmMuenchenRailRoute Lokomotive)
(RV_Compatible BlumenStrasse LKW_Muenchen)
(At_Package Stuehle FrauenStrassePost)
(type_City Muenchen)
(type_City Ulm)
(type_City_Location FrauenStrassePost)
(type_City_Location HauptbahnhofMuenchen)
(type_City_Location HauptbahnhofUlm)
(type_City_Location MuenchnerStrassePost)
(type_Equipment_Position Eisenbahnwagen)
(type_Equipment_Position FrauenStrassePost)
(type_Equipment_Position HauptbahnhofMuenchen)
(type_Equipment_Position HauptbahnhofUlm)
(type_Equipment_Position LKW_Muenchen)
(type_Equipment_Position LKW_Ulm)
(type_Equipment_Position Lokomotive)
(type_Equipment_Position Muenchen)
(type_Equipment_Position MuenchnerStrassePost)
(type_Equipment_Position Ulm)
(type_Local_Road_Route BlumenStrasse)
(type_Local_Road_Route James_Franck_Ring)
(type_Location FrauenStrassePost)
(type_Location HauptbahnhofMuenchen)
(type_Location HauptbahnhofUlm)
(type_Location Muenchen)
(type_Location MuenchnerStrassePost)
(type_Location Ulm)
(type_Not_TCenter FrauenStrassePost)
(type_Not_TCenter MuenchnerStrassePost)
(type_Object Eisenbahnwagen)
(type_Object LKW_Muenchen)
(type_Object LKW_Ulm)
(type_Object Lokomotive)
(type_Object Stuehle)
(type_Package Stuehle)
(type_Package_Storage_Position Eisenbahnwagen)
(type_Package_Storage_Position FrauenStrassePost)
(type_Package_Storage_Position HauptbahnhofMuenchen)
(type_Package_Storage_Position HauptbahnhofUlm)
(type_Package_Storage_Position LKW_Muenchen)
(type_Package_Storage_Position LKW_Ulm)
(type_Package_Storage_Position Lokomotive)
(type_Package_Storage_Position Muenchen)
(type_Package_Storage_Position MuenchnerStrassePost)
(type_Package_Storage_Position Ulm)
(type_Parcels Stuehle)
(type_Physical Eisenbahnwagen)
(type_Physical LKW_Muenchen)
(type_Physical LKW_Ulm)
(type_Physical Stuehle)
(type_Post_Office FrauenStrassePost)
(type_Post_Office MuenchnerStrassePost)
(type_Rail_Route UlmMuenchenRailRoute)
(type_Regular Eisenbahnwagen)
(type_Regular LKW_Muenchen)
(type_Regular LKW_Ulm)
(type_Regular Stuehle)
(type_Regular_Traincar Eisenbahnwagen)
(type_Regular_Truck LKW_Muenchen)
(type_Regular_Truck LKW_Ulm)
(type_Regular_Vehicle Eisenbahnwagen)
(type_Regular_Vehicle LKW_Muenchen)
(type_Regular_Vehicle LKW_Ulm)
(type_Road_Route BlumenStrasse)
(type_Road_Route James_Franck_Ring)
(type_Route BlumenStrasse)
(type_Route James_Franck_Ring)
(type_Route UlmMuenchenRailRoute)
(type_TCenter HauptbahnhofMuenchen)
(type_TCenter HauptbahnhofUlm)
(type_Thing BlumenStrasse)
(type_Thing Eisenbahnwagen)
(type_Thing FrauenStrassePost)
(type_Thing HauptbahnhofMuenchen)
(type_Thing HauptbahnhofUlm)
(type_Thing James_Franck_Ring)
(type_Thing LKW_Muenchen)
(type_Thing LKW_Ulm)
(type_Thing Lokomotive)
(type_Thing Muenchen)
(type_Thing MuenchnerStrassePost)
(type_Thing Stuehle)
(type_Thing Ulm)
(type_Thing UlmMuenchenRailRoute)
(type_Train Lokomotive)
(type_Train_Station HauptbahnhofMuenchen)
(type_Train_Station HauptbahnhofUlm)
(type_Traincar Eisenbahnwagen)
(type_Truck LKW_Muenchen)
(type_Truck LKW_Ulm)
(type_Vehicle Eisenbahnwagen)
(type_Vehicle LKW_Muenchen)
(type_Vehicle LKW_Ulm)
(type_Vehicle Lokomotive)
(type_Vehicle_Position Eisenbahnwagen)
(type_Vehicle_Position FrauenStrassePost)
(type_Vehicle_Position HauptbahnhofMuenchen)
(type_Vehicle_Position HauptbahnhofUlm)
(type_Vehicle_Position Muenchen)
(type_Vehicle_Position MuenchnerStrassePost)
(type_Vehicle_Position Ulm)
(type_sort_for_BlumenStrasse BlumenStrasse)
(type_sort_for_Eisenbahnwagen Eisenbahnwagen)
(type_sort_for_FrauenStrassePost FrauenStrassePost)
(type_sort_for_HauptbahnhofMuenchen HauptbahnhofMuenchen)
(type_sort_for_HauptbahnhofUlm HauptbahnhofUlm)
(type_sort_for_James_Franck_Ring James_Franck_Ring)
(type_sort_for_LKW_Muenchen LKW_Muenchen)
(type_sort_for_LKW_Ulm LKW_Ulm)
(type_sort_for_Lokomotive Lokomotive)
(type_sort_for_Muenchen Muenchen)
(type_sort_for_MuenchnerStrassePost MuenchnerStrassePost)
(type_sort_for_Stuehle Stuehle)
(type_sort_for_Ulm Ulm)
(type_sort_for_UlmMuenchenRailRoute UlmMuenchenRailRoute)
)
((__top))
)
| |
73d1aefb4fd995b4078919c5bde9165e7b306191e88801c1aebe9ecae0803855 | CLowcay/hgbc | Port.hs | # LANGUAGE RecordWildCards #
module Machine.GBC.Primitive.Port
( Port,
new,
newWithReadAction,
newWithReadMask,
alwaysUpdate,
neverUpdate,
read,
readDirect,
write,
writeDirect,
)
where
import Control.Monad.IO.Class (MonadIO (..))
import Data.Bits (Bits (..))
import Data.Word (Word8)
import Machine.GBC.Primitive.UnboxedRef
import Prelude hiding (read)
-- | A port is like an IORef but with a custom handler for writes.
data Port = Port
{ portWriteMask :: !Word8,
portValue :: !(UnboxedRef Word8),
portRead :: !(Word8 -> IO Word8),
portNotify :: !(Word8 -> Word8 -> IO Word8)
}
-- | Create a new port.
new ::
-- | Initial value.
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
| Action to handle writes . Paramters are - > newValue - > valueToWrite .
(Word8 -> Word8 -> IO Word8) ->
IO Port
new value0 portWriteMask portNotify = do
portValue <- newUnboxedRef value0
let portRead = pure
pure Port {..}
-- | Create a new port with a custom action to run when reading.
newWithReadAction ::
-- | Initial value.
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
-- | Action to perform on reads.
(Word8 -> IO Word8) ->
-- | Action to perform on writes.
(Word8 -> Word8 -> IO Word8) ->
IO Port
newWithReadAction value0 portWriteMask portRead portNotify = do
portValue <- newUnboxedRef value0
pure Port {..}
-- | Create a new port.
newWithReadMask ::
-- | Initial value.
Word8 ->
| Read mask . 1 indicates that the bit will always read as 1 .
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
| Action to handle writes . Paramters are - > newValue - > valueToWrite .
(Word8 -> Word8 -> IO Word8) ->
IO Port
newWithReadMask value0 portReadMask portWriteMask portNotify = do
portValue <- newUnboxedRef value0
let portRead x = pure (x .|. portReadMask)
pure Port {..}
# INLINEABLE alwaysUpdate #
alwaysUpdate :: Applicative f => a -> b -> f b
alwaysUpdate _ = pure
# INLINEABLE neverUpdate #
neverUpdate :: Applicative f => a -> b -> f a
neverUpdate = const . pure
-- | Read from the port
# INLINE read #
read :: MonadIO m => Port -> m Word8
read Port {..} = liftIO . portRead =<< readUnboxedRef portValue
-- | Read from the port directly skipping the read mask.
# INLINE readDirect #
readDirect :: MonadIO m => Port -> m Word8
readDirect Port {..} = readUnboxedRef portValue
-- | Write to the port and notify any listeners.
# INLINE write #
write :: MonadIO m => Port -> Word8 -> m ()
write Port {..} newValue = do
oldValue <- readUnboxedRef portValue
newValue' <-
liftIO
(portNotify oldValue ((oldValue .&. complement portWriteMask) .|. newValue .&. portWriteMask))
writeUnboxedRef portValue newValue'
-- | Write the value of the port directly without any checks or notifications.
# INLINE writeDirect #
writeDirect :: MonadIO m => Port -> Word8 -> m ()
writeDirect Port {..} v = liftIO (writeUnboxedRef portValue v)
| null | https://raw.githubusercontent.com/CLowcay/hgbc/76a8cf91f3c3b160eadf019bc8fc75ef07601c2f/core/src/Machine/GBC/Primitive/Port.hs | haskell | | A port is like an IORef but with a custom handler for writes.
| Create a new port.
| Initial value.
| Create a new port with a custom action to run when reading.
| Initial value.
| Action to perform on reads.
| Action to perform on writes.
| Create a new port.
| Initial value.
| Read from the port
| Read from the port directly skipping the read mask.
| Write to the port and notify any listeners.
| Write the value of the port directly without any checks or notifications. | # LANGUAGE RecordWildCards #
module Machine.GBC.Primitive.Port
( Port,
new,
newWithReadAction,
newWithReadMask,
alwaysUpdate,
neverUpdate,
read,
readDirect,
write,
writeDirect,
)
where
import Control.Monad.IO.Class (MonadIO (..))
import Data.Bits (Bits (..))
import Data.Word (Word8)
import Machine.GBC.Primitive.UnboxedRef
import Prelude hiding (read)
data Port = Port
{ portWriteMask :: !Word8,
portValue :: !(UnboxedRef Word8),
portRead :: !(Word8 -> IO Word8),
portNotify :: !(Word8 -> Word8 -> IO Word8)
}
new ::
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
| Action to handle writes . Paramters are - > newValue - > valueToWrite .
(Word8 -> Word8 -> IO Word8) ->
IO Port
new value0 portWriteMask portNotify = do
portValue <- newUnboxedRef value0
let portRead = pure
pure Port {..}
newWithReadAction ::
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
(Word8 -> IO Word8) ->
(Word8 -> Word8 -> IO Word8) ->
IO Port
newWithReadAction value0 portWriteMask portRead portNotify = do
portValue <- newUnboxedRef value0
pure Port {..}
newWithReadMask ::
Word8 ->
| Read mask . 1 indicates that the bit will always read as 1 .
Word8 ->
| Write mask . 1 indicates that the bit is writable .
Word8 ->
| Action to handle writes . Paramters are - > newValue - > valueToWrite .
(Word8 -> Word8 -> IO Word8) ->
IO Port
newWithReadMask value0 portReadMask portWriteMask portNotify = do
portValue <- newUnboxedRef value0
let portRead x = pure (x .|. portReadMask)
pure Port {..}
# INLINEABLE alwaysUpdate #
alwaysUpdate :: Applicative f => a -> b -> f b
alwaysUpdate _ = pure
# INLINEABLE neverUpdate #
neverUpdate :: Applicative f => a -> b -> f a
neverUpdate = const . pure
# INLINE read #
read :: MonadIO m => Port -> m Word8
read Port {..} = liftIO . portRead =<< readUnboxedRef portValue
# INLINE readDirect #
readDirect :: MonadIO m => Port -> m Word8
readDirect Port {..} = readUnboxedRef portValue
# INLINE write #
write :: MonadIO m => Port -> Word8 -> m ()
write Port {..} newValue = do
oldValue <- readUnboxedRef portValue
newValue' <-
liftIO
(portNotify oldValue ((oldValue .&. complement portWriteMask) .|. newValue .&. portWriteMask))
writeUnboxedRef portValue newValue'
# INLINE writeDirect #
writeDirect :: MonadIO m => Port -> Word8 -> m ()
writeDirect Port {..} v = liftIO (writeUnboxedRef portValue v)
|
ab3775b240db3b8505a7a4d55a1717ddd18ad33394132650e2aa5b6efc7f6311 | Martoon-00/toy-compiler | ExpSpec.hs | # LANGUAGE OverloadedLists #
# LANGUAGE TemplateHaskell #
module Test.Examples.ExpSpec
( spec
) where
import Control.Category (id, (.))
import Control.Lens ((&))
import Data.Bits (xor, (.&.), (.|.))
import Data.Monoid ((<>))
import Prelude hiding (id, (.))
import Test.Hspec (Spec, describe, it)
import Test.QuickCheck (Large (..), Property, counterexample)
import Test.Arbitrary ()
import Test.Execution (describeExecWays, (>-*->), (~*~))
import Toy.Base
import Toy.Execution (ExecWay (..), defCompileX86, translateLang)
import Toy.Exp
import qualified Toy.Lang as L
spec :: Spec
spec = do
let ways =
[ Ex id
, Ex translateLang
, Ex $ defCompileX86 . translateLang
]
describe "expressions" $ do
describeExecWays ways $ \way -> do
describe "arithmetic" $ do
it "plus (uni)" $
uniopTest way (+ 5) (+ 5)
it "minus (uni)" $
uniopTest way (subtract 1) (subtract 1)
it "div (uni)" $
uniopTest way (quot 6) (6 /:)
it "two variables" $
binopTest way const const
it "plus" $
binopTest way (+) (+)
it "complex" $
complexArithTest way
describe "boolean" $ do
it "and" $
binopTest way (asToBool (&&)) (&&:)
it "or" $
binopTest way (asToBool (||)) (||:)
it "xor" $
binopTest way xor (^:)
it "bitwise and" $
binopTest way (.&.) (&:)
it "bitwise or" $
binopTest way (.|.) (|:)
describe "comparisons" $ do
it "<" $
binopTest way (binResToBool (<)) (<:)
it "==" $
binopTest way (binResToBool (==)) (==:)
it "complex" $
boolTest way
describe "misc" $ do
it "large" $
largeTest way
uniopTest
:: ExecWay L.Stmt
-> (Value -> Value)
-> (Exp -> Exp)
-> Property
uniopTest way f1 f2 =
let sample = L.readS "a" <> L.writeS (f2 "a")
in way & sample ~*~ f1
binopTest
:: ExecWay L.Stmt
-> (Value -> Value -> Value)
-> (Exp -> Exp -> Exp)
-> Property
binopTest way f1 f2 = head
[ counterexample "plain" $
way & sample ~*~ f1
, counterexample "large" $
way & sample ~*~ \(Large a) (Large b) -> f1 a b
]
where
sample = L.readS "a" <> L.readS "b" <> L.writeS ("a" `f2` "b")
complexArithTest :: ExecWay L.Stmt -> Property
complexArithTest = sample ~*~ fun
where
sample = mconcat
[ L.readS "a"
, L.readS "b"
, L.readS "c"
, L.writeS $ "a" +: "b" *: 10 -: "c" %: 2
]
fun :: Value -> Value -> Value -> Value
fun a b c = a + b * 10 - (c `rem` 2)
boolTest :: ExecWay L.Stmt -> Property
boolTest = sample ~*~ fun
where
sample = mconcat
[ L.readS "a"
, L.readS "b"
, L.readS "c"
, L.readS "d"
, L.readS "e"
, L.writeS $ "a" ==: "b" &&: "c" <=: "d" ^: "e"
]
fun :: Value -> Value -> Value -> Value -> Value -> Value
fun a b c d e = if (a == b) && (c <= xor d e) then 1 else 0
largeTest :: ExecWay L.Stmt -> Property
largeTest = sample & [] >-*-> [55]
where
sample = L.writeS $ foldr (+) 0 (ValueE <$> [1..10] :: [Exp])
| null | https://raw.githubusercontent.com/Martoon-00/toy-compiler/a325d56c367bbb673608d283197fcd51cf5960fa/test/Test/Examples/ExpSpec.hs | haskell | # LANGUAGE OverloadedLists #
# LANGUAGE TemplateHaskell #
module Test.Examples.ExpSpec
( spec
) where
import Control.Category (id, (.))
import Control.Lens ((&))
import Data.Bits (xor, (.&.), (.|.))
import Data.Monoid ((<>))
import Prelude hiding (id, (.))
import Test.Hspec (Spec, describe, it)
import Test.QuickCheck (Large (..), Property, counterexample)
import Test.Arbitrary ()
import Test.Execution (describeExecWays, (>-*->), (~*~))
import Toy.Base
import Toy.Execution (ExecWay (..), defCompileX86, translateLang)
import Toy.Exp
import qualified Toy.Lang as L
spec :: Spec
spec = do
let ways =
[ Ex id
, Ex translateLang
, Ex $ defCompileX86 . translateLang
]
describe "expressions" $ do
describeExecWays ways $ \way -> do
describe "arithmetic" $ do
it "plus (uni)" $
uniopTest way (+ 5) (+ 5)
it "minus (uni)" $
uniopTest way (subtract 1) (subtract 1)
it "div (uni)" $
uniopTest way (quot 6) (6 /:)
it "two variables" $
binopTest way const const
it "plus" $
binopTest way (+) (+)
it "complex" $
complexArithTest way
describe "boolean" $ do
it "and" $
binopTest way (asToBool (&&)) (&&:)
it "or" $
binopTest way (asToBool (||)) (||:)
it "xor" $
binopTest way xor (^:)
it "bitwise and" $
binopTest way (.&.) (&:)
it "bitwise or" $
binopTest way (.|.) (|:)
describe "comparisons" $ do
it "<" $
binopTest way (binResToBool (<)) (<:)
it "==" $
binopTest way (binResToBool (==)) (==:)
it "complex" $
boolTest way
describe "misc" $ do
it "large" $
largeTest way
uniopTest
:: ExecWay L.Stmt
-> (Value -> Value)
-> (Exp -> Exp)
-> Property
uniopTest way f1 f2 =
let sample = L.readS "a" <> L.writeS (f2 "a")
in way & sample ~*~ f1
binopTest
:: ExecWay L.Stmt
-> (Value -> Value -> Value)
-> (Exp -> Exp -> Exp)
-> Property
binopTest way f1 f2 = head
[ counterexample "plain" $
way & sample ~*~ f1
, counterexample "large" $
way & sample ~*~ \(Large a) (Large b) -> f1 a b
]
where
sample = L.readS "a" <> L.readS "b" <> L.writeS ("a" `f2` "b")
complexArithTest :: ExecWay L.Stmt -> Property
complexArithTest = sample ~*~ fun
where
sample = mconcat
[ L.readS "a"
, L.readS "b"
, L.readS "c"
, L.writeS $ "a" +: "b" *: 10 -: "c" %: 2
]
fun :: Value -> Value -> Value -> Value
fun a b c = a + b * 10 - (c `rem` 2)
boolTest :: ExecWay L.Stmt -> Property
boolTest = sample ~*~ fun
where
sample = mconcat
[ L.readS "a"
, L.readS "b"
, L.readS "c"
, L.readS "d"
, L.readS "e"
, L.writeS $ "a" ==: "b" &&: "c" <=: "d" ^: "e"
]
fun :: Value -> Value -> Value -> Value -> Value -> Value
fun a b c d e = if (a == b) && (c <= xor d e) then 1 else 0
largeTest :: ExecWay L.Stmt -> Property
largeTest = sample & [] >-*-> [55]
where
sample = L.writeS $ foldr (+) 0 (ValueE <$> [1..10] :: [Exp])
| |
5d2b8d0e16931e80eef20483be477d8de29ec3ca17a61a6c40067fa6d190fc6e | FMNSSun/Burlesque | main.hs | # LANGUAGE FlexibleContexts #
import Burlesque.Parser
import Burlesque.Types
import Burlesque.Eval
import Burlesque.Display
import System.Environment
import System.IO
import System.Console.Haskeline
import System.Console.Haskeline.Completion
import Data.List
import qualified Data.Map as M
loadPrelude :: IO String
loadPrelude = readFile "Prelude.blsq"
runProgram :: String -> String -> String -> IO String
runProgram p stdin file = do
p' <- loadPrelude
result <- execStateT (eval (runParserWithString parseBlsq (p'++p))) ([BlsqStr stdin],[], M.fromList [(BlsqStr "____FILE", BlsqStr file)])
return . unlines . map toDisplay . filter notHidden . fst' $ result
runProgramNoStdin :: String -> String -> IO String
runProgramNoStdin p file = do
p' <- loadPrelude
result <- execStateT (eval (runParserWithString parseBlsq (p'++p))) ([],[], M.fromList [(BlsqStr "____FILE", BlsqStr file)])
return . unlines . map toDisplay . filter notHidden . fst' $ result
runTheFreakingShell = runInputT settings burlesqueShell
main = do
args <- getArgs
case args of
["--file",file] -> do
prog <- readFile file
cin <- getContents
cout <- runProgram prog cin file
putStr cout
["--file-no-stdin",file] -> do
prog <- readFile file
cout <- runProgramNoStdin prog file
putStr cout
["--no-stdin",prog] -> do
cout <- runProgramNoStdin prog ""
putStr cout
["--shell"] -> runInputT settings burlesqueShell
["--version"] -> putStrLn "burlesque v1.6.9!"
["--stdin",prog] -> do
cin <- getContents
p' <- loadPrelude
cout <- runProgram (p'++prog) cin ""
putStr cout
_ -> do putStrLn $ "Invalid usage"
putStrLn " --file <path> Read code from file (incl. STDIN)"
putStrLn " --file-no-stdin <path> Read code from file (excl. STDIN)"
putStrLn " --no-stdin <code> Read code from argv (excl. STDIN)"
putStrLn " --shell Start in shell mode"
putStrLn " --version Print version info"
putStrLn " --compile <path> Pseudo-compile file to haskell code"
putStrLn " --stdin <code> Read code from argv (incl. STDIN)"
putStrLn ""
putStrLn "\tBurlesque\tRoman Muentener, 2012"
settings :: Settings IO
settings = Settings {
complete = completeWord Nothing " \t" $ return . search,
historyFile = Nothing,
autoAddHistory = True
}
search s = map simpleCompletion . filter (s `isPrefixOf`) $ map fst builtins
burlesqueShell = do
line <- getInputLine "blsq ) "
case line of
Nothing -> outputStrLn "* Abort..." >> return ()
Just "exit!" -> outputStrLn "* Exit!" >> return()
Just q -> do cout <- lift $ runProgramNoStdin q ""
outputStr cout
burlesqueShell
| null | https://raw.githubusercontent.com/FMNSSun/Burlesque/1753bdf0186027f3920ab2bd95098f76afef3154/main.hs | haskell | # LANGUAGE FlexibleContexts #
import Burlesque.Parser
import Burlesque.Types
import Burlesque.Eval
import Burlesque.Display
import System.Environment
import System.IO
import System.Console.Haskeline
import System.Console.Haskeline.Completion
import Data.List
import qualified Data.Map as M
loadPrelude :: IO String
loadPrelude = readFile "Prelude.blsq"
runProgram :: String -> String -> String -> IO String
runProgram p stdin file = do
p' <- loadPrelude
result <- execStateT (eval (runParserWithString parseBlsq (p'++p))) ([BlsqStr stdin],[], M.fromList [(BlsqStr "____FILE", BlsqStr file)])
return . unlines . map toDisplay . filter notHidden . fst' $ result
runProgramNoStdin :: String -> String -> IO String
runProgramNoStdin p file = do
p' <- loadPrelude
result <- execStateT (eval (runParserWithString parseBlsq (p'++p))) ([],[], M.fromList [(BlsqStr "____FILE", BlsqStr file)])
return . unlines . map toDisplay . filter notHidden . fst' $ result
runTheFreakingShell = runInputT settings burlesqueShell
main = do
args <- getArgs
case args of
["--file",file] -> do
prog <- readFile file
cin <- getContents
cout <- runProgram prog cin file
putStr cout
["--file-no-stdin",file] -> do
prog <- readFile file
cout <- runProgramNoStdin prog file
putStr cout
["--no-stdin",prog] -> do
cout <- runProgramNoStdin prog ""
putStr cout
["--shell"] -> runInputT settings burlesqueShell
["--version"] -> putStrLn "burlesque v1.6.9!"
["--stdin",prog] -> do
cin <- getContents
p' <- loadPrelude
cout <- runProgram (p'++prog) cin ""
putStr cout
_ -> do putStrLn $ "Invalid usage"
putStrLn " --file <path> Read code from file (incl. STDIN)"
putStrLn " --file-no-stdin <path> Read code from file (excl. STDIN)"
putStrLn " --no-stdin <code> Read code from argv (excl. STDIN)"
putStrLn " --shell Start in shell mode"
putStrLn " --version Print version info"
putStrLn " --compile <path> Pseudo-compile file to haskell code"
putStrLn " --stdin <code> Read code from argv (incl. STDIN)"
putStrLn ""
putStrLn "\tBurlesque\tRoman Muentener, 2012"
settings :: Settings IO
settings = Settings {
complete = completeWord Nothing " \t" $ return . search,
historyFile = Nothing,
autoAddHistory = True
}
search s = map simpleCompletion . filter (s `isPrefixOf`) $ map fst builtins
burlesqueShell = do
line <- getInputLine "blsq ) "
case line of
Nothing -> outputStrLn "* Abort..." >> return ()
Just "exit!" -> outputStrLn "* Exit!" >> return()
Just q -> do cout <- lift $ runProgramNoStdin q ""
outputStr cout
burlesqueShell
| |
f7af93da14bd1ebd85f6f38ead81da34a6cd1261da57ed55a80d2b843c999150 | dmjio/hackernews | Test.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
# OPTIONS_GHC -fno - warn - orphans #
module Main where
import Data.Aeson
import Data.Either (isRight)
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Test.Hspec (it, hspec, describe, shouldSatisfy, shouldBe)
import Test.QuickCheck
import Test.QuickCheck.Instances ()
import Web.HackerNews
main :: IO ()
main = do
mgr <- newManager tlsManagerSettings
hspec $ do
describe "HackerNews API tests" $ do
it "should round trip Updates JSON" $ property $ \(x :: Updates) ->
Just x == decode (encode x)
it "should round trip Item JSON" $ property $ \(x :: Item) ->
Just x == decode (encode x)
it "should round trip User JSON" $ property $ \(x :: User) ->
Just x == decode (encode x)
it "should retrieve item" $ do
(`shouldSatisfy` isRight) =<< getItem mgr (ItemId 1000)
it "should return NotFound " $ do
Left x <- getItem mgr (ItemId 0)
x `shouldBe` NotFound
it "should retrieve user" $ do
(`shouldSatisfy` isRight) =<< getUser mgr (UserId "dmjio")
it "should retrieve max item" $ do
(`shouldSatisfy` isRight) =<< getMaxItem mgr
it "should retrieve top stories" $ do
(`shouldSatisfy` isRight) =<< getTopStories mgr
it "should retrieve new stories" $ do
(`shouldSatisfy` isRight) =<< getNewStories mgr
it "should retrieve best stories" $ do
(`shouldSatisfy` isRight) =<< getBestStories mgr
it "should retrieve ask stories" $ do
(`shouldSatisfy` isRight) =<< getAskStories mgr
it "should retrieve show stories" $ do
(`shouldSatisfy` isRight) =<< getShowStories mgr
it "should retrieve job stories" $ do
(`shouldSatisfy` isRight) =<< getJobStories mgr
it "should retrieve updates" $ do
(`shouldSatisfy` isRight) =<< getUpdates mgr
| null | https://raw.githubusercontent.com/dmjio/hackernews/ebf350317cf832fbfcff3c1a54f3a2b0df1f9510/ghc-tests/Test.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE ScopedTypeVariables # | # OPTIONS_GHC -fno - warn - orphans #
module Main where
import Data.Aeson
import Data.Either (isRight)
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Test.Hspec (it, hspec, describe, shouldSatisfy, shouldBe)
import Test.QuickCheck
import Test.QuickCheck.Instances ()
import Web.HackerNews
main :: IO ()
main = do
mgr <- newManager tlsManagerSettings
hspec $ do
describe "HackerNews API tests" $ do
it "should round trip Updates JSON" $ property $ \(x :: Updates) ->
Just x == decode (encode x)
it "should round trip Item JSON" $ property $ \(x :: Item) ->
Just x == decode (encode x)
it "should round trip User JSON" $ property $ \(x :: User) ->
Just x == decode (encode x)
it "should retrieve item" $ do
(`shouldSatisfy` isRight) =<< getItem mgr (ItemId 1000)
it "should return NotFound " $ do
Left x <- getItem mgr (ItemId 0)
x `shouldBe` NotFound
it "should retrieve user" $ do
(`shouldSatisfy` isRight) =<< getUser mgr (UserId "dmjio")
it "should retrieve max item" $ do
(`shouldSatisfy` isRight) =<< getMaxItem mgr
it "should retrieve top stories" $ do
(`shouldSatisfy` isRight) =<< getTopStories mgr
it "should retrieve new stories" $ do
(`shouldSatisfy` isRight) =<< getNewStories mgr
it "should retrieve best stories" $ do
(`shouldSatisfy` isRight) =<< getBestStories mgr
it "should retrieve ask stories" $ do
(`shouldSatisfy` isRight) =<< getAskStories mgr
it "should retrieve show stories" $ do
(`shouldSatisfy` isRight) =<< getShowStories mgr
it "should retrieve job stories" $ do
(`shouldSatisfy` isRight) =<< getJobStories mgr
it "should retrieve updates" $ do
(`shouldSatisfy` isRight) =<< getUpdates mgr
|
3412cced2c4f799f3dce257cc36e73f8f0e4d0956ff9e7cbf05fa22e6e890ad4 | walmartlabs/clojure-game-geek | server.clj | (ns my.clojure-game-geek.server
(:require [com.stuartsierra.component :as component]
[com.walmartlabs.lacinia.pedestal2 :as lp]
[io.pedestal.http :as http]))
(defrecord Server [schema-provider server port]
component/Lifecycle
(start [this]
(assoc this :server (-> schema-provider
:schema
(lp/default-service {:port port})
http/create-server
http/start)))
(stop [this]
(http/stop server)
(assoc this :server nil)))
| null | https://raw.githubusercontent.com/walmartlabs/clojure-game-geek/62b5fcf7b15e348eb7cda878097b40f4934f0f91/src/my/clojure_game_geek/server.clj | clojure | (ns my.clojure-game-geek.server
(:require [com.stuartsierra.component :as component]
[com.walmartlabs.lacinia.pedestal2 :as lp]
[io.pedestal.http :as http]))
(defrecord Server [schema-provider server port]
component/Lifecycle
(start [this]
(assoc this :server (-> schema-provider
:schema
(lp/default-service {:port port})
http/create-server
http/start)))
(stop [this]
(http/stop server)
(assoc this :server nil)))
| |
1a4cdc59a4a31b2ed0c024ec6d539c85282f51645b255d44c45cf8ce04d4ca33 | haskus/packages | JQuery.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
{-# LANGUAGE OverloadedStrings #-}
module Haskus.Web.JQuery
( jqueryHtmlHeader
)
where
import Haskus.Web.Html
import Data.Text
-- | Add jquery headers
jqueryHtmlHeader :: Bool -> Html ()
jqueryHtmlHeader minimized = do
let
addScript :: Text -> Text -> Html ()
addScript mini normal =
script_ [ src_ (if minimized then mini else normal) ] (mempty :: Html ())
addScript
"/script/jquery.min.js"
"/script/jquery.min.js" -- TODO: add normal jquery
-- JQuery UI: some widgets
addScript
"/script/jquery-ui.min.js"
"/script/jquery-ui.js"
allow jquery UI to support Touch Screens
addScript
"/script/jquery-ui-touch.min.js"
"/script/jquery-ui-touch.min.js" --TODO: add normal script
ability to block the whole screen with a JS popup
addScript
"/script/jquery-ui-block.js" --TODO: minimized script
"/script/jquery-ui-block.js"
-- ability to detect that images are loaded
-- From:
addScript
"/script/jquery-imgload.min.js"
"/script/jquery-imgload.js"
link_ [ rel_ "stylesheet"
, type_ "text/css"
, href_ "/style/jquery-ui.css"
]
| null | https://raw.githubusercontent.com/haskus/packages/40ea6101cea84e2c1466bc55cdb22bed92f642a2/haskus-web/src/lib/Haskus/Web/JQuery.hs | haskell | # LANGUAGE OverloadedStrings #
| Add jquery headers
TODO: add normal jquery
JQuery UI: some widgets
TODO: add normal script
TODO: minimized script
ability to detect that images are loaded
From: | # LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
module Haskus.Web.JQuery
( jqueryHtmlHeader
)
where
import Haskus.Web.Html
import Data.Text
jqueryHtmlHeader :: Bool -> Html ()
jqueryHtmlHeader minimized = do
let
addScript :: Text -> Text -> Html ()
addScript mini normal =
script_ [ src_ (if minimized then mini else normal) ] (mempty :: Html ())
addScript
"/script/jquery.min.js"
addScript
"/script/jquery-ui.min.js"
"/script/jquery-ui.js"
allow jquery UI to support Touch Screens
addScript
"/script/jquery-ui-touch.min.js"
ability to block the whole screen with a JS popup
addScript
"/script/jquery-ui-block.js"
addScript
"/script/jquery-imgload.min.js"
"/script/jquery-imgload.js"
link_ [ rel_ "stylesheet"
, type_ "text/css"
, href_ "/style/jquery-ui.css"
]
|
8418927d451d563c6a7c81f53335264499acd8d6297dad35c27b58329b185f94 | haroldcarr/learn-haskell-coq-ml-etc | StrictX.hs | {-# LANGUAGE Strict #-}
# LANGUAGE StrictData #
# LANGUAGE TupleSections #
module Control.Monad.RWSIO.StrictX where
------------------------------------------------------------------------------
import Control.Monad.IO.Class
import Data.IORef
-- import Debug.Trace
------------------------------------------------------------------------------
# ANN module ( " HLint : ignore Reduce duplication " : : String ) #
------------------------------------------------------------------------------
type RWSRef r w s = (r, IORef (w, s))
newtype RWSTIO r w s a = RWSTIO { unRWSTIO :: RWSRef r w s -> IO (a, RWSRef r w s) }
instance Functor (RWSTIO r w s) where
fmap f m = RWSTIO $ \x -> do
(a, ref) <- unRWSTIO m x
pure (f a, ref)
# INLINE fmap #
instance Monoid w => Applicative (RWSTIO r w s) where
pure a = RWSTIO $ \x -> return (a, x)
# INLINE pure #
RWSTIO mf <*> RWSTIO mx = RWSTIO $ \x -> do
(f, _) <- mf x
(arg, _) <- mx x
return (f arg, x)
{-# INLINE (<*>) #-}
instance Monoid w => Monad (RWSTIO r w s) where
return = pure
# INLINE return #
m >>= k = RWSTIO $ \x -> do
(a, _) <- unRWSTIO m x
(b, _) <- unRWSTIO (k a) x
return (b, x)
{-# INLINE (>>=) #-}
------------------------------------------------------------------------------
-- Reader
ask :: RWSTIO r w s r
ask = RWSTIO $ \x@(r,_) -> return (r, x)
# INLINE ask #
asks :: (r -> a) -> RWSTIO r w s a
asks f = RWSTIO $ \x@(r,_) -> return (f r, x)
{-# INLINE asks #-}
------------------------------------------------------------------------------
-- Writer
tell :: Monoid w => w -> RWSTIO r w s ()
tell w = RWSTIO $ \x@(_r, ref) -> do
{-
liftIO (modifyIORef' ref (\(w',s) -> ( trace ("\nwtell " ++ show w' ++ " " ++ show w)
w'<>w
, s)))
-}
(w', s) <- readIORef ref
writeIORef ref (w' <> w, s)
return ((), x)
{-# INLINE tell #-}
------------------------------------------------------------------------------
State
get :: RWSTIO r w s s
get = RWSTIO $ \x@(_,ref) -> do
(_w, s) <- readIORef ref
return (s, x)
{-# INLINE get #-}
gets :: (s -> a) -> RWSTIO r w s a
gets f = RWSTIO $ \x@(_,ref) -> do
(_w, s) <- readIORef ref
return (f s, x)
{-# INLINE gets #-}
put :: s -> RWSTIO r w s ()
put s = RWSTIO $ \x@(_,ref) -> do
-- Using modifyIORef' causes a space leak. The function given to modify isn't run until the end.
-- trace "StateputliftIO" $ liftIO (modifyIORef' ref (\(w,_s) -> (trace "Stateput" w,s)))
(w, _s) <- readIORef ref
writeIORef ref (w, s)
return ((), x)
# INLINE put #
modify :: (s -> s) -> RWSTIO r w s ()
modify f = RWSTIO $ \x@(_,ref) -> do
(w, s) <- readIORef ref
writeIORef ref (w, f s)
return ((), x)
# INLINE modify #
------------------------------------------------------------------------------
initRWSTIO :: (MonadIO m, Monoid w) => r -> s -> m (RWSRef r w s)
initRWSTIO r s = (r,) <$> liftIO (newIORef (mempty, s))
resetRWSTIO :: (MonadIO m, Monoid w) => RWSRef r w s -> s -> m ()
resetRWSTIO (_, ref) s =
liftIO (writeIORef ref (mempty, s))
resetRWSTIOWriter :: (MonadIO m, Monoid w) => RWSRef r w s -> m ()
resetRWSTIOWriter (_, ref) =
liftIO (modifyIORef' ref (\(_, s) -> (mempty, s)))
runRWSTIO0
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> r -> s
-> m (a, s, w, RWSRef r w s)
runRWSTIO0 act r s = do
x@(_,ref) <- initRWSTIO r s
liftIO $ do
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
| Typical usage : ' initRWSTIO ' followed by one or more ' runRWSTIO '
runRWSTIO
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> RWSRef r w s
-> m (a, s, w, RWSRef r w s)
runRWSTIO act x@(_,ref) =
liftIO $ do
resetRWSTIOWriter x
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
runRWSTIO'
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> s -> RWSRef r w s
-> m (a, s, w, RWSRef r w s)
runRWSTIO' act s x@(_,ref) =
liftIO $ do
resetRWSTIO x s
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/topic/monads/2020-06-hc-reader-and-monad-write-state-io/src/Control/Monad/RWSIO/StrictX.hs | haskell | # LANGUAGE Strict #
----------------------------------------------------------------------------
import Debug.Trace
----------------------------------------------------------------------------
----------------------------------------------------------------------------
# INLINE (<*>) #
# INLINE (>>=) #
----------------------------------------------------------------------------
Reader
# INLINE asks #
----------------------------------------------------------------------------
Writer
liftIO (modifyIORef' ref (\(w',s) -> ( trace ("\nwtell " ++ show w' ++ " " ++ show w)
w'<>w
, s)))
# INLINE tell #
----------------------------------------------------------------------------
# INLINE get #
# INLINE gets #
Using modifyIORef' causes a space leak. The function given to modify isn't run until the end.
trace "StateputliftIO" $ liftIO (modifyIORef' ref (\(w,_s) -> (trace "Stateput" w,s)))
---------------------------------------------------------------------------- | # LANGUAGE StrictData #
# LANGUAGE TupleSections #
module Control.Monad.RWSIO.StrictX where
import Control.Monad.IO.Class
import Data.IORef
# ANN module ( " HLint : ignore Reduce duplication " : : String ) #
type RWSRef r w s = (r, IORef (w, s))
newtype RWSTIO r w s a = RWSTIO { unRWSTIO :: RWSRef r w s -> IO (a, RWSRef r w s) }
instance Functor (RWSTIO r w s) where
fmap f m = RWSTIO $ \x -> do
(a, ref) <- unRWSTIO m x
pure (f a, ref)
# INLINE fmap #
instance Monoid w => Applicative (RWSTIO r w s) where
pure a = RWSTIO $ \x -> return (a, x)
# INLINE pure #
RWSTIO mf <*> RWSTIO mx = RWSTIO $ \x -> do
(f, _) <- mf x
(arg, _) <- mx x
return (f arg, x)
instance Monoid w => Monad (RWSTIO r w s) where
return = pure
# INLINE return #
m >>= k = RWSTIO $ \x -> do
(a, _) <- unRWSTIO m x
(b, _) <- unRWSTIO (k a) x
return (b, x)
ask :: RWSTIO r w s r
ask = RWSTIO $ \x@(r,_) -> return (r, x)
# INLINE ask #
asks :: (r -> a) -> RWSTIO r w s a
asks f = RWSTIO $ \x@(r,_) -> return (f r, x)
tell :: Monoid w => w -> RWSTIO r w s ()
tell w = RWSTIO $ \x@(_r, ref) -> do
(w', s) <- readIORef ref
writeIORef ref (w' <> w, s)
return ((), x)
State
get :: RWSTIO r w s s
get = RWSTIO $ \x@(_,ref) -> do
(_w, s) <- readIORef ref
return (s, x)
gets :: (s -> a) -> RWSTIO r w s a
gets f = RWSTIO $ \x@(_,ref) -> do
(_w, s) <- readIORef ref
return (f s, x)
put :: s -> RWSTIO r w s ()
put s = RWSTIO $ \x@(_,ref) -> do
(w, _s) <- readIORef ref
writeIORef ref (w, s)
return ((), x)
# INLINE put #
modify :: (s -> s) -> RWSTIO r w s ()
modify f = RWSTIO $ \x@(_,ref) -> do
(w, s) <- readIORef ref
writeIORef ref (w, f s)
return ((), x)
# INLINE modify #
initRWSTIO :: (MonadIO m, Monoid w) => r -> s -> m (RWSRef r w s)
initRWSTIO r s = (r,) <$> liftIO (newIORef (mempty, s))
resetRWSTIO :: (MonadIO m, Monoid w) => RWSRef r w s -> s -> m ()
resetRWSTIO (_, ref) s =
liftIO (writeIORef ref (mempty, s))
resetRWSTIOWriter :: (MonadIO m, Monoid w) => RWSRef r w s -> m ()
resetRWSTIOWriter (_, ref) =
liftIO (modifyIORef' ref (\(_, s) -> (mempty, s)))
runRWSTIO0
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> r -> s
-> m (a, s, w, RWSRef r w s)
runRWSTIO0 act r s = do
x@(_,ref) <- initRWSTIO r s
liftIO $ do
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
| Typical usage : ' initRWSTIO ' followed by one or more ' runRWSTIO '
runRWSTIO
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> RWSRef r w s
-> m (a, s, w, RWSRef r w s)
runRWSTIO act x@(_,ref) =
liftIO $ do
resetRWSTIOWriter x
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
runRWSTIO'
:: (MonadIO m, Monoid w)
=> RWSTIO r w s a -> s -> RWSRef r w s
-> m (a, s, w, RWSRef r w s)
runRWSTIO' act s x@(_,ref) =
liftIO $ do
resetRWSTIO x s
(a, _) <- unRWSTIO act x
(w, s') <- readIORef ref
pure (a, s', w, x)
|
be0d76cdc300b8824606d5bff493b2ccbb0354cdac893d1d3a5fff23e31dda84 | neovimhaskell/nvim-hs | Classes.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE RecordWildCards #
{-# LANGUAGE OverloadedStrings #-}
|
Module : Neovim . Plugin . IPC.Classes
Description : Classes used for Inter Plugin Communication
Copyright : ( c )
License : Apache-2.0
Maintainer :
Stability : experimental
Portability : GHC
Module : Neovim.Plugin.IPC.Classes
Description : Classes used for Inter Plugin Communication
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer :
Stability : experimental
Portability : GHC
-}
module Neovim.Plugin.IPC.Classes (
SomeMessage (..),
Message (..),
FunctionCall (..),
Request (..),
Notification (..),
writeMessage,
readSomeMessage,
UTCTime,
getCurrentTime,
module Data.Int,
) where
import Neovim.Classes (
Generic,
Int64,
NFData (..),
Pretty (pretty),
deepseq,
(<+>),
)
import Neovim.Plugin.Classes (FunctionName, NeovimEventId)
import Data.Data (cast)
import Data.Int (Int64)
import Data.MessagePack (Object)
import Data.Time (UTCTime, formatTime, getCurrentTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Prettyprinter (hardline, nest, viaShow)
import UnliftIO (
MonadIO (..),
MonadUnliftIO,
TMVar,
TQueue,
Typeable,
atomically,
evaluate,
readTQueue,
writeTQueue,
)
import Prelude
| Taken from xmonad and based on ideas in /An Extensible Dynamically - Typed
Hierarchy of , , 2006 .
User - extensible messages must be put into a value of this type , so that it
can be sent to other plugins .
Hierarchy of Exceptions/, Simon Marlow, 2006.
User-extensible messages must be put into a value of this type, so that it
can be sent to other plugins.
-}
data SomeMessage = forall msg. Message msg => SomeMessage msg
| This class allows type safe casting of ' SomeMessage ' to an actual message .
The cast is successful if the type you 're expecting matches the type in the
' SomeMessage ' wrapper . This way , you can subscribe to an arbitrary message
type withouth having to pattern match on the constructors . This also allows
plugin authors to create their own message types without having to change the
core code of /nvim - hs/.
The cast is successful if the type you're expecting matches the type in the
'SomeMessage' wrapper. This way, you can subscribe to an arbitrary message
type withouth having to pattern match on the constructors. This also allows
plugin authors to create their own message types without having to change the
core code of /nvim-hs/.
-}
class (NFData message, Typeable message) => Message message where
-- | Try to convert a given message to a value of the message type we are
-- interested in. Will evaluate to 'Nothing' for any other type.
fromMessage :: SomeMessage -> Maybe message
fromMessage (SomeMessage message) = cast message
writeMessage :: (MonadUnliftIO m, Message message) => TQueue SomeMessage -> message -> m ()
writeMessage q message = liftIO $ do
evaluate (rnf message)
atomically $ writeTQueue q (SomeMessage message)
readSomeMessage :: MonadIO m => TQueue SomeMessage -> m SomeMessage
readSomeMessage q = liftIO $ atomically (readTQueue q)
| Haskell representation of supported Remote Procedure Call messages .
data FunctionCall
= -- | Method name, parameters, callback, timestamp
FunctionCall FunctionName [Object] (TMVar (Either Object Object)) UTCTime
deriving (Typeable, Generic)
instance NFData FunctionCall where
rnf (FunctionCall f os v t) = f `deepseq` os `deepseq` v `seq` t `deepseq` ()
instance Message FunctionCall
instance Pretty FunctionCall where
pretty (FunctionCall fname args _ t) =
nest 2 $
"Function call for:"
<+> pretty fname
<> hardline
<> "Arguments:"
<+> viaShow args
<> hardline
<> "Timestamp:"
<+> (viaShow . formatTime defaultTimeLocale "%H:%M:%S (%q)") t
{- | A request is a data type containing the method to call, its arguments and
an identifier used to map the result to the function that has been called.
-}
data Request = Request
{ -- | Name of the function to call.
reqMethod :: FunctionName
, -- | Identifier to map the result to a function call invocation.
reqId :: !Int64
, -- | Arguments for the function.
reqArgs :: [Object]
}
deriving (Eq, Ord, Show, Typeable, Generic)
instance NFData Request
instance Message Request
instance Pretty Request where
pretty Request{..} =
nest 2 $
"Request"
<+> "#"
<> pretty reqId
<> hardline
<> "Method:"
<+> pretty reqMethod
<> hardline
<> "Arguments:"
<+> viaShow reqArgs
| A notification is similar to a ' Request ' . It essentially does the same
thing , but the function is only called for its side effects . This type of
message is sent by neovim if the caller there does not care about the result
of the computation .
thing, but the function is only called for its side effects. This type of
message is sent by neovim if the caller there does not care about the result
of the computation.
-}
data Notification = Notification
{ -- | Event name of the notification.
notEvent :: NeovimEventId
, -- | Arguments for the function.
notArgs :: [Object]
}
deriving (Eq, Ord, Show, Typeable, Generic)
instance NFData Notification
instance Message Notification
instance Pretty Notification where
pretty Notification{..} =
nest 2 $
"Notification"
<> hardline
<> "Event:"
<+> pretty notEvent
<> hardline
<> "Arguments:"
<+> viaShow notEvent
| null | https://raw.githubusercontent.com/neovimhaskell/nvim-hs/9d0a040c24f060da57e47e1b720b99ad32e6dfc7/src/Neovim/Plugin/IPC/Classes.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
| Try to convert a given message to a value of the message type we are
interested in. Will evaluate to 'Nothing' for any other type.
| Method name, parameters, callback, timestamp
| A request is a data type containing the method to call, its arguments and
an identifier used to map the result to the function that has been called.
| Name of the function to call.
| Identifier to map the result to a function call invocation.
| Arguments for the function.
| Event name of the notification.
| Arguments for the function. | # LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE RecordWildCards #
|
Module : Neovim . Plugin . IPC.Classes
Description : Classes used for Inter Plugin Communication
Copyright : ( c )
License : Apache-2.0
Maintainer :
Stability : experimental
Portability : GHC
Module : Neovim.Plugin.IPC.Classes
Description : Classes used for Inter Plugin Communication
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer :
Stability : experimental
Portability : GHC
-}
module Neovim.Plugin.IPC.Classes (
SomeMessage (..),
Message (..),
FunctionCall (..),
Request (..),
Notification (..),
writeMessage,
readSomeMessage,
UTCTime,
getCurrentTime,
module Data.Int,
) where
import Neovim.Classes (
Generic,
Int64,
NFData (..),
Pretty (pretty),
deepseq,
(<+>),
)
import Neovim.Plugin.Classes (FunctionName, NeovimEventId)
import Data.Data (cast)
import Data.Int (Int64)
import Data.MessagePack (Object)
import Data.Time (UTCTime, formatTime, getCurrentTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Prettyprinter (hardline, nest, viaShow)
import UnliftIO (
MonadIO (..),
MonadUnliftIO,
TMVar,
TQueue,
Typeable,
atomically,
evaluate,
readTQueue,
writeTQueue,
)
import Prelude
| Taken from xmonad and based on ideas in /An Extensible Dynamically - Typed
Hierarchy of , , 2006 .
User - extensible messages must be put into a value of this type , so that it
can be sent to other plugins .
Hierarchy of Exceptions/, Simon Marlow, 2006.
User-extensible messages must be put into a value of this type, so that it
can be sent to other plugins.
-}
data SomeMessage = forall msg. Message msg => SomeMessage msg
| This class allows type safe casting of ' SomeMessage ' to an actual message .
The cast is successful if the type you 're expecting matches the type in the
' SomeMessage ' wrapper . This way , you can subscribe to an arbitrary message
type withouth having to pattern match on the constructors . This also allows
plugin authors to create their own message types without having to change the
core code of /nvim - hs/.
The cast is successful if the type you're expecting matches the type in the
'SomeMessage' wrapper. This way, you can subscribe to an arbitrary message
type withouth having to pattern match on the constructors. This also allows
plugin authors to create their own message types without having to change the
core code of /nvim-hs/.
-}
class (NFData message, Typeable message) => Message message where
fromMessage :: SomeMessage -> Maybe message
fromMessage (SomeMessage message) = cast message
writeMessage :: (MonadUnliftIO m, Message message) => TQueue SomeMessage -> message -> m ()
writeMessage q message = liftIO $ do
evaluate (rnf message)
atomically $ writeTQueue q (SomeMessage message)
readSomeMessage :: MonadIO m => TQueue SomeMessage -> m SomeMessage
readSomeMessage q = liftIO $ atomically (readTQueue q)
| Haskell representation of supported Remote Procedure Call messages .
data FunctionCall
FunctionCall FunctionName [Object] (TMVar (Either Object Object)) UTCTime
deriving (Typeable, Generic)
instance NFData FunctionCall where
rnf (FunctionCall f os v t) = f `deepseq` os `deepseq` v `seq` t `deepseq` ()
instance Message FunctionCall
instance Pretty FunctionCall where
pretty (FunctionCall fname args _ t) =
nest 2 $
"Function call for:"
<+> pretty fname
<> hardline
<> "Arguments:"
<+> viaShow args
<> hardline
<> "Timestamp:"
<+> (viaShow . formatTime defaultTimeLocale "%H:%M:%S (%q)") t
data Request = Request
reqMethod :: FunctionName
reqId :: !Int64
reqArgs :: [Object]
}
deriving (Eq, Ord, Show, Typeable, Generic)
instance NFData Request
instance Message Request
instance Pretty Request where
pretty Request{..} =
nest 2 $
"Request"
<+> "#"
<> pretty reqId
<> hardline
<> "Method:"
<+> pretty reqMethod
<> hardline
<> "Arguments:"
<+> viaShow reqArgs
| A notification is similar to a ' Request ' . It essentially does the same
thing , but the function is only called for its side effects . This type of
message is sent by neovim if the caller there does not care about the result
of the computation .
thing, but the function is only called for its side effects. This type of
message is sent by neovim if the caller there does not care about the result
of the computation.
-}
data Notification = Notification
notEvent :: NeovimEventId
notArgs :: [Object]
}
deriving (Eq, Ord, Show, Typeable, Generic)
instance NFData Notification
instance Message Notification
instance Pretty Notification where
pretty Notification{..} =
nest 2 $
"Notification"
<> hardline
<> "Event:"
<+> pretty notEvent
<> hardline
<> "Arguments:"
<+> viaShow notEvent
|
31e42ecf44abe903ba38f49e42c6f04e17b40d89860eec49098b7438fcaf5833 | wingo/fibers | sieve.scm | #!/usr/bin/env guile
# -*- scheme -*-
!#
(use-modules (ice-9 match)
(fibers)
(fibers channels))
(define (sieve p in)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ()
(let ((n (get-message in)))
(unless (zero? (modulo n p))
(put-message out n)))
(lp)))
#:parallel? #t)
out))
(define (integers-from n)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ((n n))
(put-message out n)
(lp (1+ n))))
#:parallel? #t)
out))
(define (take ch n)
(let lp ((n n))
(unless (zero? n)
(get-message ch)
(lp (1- n)))))
(define (primes)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ((ch (integers-from 2)))
(let ((p (get-message ch)))
(put-message out p)
(lp (sieve p ch)))))
#:parallel? #t)
out))
(define (main args)
(match args
((_ count)
(let ((count (string->number count)))
(run-fibers (lambda () (take (primes) count)))))))
(when (batch-mode?) (main (program-arguments)))
| null | https://raw.githubusercontent.com/wingo/fibers/44d17e64e581fb281bae1390f74c762ecf089b58/benchmarks/sieve.scm | scheme | #!/usr/bin/env guile
# -*- scheme -*-
!#
(use-modules (ice-9 match)
(fibers)
(fibers channels))
(define (sieve p in)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ()
(let ((n (get-message in)))
(unless (zero? (modulo n p))
(put-message out n)))
(lp)))
#:parallel? #t)
out))
(define (integers-from n)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ((n n))
(put-message out n)
(lp (1+ n))))
#:parallel? #t)
out))
(define (take ch n)
(let lp ((n n))
(unless (zero? n)
(get-message ch)
(lp (1- n)))))
(define (primes)
(let ((out (make-channel)))
(spawn-fiber (lambda ()
(let lp ((ch (integers-from 2)))
(let ((p (get-message ch)))
(put-message out p)
(lp (sieve p ch)))))
#:parallel? #t)
out))
(define (main args)
(match args
((_ count)
(let ((count (string->number count)))
(run-fibers (lambda () (take (primes) count)))))))
(when (batch-mode?) (main (program-arguments)))
| |
9e2f7ef79851fb48f4c07b76436477208091ab1beff0190f31fdfb0a5d4f3e59 | camfort/camfort | Analysis.hs | |
Module : Camfort . Specification . Units . Analysis
Description : Helpers for units refactoring and analysis .
Copyright : ( c ) 2017 , , , , : Apache-2.0
Maintainer :
Stability : experimental
Module : Camfort.Specification.Units.Analysis
Description : Helpers for units refactoring and analysis.
Copyright : (c) 2017, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
License : Apache-2.0
Maintainer :
Stability : experimental
-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE LambdaCase #
module Camfort.Specification.Units.Analysis
( UnitAnalysis
, compileUnits
, initInference
, runInference
, runUnitAnalysis
-- ** Helpers
, puName
, puSrcName
) where
import Camfort.Analysis
import Camfort.Analysis.Annotations (Annotation)
import Camfort.Analysis.CommentAnnotator (annotateComments)
import Camfort.Analysis.Logger (LogLevel(..))
import Camfort.Analysis.ModFile (withCombinedEnvironment)
import qualified Camfort.Specification.Units.Annotation as UA
import Camfort.Specification.Units.Environment
import Camfort.Specification.Units.InferenceBackend
import qualified Camfort.Specification.Units.InferenceBackendFlint as Flint
import qualified Camfort.Specification.Units.InferenceBackendSBV as BackendSBV
import Camfort.Specification.Units.ModFile
(genUnitsModFile, initializeModFiles, runCompileUnits)
import Camfort.Specification.Units.Monad
import Camfort.Specification.Units.MonadTypes
import Camfort.Specification.Units.Parser (unitParser)
import qualified Camfort.Specification.Units.Parser.Types as P
import Control.Lens ((^?), _1)
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Writer.Lazy
import qualified Data.Array as A
import Data.Data (Data)
import Data.Generics.Uniplate.Operations
import qualified Data.IntMap.Strict as IM
import Data.List (nub, intercalate)
import qualified Data.Map.Strict as M
import Data.Maybe (isJust, fromMaybe, mapMaybe)
import qualified Data.Set as S
import Data.Text (Text)
import qualified Language.Fortran.AST as F
import Language.Fortran.Analysis (constExp, varName, srcName)
import qualified Language.Fortran.Analysis as FA
import qualified Language.Fortran.Analysis.SemanticTypes as FAS
import qualified Language.Fortran.Analysis.BBlocks as FAB
import qualified Language.Fortran.Analysis.DataFlow as FAD
import Language.Fortran.AST.Literal.Real (readRealLit, parseRealLit)
import Language.Fortran.Util.ModFile
import qualified Numeric.LinearAlgebra as H -- for debugging
import Prelude hiding (mod)
-- | Prepare to run an inference function.
initInference :: UnitSolver ()
initInference = do
pf <- getProgramFile
Parse unit annotations found in comments and link to their
corresponding statements in the AST .
let (linkedPF, _) =
runWriter $ annotateComments unitParser
(\srcSpan err -> tell $ "Error " ++ show srcSpan ++ ": " ++ show err) pf
modifyProgramFile $ const linkedPF
The following insert * functions examine the AST and insert
mappings into the tables stored in the UnitState .
First , find all given unit annotations and insert them into our
-- mappings. Also obtain all unit alias definitions.
insertGivenUnits
-- For function or subroutine parameters (or return variables) that
-- are not given explicit units, give them a parametric polymorphic
-- unit.
insertParametricUnits
-- Any other variables get assigned a unique undetermined unit named
-- after the variable. This assumes that all variables have unique
-- names, which the renaming module already has assured.
insertUndeterminedUnits
-- Now take the information that we have gathered and annotate the
-- variable expressions within the AST with it.
annotateAllVariables
-- Annotate the literals within the program based upon the
-- Literals-mode option.
annotateLiterals
-- With the variable expressions annotated, we now propagate the
information throughout the AST , giving units to as many
-- expressions as possible, and also constraints wherever
-- appropriate.
propagateUnits
Gather up all of the constraints that we identified in the AST .
-- These constraints will include parametric polymorphic units that
-- have not yet been instantiated into their particular uses.
abstractCons <- extractConstraints
dumpConsM "***abstractCons" abstractCons
-- Eliminate all parametric polymorphic units by copying them for
-- each specific use cases and substituting a unique call-site
-- identifier that distinguishes each use-case from the others.
cons <- applyTemplates abstractCons
dumpConsM "***concreteCons" cons
-- Remove any traces of CommentAnnotator, since the annotations can
cause generic operations traversing the AST to get confused .
modifyProgramFile UA.cleanLinks
modifyConstraints (const cons)
debugLogging
| Run a ' UnitSolver ' analysis within a ' UnitsAnalysis ' .
runInference :: UnitSolver a -> UnitAnalysis (a, UnitState)
runInference solver = do
pf <- asks unitProgramFile
mfs <- lift analysisModFiles
let (pf', _, _) = withCombinedEnvironment mfs . fmap UA.mkUnitAnnotation $ pf
let pvm = combinedParamVarMap mfs
let pf'' = FAD.analyseConstExps . FAD.analyseParameterVars pvm . FAB.analyseBBlocks $ pf'
runUnitSolver pf'' $ do
initializeModFiles
initInference
solver
--------------------------------------------------
-- | Seek out any parameters to functions or subroutines that do not
-- already have units, and insert parametric units for them into the
map of variables to UnitInfo .
insertParametricUnits :: UnitSolver ()
insertParametricUnits = getProgramFile >>= (mapM_ paramPU . universeBi)
where
paramPU pu =
forM_ (indexedParams pu) $ \ (i, param) ->
-- Insert a parametric unit if the variable does not already have a unit.
modifyVarUnitMap $ M.insertWith (curry snd) param (UnitParamPosAbs (fname, i))
where
fname = (puName pu, puSrcName pu)
-- | Return the list of parameters paired with its positional index.
indexedParams :: F.ProgramUnit UA -> [(Int, VV)]
indexedParams pu
| F.PUFunction _ _ _ _ _ Nothing (Just r) _ _ <- pu = [(0, toVV r)]
| F.PUFunction _ _ _ _ _ Nothing _ _ _ <- pu = [(0, (fname, sfname))]
| F.PUFunction _ _ _ _ _ (Just paList) (Just r) _ _ <- pu = zip [0..] $ map toVV (r : F.aStrip paList)
| F.PUFunction _ _ _ _ _ (Just paList) _ _ _ <- pu = zip [0..] $ (fname, sfname) : map toVV (F.aStrip paList)
| F.PUSubroutine _ _ _ _ (Just paList) _ _ <- pu = zip [1..] $ map toVV (F.aStrip paList)
| otherwise = []
where
fname = puName pu
sfname = puSrcName pu
toVV e = (varName e, srcName e)
--------------------------------------------------
| Any remaining variables with unknown units are given unit UnitVar
-- with a unique name (in this case, taken from the unique name of the
variable as provided by the ) , or UnitParamVarAbs if the
-- variables are inside of a function or subroutine.
insertUndeterminedUnits :: UnitSolver ()
insertUndeterminedUnits = do
pf <- getProgramFile
dmap <- lift . lift $ M.union (extractDeclMap pf) . combinedDeclMap <$> analysisModFiles
forM_ (universeBi pf :: [F.ProgramUnit UA]) $ \ pu ->
modifyPUBlocksM (transformBiM (insertUndeterminedUnitVar dmap)) pu
-- Specifically handle variables
insertUndeterminedUnitVar :: DeclMap -> F.Expression UA -> UnitSolver (F.Expression UA)
insertUndeterminedUnitVar dmap v@(F.ExpValue _ _ (F.ValVariable _))
| Just (FA.IDType { FA.idVType = Just sty }) <- FA.idType (F.getAnnotation v)
, isAcceptableType sty = do
let vname = varName v
let sname = srcName v
let unit = toUnitVar dmap (vname, sname)
modifyVarUnitMap $ M.insertWith (curry snd) (varName v, srcName v) unit
pure v
insertUndeterminedUnitVar _ e = pure e
Choose UnitVar or UnitParamVarAbs depending upon how the variable was declared .
toUnitVar :: DeclMap -> VV -> UnitInfo
toUnitVar dmap (vname, sname) = unit
where
unit = case fst <$> M.lookup vname dmap of
Just (DCFunction (F.Named fvname, F.Named fsname)) -> UnitParamVarAbs ((fvname, fsname), (vname, sname))
Just (DCSubroutine (F.Named fvname, F.Named fsname)) -> UnitParamVarAbs ((fvname, fsname), (vname, sname))
_ -> UnitVar (vname, sname)
-- Insert undetermined units annotations on the following types of variables.
isAcceptableType :: FAS.SemType -> Bool
isAcceptableType = \case
FAS.TReal _ -> True
FAS.TComplex _ -> True
FAS.TInteger _ -> True
_ -> False
--------------------------------------------------
-- | Convert explicit polymorphic annotations such as (UnitName "'a")
into UnitParamEAPAbs with a ' context - unique - name ' given by the
-- ProgramUnitName combined with the supplied unit name.
transformExplicitPolymorphism :: Maybe F.ProgramUnitName -> UnitInfo -> UnitInfo
transformExplicitPolymorphism (Just (F.Named f)) (UnitName a@('\'':_)) = UnitParamEAPAbs (a, f ++ "_" ++ a)
transformExplicitPolymorphism _ u = u
-- | Any units provided by the programmer through comment annotations
-- will be incorporated into the VarUnitMap.
insertGivenUnits :: UnitSolver ()
insertGivenUnits = do
pf <- getProgramFile
mapM_ checkPU (universeBi pf)
where
-- Look through each Program Unit for the comments
checkPU :: F.ProgramUnit UA -> UnitSolver ()
checkPU (F.PUComment a _ _)
-- Look at unit assignment between function return variable and spec.
| Just (P.UnitAssignment (Just vars) unitsAST) <- mSpec
, Just pu <- mPU = insertPUUnitAssigns (toUnitInfo unitsAST) pu vars
-- Add a new unit alias.
| Just (P.UnitAlias name unitsAST) <- mSpec = modifyUnitAliasMap (M.insert name (toUnitInfo unitsAST))
| otherwise = pure ()
where
mSpec = UA.unitSpec (FA.prevAnnotation a)
mPU = UA.unitPU (FA.prevAnnotation a)
Other type of ProgramUnit ( e.g. one with a body of blocks )
checkPU pu = mapM_ (checkBlockComment getName) [ b | {} <- universeBi (F.programUnitBody pu) ]
where
getName = case pu of
F.PUFunction {} -> Just $ F.getName pu
F.PUSubroutine {} -> Just $ F.getName pu
_ -> Nothing
-- Look through each comment that has some kind of unit annotation within it.
checkBlockComment :: Maybe F.ProgramUnitName -> F.Block UA -> UnitSolver ()
checkBlockComment pname (F.BlComment a _ _)
-- Look at unit assignment between variable and spec.
| Just (P.UnitAssignment (Just vars) unitsAST) <- mSpec
, Just b <- mBlock = insertBlockUnitAssigns pname (toUnitInfo unitsAST) b vars
-- Add a new unit alias.
| Just (P.UnitAlias name unitsAST) <- mSpec = modifyUnitAliasMap (M.insert name (toUnitInfo unitsAST))
| otherwise = pure ()
where
mSpec = UA.unitSpec (FA.prevAnnotation a)
mBlock = UA.unitBlock (FA.prevAnnotation a)
checkBlockComment _ _ = error "received non-comment in checkBlockComment"
-- Figure out the unique names of the referenced variables and
-- then insert unit info under each of those names.
insertBlockUnitAssigns :: Maybe F.ProgramUnitName -> UnitInfo -> F.Block UA -> [String] -> UnitSolver ()
insertBlockUnitAssigns pname info (F.BlStatement _ _ _ (F.StDeclaration _ _ _ _ decls)) varRealNames = do
-- figure out the 'unique name' of the varRealName that was found in the comment
-- FIXME: account for module renaming
-- FIXME: might be more efficient to allow access to variable renaming environ at this program point
let info' = transform (transformExplicitPolymorphism pname) info
let m = M.fromList [ ((varName e, srcName e), info')
| e@(F.ExpValue _ _ (F.ValVariable _)) <- universeBi decls :: [F.Expression UA]
, varRealName <- varRealNames
, varRealName == srcName e ]
modifyVarUnitMap $ M.unionWith const m
modifyGivenVarSet . S.union . S.fromList . map fst . M.keys $ m
insertBlockUnitAssigns _ _ _ _ = error "received non-statement/declaration in insertBlockUnitAssigns"
-- Insert unit annotation for function return variable
insertPUUnitAssigns :: UnitInfo -> F.ProgramUnit UA -> [String] -> UnitSolver ()
insertPUUnitAssigns info pu@(F.PUFunction _ _ _ _ _ _ mret _ _) varRealNames
| (retUniq, retSrc) <- case mret of Just ret -> (FA.varName ret, FA.srcName ret)
Nothing -> (puName pu, puSrcName pu)
, retSrc `elem` varRealNames = do
let pname = Just $ F.getName pu
let info' = transform (transformExplicitPolymorphism pname) info
let m = M.fromList [ ((retUniq, retSrc), info') ]
modifyVarUnitMap $ M.unionWith const m
modifyGivenVarSet . S.union . S.fromList . map fst . M.keys $ m
insertPUUnitAssigns _ _ _ = error "received non-function in insertPUUnitAssigns"
--------------------------------------------------
-- | Take the unit information from the VarUnitMap and use it to
-- annotate every variable expression in the AST.
annotateAllVariables :: UnitSolver ()
annotateAllVariables = modifyProgramFileM $ \ pf -> do
varUnitMap <- getVarUnitMap
importedVariables <- getImportedVariables
let varUnitMap' = M.unionWith (curry snd) varUnitMap importedVariables
let annotateExp e@(F.ExpValue _ _ (F.ValVariable _))
| Just info <- M.lookup (varName e, srcName e) varUnitMap' = UA.setUnitInfo info e
-- may need to annotate intrinsics separately
annotateExp e = e
pure $ transformBi annotateExp pf
--------------------------------------------------
| Give units to literals based upon the rules of the Literals mode .
--
LitUnitless : All literals are unitless .
LitPoly : All literals are polymorphic .
LitMixed : The literal " 0 " or " 0.0 " is fully parametric polymorphic .
-- All other literals are monomorphic, possibly unitless.
annotateLiterals :: UnitSolver ()
annotateLiterals = modifyProgramFileM (transformBiM annotateLiteralsPU)
annotateLiteralsPU :: F.ProgramUnit UA -> UnitSolver (F.ProgramUnit UA)
annotateLiteralsPU pu = do
mode <- asks (uoLiterals . unitOpts)
case mode of
LitUnitless -> modifyPUBlocksM (transformBiM expUnitless) pu
LitPoly -> modifyPUBlocksM (transformBiM (withLiterals genParamLit)) pu
LitMixed -> modifyPUBlocksM (transformBiM expMixed) pu
where
Follow the LitMixed rules .
expMixed e = case e of
F.ExpValue _ _ (F.ValInteger i _)
| read i == 0 -> withLiterals genParamLit e
| otherwise -> withLiterals genUnitLiteral e
F.ExpValue _ _ (F.ValReal i _)
| readRealLit i == 0.0 -> withLiterals genParamLit e
| otherwise -> withLiterals genUnitLiteral e
F.ExpBinary a s op e1 e2
| op `elem` [F.Multiplication, F.Division] -> case () of
-- leave it alone if they're both constants
_ | Just _ <- constExp (F.getAnnotation e1)
, Just _ <- constExp (F.getAnnotation e2) -> pure e
-- a constant multiplier is unitless
_ | Just _ <- constExp (F.getAnnotation e1)
, Just UnitLiteral{} <- UA.getUnitInfo e1 ->
pure $ F.ExpBinary a s op (UA.setUnitInfo UnitlessLit e1) e2
-- a constant multiplier is unitless
| Just _ <- constExp (F.getAnnotation e2)
, Just UnitLiteral{} <- UA.getUnitInfo e2 ->
pure $ F.ExpBinary a s op e1 (UA.setUnitInfo UnitlessLit e2)
_ -> pure e
_ | Just _ <- constExp (F.getAnnotation e) -> case UA.getUnitInfo e of
-- Treat constant expressions as if they were fresh
-- literals, unless assigned units already.
Just UnitLiteral{} -> genLit e
Just UnitVar{} -> genLit e
_ -> pure e
| otherwise -> pure e
-- Set all literals to unitless.
expUnitless e
| isLiteral e = pure $ UA.setUnitInfo UnitlessLit e
| otherwise = pure e
-- Set all literals to the result of given monadic computation.
withLiterals m e
| isLiteral e = flip UA.setUnitInfo e <$> m
| otherwise = pure e
isPolyCtxt = case of F.PUFunction { } - > True ; F.PUSubroutine { } - > True ; _ - > False
genLit e
| isLiteralZero e = withLiterals genParamLit e
| otherwise = withLiterals genUnitLiteral e
-- | Is it a literal, literally?
isLiteral :: F.Expression UA -> Bool
isLiteral (F.ExpValue _ _ F.ValReal{}) = True
isLiteral (F.ExpValue _ _ F.ValInteger{}) = True
-- allow propagated constants to be interpreted as literals
isLiteral e = isJust (constExp (F.getAnnotation e))
-- | Is expression a literal and is it non-zero?
isLiteralNonZero :: F.Expression UA -> Bool
isLiteralNonZero (F.ExpValue _ _ (F.ValInteger i _)) = read i /= 0
isLiteralNonZero (F.ExpValue _ _ (F.ValReal i _)) = readRealLit i /= 0.0
-- allow propagated constants to be interpreted as literals
isLiteralNonZero e = case constExp (F.getAnnotation e) of
Just (FA.ConstInt i) -> i /= 0
Just (FA.ConstUninterpInt s) -> read s /= 0
Just (FA.ConstUninterpReal s) -> readRealLit (parseRealLit s) /= 0.0
_ -> False
isLiteralZero :: F.Expression UA -> Bool
isLiteralZero x = isLiteral x && not (isLiteralNonZero x)
--------------------------------------------------
-- | Filter out redundant constraints.
cullRedundant :: Constraints -> Constraints
cullRedundant = nub . mapMaybe ( \ con -> case con of
ConEq u1 u2 | u1 /= u2 -> Just con
ConConj cs | cs' <- cullRedundant cs, not (null cs) -> Just (ConConj cs')
_ -> Nothing
)
-- | Convert all parametric templates into actual uses, via substitution.
applyTemplates :: Constraints -> UnitSolver Constraints
postcondition : returned constraints lack all Parametric constructors
applyTemplates cons = do
dumpConsM "applyTemplates" cons
-- Get a list of the instances of parametric polymorphism from the constraints.
let instances = nub [ (name, i) | UnitParamPosUse ((name, _), _, i) <- universeBi cons ]
-- Also generate a list of 'dummy' instances to ensure that every
-- 'toplevel' function and subroutine is thoroughly expanded and
analysed , even if it is not used in the current ProgramFile . ( It
-- might be part of a library module, for instance).
pf <- getProgramFile
dummies <- forM (topLevelFuncsAndSubs pf) $ \ pu -> do
ident <- freshId
pure (puName pu, ident)
logDebug' pf $ ("instances: " <> describeShow instances)
logDebug' pf $ ("dummies: " <> describeShow dummies)
importedVariables <- getImportedVariables
Prepare constraints for all variables imported via StUse .
let importedCons = [ ConEq (UnitVar vv) units | (vv, units) <- M.toList importedVariables ]
-- Work through the instances, expanding their templates, and
-- substituting the callId into the abstract parameters.
concreteCons <- cullRedundant <$>
liftM2 (++) (foldM (substInstance False []) importedCons instances)
(foldM (substInstance True []) [] dummies)
dumpConsM "applyTemplates: concreteCons" concreteCons
-- Also include aliases in the final set of constraints, where
-- aliases are implemented by simply asserting that they are equal
-- to their definition.
aliasMap <- getUnitAliasMap
let aliases = [ ConEq (UnitAlias name) def | (name, def) <- M.toList aliasMap ]
let transAlias (UnitName a) | a `M.member` aliasMap = UnitAlias a
transAlias u = u
dumpConsM "aliases" aliases
pure . transformBi transAlias . cullRedundant $ cons ++ concreteCons ++ aliases
| Look up the Parametric templates for a given function or
-- subroutine, and do the substitutions. Process any additional
-- polymorphic calls that are uncovered, unless they are recursive
-- calls that have already been seen in the current call stack.
substInstance :: Bool -> [F.Name] -> Constraints -> (F.Name, Int) -> UnitSolver Constraints
substInstance isDummy callStack output (name, callId) = do
tmap <- getTemplateMap
-- Look up the templates associated with the given function or
-- subroutine name. And then transform the templates by generating
-- new callIds for any constraints created by function or subroutine
-- calls contained within the templates.
--
-- The reason for this is because functions called by functions can
-- be used in a parametric polymorphic way.
-- npc <- nameParamConstraints name -- In case it is an imported function, use this.
let npc = [] -- disabled for now
template <- transformBiM callIdRemap $ npc `fromMaybe` M.lookup name tmap
dumpConsM ("substInstance " ++ show isDummy ++ " " ++ show callStack ++ " " ++ show (name, callId) ++ " template lookup") template
-- Reset the usCallIdRemap field so that it is ready for the next
-- set of templates.
modifyCallIdRemap (const IM.empty)
-- If any new instances are discovered, also process them, unless recursive.
let instances = nub [ (name', i) | UnitParamPosUse ((name', _), _, i) <- universeBi template ]
template' <- if name `elem` callStack then
-- Detected recursion: we do not support polymorphic-unit recursion,
-- ergo all subsequent recursive calls are assumed to have the same
unit - assignments as the first call .
pure []
else
foldM (substInstance False (name:callStack)) [] instances
dumpConsM ("instantiating " ++ show (name, callId) ++ ": (output ++ template) is") (output ++ template)
dumpConsM ("instantiating " ++ show (name, callId) ++ ": (template') is") template'
-- Convert abstract parametric units into concrete ones.
let output' = -- Do not instantiate explicitly annotated polymorphic
-- variables from current context when looking at dummy (name, callId)
(if isDummy then output ++ template
else instantiate callId (output ++ template)) ++
-- Only instantiate explicitly annotated polymorphic
-- variables from nested function/subroutine calls.
instantiate callId template'
dumpConsM ("final output for " ++ show (name, callId)) output'
pure output'
-- | Generate constraints from a NameParamMap entry .
-- nameParamConstraints :: F.Name -> UnitSolver Constraints
-- nameParamConstraints fname = do
-- let filterForName (NPKParam (n, _) _) _ = n == fname
-- filterForName _ _ = False
M.toList . M.filterWithKey filterForName ) < $ > getNameParamMap
pure [ ConEq ( UnitParamPosAbs ( n , pos ) ) ( foldUnits units ) | ( NPKParam n pos , units ) < - nlst ]
-- | If given a usage of a parametric unit, rewrite the callId field
-- to follow an existing mapping in the usCallIdRemap state field, or
-- generate a new callId and add it to the usCallIdRemap state field.
callIdRemap :: UnitInfo -> UnitSolver UnitInfo
callIdRemap info = modifyCallIdRemapM $ \ idMap -> case info of
UnitParamPosUse (n, p, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamPosUse (n, p, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamPosUse (n, p, i'), IM.insert i i' idMap)
UnitParamVarUse (n, v, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamVarUse (n, v, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamVarUse (n, v, i'), IM.insert i i' idMap)
UnitParamLitUse (l, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamLitUse (l, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamLitUse (l, i'), IM.insert i i' idMap)
UnitParamEAPUse (v, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamEAPUse (v, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamEAPUse (v, i'), IM.insert i i' idMap)
_ -> pure (info, idMap)
-- | Convert a parametric template into a particular use.
instantiate :: Data a => Int -> a -> a
instantiate callId = transformBi $ \ info -> case info of
UnitParamPosAbs (name, position) -> UnitParamPosUse (name, position, callId)
UnitParamLitAbs litId -> UnitParamLitUse (litId, callId)
UnitParamVarAbs (fname, vname) -> UnitParamVarUse (fname, vname, callId)
UnitParamEAPAbs vname -> UnitParamEAPUse (vname, callId)
_ -> info
| Return a list of ProgramUnits that might be considered ' toplevel '
in the ProgramFile , e.g. , possible exports . These must be analysed
-- independently of whether they are actually used in the same file,
-- because other files might use them.
topLevelFuncsAndSubs :: F.ProgramFile a -> [F.ProgramUnit a]
topLevelFuncsAndSubs (F.ProgramFile _ pus) = topLevel =<< pus
where
topLevel (F.PUModule _ _ _ _ (Just contains)) = topLevel =<< contains
topLevel (F.PUMain _ _ _ _ (Just contains)) = topLevel =<< contains
topLevel {} = pure f
topLevel {} = pure s
topLevel _ = []
--------------------------------------------------
| Gather all constraints from the main blocks of the AST , as well as from the varUnitMap
extractConstraints :: UnitSolver Constraints
extractConstraints = do
pf <- getProgramFile
dmap <- lift . lift $ M.union (extractDeclMap pf) . combinedDeclMap <$> analysisModFiles
varUnitMap <- getVarUnitMap
pure $ [ con | b <- mainBlocks pf, con@ConEq{} <- universeBi b ] ++
[ ConEq (toUnitVar dmap v) u | (v, u) <- M.toList varUnitMap ]
-- | A list of blocks considered to be part of the 'main' program.
mainBlocks :: F.ProgramFile UA -> [F.Block UA]
mainBlocks = concatMap getBlocks . universeBi
where
getBlocks (F.PUMain _ _ _ bs _) = bs
getBlocks (F.PUModule _ _ _ bs _) = bs
getBlocks _ = []
--------------------------------------------------
-- | Propagate* functions: decorate the AST with constraints, given
-- that variables have all been annotated.
propagateUnits :: UnitSolver ()
-- precondition: all variables have already been annotated
propagateUnits = modifyProgramFileM $ transformBiM propagateInterface <=<
transformBiM propagatePU <=<
transformBiM propagateDoSpec <=<
transformBiM propagateStatement <=<
transformBiM propagateExp
propagateExp :: F.Expression UA -> UnitSolver (F.Expression UA)
propagateExp e = case e of
F.ExpValue{} -> pure e -- all values should already be annotated
F.ExpBinary _ _ F.Multiplication e1 e2 -> setF2 UnitMul (UA.getUnitInfo e1) (UA.getUnitInfo e2)
F.ExpBinary _ _ F.Division e1 e2 -> setF2 UnitMul (UA.getUnitInfo e1) (flip UnitPow (-1) <$> UA.getUnitInfo e2)
F.ExpBinary _ _ F.Exponentiation e1 e2 -> setF2 UnitPow (UA.getUnitInfo e1) (constantExpression e2)
F.ExpBinary _ _ o e1 e2 | isOp AddOp o -> setF2C ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
| isOp RelOp o -> setF2C ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
F.ExpFunctionCall {} -> propagateFunctionCall e
F.ExpSubscript _ _ e1 _ -> pure $ UA.maybeSetUnitInfo (UA.getUnitInfo e1) e
F.ExpUnary _ _ _ e1 -> pure $ UA.maybeSetUnitInfo (UA.getUnitInfo e1) e
F.ExpInitialisation{} -> pure e
_ -> do
logDebug' e $ "progagateExp: unhandled " <> describeShow e
pure e
where
-- Shorter names for convenience functions.
setF2 f u1 u2 = pure $ UA.maybeSetUnitInfoF2 f u1 u2 e
-- Remember, not only set a constraint, but also give a unit!
setF2C f u1 u2 = pure . UA.maybeSetUnitInfo u1 $ UA.maybeSetUnitConstraintF2 f u1 u2 e
propagateFunctionCall :: F.Expression UA -> UnitSolver (F.Expression UA)
propagateFunctionCall (F.ExpFunctionCall a s f (F.AList a' s' args)) = do
(info, args') <- callHelper f args
let cons = intrinsicHelper info f args'
pure . UA.setConstraint (ConConj cons) . UA.setUnitInfo info $ F.ExpFunctionCall a s f (F.AList a' s' args')
propagateFunctionCall _ = error "received non-function-call in propagateFunctionCall"
propagateDoSpec :: F.DoSpecification UA -> UnitSolver (F.DoSpecification UA)
propagateDoSpec ast@(F.DoSpecification _ _ (F.StExpressionAssign _ _ e1 _) e2 m_e3) = do
-- express constraints between the iteration variable, the bounding
-- expressions and the step expression, or treat the step expression
-- as a literal 1 if not specified.
pure . maybe ast (flip UA.setConstraint ast) $ ConConj <$> mconcat [
-- units(e1) ~ units(e2)
(:[]) <$> liftM2 ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
units(e1 ) ~ units(e3 ) or if e3 not specified then units(e1 ) ~ 1 in a polymorphic context
, do u1 <- UA.getUnitInfo e1
u3 <- (UA.getUnitInfo =<< m_e3) `mplus` if isMonomorphic u1 then mzero else pure UnitlessVar
pure [ConEq u1 u3]
units(e2 ) ~ units(e3 ) or if e3 not specified then units(e2 ) ~ 1 in a polymorphic context
, do u2 <- UA.getUnitInfo e1
u3 <- (UA.getUnitInfo =<< m_e3) `mplus` if isMonomorphic u2 then mzero else pure UnitlessVar
pure [ConEq u2 u3]
]
propagateDoSpec _ = error "propagateDoSpec: called on invalid DoSpec"
propagateStatement :: F.Statement UA -> UnitSolver (F.Statement UA)
propagateStatement stmt = case stmt of
F.StExpressionAssign _ _ e1 e2 -> literalAssignmentSpecialCase e1 e2 stmt
F.StCall a s sub (F.AList a' s' args) -> do
(info, args') <- callHelper sub args
let cons = intrinsicHelper info sub args'
pure . UA.setConstraint (ConConj cons) $ F.StCall a s sub (F.AList a' s' args')
F.StDeclaration {} -> transformBiM propagateDeclarator stmt
_ -> pure stmt
propagateDeclarator :: F.Declarator UA -> UnitSolver (F.Declarator UA)
propagateDeclarator decl = case decl of
F.Declarator _ _ e1 _ _ (Just e2) -> literalAssignmentSpecialCase e1 e2 decl
_ -> pure decl
-- Allow literal assignment to overload the non-polymorphic
-- unit-assignment of the non-zero literal.
literalAssignmentSpecialCase :: (F.Annotated f)
=> F.Expression UA -> F.Expression UA
-> f UA -> UnitSolver (f UA)
literalAssignmentSpecialCase e1 e2 ast
| isLiteralZero e2 = pure ast
| isLiteral e2
, Just u1 <- UA.getUnitInfo e1
, Just UnitLiteral{} <- UA.getUnitInfo e2
, isMonomorphic u1 = pure ast
otherwise express the constraint between LHS and RHS of assignment .
| otherwise = pure $ UA.maybeSetUnitConstraintF2 ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2) ast
Generic Interface template mapping will be same as first module procedure .
propagateInterface :: F.Block UA -> UnitSolver (F.Block UA)
propagateInterface b@(F.BlInterface _ _ (Just e) _ _ bs) = do
let iname = varName e
case [ varName e1 | F.StModuleProcedure _ _ (F.AList _ _ (e1:_)) <- universeBi bs :: [F.Statement UA] ] of
mpname:_ -> do
-- translate any instance of mpname into iname within the template
let trans = transformBi (\ x -> if x == mpname then iname else x)
copy ( translated ) template from first module procedure to interface
modifyTemplateMap $ \ m -> fromMaybe m ((\ t -> M.insert iname (trans t) m) <$> M.lookup mpname m)
_ ->
pure ()
pure b
propagateInterface b = pure b
propagatePU :: F.ProgramUnit UA -> UnitSolver (F.ProgramUnit UA)
propagatePU pu = do
let name = puName pu
let sname = puSrcName pu
let nn = (name, sname)
Constraints within the PU .
varMap <- getVarUnitMap
-- If any of the function/subroutine parameters was given an
-- explicit unit annotation, then create a constraint between that
explicit unit and the UnitParamPosAbs corresponding to the
-- parameter. This way all other uses of the parameter get linked to
-- the explicit unit annotation as well.
givenCons <- forM (indexedParams pu) $ \ (i, param) ->
case M.lookup param varMap of
Just UnitParamPosAbs{} -> pure . ConEq (UnitParamVarAbs (nn, param)) $ UnitParamPosAbs (nn, i)
Just u -> pure . ConEq u $ UnitParamPosAbs (nn, i)
_ -> pure . ConEq (UnitParamVarAbs (nn, param)) $ UnitParamPosAbs (nn, i)
let cons = givenCons ++ bodyCons
case pu of F.PUFunction {} -> modifyTemplateMap (M.insert name cons)
F.PUSubroutine {} -> modifyTemplateMap (M.insert name cons)
_ -> pure ()
-- Set the unitInfo field of a function program unit to be the same
-- as the unitInfo of its result.
let pu' = case (pu, indexedParams pu) of
(F.PUFunction {}, (0, res):_) -> UA.setUnitInfo (UnitParamPosAbs (nn, 0) `fromMaybe` M.lookup res varMap) pu
_ -> pu
pure (UA.setConstraint (ConConj cons) pu')
--------------------------------------------------
-- | Coalesce various function and subroutine call common code.
callHelper :: F.Expression UA -> [F.Argument UA] -> UnitSolver (UnitInfo, [F.Argument UA])
callHelper nexp args = do
let name = (varName nexp, srcName nexp)
let ctyp = FA.idCType =<< FA.idType (F.getAnnotation nexp)
callId <- case ctyp of
Just FA.CTExternal -> pure 0 -- if external with no further info then no polymorphism
_ -> freshId -- every call-site gets its own unique identifier
let eachArg i arg@(F.Argument _ _ _ e)
-- add site-specific parametric constraints to each argument
| Just u <- UA.getUnitInfo e = UA.setConstraint (ConEq u (UnitParamPosUse (name, i, callId))) arg
| otherwise = arg
let args' = zipWith eachArg [1..] args
-- build a site-specific parametric unit for use on a return variable, if any
let info = UnitParamPosUse (name, 0, callId)
pure (info, args')
FIXME : use this function to create a list of constraints on intrinsic call - sites ...
intrinsicHelper :: Foldable t => UnitInfo -> F.Expression (FA.Analysis a) -> t b -> [Constraint]
intrinsicHelper (UnitParamPosUse (_, _, callId)) f@(F.ExpValue _ _ (F.ValIntrinsic _)) args
| Just (retU, argUs) <- intrinsicLookup sname = zipWith eachArg [0..numArgs] (retU:argUs)
where
numArgs = length args
sname = srcName f
vname = varName f
eachArg i u = ConEq (UnitParamPosUse ((vname, sname), i, callId)) (instantiate callId u)
intrinsicHelper _ _ _ = []
-- | Get info about intrinsics by source name 'sname', taking into
-- account the special case of those with arbitrary number of
-- arguments.
intrinsicLookup :: F.Name -> Maybe (UnitInfo, [UnitInfo])
intrinsicLookup sname = do
(retU, argUs) <- M.lookup sname intrinsicUnits
return (retU, if sname `S.member` specialCaseArbitraryArgs then cycle argUs else argUs)
-- | Generate a unique identifier for a literal encountered in the code.
genUnitLiteral :: UnitSolver UnitInfo
genUnitLiteral = UnitLiteral <$> freshId
-- | Generate a unique identifier for a polymorphic literal encountered in the code.
genParamLit :: UnitSolver UnitInfo
genParamLit = UnitParamLitAbs <$> freshId
-- Operate only on the blocks of a program unit, not the contained sub-programunits.
modifyPUBlocksM :: Monad m => ([F.Block a] -> m [F.Block a]) -> F.ProgramUnit a -> m (F.ProgramUnit a)
modifyPUBlocksM f pu = case pu of
F.PUMain a s n b pus -> flip fmap (f b) $ \ b' -> F.PUMain a s n b' pus
F.PUModule a s n b pus -> flip fmap (f b) $ \ b' -> F.PUModule a s n b' pus
F.PUSubroutine a s r n p b subs -> flip fmap (f b) $ \ b' -> F.PUSubroutine a s r n p b' subs
F.PUFunction a s r rec n p res b subs -> flip fmap (f b) $ \ b' -> F.PUFunction a s r rec n p res b' subs
F.PUBlockData a s n b -> flip fmap (f b) $ \ b' -> F.PUBlockData a s n b'
F.PUComment {} -> pure pu -- no blocks
Fortran semantics for interpretation of constant expressions
-- involving numeric literals.
data FNum = FReal Double | FInt Integer
fnumToDouble :: FNum -> Double
fnumToDouble (FReal x) = x
fnumToDouble (FInt x) = fromIntegral x
fAdd, fSub, fMul, fDiv, fPow :: FNum -> FNum -> FNum
fAdd (FReal x) fy = FReal $ x + fnumToDouble fy
fAdd fx (FReal y) = FReal $ fnumToDouble fx + y
fAdd (FInt x) (FInt y) = FInt $ x + y
fSub (FReal x) fy = FReal $ x - fnumToDouble fy
fSub fx (FReal y) = FReal $ fnumToDouble fx - y
fSub (FInt x) (FInt y) = FInt $ x - y
fMul (FReal x) fy = FReal $ x * fnumToDouble fy
fMul fx (FReal y) = FReal $ fnumToDouble fx * y
fMul (FInt x) (FInt y) = FInt $ x * y
fDiv (FReal x) fy = FReal $ x / fnumToDouble fy
fDiv fx (FReal y) = FReal $ fnumToDouble fx / y
Haskell quot truncates towards zero , like Fortran
fPow (FReal x) fy = FReal $ x ** fnumToDouble fy
fPow fx (FReal y) = FReal $ fnumToDouble fx ** y
fPow (FInt x) (FInt y)
| y >= 0 = FInt $ x ^ y
| otherwise = FReal $ fromIntegral x ^^ y
fDivMaybe :: Maybe FNum -> Maybe FNum -> Maybe FNum
fDivMaybe mx my
| Just y <- my,
fnumToDouble y == 0.0 = Nothing
| otherwise = liftM2 fDiv mx my
-- | Statically computes if the expression is a constant value.
constantExpression :: F.Expression a -> Maybe Double
constantExpression expr = fnumToDouble <$> ce expr
where
ce e = case e of
(F.ExpValue _ _ (F.ValInteger i _)) -> Just $ FInt $ read i
(F.ExpValue _ _ (F.ValReal r _)) -> Just $ FReal $ readRealLit r
(F.ExpBinary _ _ F.Addition e1 e2) -> liftM2 fAdd (ce e1) (ce e2)
(F.ExpBinary _ _ F.Subtraction e1 e2) -> liftM2 fSub (ce e1) (ce e2)
(F.ExpBinary _ _ F.Multiplication e1 e2) -> liftM2 fMul (ce e1) (ce e2)
(F.ExpBinary _ _ F.Division e1 e2) -> fDivMaybe (ce e1) (ce e2)
(F.ExpBinary _ _ F.Exponentiation e1 e2) -> liftM2 fPow (ce e1) (ce e2)
-- FIXME: expand...
_ -> Nothing
-- | Asks the question: is the operator within the given category?
isOp :: BinOpKind -> F.BinaryOp -> Bool
isOp cat = (== cat) . binOpKind
data BinOpKind = AddOp | MulOp | DivOp | PowerOp | LogicOp | RelOp deriving Eq
binOpKind :: F.BinaryOp -> BinOpKind
binOpKind F.Addition = AddOp
binOpKind F.Subtraction = AddOp
binOpKind F.Multiplication = MulOp
binOpKind F.Division = DivOp
binOpKind F.Exponentiation = PowerOp
binOpKind F.Concatenation = AddOp
binOpKind F.GT = RelOp
binOpKind F.GTE = RelOp
binOpKind F.LT = RelOp
binOpKind F.LTE = RelOp
binOpKind F.EQ = RelOp
binOpKind F.NE = RelOp
binOpKind F.Or = LogicOp
binOpKind F.And = LogicOp
binOpKind F.XOr = LogicOp
binOpKind F.Equivalent = RelOp
binOpKind F.NotEquivalent = RelOp
binOpKind (F.BinCustom _) = RelOp
-- | Get information about imported variables coming from mod files.
getImportedVariables :: UnitSolver (M.Map VV UnitInfo)
getImportedVariables = do
pf <- getProgramFile
nmap <- getNameParamMap
-- Translate a Use AST node into a pair mapping unique name to 'local' source name in this program file.
let useToPair (F.UseID _ _ e) = (varName e, srcName e)
useToPair (F.UseRename _ _ e1 _) = (varName e1, srcName e1) -- (unique name, 'local' source name)
-- A map of modules -> (maps of variables -> their unit info).
let modnmaps = [ M.fromList (mapMaybe f (M.toList npkmap))
find all StUse statements and identify variables that need to be imported from nmap
| F.StUse _ _ e _ only alist <- universeBi pf :: [ F.Statement UA ]
, let mod = srcName e
, let uses = map useToPair (fromMaybe [] (F.aStrip <$> alist))
, Just npkmap <- [M.lookup (F.Named mod) nmap]
, let f (npk, ui) = case npk of
(NPKVariable (var, src))
-- import all variables from module -- apply any renames from uses
| only == F.Permissive -> Just (NPKVariable (var, src `fromMaybe` lookup var uses), ui)
-- only import variable mentioned in uses
| Just src' <- lookup var uses -> Just (NPKVariable (var, src'), ui)
_ -> Nothing
]
pure $ M.fromList [ (vv, foldUnits units) | (NPKVariable vv, units) <- M.toList (M.unions modnmaps) ]
--------------------------------------------------
logDebugNoOrigin :: Text -> UnitSolver ()
logDebugNoOrigin msg = do
pf <- gets usProgramFile
logDebug' pf msg
dumpConsM :: String -> Constraints -> UnitSolver ()
dumpConsM str = logDebugNoOrigin . describe . unlines . ([replicate 50 '-', str ++ ":"]++) . (++[replicate 50 '^']) . map f
where
f (ConEq u1 u2) = show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)
f (ConConj cons) = intercalate " && " (map f cons)
debugLogging :: UnitSolver ()
debugLogging = do
(logDebugNoOrigin . describe . unlines . map (\ (ConEq u1 u2) -> " ***AbsConstraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2))) =<< extractConstraints
pf <- getProgramFile
cons <- getConstraints
vum <- getVarUnitMap
logDebugNoOrigin . describe . unlines $ [ " " ++ show info ++ " :: " ++ n | ((n, _), info) <- M.toList vum ]
logDebugNoOrigin ""
uam <- getUnitAliasMap
logDebugNoOrigin . describe . unlines $ [ " " ++ n ++ " = " ++ show info | (n, info) <- M.toList uam ]
logDebugNoOrigin . describe . unlines $ map (\ (ConEq u1 u2) -> " ***Constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) cons
logDebugNoOrigin $ describeShow cons <> "\n"
forM_ (universeBi pf) $ \ pu -> case pu of
F.PUFunction {}
| Just (ConConj con) <- UA.getConstraint pu ->
logDebugNoOrigin . describe . unlines $ (puName pu ++ ":"):map (\ (ConEq u1 u2) -> " constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) con
F.PUSubroutine {}
| Just (ConConj con) <- UA.getConstraint pu ->
logDebugNoOrigin . describe . unlines $ (puName pu ++ ":"):map (\ (ConEq u1 u2) -> " constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) con
_ -> pure ()
let (lhsM, rhsM, _, lhsColA, rhsColA) = constraintsToMatrices cons
logDebugNoOrigin "--------------------------------------------------\nLHS Cols:"
logDebugNoOrigin $ describeShow lhsColA
logDebugNoOrigin "--------------------------------------------------\nRHS Cols:"
logDebugNoOrigin $ describeShow rhsColA
logDebugNoOrigin "--------------------------------------------------\nLHS M:"
logDebugNoOrigin $ describeShow lhsM
logDebugNoOrigin "--------------------------------------------------\nRHS M:"
logDebugNoOrigin $ describeShow rhsM
logDebugNoOrigin "--------------------------------------------------\nAUG M:"
let augM = if H.rows rhsM == 0 || H.cols rhsM == 0 then lhsM else H.fromBlocks [[lhsM, rhsM]]
logDebugNoOrigin $ describeShow augM
logDebugNoOrigin "--------------------------------------------------\nSolved (hnf) M:"
let hnfM = Flint.hnf augM
logDebugNoOrigin $ describeShow hnfM
logDebugNoOrigin "--------------------------------------------------\nSolved (normHNF) M:"
let (solvedM, newColIndices) = Flint.normHNF augM
logDebugNoOrigin . describeShow $ solvedM
logDebugNoOrigin $ "newColIndices = " <> describeShow newColIndices
logDebugNoOrigin "--------------------------------------------------\nLHS Cols with newColIndices:"
let lhsCols = A.elems lhsColA ++ map (lhsColA A.!) newColIndices
logDebugNoOrigin $ describe . unlines . map show $ zip [(0::Int)..] lhsCols
-- logDebugNoOrigin "--------------------------------------------------\nSolved (SVD) M:"
logDebugNoOrigin $ show ( H.linearSolveSVD lhsM rhsM )
-- logDebugNoOrigin "--------------------------------------------------\nSingular Values:"
logDebugNoOrigin $ show ( )
logDebugNoOrigin "--------------------------------------------------"
logDebugNoOrigin $ "Rank LHS: " <> describeShow (H.rank lhsM)
logDebugNoOrigin "--------------------------------------------------"
let augA = if H.rows rhsM == 0 || H.cols rhsM == 0 then lhsM else H.fromBlocks [[lhsM, rhsM]]
logDebugNoOrigin $ "Rank Augmented: " <> describeShow (H.rank augA)
logDebugNoOrigin "--------------------------------------------------\nGenUnitAssignments:"
let unitAssignments = genUnitAssignments cons
logDebugNoOrigin . describe . unlines $ map (\ (u1s, u2) -> " ***UnitAssignment: " ++ show u1s ++ " === " ++ show (flattenUnits u2) ++ "\n") unitAssignments
logDebugNoOrigin "--------------------------------------------------"
let unitAssignmentsSBV = BackendSBV.genUnitAssignments cons
logDebugNoOrigin . describe . unlines $ map (\ (u1s, u2) -> " ***UnitAssignmentSBV: " ++ show u1s ++ " === " ++ show (flattenUnits u2)) unitAssignmentsSBV
logDebugNoOrigin "--------------------------------------------------\nProvenance:"
let (augM', p) = provenance augM
logDebugNoOrigin . describeShow $ augM'
logDebugNoOrigin . describeShow $ p
--------------------------------------------------
-- convenience
puName :: F.ProgramUnit UA -> F.Name
puName pu
| F.Named n <- FA.puName pu = n
| otherwise = "_nameless"
puSrcName :: F.ProgramUnit UA -> F.Name
puSrcName pu
| F.Named n <- FA.puSrcName pu = n
| otherwise = "_nameless"
--------------------------------------------------
-- | Intrinics that take arbitrary number of arguments. Entry in table
-- 'intrinsicUnits' will contain a single item in the argument list,
-- corresponding to the template used for all arguments.
specialCaseArbitraryArgs :: S.Set F.Name
specialCaseArbitraryArgs = S.fromList [ "max", "max0", "amax1", "dmax1", "amax0", "max1"
, "min", "min0", "amin1", "dmin1", "amin0", "min1" ]
-- | Intrinsics table: name => (return-unit, parameter-units). See also 'specialCaseArbitraryArgs'.
intrinsicUnits :: M.Map F.Name (UnitInfo, [UnitInfo])
intrinsicUnits =
M.fromList
[ ("transfer", (UnitParamEAPAbs ("'b", "'b"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("abs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("iabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("cabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("aimag", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("aint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("anint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dnint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("cmplx", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("conjg", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dble", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("idim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("ddim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("dprod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("ceiling", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("floor", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("int", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("ifix", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("idint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("maxval", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("minval", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("max", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("min", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("min0", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("amin1", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("dmin1", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("amin0", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("min1", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")])) -- special case: arbitrary # of parameters
, ("mod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("modulo", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("amod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("dmod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("nint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("real", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("float", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("sngl", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("sign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("isign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("dsign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("present", (UnitParamEAPAbs ("'a", "'a"), [UnitlessVar]))
, ("sqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("dsqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("csqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("exp", (UnitlessVar, [UnitlessVar]))
, ("dexp", (UnitlessVar, [UnitlessVar]))
, ("cexp", (UnitlessVar, [UnitlessVar]))
, ("alog", (UnitlessVar, [UnitlessVar]))
, ("dlog", (UnitlessVar, [UnitlessVar]))
, ("clog", (UnitlessVar, [UnitlessVar]))
, ("alog10", (UnitlessVar, [UnitlessVar]))
, ("dlog10", (UnitlessVar, [UnitlessVar]))
, ("sin", (UnitlessVar, [UnitlessVar]))
, ("dsin", (UnitlessVar, [UnitlessVar]))
, ("csin", (UnitlessVar, [UnitlessVar]))
, ("cos", (UnitlessVar, [UnitlessVar]))
, ("dcos", (UnitlessVar, [UnitlessVar]))
, ("ccos", (UnitlessVar, [UnitlessVar]))
, ("tan", (UnitlessVar, [UnitlessVar]))
, ("dtan", (UnitlessVar, [UnitlessVar]))
, ("asin", (UnitlessVar, [UnitlessVar]))
, ("dasin", (UnitlessVar, [UnitlessVar]))
, ("acos", (UnitlessVar, [UnitlessVar]))
, ("dacos", (UnitlessVar, [UnitlessVar]))
, ("atan", (UnitlessVar, [UnitlessVar]))
, ("datan", (UnitlessVar, [UnitlessVar]))
, ("atan2", (UnitlessVar, [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("datan2", (UnitlessVar, [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("sinh", (UnitlessVar, [UnitlessVar]))
, ("dsinh", (UnitlessVar, [UnitlessVar]))
, ("cosh", (UnitlessVar, [UnitlessVar]))
, ("dcosh", (UnitlessVar, [UnitlessVar]))
, ("tanh", (UnitlessVar, [UnitlessVar]))
, ("dtanh", (UnitlessVar, [UnitlessVar]))
, ("iand", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
]
-- Others: reshape, merge need special handling
-- | Compile a program to a 'ModFile' containing units information.
compileUnits :: UnitOpts -> ModFiles -> F.ProgramFile Annotation -> IO ModFile
compileUnits uo mfs pf = do
let (pf', _, _) = withCombinedEnvironment mfs . fmap UA.mkUnitAnnotation $ pf
let analysis = runReaderT (runInference runCompileUnits) $
UnitEnv
{ unitOpts = uo
, unitProgramFile = pf
}
report <- runAnalysisT (F.pfGetFilename pf) (logOutputNone True) LogError mfs analysis
case report ^? arResult . _ARSuccess . _1 of
Just cu -> return (genUnitsModFile pf' cu)
Nothing -> fail "compileUnits: units analysis failed"
| null | https://raw.githubusercontent.com/camfort/camfort/861646ae5af61a41d1519049cfeda60ac82f3d98/src/Camfort/Specification/Units/Analysis.hs | haskell | # LANGUAGE OverloadedStrings #
** Helpers
for debugging
| Prepare to run an inference function.
mappings. Also obtain all unit alias definitions.
For function or subroutine parameters (or return variables) that
are not given explicit units, give them a parametric polymorphic
unit.
Any other variables get assigned a unique undetermined unit named
after the variable. This assumes that all variables have unique
names, which the renaming module already has assured.
Now take the information that we have gathered and annotate the
variable expressions within the AST with it.
Annotate the literals within the program based upon the
Literals-mode option.
With the variable expressions annotated, we now propagate the
expressions as possible, and also constraints wherever
appropriate.
These constraints will include parametric polymorphic units that
have not yet been instantiated into their particular uses.
Eliminate all parametric polymorphic units by copying them for
each specific use cases and substituting a unique call-site
identifier that distinguishes each use-case from the others.
Remove any traces of CommentAnnotator, since the annotations can
------------------------------------------------
| Seek out any parameters to functions or subroutines that do not
already have units, and insert parametric units for them into the
Insert a parametric unit if the variable does not already have a unit.
| Return the list of parameters paired with its positional index.
------------------------------------------------
with a unique name (in this case, taken from the unique name of the
variables are inside of a function or subroutine.
Specifically handle variables
Insert undetermined units annotations on the following types of variables.
------------------------------------------------
| Convert explicit polymorphic annotations such as (UnitName "'a")
ProgramUnitName combined with the supplied unit name.
| Any units provided by the programmer through comment annotations
will be incorporated into the VarUnitMap.
Look through each Program Unit for the comments
Look at unit assignment between function return variable and spec.
Add a new unit alias.
Look through each comment that has some kind of unit annotation within it.
Look at unit assignment between variable and spec.
Add a new unit alias.
Figure out the unique names of the referenced variables and
then insert unit info under each of those names.
figure out the 'unique name' of the varRealName that was found in the comment
FIXME: account for module renaming
FIXME: might be more efficient to allow access to variable renaming environ at this program point
Insert unit annotation for function return variable
------------------------------------------------
| Take the unit information from the VarUnitMap and use it to
annotate every variable expression in the AST.
may need to annotate intrinsics separately
------------------------------------------------
All other literals are monomorphic, possibly unitless.
leave it alone if they're both constants
a constant multiplier is unitless
a constant multiplier is unitless
Treat constant expressions as if they were fresh
literals, unless assigned units already.
Set all literals to unitless.
Set all literals to the result of given monadic computation.
| Is it a literal, literally?
allow propagated constants to be interpreted as literals
| Is expression a literal and is it non-zero?
allow propagated constants to be interpreted as literals
------------------------------------------------
| Filter out redundant constraints.
| Convert all parametric templates into actual uses, via substitution.
Get a list of the instances of parametric polymorphism from the constraints.
Also generate a list of 'dummy' instances to ensure that every
'toplevel' function and subroutine is thoroughly expanded and
might be part of a library module, for instance).
Work through the instances, expanding their templates, and
substituting the callId into the abstract parameters.
Also include aliases in the final set of constraints, where
aliases are implemented by simply asserting that they are equal
to their definition.
subroutine, and do the substitutions. Process any additional
polymorphic calls that are uncovered, unless they are recursive
calls that have already been seen in the current call stack.
Look up the templates associated with the given function or
subroutine name. And then transform the templates by generating
new callIds for any constraints created by function or subroutine
calls contained within the templates.
The reason for this is because functions called by functions can
be used in a parametric polymorphic way.
npc <- nameParamConstraints name -- In case it is an imported function, use this.
disabled for now
Reset the usCallIdRemap field so that it is ready for the next
set of templates.
If any new instances are discovered, also process them, unless recursive.
Detected recursion: we do not support polymorphic-unit recursion,
ergo all subsequent recursive calls are assumed to have the same
Convert abstract parametric units into concrete ones.
Do not instantiate explicitly annotated polymorphic
variables from current context when looking at dummy (name, callId)
Only instantiate explicitly annotated polymorphic
variables from nested function/subroutine calls.
| Generate constraints from a NameParamMap entry .
nameParamConstraints :: F.Name -> UnitSolver Constraints
nameParamConstraints fname = do
let filterForName (NPKParam (n, _) _) _ = n == fname
filterForName _ _ = False
| If given a usage of a parametric unit, rewrite the callId field
to follow an existing mapping in the usCallIdRemap state field, or
generate a new callId and add it to the usCallIdRemap state field.
| Convert a parametric template into a particular use.
independently of whether they are actually used in the same file,
because other files might use them.
------------------------------------------------
| A list of blocks considered to be part of the 'main' program.
------------------------------------------------
| Propagate* functions: decorate the AST with constraints, given
that variables have all been annotated.
precondition: all variables have already been annotated
all values should already be annotated
Shorter names for convenience functions.
Remember, not only set a constraint, but also give a unit!
express constraints between the iteration variable, the bounding
expressions and the step expression, or treat the step expression
as a literal 1 if not specified.
units(e1) ~ units(e2)
Allow literal assignment to overload the non-polymorphic
unit-assignment of the non-zero literal.
translate any instance of mpname into iname within the template
If any of the function/subroutine parameters was given an
explicit unit annotation, then create a constraint between that
parameter. This way all other uses of the parameter get linked to
the explicit unit annotation as well.
Set the unitInfo field of a function program unit to be the same
as the unitInfo of its result.
------------------------------------------------
| Coalesce various function and subroutine call common code.
if external with no further info then no polymorphism
every call-site gets its own unique identifier
add site-specific parametric constraints to each argument
build a site-specific parametric unit for use on a return variable, if any
| Get info about intrinsics by source name 'sname', taking into
account the special case of those with arbitrary number of
arguments.
| Generate a unique identifier for a literal encountered in the code.
| Generate a unique identifier for a polymorphic literal encountered in the code.
Operate only on the blocks of a program unit, not the contained sub-programunits.
no blocks
involving numeric literals.
| Statically computes if the expression is a constant value.
FIXME: expand...
| Asks the question: is the operator within the given category?
| Get information about imported variables coming from mod files.
Translate a Use AST node into a pair mapping unique name to 'local' source name in this program file.
(unique name, 'local' source name)
A map of modules -> (maps of variables -> their unit info).
import all variables from module -- apply any renames from uses
only import variable mentioned in uses
------------------------------------------------
logDebugNoOrigin "--------------------------------------------------\nSolved (SVD) M:"
logDebugNoOrigin "--------------------------------------------------\nSingular Values:"
------------------------------------------------
convenience
------------------------------------------------
| Intrinics that take arbitrary number of arguments. Entry in table
'intrinsicUnits' will contain a single item in the argument list,
corresponding to the template used for all arguments.
| Intrinsics table: name => (return-unit, parameter-units). See also 'specialCaseArbitraryArgs'.
special case: arbitrary # of parameters
special case: arbitrary # of parameters
special case: arbitrary # of parameters
special case: arbitrary # of parameters
special case: arbitrary # of parameters
special case: arbitrary # of parameters
special case: arbitrary # of parameters
Others: reshape, merge need special handling
| Compile a program to a 'ModFile' containing units information. | |
Module : Camfort . Specification . Units . Analysis
Description : Helpers for units refactoring and analysis .
Copyright : ( c ) 2017 , , , , : Apache-2.0
Maintainer :
Stability : experimental
Module : Camfort.Specification.Units.Analysis
Description : Helpers for units refactoring and analysis.
Copyright : (c) 2017, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
License : Apache-2.0
Maintainer :
Stability : experimental
-}
# LANGUAGE LambdaCase #
module Camfort.Specification.Units.Analysis
( UnitAnalysis
, compileUnits
, initInference
, runInference
, runUnitAnalysis
, puName
, puSrcName
) where
import Camfort.Analysis
import Camfort.Analysis.Annotations (Annotation)
import Camfort.Analysis.CommentAnnotator (annotateComments)
import Camfort.Analysis.Logger (LogLevel(..))
import Camfort.Analysis.ModFile (withCombinedEnvironment)
import qualified Camfort.Specification.Units.Annotation as UA
import Camfort.Specification.Units.Environment
import Camfort.Specification.Units.InferenceBackend
import qualified Camfort.Specification.Units.InferenceBackendFlint as Flint
import qualified Camfort.Specification.Units.InferenceBackendSBV as BackendSBV
import Camfort.Specification.Units.ModFile
(genUnitsModFile, initializeModFiles, runCompileUnits)
import Camfort.Specification.Units.Monad
import Camfort.Specification.Units.MonadTypes
import Camfort.Specification.Units.Parser (unitParser)
import qualified Camfort.Specification.Units.Parser.Types as P
import Control.Lens ((^?), _1)
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Writer.Lazy
import qualified Data.Array as A
import Data.Data (Data)
import Data.Generics.Uniplate.Operations
import qualified Data.IntMap.Strict as IM
import Data.List (nub, intercalate)
import qualified Data.Map.Strict as M
import Data.Maybe (isJust, fromMaybe, mapMaybe)
import qualified Data.Set as S
import Data.Text (Text)
import qualified Language.Fortran.AST as F
import Language.Fortran.Analysis (constExp, varName, srcName)
import qualified Language.Fortran.Analysis as FA
import qualified Language.Fortran.Analysis.SemanticTypes as FAS
import qualified Language.Fortran.Analysis.BBlocks as FAB
import qualified Language.Fortran.Analysis.DataFlow as FAD
import Language.Fortran.AST.Literal.Real (readRealLit, parseRealLit)
import Language.Fortran.Util.ModFile
import Prelude hiding (mod)
initInference :: UnitSolver ()
initInference = do
pf <- getProgramFile
Parse unit annotations found in comments and link to their
corresponding statements in the AST .
let (linkedPF, _) =
runWriter $ annotateComments unitParser
(\srcSpan err -> tell $ "Error " ++ show srcSpan ++ ": " ++ show err) pf
modifyProgramFile $ const linkedPF
The following insert * functions examine the AST and insert
mappings into the tables stored in the UnitState .
First , find all given unit annotations and insert them into our
insertGivenUnits
insertParametricUnits
insertUndeterminedUnits
annotateAllVariables
annotateLiterals
information throughout the AST , giving units to as many
propagateUnits
Gather up all of the constraints that we identified in the AST .
abstractCons <- extractConstraints
dumpConsM "***abstractCons" abstractCons
cons <- applyTemplates abstractCons
dumpConsM "***concreteCons" cons
cause generic operations traversing the AST to get confused .
modifyProgramFile UA.cleanLinks
modifyConstraints (const cons)
debugLogging
| Run a ' UnitSolver ' analysis within a ' UnitsAnalysis ' .
runInference :: UnitSolver a -> UnitAnalysis (a, UnitState)
runInference solver = do
pf <- asks unitProgramFile
mfs <- lift analysisModFiles
let (pf', _, _) = withCombinedEnvironment mfs . fmap UA.mkUnitAnnotation $ pf
let pvm = combinedParamVarMap mfs
let pf'' = FAD.analyseConstExps . FAD.analyseParameterVars pvm . FAB.analyseBBlocks $ pf'
runUnitSolver pf'' $ do
initializeModFiles
initInference
solver
map of variables to UnitInfo .
insertParametricUnits :: UnitSolver ()
insertParametricUnits = getProgramFile >>= (mapM_ paramPU . universeBi)
where
paramPU pu =
forM_ (indexedParams pu) $ \ (i, param) ->
modifyVarUnitMap $ M.insertWith (curry snd) param (UnitParamPosAbs (fname, i))
where
fname = (puName pu, puSrcName pu)
indexedParams :: F.ProgramUnit UA -> [(Int, VV)]
indexedParams pu
| F.PUFunction _ _ _ _ _ Nothing (Just r) _ _ <- pu = [(0, toVV r)]
| F.PUFunction _ _ _ _ _ Nothing _ _ _ <- pu = [(0, (fname, sfname))]
| F.PUFunction _ _ _ _ _ (Just paList) (Just r) _ _ <- pu = zip [0..] $ map toVV (r : F.aStrip paList)
| F.PUFunction _ _ _ _ _ (Just paList) _ _ _ <- pu = zip [0..] $ (fname, sfname) : map toVV (F.aStrip paList)
| F.PUSubroutine _ _ _ _ (Just paList) _ _ <- pu = zip [1..] $ map toVV (F.aStrip paList)
| otherwise = []
where
fname = puName pu
sfname = puSrcName pu
toVV e = (varName e, srcName e)
| Any remaining variables with unknown units are given unit UnitVar
variable as provided by the ) , or UnitParamVarAbs if the
insertUndeterminedUnits :: UnitSolver ()
insertUndeterminedUnits = do
pf <- getProgramFile
dmap <- lift . lift $ M.union (extractDeclMap pf) . combinedDeclMap <$> analysisModFiles
forM_ (universeBi pf :: [F.ProgramUnit UA]) $ \ pu ->
modifyPUBlocksM (transformBiM (insertUndeterminedUnitVar dmap)) pu
insertUndeterminedUnitVar :: DeclMap -> F.Expression UA -> UnitSolver (F.Expression UA)
insertUndeterminedUnitVar dmap v@(F.ExpValue _ _ (F.ValVariable _))
| Just (FA.IDType { FA.idVType = Just sty }) <- FA.idType (F.getAnnotation v)
, isAcceptableType sty = do
let vname = varName v
let sname = srcName v
let unit = toUnitVar dmap (vname, sname)
modifyVarUnitMap $ M.insertWith (curry snd) (varName v, srcName v) unit
pure v
insertUndeterminedUnitVar _ e = pure e
Choose UnitVar or UnitParamVarAbs depending upon how the variable was declared .
toUnitVar :: DeclMap -> VV -> UnitInfo
toUnitVar dmap (vname, sname) = unit
where
unit = case fst <$> M.lookup vname dmap of
Just (DCFunction (F.Named fvname, F.Named fsname)) -> UnitParamVarAbs ((fvname, fsname), (vname, sname))
Just (DCSubroutine (F.Named fvname, F.Named fsname)) -> UnitParamVarAbs ((fvname, fsname), (vname, sname))
_ -> UnitVar (vname, sname)
isAcceptableType :: FAS.SemType -> Bool
isAcceptableType = \case
FAS.TReal _ -> True
FAS.TComplex _ -> True
FAS.TInteger _ -> True
_ -> False
into UnitParamEAPAbs with a ' context - unique - name ' given by the
transformExplicitPolymorphism :: Maybe F.ProgramUnitName -> UnitInfo -> UnitInfo
transformExplicitPolymorphism (Just (F.Named f)) (UnitName a@('\'':_)) = UnitParamEAPAbs (a, f ++ "_" ++ a)
transformExplicitPolymorphism _ u = u
insertGivenUnits :: UnitSolver ()
insertGivenUnits = do
pf <- getProgramFile
mapM_ checkPU (universeBi pf)
where
checkPU :: F.ProgramUnit UA -> UnitSolver ()
checkPU (F.PUComment a _ _)
| Just (P.UnitAssignment (Just vars) unitsAST) <- mSpec
, Just pu <- mPU = insertPUUnitAssigns (toUnitInfo unitsAST) pu vars
| Just (P.UnitAlias name unitsAST) <- mSpec = modifyUnitAliasMap (M.insert name (toUnitInfo unitsAST))
| otherwise = pure ()
where
mSpec = UA.unitSpec (FA.prevAnnotation a)
mPU = UA.unitPU (FA.prevAnnotation a)
Other type of ProgramUnit ( e.g. one with a body of blocks )
checkPU pu = mapM_ (checkBlockComment getName) [ b | {} <- universeBi (F.programUnitBody pu) ]
where
getName = case pu of
F.PUFunction {} -> Just $ F.getName pu
F.PUSubroutine {} -> Just $ F.getName pu
_ -> Nothing
checkBlockComment :: Maybe F.ProgramUnitName -> F.Block UA -> UnitSolver ()
checkBlockComment pname (F.BlComment a _ _)
| Just (P.UnitAssignment (Just vars) unitsAST) <- mSpec
, Just b <- mBlock = insertBlockUnitAssigns pname (toUnitInfo unitsAST) b vars
| Just (P.UnitAlias name unitsAST) <- mSpec = modifyUnitAliasMap (M.insert name (toUnitInfo unitsAST))
| otherwise = pure ()
where
mSpec = UA.unitSpec (FA.prevAnnotation a)
mBlock = UA.unitBlock (FA.prevAnnotation a)
checkBlockComment _ _ = error "received non-comment in checkBlockComment"
insertBlockUnitAssigns :: Maybe F.ProgramUnitName -> UnitInfo -> F.Block UA -> [String] -> UnitSolver ()
insertBlockUnitAssigns pname info (F.BlStatement _ _ _ (F.StDeclaration _ _ _ _ decls)) varRealNames = do
let info' = transform (transformExplicitPolymorphism pname) info
let m = M.fromList [ ((varName e, srcName e), info')
| e@(F.ExpValue _ _ (F.ValVariable _)) <- universeBi decls :: [F.Expression UA]
, varRealName <- varRealNames
, varRealName == srcName e ]
modifyVarUnitMap $ M.unionWith const m
modifyGivenVarSet . S.union . S.fromList . map fst . M.keys $ m
insertBlockUnitAssigns _ _ _ _ = error "received non-statement/declaration in insertBlockUnitAssigns"
insertPUUnitAssigns :: UnitInfo -> F.ProgramUnit UA -> [String] -> UnitSolver ()
insertPUUnitAssigns info pu@(F.PUFunction _ _ _ _ _ _ mret _ _) varRealNames
| (retUniq, retSrc) <- case mret of Just ret -> (FA.varName ret, FA.srcName ret)
Nothing -> (puName pu, puSrcName pu)
, retSrc `elem` varRealNames = do
let pname = Just $ F.getName pu
let info' = transform (transformExplicitPolymorphism pname) info
let m = M.fromList [ ((retUniq, retSrc), info') ]
modifyVarUnitMap $ M.unionWith const m
modifyGivenVarSet . S.union . S.fromList . map fst . M.keys $ m
insertPUUnitAssigns _ _ _ = error "received non-function in insertPUUnitAssigns"
annotateAllVariables :: UnitSolver ()
annotateAllVariables = modifyProgramFileM $ \ pf -> do
varUnitMap <- getVarUnitMap
importedVariables <- getImportedVariables
let varUnitMap' = M.unionWith (curry snd) varUnitMap importedVariables
let annotateExp e@(F.ExpValue _ _ (F.ValVariable _))
| Just info <- M.lookup (varName e, srcName e) varUnitMap' = UA.setUnitInfo info e
annotateExp e = e
pure $ transformBi annotateExp pf
| Give units to literals based upon the rules of the Literals mode .
LitUnitless : All literals are unitless .
LitPoly : All literals are polymorphic .
LitMixed : The literal " 0 " or " 0.0 " is fully parametric polymorphic .
annotateLiterals :: UnitSolver ()
annotateLiterals = modifyProgramFileM (transformBiM annotateLiteralsPU)
annotateLiteralsPU :: F.ProgramUnit UA -> UnitSolver (F.ProgramUnit UA)
annotateLiteralsPU pu = do
mode <- asks (uoLiterals . unitOpts)
case mode of
LitUnitless -> modifyPUBlocksM (transformBiM expUnitless) pu
LitPoly -> modifyPUBlocksM (transformBiM (withLiterals genParamLit)) pu
LitMixed -> modifyPUBlocksM (transformBiM expMixed) pu
where
Follow the LitMixed rules .
expMixed e = case e of
F.ExpValue _ _ (F.ValInteger i _)
| read i == 0 -> withLiterals genParamLit e
| otherwise -> withLiterals genUnitLiteral e
F.ExpValue _ _ (F.ValReal i _)
| readRealLit i == 0.0 -> withLiterals genParamLit e
| otherwise -> withLiterals genUnitLiteral e
F.ExpBinary a s op e1 e2
| op `elem` [F.Multiplication, F.Division] -> case () of
_ | Just _ <- constExp (F.getAnnotation e1)
, Just _ <- constExp (F.getAnnotation e2) -> pure e
_ | Just _ <- constExp (F.getAnnotation e1)
, Just UnitLiteral{} <- UA.getUnitInfo e1 ->
pure $ F.ExpBinary a s op (UA.setUnitInfo UnitlessLit e1) e2
| Just _ <- constExp (F.getAnnotation e2)
, Just UnitLiteral{} <- UA.getUnitInfo e2 ->
pure $ F.ExpBinary a s op e1 (UA.setUnitInfo UnitlessLit e2)
_ -> pure e
_ | Just _ <- constExp (F.getAnnotation e) -> case UA.getUnitInfo e of
Just UnitLiteral{} -> genLit e
Just UnitVar{} -> genLit e
_ -> pure e
| otherwise -> pure e
expUnitless e
| isLiteral e = pure $ UA.setUnitInfo UnitlessLit e
| otherwise = pure e
withLiterals m e
| isLiteral e = flip UA.setUnitInfo e <$> m
| otherwise = pure e
isPolyCtxt = case of F.PUFunction { } - > True ; F.PUSubroutine { } - > True ; _ - > False
genLit e
| isLiteralZero e = withLiterals genParamLit e
| otherwise = withLiterals genUnitLiteral e
isLiteral :: F.Expression UA -> Bool
isLiteral (F.ExpValue _ _ F.ValReal{}) = True
isLiteral (F.ExpValue _ _ F.ValInteger{}) = True
isLiteral e = isJust (constExp (F.getAnnotation e))
isLiteralNonZero :: F.Expression UA -> Bool
isLiteralNonZero (F.ExpValue _ _ (F.ValInteger i _)) = read i /= 0
isLiteralNonZero (F.ExpValue _ _ (F.ValReal i _)) = readRealLit i /= 0.0
isLiteralNonZero e = case constExp (F.getAnnotation e) of
Just (FA.ConstInt i) -> i /= 0
Just (FA.ConstUninterpInt s) -> read s /= 0
Just (FA.ConstUninterpReal s) -> readRealLit (parseRealLit s) /= 0.0
_ -> False
isLiteralZero :: F.Expression UA -> Bool
isLiteralZero x = isLiteral x && not (isLiteralNonZero x)
cullRedundant :: Constraints -> Constraints
cullRedundant = nub . mapMaybe ( \ con -> case con of
ConEq u1 u2 | u1 /= u2 -> Just con
ConConj cs | cs' <- cullRedundant cs, not (null cs) -> Just (ConConj cs')
_ -> Nothing
)
applyTemplates :: Constraints -> UnitSolver Constraints
postcondition : returned constraints lack all Parametric constructors
applyTemplates cons = do
dumpConsM "applyTemplates" cons
let instances = nub [ (name, i) | UnitParamPosUse ((name, _), _, i) <- universeBi cons ]
analysed , even if it is not used in the current ProgramFile . ( It
pf <- getProgramFile
dummies <- forM (topLevelFuncsAndSubs pf) $ \ pu -> do
ident <- freshId
pure (puName pu, ident)
logDebug' pf $ ("instances: " <> describeShow instances)
logDebug' pf $ ("dummies: " <> describeShow dummies)
importedVariables <- getImportedVariables
Prepare constraints for all variables imported via StUse .
let importedCons = [ ConEq (UnitVar vv) units | (vv, units) <- M.toList importedVariables ]
concreteCons <- cullRedundant <$>
liftM2 (++) (foldM (substInstance False []) importedCons instances)
(foldM (substInstance True []) [] dummies)
dumpConsM "applyTemplates: concreteCons" concreteCons
aliasMap <- getUnitAliasMap
let aliases = [ ConEq (UnitAlias name) def | (name, def) <- M.toList aliasMap ]
let transAlias (UnitName a) | a `M.member` aliasMap = UnitAlias a
transAlias u = u
dumpConsM "aliases" aliases
pure . transformBi transAlias . cullRedundant $ cons ++ concreteCons ++ aliases
| Look up the Parametric templates for a given function or
substInstance :: Bool -> [F.Name] -> Constraints -> (F.Name, Int) -> UnitSolver Constraints
substInstance isDummy callStack output (name, callId) = do
tmap <- getTemplateMap
template <- transformBiM callIdRemap $ npc `fromMaybe` M.lookup name tmap
dumpConsM ("substInstance " ++ show isDummy ++ " " ++ show callStack ++ " " ++ show (name, callId) ++ " template lookup") template
modifyCallIdRemap (const IM.empty)
let instances = nub [ (name', i) | UnitParamPosUse ((name', _), _, i) <- universeBi template ]
template' <- if name `elem` callStack then
unit - assignments as the first call .
pure []
else
foldM (substInstance False (name:callStack)) [] instances
dumpConsM ("instantiating " ++ show (name, callId) ++ ": (output ++ template) is") (output ++ template)
dumpConsM ("instantiating " ++ show (name, callId) ++ ": (template') is") template'
(if isDummy then output ++ template
else instantiate callId (output ++ template)) ++
instantiate callId template'
dumpConsM ("final output for " ++ show (name, callId)) output'
pure output'
M.toList . M.filterWithKey filterForName ) < $ > getNameParamMap
pure [ ConEq ( UnitParamPosAbs ( n , pos ) ) ( foldUnits units ) | ( NPKParam n pos , units ) < - nlst ]
callIdRemap :: UnitInfo -> UnitSolver UnitInfo
callIdRemap info = modifyCallIdRemapM $ \ idMap -> case info of
UnitParamPosUse (n, p, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamPosUse (n, p, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamPosUse (n, p, i'), IM.insert i i' idMap)
UnitParamVarUse (n, v, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamVarUse (n, v, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamVarUse (n, v, i'), IM.insert i i' idMap)
UnitParamLitUse (l, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamLitUse (l, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamLitUse (l, i'), IM.insert i i' idMap)
UnitParamEAPUse (v, i)
| Just i' <- IM.lookup i idMap -> pure (UnitParamEAPUse (v, i'), idMap)
| otherwise -> freshId >>= \ i' ->
pure (UnitParamEAPUse (v, i'), IM.insert i i' idMap)
_ -> pure (info, idMap)
instantiate :: Data a => Int -> a -> a
instantiate callId = transformBi $ \ info -> case info of
UnitParamPosAbs (name, position) -> UnitParamPosUse (name, position, callId)
UnitParamLitAbs litId -> UnitParamLitUse (litId, callId)
UnitParamVarAbs (fname, vname) -> UnitParamVarUse (fname, vname, callId)
UnitParamEAPAbs vname -> UnitParamEAPUse (vname, callId)
_ -> info
| Return a list of ProgramUnits that might be considered ' toplevel '
in the ProgramFile , e.g. , possible exports . These must be analysed
topLevelFuncsAndSubs :: F.ProgramFile a -> [F.ProgramUnit a]
topLevelFuncsAndSubs (F.ProgramFile _ pus) = topLevel =<< pus
where
topLevel (F.PUModule _ _ _ _ (Just contains)) = topLevel =<< contains
topLevel (F.PUMain _ _ _ _ (Just contains)) = topLevel =<< contains
topLevel {} = pure f
topLevel {} = pure s
topLevel _ = []
| Gather all constraints from the main blocks of the AST , as well as from the varUnitMap
extractConstraints :: UnitSolver Constraints
extractConstraints = do
pf <- getProgramFile
dmap <- lift . lift $ M.union (extractDeclMap pf) . combinedDeclMap <$> analysisModFiles
varUnitMap <- getVarUnitMap
pure $ [ con | b <- mainBlocks pf, con@ConEq{} <- universeBi b ] ++
[ ConEq (toUnitVar dmap v) u | (v, u) <- M.toList varUnitMap ]
mainBlocks :: F.ProgramFile UA -> [F.Block UA]
mainBlocks = concatMap getBlocks . universeBi
where
getBlocks (F.PUMain _ _ _ bs _) = bs
getBlocks (F.PUModule _ _ _ bs _) = bs
getBlocks _ = []
propagateUnits :: UnitSolver ()
propagateUnits = modifyProgramFileM $ transformBiM propagateInterface <=<
transformBiM propagatePU <=<
transformBiM propagateDoSpec <=<
transformBiM propagateStatement <=<
transformBiM propagateExp
propagateExp :: F.Expression UA -> UnitSolver (F.Expression UA)
propagateExp e = case e of
F.ExpBinary _ _ F.Multiplication e1 e2 -> setF2 UnitMul (UA.getUnitInfo e1) (UA.getUnitInfo e2)
F.ExpBinary _ _ F.Division e1 e2 -> setF2 UnitMul (UA.getUnitInfo e1) (flip UnitPow (-1) <$> UA.getUnitInfo e2)
F.ExpBinary _ _ F.Exponentiation e1 e2 -> setF2 UnitPow (UA.getUnitInfo e1) (constantExpression e2)
F.ExpBinary _ _ o e1 e2 | isOp AddOp o -> setF2C ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
| isOp RelOp o -> setF2C ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
F.ExpFunctionCall {} -> propagateFunctionCall e
F.ExpSubscript _ _ e1 _ -> pure $ UA.maybeSetUnitInfo (UA.getUnitInfo e1) e
F.ExpUnary _ _ _ e1 -> pure $ UA.maybeSetUnitInfo (UA.getUnitInfo e1) e
F.ExpInitialisation{} -> pure e
_ -> do
logDebug' e $ "progagateExp: unhandled " <> describeShow e
pure e
where
setF2 f u1 u2 = pure $ UA.maybeSetUnitInfoF2 f u1 u2 e
setF2C f u1 u2 = pure . UA.maybeSetUnitInfo u1 $ UA.maybeSetUnitConstraintF2 f u1 u2 e
propagateFunctionCall :: F.Expression UA -> UnitSolver (F.Expression UA)
propagateFunctionCall (F.ExpFunctionCall a s f (F.AList a' s' args)) = do
(info, args') <- callHelper f args
let cons = intrinsicHelper info f args'
pure . UA.setConstraint (ConConj cons) . UA.setUnitInfo info $ F.ExpFunctionCall a s f (F.AList a' s' args')
propagateFunctionCall _ = error "received non-function-call in propagateFunctionCall"
propagateDoSpec :: F.DoSpecification UA -> UnitSolver (F.DoSpecification UA)
propagateDoSpec ast@(F.DoSpecification _ _ (F.StExpressionAssign _ _ e1 _) e2 m_e3) = do
pure . maybe ast (flip UA.setConstraint ast) $ ConConj <$> mconcat [
(:[]) <$> liftM2 ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2)
units(e1 ) ~ units(e3 ) or if e3 not specified then units(e1 ) ~ 1 in a polymorphic context
, do u1 <- UA.getUnitInfo e1
u3 <- (UA.getUnitInfo =<< m_e3) `mplus` if isMonomorphic u1 then mzero else pure UnitlessVar
pure [ConEq u1 u3]
units(e2 ) ~ units(e3 ) or if e3 not specified then units(e2 ) ~ 1 in a polymorphic context
, do u2 <- UA.getUnitInfo e1
u3 <- (UA.getUnitInfo =<< m_e3) `mplus` if isMonomorphic u2 then mzero else pure UnitlessVar
pure [ConEq u2 u3]
]
propagateDoSpec _ = error "propagateDoSpec: called on invalid DoSpec"
propagateStatement :: F.Statement UA -> UnitSolver (F.Statement UA)
propagateStatement stmt = case stmt of
F.StExpressionAssign _ _ e1 e2 -> literalAssignmentSpecialCase e1 e2 stmt
F.StCall a s sub (F.AList a' s' args) -> do
(info, args') <- callHelper sub args
let cons = intrinsicHelper info sub args'
pure . UA.setConstraint (ConConj cons) $ F.StCall a s sub (F.AList a' s' args')
F.StDeclaration {} -> transformBiM propagateDeclarator stmt
_ -> pure stmt
propagateDeclarator :: F.Declarator UA -> UnitSolver (F.Declarator UA)
propagateDeclarator decl = case decl of
F.Declarator _ _ e1 _ _ (Just e2) -> literalAssignmentSpecialCase e1 e2 decl
_ -> pure decl
literalAssignmentSpecialCase :: (F.Annotated f)
=> F.Expression UA -> F.Expression UA
-> f UA -> UnitSolver (f UA)
literalAssignmentSpecialCase e1 e2 ast
| isLiteralZero e2 = pure ast
| isLiteral e2
, Just u1 <- UA.getUnitInfo e1
, Just UnitLiteral{} <- UA.getUnitInfo e2
, isMonomorphic u1 = pure ast
otherwise express the constraint between LHS and RHS of assignment .
| otherwise = pure $ UA.maybeSetUnitConstraintF2 ConEq (UA.getUnitInfo e1) (UA.getUnitInfo e2) ast
Generic Interface template mapping will be same as first module procedure .
propagateInterface :: F.Block UA -> UnitSolver (F.Block UA)
propagateInterface b@(F.BlInterface _ _ (Just e) _ _ bs) = do
let iname = varName e
case [ varName e1 | F.StModuleProcedure _ _ (F.AList _ _ (e1:_)) <- universeBi bs :: [F.Statement UA] ] of
mpname:_ -> do
let trans = transformBi (\ x -> if x == mpname then iname else x)
copy ( translated ) template from first module procedure to interface
modifyTemplateMap $ \ m -> fromMaybe m ((\ t -> M.insert iname (trans t) m) <$> M.lookup mpname m)
_ ->
pure ()
pure b
propagateInterface b = pure b
propagatePU :: F.ProgramUnit UA -> UnitSolver (F.ProgramUnit UA)
propagatePU pu = do
let name = puName pu
let sname = puSrcName pu
let nn = (name, sname)
Constraints within the PU .
varMap <- getVarUnitMap
explicit unit and the UnitParamPosAbs corresponding to the
givenCons <- forM (indexedParams pu) $ \ (i, param) ->
case M.lookup param varMap of
Just UnitParamPosAbs{} -> pure . ConEq (UnitParamVarAbs (nn, param)) $ UnitParamPosAbs (nn, i)
Just u -> pure . ConEq u $ UnitParamPosAbs (nn, i)
_ -> pure . ConEq (UnitParamVarAbs (nn, param)) $ UnitParamPosAbs (nn, i)
let cons = givenCons ++ bodyCons
case pu of F.PUFunction {} -> modifyTemplateMap (M.insert name cons)
F.PUSubroutine {} -> modifyTemplateMap (M.insert name cons)
_ -> pure ()
let pu' = case (pu, indexedParams pu) of
(F.PUFunction {}, (0, res):_) -> UA.setUnitInfo (UnitParamPosAbs (nn, 0) `fromMaybe` M.lookup res varMap) pu
_ -> pu
pure (UA.setConstraint (ConConj cons) pu')
callHelper :: F.Expression UA -> [F.Argument UA] -> UnitSolver (UnitInfo, [F.Argument UA])
callHelper nexp args = do
let name = (varName nexp, srcName nexp)
let ctyp = FA.idCType =<< FA.idType (F.getAnnotation nexp)
callId <- case ctyp of
let eachArg i arg@(F.Argument _ _ _ e)
| Just u <- UA.getUnitInfo e = UA.setConstraint (ConEq u (UnitParamPosUse (name, i, callId))) arg
| otherwise = arg
let args' = zipWith eachArg [1..] args
let info = UnitParamPosUse (name, 0, callId)
pure (info, args')
FIXME : use this function to create a list of constraints on intrinsic call - sites ...
intrinsicHelper :: Foldable t => UnitInfo -> F.Expression (FA.Analysis a) -> t b -> [Constraint]
intrinsicHelper (UnitParamPosUse (_, _, callId)) f@(F.ExpValue _ _ (F.ValIntrinsic _)) args
| Just (retU, argUs) <- intrinsicLookup sname = zipWith eachArg [0..numArgs] (retU:argUs)
where
numArgs = length args
sname = srcName f
vname = varName f
eachArg i u = ConEq (UnitParamPosUse ((vname, sname), i, callId)) (instantiate callId u)
intrinsicHelper _ _ _ = []
intrinsicLookup :: F.Name -> Maybe (UnitInfo, [UnitInfo])
intrinsicLookup sname = do
(retU, argUs) <- M.lookup sname intrinsicUnits
return (retU, if sname `S.member` specialCaseArbitraryArgs then cycle argUs else argUs)
genUnitLiteral :: UnitSolver UnitInfo
genUnitLiteral = UnitLiteral <$> freshId
genParamLit :: UnitSolver UnitInfo
genParamLit = UnitParamLitAbs <$> freshId
modifyPUBlocksM :: Monad m => ([F.Block a] -> m [F.Block a]) -> F.ProgramUnit a -> m (F.ProgramUnit a)
modifyPUBlocksM f pu = case pu of
F.PUMain a s n b pus -> flip fmap (f b) $ \ b' -> F.PUMain a s n b' pus
F.PUModule a s n b pus -> flip fmap (f b) $ \ b' -> F.PUModule a s n b' pus
F.PUSubroutine a s r n p b subs -> flip fmap (f b) $ \ b' -> F.PUSubroutine a s r n p b' subs
F.PUFunction a s r rec n p res b subs -> flip fmap (f b) $ \ b' -> F.PUFunction a s r rec n p res b' subs
F.PUBlockData a s n b -> flip fmap (f b) $ \ b' -> F.PUBlockData a s n b'
Fortran semantics for interpretation of constant expressions
data FNum = FReal Double | FInt Integer
fnumToDouble :: FNum -> Double
fnumToDouble (FReal x) = x
fnumToDouble (FInt x) = fromIntegral x
fAdd, fSub, fMul, fDiv, fPow :: FNum -> FNum -> FNum
fAdd (FReal x) fy = FReal $ x + fnumToDouble fy
fAdd fx (FReal y) = FReal $ fnumToDouble fx + y
fAdd (FInt x) (FInt y) = FInt $ x + y
fSub (FReal x) fy = FReal $ x - fnumToDouble fy
fSub fx (FReal y) = FReal $ fnumToDouble fx - y
fSub (FInt x) (FInt y) = FInt $ x - y
fMul (FReal x) fy = FReal $ x * fnumToDouble fy
fMul fx (FReal y) = FReal $ fnumToDouble fx * y
fMul (FInt x) (FInt y) = FInt $ x * y
fDiv (FReal x) fy = FReal $ x / fnumToDouble fy
fDiv fx (FReal y) = FReal $ fnumToDouble fx / y
Haskell quot truncates towards zero , like Fortran
fPow (FReal x) fy = FReal $ x ** fnumToDouble fy
fPow fx (FReal y) = FReal $ fnumToDouble fx ** y
fPow (FInt x) (FInt y)
| y >= 0 = FInt $ x ^ y
| otherwise = FReal $ fromIntegral x ^^ y
fDivMaybe :: Maybe FNum -> Maybe FNum -> Maybe FNum
fDivMaybe mx my
| Just y <- my,
fnumToDouble y == 0.0 = Nothing
| otherwise = liftM2 fDiv mx my
constantExpression :: F.Expression a -> Maybe Double
constantExpression expr = fnumToDouble <$> ce expr
where
ce e = case e of
(F.ExpValue _ _ (F.ValInteger i _)) -> Just $ FInt $ read i
(F.ExpValue _ _ (F.ValReal r _)) -> Just $ FReal $ readRealLit r
(F.ExpBinary _ _ F.Addition e1 e2) -> liftM2 fAdd (ce e1) (ce e2)
(F.ExpBinary _ _ F.Subtraction e1 e2) -> liftM2 fSub (ce e1) (ce e2)
(F.ExpBinary _ _ F.Multiplication e1 e2) -> liftM2 fMul (ce e1) (ce e2)
(F.ExpBinary _ _ F.Division e1 e2) -> fDivMaybe (ce e1) (ce e2)
(F.ExpBinary _ _ F.Exponentiation e1 e2) -> liftM2 fPow (ce e1) (ce e2)
_ -> Nothing
isOp :: BinOpKind -> F.BinaryOp -> Bool
isOp cat = (== cat) . binOpKind
data BinOpKind = AddOp | MulOp | DivOp | PowerOp | LogicOp | RelOp deriving Eq
binOpKind :: F.BinaryOp -> BinOpKind
binOpKind F.Addition = AddOp
binOpKind F.Subtraction = AddOp
binOpKind F.Multiplication = MulOp
binOpKind F.Division = DivOp
binOpKind F.Exponentiation = PowerOp
binOpKind F.Concatenation = AddOp
binOpKind F.GT = RelOp
binOpKind F.GTE = RelOp
binOpKind F.LT = RelOp
binOpKind F.LTE = RelOp
binOpKind F.EQ = RelOp
binOpKind F.NE = RelOp
binOpKind F.Or = LogicOp
binOpKind F.And = LogicOp
binOpKind F.XOr = LogicOp
binOpKind F.Equivalent = RelOp
binOpKind F.NotEquivalent = RelOp
binOpKind (F.BinCustom _) = RelOp
getImportedVariables :: UnitSolver (M.Map VV UnitInfo)
getImportedVariables = do
pf <- getProgramFile
nmap <- getNameParamMap
let useToPair (F.UseID _ _ e) = (varName e, srcName e)
let modnmaps = [ M.fromList (mapMaybe f (M.toList npkmap))
find all StUse statements and identify variables that need to be imported from nmap
| F.StUse _ _ e _ only alist <- universeBi pf :: [ F.Statement UA ]
, let mod = srcName e
, let uses = map useToPair (fromMaybe [] (F.aStrip <$> alist))
, Just npkmap <- [M.lookup (F.Named mod) nmap]
, let f (npk, ui) = case npk of
(NPKVariable (var, src))
| only == F.Permissive -> Just (NPKVariable (var, src `fromMaybe` lookup var uses), ui)
| Just src' <- lookup var uses -> Just (NPKVariable (var, src'), ui)
_ -> Nothing
]
pure $ M.fromList [ (vv, foldUnits units) | (NPKVariable vv, units) <- M.toList (M.unions modnmaps) ]
logDebugNoOrigin :: Text -> UnitSolver ()
logDebugNoOrigin msg = do
pf <- gets usProgramFile
logDebug' pf msg
dumpConsM :: String -> Constraints -> UnitSolver ()
dumpConsM str = logDebugNoOrigin . describe . unlines . ([replicate 50 '-', str ++ ":"]++) . (++[replicate 50 '^']) . map f
where
f (ConEq u1 u2) = show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)
f (ConConj cons) = intercalate " && " (map f cons)
debugLogging :: UnitSolver ()
debugLogging = do
(logDebugNoOrigin . describe . unlines . map (\ (ConEq u1 u2) -> " ***AbsConstraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2))) =<< extractConstraints
pf <- getProgramFile
cons <- getConstraints
vum <- getVarUnitMap
logDebugNoOrigin . describe . unlines $ [ " " ++ show info ++ " :: " ++ n | ((n, _), info) <- M.toList vum ]
logDebugNoOrigin ""
uam <- getUnitAliasMap
logDebugNoOrigin . describe . unlines $ [ " " ++ n ++ " = " ++ show info | (n, info) <- M.toList uam ]
logDebugNoOrigin . describe . unlines $ map (\ (ConEq u1 u2) -> " ***Constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) cons
logDebugNoOrigin $ describeShow cons <> "\n"
forM_ (universeBi pf) $ \ pu -> case pu of
F.PUFunction {}
| Just (ConConj con) <- UA.getConstraint pu ->
logDebugNoOrigin . describe . unlines $ (puName pu ++ ":"):map (\ (ConEq u1 u2) -> " constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) con
F.PUSubroutine {}
| Just (ConConj con) <- UA.getConstraint pu ->
logDebugNoOrigin . describe . unlines $ (puName pu ++ ":"):map (\ (ConEq u1 u2) -> " constraint: " ++ show (flattenUnits u1) ++ " === " ++ show (flattenUnits u2)) con
_ -> pure ()
let (lhsM, rhsM, _, lhsColA, rhsColA) = constraintsToMatrices cons
logDebugNoOrigin "--------------------------------------------------\nLHS Cols:"
logDebugNoOrigin $ describeShow lhsColA
logDebugNoOrigin "--------------------------------------------------\nRHS Cols:"
logDebugNoOrigin $ describeShow rhsColA
logDebugNoOrigin "--------------------------------------------------\nLHS M:"
logDebugNoOrigin $ describeShow lhsM
logDebugNoOrigin "--------------------------------------------------\nRHS M:"
logDebugNoOrigin $ describeShow rhsM
logDebugNoOrigin "--------------------------------------------------\nAUG M:"
let augM = if H.rows rhsM == 0 || H.cols rhsM == 0 then lhsM else H.fromBlocks [[lhsM, rhsM]]
logDebugNoOrigin $ describeShow augM
logDebugNoOrigin "--------------------------------------------------\nSolved (hnf) M:"
let hnfM = Flint.hnf augM
logDebugNoOrigin $ describeShow hnfM
logDebugNoOrigin "--------------------------------------------------\nSolved (normHNF) M:"
let (solvedM, newColIndices) = Flint.normHNF augM
logDebugNoOrigin . describeShow $ solvedM
logDebugNoOrigin $ "newColIndices = " <> describeShow newColIndices
logDebugNoOrigin "--------------------------------------------------\nLHS Cols with newColIndices:"
let lhsCols = A.elems lhsColA ++ map (lhsColA A.!) newColIndices
logDebugNoOrigin $ describe . unlines . map show $ zip [(0::Int)..] lhsCols
logDebugNoOrigin $ show ( H.linearSolveSVD lhsM rhsM )
logDebugNoOrigin $ show ( )
logDebugNoOrigin "--------------------------------------------------"
logDebugNoOrigin $ "Rank LHS: " <> describeShow (H.rank lhsM)
logDebugNoOrigin "--------------------------------------------------"
let augA = if H.rows rhsM == 0 || H.cols rhsM == 0 then lhsM else H.fromBlocks [[lhsM, rhsM]]
logDebugNoOrigin $ "Rank Augmented: " <> describeShow (H.rank augA)
logDebugNoOrigin "--------------------------------------------------\nGenUnitAssignments:"
let unitAssignments = genUnitAssignments cons
logDebugNoOrigin . describe . unlines $ map (\ (u1s, u2) -> " ***UnitAssignment: " ++ show u1s ++ " === " ++ show (flattenUnits u2) ++ "\n") unitAssignments
logDebugNoOrigin "--------------------------------------------------"
let unitAssignmentsSBV = BackendSBV.genUnitAssignments cons
logDebugNoOrigin . describe . unlines $ map (\ (u1s, u2) -> " ***UnitAssignmentSBV: " ++ show u1s ++ " === " ++ show (flattenUnits u2)) unitAssignmentsSBV
logDebugNoOrigin "--------------------------------------------------\nProvenance:"
let (augM', p) = provenance augM
logDebugNoOrigin . describeShow $ augM'
logDebugNoOrigin . describeShow $ p
puName :: F.ProgramUnit UA -> F.Name
puName pu
| F.Named n <- FA.puName pu = n
| otherwise = "_nameless"
puSrcName :: F.ProgramUnit UA -> F.Name
puSrcName pu
| F.Named n <- FA.puSrcName pu = n
| otherwise = "_nameless"
specialCaseArbitraryArgs :: S.Set F.Name
specialCaseArbitraryArgs = S.fromList [ "max", "max0", "amax1", "dmax1", "amax0", "max1"
, "min", "min0", "amin1", "dmin1", "amin0", "min1" ]
intrinsicUnits :: M.Map F.Name (UnitInfo, [UnitInfo])
intrinsicUnits =
M.fromList
[ ("transfer", (UnitParamEAPAbs ("'b", "'b"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("abs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("iabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("cabs", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("aimag", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("aint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("anint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dnint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("cmplx", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("conjg", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dble", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("dim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("idim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("ddim", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("dprod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("ceiling", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("floor", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("int", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("ifix", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("idint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("maxval", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("minval", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("mod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("modulo", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("amod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("dmod", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("nint", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("real", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("float", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("sngl", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a")]))
, ("sign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("isign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("dsign", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'b", "'b")]))
, ("present", (UnitParamEAPAbs ("'a", "'a"), [UnitlessVar]))
, ("sqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("dsqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("csqrt", (UnitParamEAPAbs ("'a", "'a"), [UnitPow (UnitParamEAPAbs ("'a", "'a")) 2]))
, ("exp", (UnitlessVar, [UnitlessVar]))
, ("dexp", (UnitlessVar, [UnitlessVar]))
, ("cexp", (UnitlessVar, [UnitlessVar]))
, ("alog", (UnitlessVar, [UnitlessVar]))
, ("dlog", (UnitlessVar, [UnitlessVar]))
, ("clog", (UnitlessVar, [UnitlessVar]))
, ("alog10", (UnitlessVar, [UnitlessVar]))
, ("dlog10", (UnitlessVar, [UnitlessVar]))
, ("sin", (UnitlessVar, [UnitlessVar]))
, ("dsin", (UnitlessVar, [UnitlessVar]))
, ("csin", (UnitlessVar, [UnitlessVar]))
, ("cos", (UnitlessVar, [UnitlessVar]))
, ("dcos", (UnitlessVar, [UnitlessVar]))
, ("ccos", (UnitlessVar, [UnitlessVar]))
, ("tan", (UnitlessVar, [UnitlessVar]))
, ("dtan", (UnitlessVar, [UnitlessVar]))
, ("asin", (UnitlessVar, [UnitlessVar]))
, ("dasin", (UnitlessVar, [UnitlessVar]))
, ("acos", (UnitlessVar, [UnitlessVar]))
, ("dacos", (UnitlessVar, [UnitlessVar]))
, ("atan", (UnitlessVar, [UnitlessVar]))
, ("datan", (UnitlessVar, [UnitlessVar]))
, ("atan2", (UnitlessVar, [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("datan2", (UnitlessVar, [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
, ("sinh", (UnitlessVar, [UnitlessVar]))
, ("dsinh", (UnitlessVar, [UnitlessVar]))
, ("cosh", (UnitlessVar, [UnitlessVar]))
, ("dcosh", (UnitlessVar, [UnitlessVar]))
, ("tanh", (UnitlessVar, [UnitlessVar]))
, ("dtanh", (UnitlessVar, [UnitlessVar]))
, ("iand", (UnitParamEAPAbs ("'a", "'a"), [UnitParamEAPAbs ("'a", "'a"), UnitParamEAPAbs ("'a", "'a")]))
]
compileUnits :: UnitOpts -> ModFiles -> F.ProgramFile Annotation -> IO ModFile
compileUnits uo mfs pf = do
let (pf', _, _) = withCombinedEnvironment mfs . fmap UA.mkUnitAnnotation $ pf
let analysis = runReaderT (runInference runCompileUnits) $
UnitEnv
{ unitOpts = uo
, unitProgramFile = pf
}
report <- runAnalysisT (F.pfGetFilename pf) (logOutputNone True) LogError mfs analysis
case report ^? arResult . _ARSuccess . _1 of
Just cu -> return (genUnitsModFile pf' cu)
Nothing -> fail "compileUnits: units analysis failed"
|
086512d84fbc7346756bd6f42fce79a09804751015cc39678ee8b8c015de2e33 | pirapira/coq2rust | assumptions.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Util
open Names
open Term
* A few declarations for the " Print Assumption " command
@author spiwack
@author spiwack *)
type context_object =
| Variable of Id.t (** A section variable or a Let definition *)
| Axiom of constant (** An axiom or a constant. *)
| Opaque of constant (** An opaque constant. *)
| Transparent of constant (** A transparent constant *)
(** AssumptionSet.t is a set of [assumption] *)
module ContextObjectSet : Set.S with type elt = context_object
module ContextObjectMap : Map.ExtS
with type key = context_object and module Set := ContextObjectSet
(** collects all the assumptions (optionally including opaque definitions)
on which a term relies (together with their type) *)
val assumptions :
?add_opaque:bool -> ?add_transparent:bool -> transparent_state -> constr ->
Term.types ContextObjectMap.t
| null | https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/library/assumptions.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* A section variable or a Let definition
* An axiom or a constant.
* An opaque constant.
* A transparent constant
* AssumptionSet.t is a set of [assumption]
* collects all the assumptions (optionally including opaque definitions)
on which a term relies (together with their type) | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util
open Names
open Term
* A few declarations for the " Print Assumption " command
@author spiwack
@author spiwack *)
type context_object =
module ContextObjectSet : Set.S with type elt = context_object
module ContextObjectMap : Map.ExtS
with type key = context_object and module Set := ContextObjectSet
val assumptions :
?add_opaque:bool -> ?add_transparent:bool -> transparent_state -> constr ->
Term.types ContextObjectMap.t
|
75d82ef2ea8cb0c867867ef90cdd4a07dff2aaa53d2ad466ce8b30f22ac8d2e1 | esl/erlang-web | e_mod_gen.erl | The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
Erlang Web Public License along with this software . If not , it can be
%% retrieved via the world wide web at -consulting.com/.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2008 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
%%%-------------------------------------------------------------------
%%% File : e_mod_gen.erl
@author < >
%%% @doc Generic mod for the web servers.
%%% It deals with all generic parts of requests handling.
%%% @type controller_response() = template | {redirect, URL :: string} |
%%% {content, html, HTML :: string()} | {content, text, Text :: string()} |
%%% {json, Term :: term()} | {template, Template :: string()} |
%%% {custom, Custom :: term()} | {headers, Headers :: list(tuple()), Res :: controller_response()} |
%%% {error, Code :: integer()}
%%% @see e_mod_inets
%%% @see e_mod_yaws
%%% @end
%%%-------------------------------------------------------------------
-module(e_mod_gen).
-export([handle_request/1]).
-export([template/3, template_file/1, error_page/2, error_page/3]).
-export([restore_session/1, bind_session/1]).
-export([controller/3]).
-export([sanitize_file_name/1, parse_url/1]).
-include_lib("eptic/include/eptic.hrl").
-type(controller_response() :: template |
{redirect, string()} |
{content, html, iolist()} |
{content, text, iolist()} |
{content, xml, iolist()} |
{content, pdf, iolist()} |
{json, term()} |
{template, string()} |
{custom, term()} |
%should be controller_response(), but recursive types are not
%supported.
{headers, list(tuple()), ControllerResponse::any()} |
{error, integer()}).
%%
handle_request(URL : : string ( ) ) - > HTML | list({status , Code : : integer ( ) } , HTML ) |
{ ret_view , Ret : : controller_response ( ) , View : : string ( ) } |
%% enoent
%% HTML = {html, Html :: string()}
%% @doc Dispatches the request and calls the right module/expands the template.
%% The returning value is interpreted by the server callback module.
%%
-spec(handle_request/1 :: (string()) -> {html, string()} |
list(tuple()) |
{ret_view, controller_response(), string()} |
enoent).
handle_request("/app/" ++ URL) ->
e_dict:fset("__path", URL),
e_logger:log({?MODULE, {old_path_type, URL}}),
case parse_url(URL) of
{view, View} -> view(View);
{error, R} -> error_page(501, URL, R);
{Mod, Fun, View} -> controller(Mod, Fun, View)
end;
handle_request(URL) ->
case e_dispatcher:dispatch(URL) of
invalid_url -> enoent;
{view, View} -> view(View);
{error, Code, Path} -> error_page(Code, Path);
{Mod, Fun, View} -> controller(Mod, Fun, View)
end.
%%
template(Path : : string ( ) , [ ] , Transform : : atom ( ) ) - >
list({status , } , { html , HTML : : string ( ) } ) | { html , HTML : : string ( ) }
%% @doc Expands the template from the given <i>Path</i>.
%% The requested template is read from disk cache and properly expanded
%% using <i>Transform</i>:process_xml function (by default {@link //wpart/wpart_xs:process_xml/1}).
%% @see //wpart/wpart_xs:process_xml/1
%% @see e_cache:read_file/1
%%
-spec(template/3 :: (string(), nil(), atom()) -> list(tuple()) | {html, string()}).
template(Template, [], Transform) ->
case e_cache:read_file(Template) of
{error, Error} ->
error_page(404, Template, {e_cache_error, Error});
E ->
{html, apply(Transform, process_xml, [E])}
end.
%%
error_page(ErrorCode : : integer ( ) , URL : : string ( ) ) - >
list({status , } , { html , HTML : : string ( ) } )
%% @doc Generates the error site for given error code.
If error code is 404 , then calls the { @link error_page/3 } with
%% reason <i>not_found</i>.
%% In other cases, the reason is empty.
%% @see error_page/3
%% @see e_dispatcher:error_page/1
%%
-spec(error_page/2 :: (integer(), string()) -> list(tuple())).
error_page(404, Path) ->
error_page(404, Path, not_found);
error_page(ErrorCode, Path) ->
error_page(ErrorCode, Path, "").
%%
error_page(ErrorCode : : integer ( ) , URL : : string ( ) , Reason : : term ( ) ) - >
list({status , } , { html , HTML : : string ( ) } )
@doc Generates the error site with the reason < i > > for given error code .
%% If the <i>debug_mode</i> flag is set to true, then the error page will always contain
%% the detailed information about the error.
%% Otherwise, the proper error page will be returned.
%% @see e_dispatcher:error_page/1
%% @see e_conf:debug_mode/0
%%
-spec(error_page/3 :: (integer(), string(), term()) -> list(tuple())).
error_page(ErrorCode, Path, Reason) ->
error_logger:error_msg("~p module, error_page, error code: ~p~n"
"path: ~p~n"
"reason: ~p ~n", [?MODULE, ErrorCode, Path, Reason]),
case e_dispatcher:error_page(ErrorCode) of
not_found ->
[{status, ErrorCode},
{html, ?ERR(io_lib:print({not_found, Path}))}];
TplPath ->
case e_conf:debug_mode() of
true ->
[{status, ErrorCode},
{html, ?ERR(io_lib:print({Reason, Path}))}];
false ->
Filled =
apply(e_conf:template_expander(),
process_xml,
[e_cache:read_file(TplPath)]),
error_logger:error_msg("~p module, error: ~p: ~p~n",
[?MODULE, ErrorCode, Path]),
[{status, ErrorCode},
{html, Filled}]
end
end.
%%
%% @spec bind_session(Cookie :: term()) -> no_session | Cookie
@doc Binds the session to the given < i > Cookie</i > .
%% If the session kept in request dictionary is empty, then it deletes it
%% from internal state and returns <i>no_session</i>.
%% Otherwise returns the given cookie.
%%
-spec(bind_session/1 :: (Cookie) -> no_session | Cookie).
bind_session(Cookie) ->
case {e_session:get_session(Cookie), e_dict:fget("session")} of
{{ok,""},[]} ->
no_session;
{_,[]} ->
e_session:delete_session(Cookie),
no_session;
{_,Session} ->
e_session:update_session(Cookie,Session),
Cookie
end.
%%
: : term ( ) ) - > true
@doc Restores the session with the given < i > Cookie</i > .
If the < i > Cookie</i > is empty , it sets the request dictionary
%% session variable to empty list.
%% Otherwise, it loads the previous state of session from
%% the internal data storage.
%%
-spec(restore_session/1 :: (term()) -> true).
restore_session("") ->
e_dict:fset("session", []);
restore_session(Cookie) ->
{ok, Session} = e_session:get_session(Cookie),
e_dict:fset("session", Session).
%%
: : string ( ) ) - > FullPath : : string ( )
%% @doc Returns the full path to the requested template.
%% The returned path is sanitized and prefixed with the template root
%% directory.
%% @see e_conf:template_root/0
%%
-spec(template_file/1 :: (string()) -> string()).
template_file(View) ->
filename:join([
e_conf:template_root(),
sanitize_file_name(View)
]).
%% @hidden
-spec(parse_url/1 :: (string()) -> {view, string()} | {atom(), atom(), string()} | {error, invalid_url}).
parse_url(Url) ->
case string:tokens(Url, "/") of
["view"|View] ->
{view, filename:join(View)};
[Mod, Fun | View] when length(View) /= 0 ->
{list_to_existing_atom(Mod),
list_to_existing_atom(Fun),
filename:join(View)};
[Mod, Fun] ->
{list_to_existing_atom(Mod),
list_to_existing_atom(Fun),
[]};
_ ->
{error, invalid_url}
end.
-spec(view/1 :: (string()) -> list(tuple()) | {html, string()}).
view(View) ->
template(template_file(View), [],
e_conf:template_expander()).
-spec(controller/3 :: (atom(), atom(), string()) -> {ret_view, controller_response(), string()}).
controller(Mod, Fun, View) ->
eptic:fset("__controller", Mod),
Funs = Mod:module_info(exports),
case (lists:member({dataflow,1},Funs) andalso lists:member({error,2},Funs)) of
true ->
e_logger:log({?MODULE, {entering_dataflow_for, {Mod, Fun}}}),
Answ = apply(Mod, dataflow, [Fun]),
controller_handler(Answ, {Mod,Fun,View});
false ->
case lists:member({validate, 1}, Funs) of
true ->
e_logger:log({?MODULE, {skipping_dataflow, entering_validate, {Mod, Fun}}}),
{ok, ValidArgs} = apply(Mod, validate, [Fun]),
Ret = apply(Mod, Fun, ValidArgs),
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View};
false ->
e_logger:log({?MODULE, {skipping_dataflow_and_validate, entering_directly, {Mod, Fun}}}),
Ret = apply(Mod, Fun, [get_dataflow_initial_args()]),
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View}
end
end.
-spec(controller_handler/2 :: ({list(atom()), list(atom())} | list(atom()), {atom(), atom(), string()}) ->
{ret_view, controller_response(), string()}).
controller_handler({Before, After}, {Mod,Fun,View}) ->
e_logger:log({?MODULE, {dataflow_before, Before}}),
e_logger:log({?MODULE, {dataflow_after, After}}),
InitialArgs = get_dataflow_initial_args(),
e_logger:log({?MODULE, {dataflow_dispatcher_args, InitialArgs}}),
Ret = case dataflow(Mod, Fun, Before, InitialArgs) of
{ok, Args} ->
RetVal = apply(Mod,Fun,[Args]),
dataflow(Mod,Fun,After,[]),
RetVal;
{error, Val} ->
Val
end,
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View};
controller_handler(Before, {Mod,Fun,View}) ->
e_logger:log({?MODULE, {dataflow_before, Before}}),
InitialArgs = get_dataflow_initial_args(),
e_logger:log({?MODULE, {dataflow_dispatcher_args, InitialArgs}}),
Ret = case dataflow(Mod, Fun, Before, InitialArgs) of
{ok, Args} ->
apply(Mod,Fun,[Args]);
{error, Val} ->
Val
end,
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View}.
-spec(get_dataflow_initial_args/0 :: () -> list(tuple())).
get_dataflow_initial_args() ->
case eptic:fget("__dispatcher_params") of
undefined ->
[];
Val ->
Val
end.
-spec(dataflow/4 :: (atom(), atom(), list(atom()), term()) -> {ok, term()} | {error, controller_response()}).
dataflow(_Mod, _Fun, [], Args) ->
{ok, Args};
dataflow(Mod, Fun, [H|T], Args) ->
case apply(Mod, H, [Fun, Args]) of
{ok, Args1} ->
dataflow(Mod, Fun, T, Args1);
{error, Reason} ->
{error, apply(Mod, error, [Fun, Reason])}
end.
%% @hidden
-spec(sanitize_file_name/1 :: (string()) -> string()).
sanitize_file_name([$.,$.|T]) ->
sanitize_file_name([$.|T]);
sanitize_file_name([H|T]) ->
case lists:member(H, " &;'`{}!\\?<>\"()$") of
true ->
sanitize_file_name(T);
false ->
[H|sanitize_file_name(T)]
end;
sanitize_file_name([]) ->
[].
| null | https://raw.githubusercontent.com/esl/erlang-web/2e5c2c9725465fc5b522250c305a9d553b3b8243/lib/eptic-1.4.1/src/e_mod_gen.erl | erlang | compliance with the License. You should have received a copy of the
retrieved via the world wide web at -consulting.com/.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-------------------------------------------------------------------
File : e_mod_gen.erl
@doc Generic mod for the web servers.
It deals with all generic parts of requests handling.
@type controller_response() = template | {redirect, URL :: string} |
{content, html, HTML :: string()} | {content, text, Text :: string()} |
{json, Term :: term()} | {template, Template :: string()} |
{custom, Custom :: term()} | {headers, Headers :: list(tuple()), Res :: controller_response()} |
{error, Code :: integer()}
@see e_mod_inets
@see e_mod_yaws
@end
-------------------------------------------------------------------
should be controller_response(), but recursive types are not
supported.
enoent
HTML = {html, Html :: string()}
@doc Dispatches the request and calls the right module/expands the template.
The returning value is interpreted by the server callback module.
@doc Expands the template from the given <i>Path</i>.
The requested template is read from disk cache and properly expanded
using <i>Transform</i>:process_xml function (by default {@link //wpart/wpart_xs:process_xml/1}).
@see //wpart/wpart_xs:process_xml/1
@see e_cache:read_file/1
@doc Generates the error site for given error code.
reason <i>not_found</i>.
In other cases, the reason is empty.
@see error_page/3
@see e_dispatcher:error_page/1
If the <i>debug_mode</i> flag is set to true, then the error page will always contain
the detailed information about the error.
Otherwise, the proper error page will be returned.
@see e_dispatcher:error_page/1
@see e_conf:debug_mode/0
@spec bind_session(Cookie :: term()) -> no_session | Cookie
If the session kept in request dictionary is empty, then it deletes it
from internal state and returns <i>no_session</i>.
Otherwise returns the given cookie.
session variable to empty list.
Otherwise, it loads the previous state of session from
the internal data storage.
@doc Returns the full path to the requested template.
The returned path is sanitized and prefixed with the template root
directory.
@see e_conf:template_root/0
@hidden
@hidden | The contents of this file are subject to the Erlang Web Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
Erlang Web Public License along with this software . If not , it can be
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is Erlang Training & Consulting
Ltd. Portions created by Erlang Training & Consulting Ltd are Copyright 2008 ,
Erlang Training & Consulting Ltd. All Rights Reserved .
@author < >
-module(e_mod_gen).
-export([handle_request/1]).
-export([template/3, template_file/1, error_page/2, error_page/3]).
-export([restore_session/1, bind_session/1]).
-export([controller/3]).
-export([sanitize_file_name/1, parse_url/1]).
-include_lib("eptic/include/eptic.hrl").
-type(controller_response() :: template |
{redirect, string()} |
{content, html, iolist()} |
{content, text, iolist()} |
{content, xml, iolist()} |
{content, pdf, iolist()} |
{json, term()} |
{template, string()} |
{custom, term()} |
{headers, list(tuple()), ControllerResponse::any()} |
{error, integer()}).
handle_request(URL : : string ( ) ) - > HTML | list({status , Code : : integer ( ) } , HTML ) |
{ ret_view , Ret : : controller_response ( ) , View : : string ( ) } |
-spec(handle_request/1 :: (string()) -> {html, string()} |
list(tuple()) |
{ret_view, controller_response(), string()} |
enoent).
handle_request("/app/" ++ URL) ->
e_dict:fset("__path", URL),
e_logger:log({?MODULE, {old_path_type, URL}}),
case parse_url(URL) of
{view, View} -> view(View);
{error, R} -> error_page(501, URL, R);
{Mod, Fun, View} -> controller(Mod, Fun, View)
end;
handle_request(URL) ->
case e_dispatcher:dispatch(URL) of
invalid_url -> enoent;
{view, View} -> view(View);
{error, Code, Path} -> error_page(Code, Path);
{Mod, Fun, View} -> controller(Mod, Fun, View)
end.
template(Path : : string ( ) , [ ] , Transform : : atom ( ) ) - >
list({status , } , { html , HTML : : string ( ) } ) | { html , HTML : : string ( ) }
-spec(template/3 :: (string(), nil(), atom()) -> list(tuple()) | {html, string()}).
template(Template, [], Transform) ->
case e_cache:read_file(Template) of
{error, Error} ->
error_page(404, Template, {e_cache_error, Error});
E ->
{html, apply(Transform, process_xml, [E])}
end.
error_page(ErrorCode : : integer ( ) , URL : : string ( ) ) - >
list({status , } , { html , HTML : : string ( ) } )
If error code is 404 , then calls the { @link error_page/3 } with
-spec(error_page/2 :: (integer(), string()) -> list(tuple())).
error_page(404, Path) ->
error_page(404, Path, not_found);
error_page(ErrorCode, Path) ->
error_page(ErrorCode, Path, "").
error_page(ErrorCode : : integer ( ) , URL : : string ( ) , Reason : : term ( ) ) - >
list({status , } , { html , HTML : : string ( ) } )
@doc Generates the error site with the reason < i > > for given error code .
-spec(error_page/3 :: (integer(), string(), term()) -> list(tuple())).
error_page(ErrorCode, Path, Reason) ->
error_logger:error_msg("~p module, error_page, error code: ~p~n"
"path: ~p~n"
"reason: ~p ~n", [?MODULE, ErrorCode, Path, Reason]),
case e_dispatcher:error_page(ErrorCode) of
not_found ->
[{status, ErrorCode},
{html, ?ERR(io_lib:print({not_found, Path}))}];
TplPath ->
case e_conf:debug_mode() of
true ->
[{status, ErrorCode},
{html, ?ERR(io_lib:print({Reason, Path}))}];
false ->
Filled =
apply(e_conf:template_expander(),
process_xml,
[e_cache:read_file(TplPath)]),
error_logger:error_msg("~p module, error: ~p: ~p~n",
[?MODULE, ErrorCode, Path]),
[{status, ErrorCode},
{html, Filled}]
end
end.
@doc Binds the session to the given < i > Cookie</i > .
-spec(bind_session/1 :: (Cookie) -> no_session | Cookie).
bind_session(Cookie) ->
case {e_session:get_session(Cookie), e_dict:fget("session")} of
{{ok,""},[]} ->
no_session;
{_,[]} ->
e_session:delete_session(Cookie),
no_session;
{_,Session} ->
e_session:update_session(Cookie,Session),
Cookie
end.
: : term ( ) ) - > true
@doc Restores the session with the given < i > Cookie</i > .
If the < i > Cookie</i > is empty , it sets the request dictionary
-spec(restore_session/1 :: (term()) -> true).
restore_session("") ->
e_dict:fset("session", []);
restore_session(Cookie) ->
{ok, Session} = e_session:get_session(Cookie),
e_dict:fset("session", Session).
: : string ( ) ) - > FullPath : : string ( )
-spec(template_file/1 :: (string()) -> string()).
template_file(View) ->
filename:join([
e_conf:template_root(),
sanitize_file_name(View)
]).
-spec(parse_url/1 :: (string()) -> {view, string()} | {atom(), atom(), string()} | {error, invalid_url}).
parse_url(Url) ->
case string:tokens(Url, "/") of
["view"|View] ->
{view, filename:join(View)};
[Mod, Fun | View] when length(View) /= 0 ->
{list_to_existing_atom(Mod),
list_to_existing_atom(Fun),
filename:join(View)};
[Mod, Fun] ->
{list_to_existing_atom(Mod),
list_to_existing_atom(Fun),
[]};
_ ->
{error, invalid_url}
end.
-spec(view/1 :: (string()) -> list(tuple()) | {html, string()}).
view(View) ->
template(template_file(View), [],
e_conf:template_expander()).
-spec(controller/3 :: (atom(), atom(), string()) -> {ret_view, controller_response(), string()}).
controller(Mod, Fun, View) ->
eptic:fset("__controller", Mod),
Funs = Mod:module_info(exports),
case (lists:member({dataflow,1},Funs) andalso lists:member({error,2},Funs)) of
true ->
e_logger:log({?MODULE, {entering_dataflow_for, {Mod, Fun}}}),
Answ = apply(Mod, dataflow, [Fun]),
controller_handler(Answ, {Mod,Fun,View});
false ->
case lists:member({validate, 1}, Funs) of
true ->
e_logger:log({?MODULE, {skipping_dataflow, entering_validate, {Mod, Fun}}}),
{ok, ValidArgs} = apply(Mod, validate, [Fun]),
Ret = apply(Mod, Fun, ValidArgs),
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View};
false ->
e_logger:log({?MODULE, {skipping_dataflow_and_validate, entering_directly, {Mod, Fun}}}),
Ret = apply(Mod, Fun, [get_dataflow_initial_args()]),
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View}
end
end.
-spec(controller_handler/2 :: ({list(atom()), list(atom())} | list(atom()), {atom(), atom(), string()}) ->
{ret_view, controller_response(), string()}).
controller_handler({Before, After}, {Mod,Fun,View}) ->
e_logger:log({?MODULE, {dataflow_before, Before}}),
e_logger:log({?MODULE, {dataflow_after, After}}),
InitialArgs = get_dataflow_initial_args(),
e_logger:log({?MODULE, {dataflow_dispatcher_args, InitialArgs}}),
Ret = case dataflow(Mod, Fun, Before, InitialArgs) of
{ok, Args} ->
RetVal = apply(Mod,Fun,[Args]),
dataflow(Mod,Fun,After,[]),
RetVal;
{error, Val} ->
Val
end,
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View};
controller_handler(Before, {Mod,Fun,View}) ->
e_logger:log({?MODULE, {dataflow_before, Before}}),
InitialArgs = get_dataflow_initial_args(),
e_logger:log({?MODULE, {dataflow_dispatcher_args, InitialArgs}}),
Ret = case dataflow(Mod, Fun, Before, InitialArgs) of
{ok, Args} ->
apply(Mod,Fun,[Args]);
{error, Val} ->
Val
end,
e_logger:log({?MODULE, {controller_response, Ret}}),
{ret_view, Ret, View}.
-spec(get_dataflow_initial_args/0 :: () -> list(tuple())).
get_dataflow_initial_args() ->
case eptic:fget("__dispatcher_params") of
undefined ->
[];
Val ->
Val
end.
-spec(dataflow/4 :: (atom(), atom(), list(atom()), term()) -> {ok, term()} | {error, controller_response()}).
dataflow(_Mod, _Fun, [], Args) ->
{ok, Args};
dataflow(Mod, Fun, [H|T], Args) ->
case apply(Mod, H, [Fun, Args]) of
{ok, Args1} ->
dataflow(Mod, Fun, T, Args1);
{error, Reason} ->
{error, apply(Mod, error, [Fun, Reason])}
end.
-spec(sanitize_file_name/1 :: (string()) -> string()).
sanitize_file_name([$.,$.|T]) ->
sanitize_file_name([$.|T]);
sanitize_file_name([H|T]) ->
case lists:member(H, " &;'`{}!\\?<>\"()$") of
true ->
sanitize_file_name(T);
false ->
[H|sanitize_file_name(T)]
end;
sanitize_file_name([]) ->
[].
|
3d3a1e1be2d068ef47a35c711af6ed77c4f9f820e55530d963ef970a5b88017a | kupl/FixML | sub27.ml |
type exp =
| V of var
| P of var * exp
| C of exp * exp
and var = string
let rec check : exp -> bool
= fun exp ->
match exp with
| V _ -> false
| C(_,_) -> false
| P(x, V y) -> if x = y then true else false
| P(x, P (y, z)) -> if x = y && check (P(x,z)) then true else false
| P(x, C (y, z)) -> check (P(x,y)) && check (P(x,z))
| null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/wellformedness/wellformedness/submissions/sub27.ml | ocaml |
type exp =
| V of var
| P of var * exp
| C of exp * exp
and var = string
let rec check : exp -> bool
= fun exp ->
match exp with
| V _ -> false
| C(_,_) -> false
| P(x, V y) -> if x = y then true else false
| P(x, P (y, z)) -> if x = y && check (P(x,z)) then true else false
| P(x, C (y, z)) -> check (P(x,y)) && check (P(x,z))
| |
ce0b8c9ed399feb97280fc799ae252a49d49e3351bb92250bcd41baca4b92afd | kendroe/CoqRewriter | lib_coq.ml | The contrib name is used to locate errors when loading
let contrib_name = "advancedRewrite"
* Getting ( primitive Coq terms ) from existing Coq
libraries .
- [ find_reference ] is located in { v interp / coqlib.ml v } and return a global reference to the name " dir.s " ( it must be used lazily ) .
- [ constr_of_global ] is located in { v library / libnames.ml v } and turn a
global reference into a constr .
libraries.
- [find_reference] is located in {v interp/coqlib.ml v} and return a global reference to the name "dir.s" (it must be used lazily).
- [constr_of_global] is located in {v library/libnames.ml v} and turn a
global reference into a constr.
*)
let find_constant contrib dir s =
Universes.constr_of_global (Coqlib.find_reference contrib dir s)
let init_constant dir s = find_constant contrib_name dir s
(** [decomp_term] returns a user view of a constr, as defined in {v
kernel/term.ml v}. *)
let decomp_term (c : Term.constr) =
Term.kind_of_term ( Term.strip_outer_cast c )
Term.kind_of_term c (** ? **)
let lapp c v = Term.mkApp (Lazy.force c, v)
module Env = struct
module ConstrHashed = struct
type t = Term.constr
let equal = Term.eq_constr
let hash = Term.hash_constr
end
module ConstrHashtbl = Hashtbl.Make (ConstrHashed)
type t = (int ConstrHashtbl.t * int ref)
let add (env : t) (t : Term.constr ) =
try ConstrHashtbl.find (fst env) t
with
| Not_found ->
let i = !(snd env) in
ConstrHashtbl.add (fst env) t i ; incr (snd env); i
let empty () = (ConstrHashtbl.create 16, ref 0)
let to_list (env,_) =
ConstrHashtbl.fold (fun constr key acc -> ( constr) :: acc) env []
end
module Nat = struct
let path = ["Coq" ; "Init"; "Datatypes"]
let typ = lazy (init_constant path "nat")
let _S = lazy (init_constant path "S")
let _O = lazy (init_constant path "O")
A coq nat from an int
let of_int n =
let rec aux n =
begin match n with
| n when n < 0 -> assert false
| 0 -> Lazy.force _O
| n -> Term.mkApp
(
(Lazy.force _S
), [| aux (n-1)|]
)
end
in
aux n
end
(** Lists from the standard library*)
module List = struct
let path = ["Coq"; "Lists"; "List"]
let typ = lazy (init_constant path "list")
let nil = lazy (init_constant path "nil")
let cons = lazy (init_constant path "cons")
let cons ty h t =
Term.mkApp (Lazy.force cons, [| ty; h ; t |])
let nil ty =
(Term.mkApp (Lazy.force nil, [| ty |]))
let rec of_list ty = function
| [] -> nil ty
| t::q -> cons ty t (of_list ty q)
let type_of_list ty =
Term.mkApp (Lazy.force typ, [|ty|])
end
| null | https://raw.githubusercontent.com/kendroe/CoqRewriter/ddf5dc2ea51105d5a2dc87c99f0d364cf2b8ebf5/plugin/src/lib_coq.ml | ocaml | * [decomp_term] returns a user view of a constr, as defined in {v
kernel/term.ml v}.
* ? *
* Lists from the standard library | The contrib name is used to locate errors when loading
let contrib_name = "advancedRewrite"
* Getting ( primitive Coq terms ) from existing Coq
libraries .
- [ find_reference ] is located in { v interp / coqlib.ml v } and return a global reference to the name " dir.s " ( it must be used lazily ) .
- [ constr_of_global ] is located in { v library / libnames.ml v } and turn a
global reference into a constr .
libraries.
- [find_reference] is located in {v interp/coqlib.ml v} and return a global reference to the name "dir.s" (it must be used lazily).
- [constr_of_global] is located in {v library/libnames.ml v} and turn a
global reference into a constr.
*)
let find_constant contrib dir s =
Universes.constr_of_global (Coqlib.find_reference contrib dir s)
let init_constant dir s = find_constant contrib_name dir s
let decomp_term (c : Term.constr) =
Term.kind_of_term ( Term.strip_outer_cast c )
let lapp c v = Term.mkApp (Lazy.force c, v)
module Env = struct
module ConstrHashed = struct
type t = Term.constr
let equal = Term.eq_constr
let hash = Term.hash_constr
end
module ConstrHashtbl = Hashtbl.Make (ConstrHashed)
type t = (int ConstrHashtbl.t * int ref)
let add (env : t) (t : Term.constr ) =
try ConstrHashtbl.find (fst env) t
with
| Not_found ->
let i = !(snd env) in
ConstrHashtbl.add (fst env) t i ; incr (snd env); i
let empty () = (ConstrHashtbl.create 16, ref 0)
let to_list (env,_) =
ConstrHashtbl.fold (fun constr key acc -> ( constr) :: acc) env []
end
module Nat = struct
let path = ["Coq" ; "Init"; "Datatypes"]
let typ = lazy (init_constant path "nat")
let _S = lazy (init_constant path "S")
let _O = lazy (init_constant path "O")
A coq nat from an int
let of_int n =
let rec aux n =
begin match n with
| n when n < 0 -> assert false
| 0 -> Lazy.force _O
| n -> Term.mkApp
(
(Lazy.force _S
), [| aux (n-1)|]
)
end
in
aux n
end
module List = struct
let path = ["Coq"; "Lists"; "List"]
let typ = lazy (init_constant path "list")
let nil = lazy (init_constant path "nil")
let cons = lazy (init_constant path "cons")
let cons ty h t =
Term.mkApp (Lazy.force cons, [| ty; h ; t |])
let nil ty =
(Term.mkApp (Lazy.force nil, [| ty |]))
let rec of_list ty = function
| [] -> nil ty
| t::q -> cons ty t (of_list ty q)
let type_of_list ty =
Term.mkApp (Lazy.force typ, [|ty|])
end
|
7b091094945f9df6f355234628f4ba4d6fe128c09cb013eda9da8a668a58536e | jrh13/hol-light | preterm.ml | (* ========================================================================= *)
Preterms and pretypes ; typechecking ; translation to types and terms .
(* *)
, University of Cambridge Computer Laboratory
(* *)
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , 2012
( c ) Copyright , 2012
(* ========================================================================= *)
needs "printer.ml";;
(* ------------------------------------------------------------------------- *)
Flag to say whether to treat varstruct " \const . bod " as variable .
(* ------------------------------------------------------------------------- *)
let ignore_constant_varstruct = ref true;;
(* ------------------------------------------------------------------------- *)
(* Flags controlling the treatment of invented type variables in quotations. *)
(* It can be treated as an error, result in a warning, or neither of those. *)
(* ------------------------------------------------------------------------- *)
let type_invention_warning = ref true;;
let type_invention_error = ref false;;
(* ------------------------------------------------------------------------- *)
(* Implicit types or type schemes for non-constants. *)
(* ------------------------------------------------------------------------- *)
let the_implicit_types = ref ([]:(string*hol_type)list);;
(* ------------------------------------------------------------------------- *)
(* Overloading and interface mapping. *)
(* ------------------------------------------------------------------------- *)
let make_overloadable s gty =
if can (assoc s) (!the_overload_skeletons)
then if assoc s (!the_overload_skeletons) = gty then ()
else failwith "make_overloadable: differs from existing skeleton"
else the_overload_skeletons := (s,gty)::(!the_overload_skeletons);;
let remove_interface sym =
let interface = filter ((<>)sym o fst) (!the_interface) in
the_interface := interface;;
let reduce_interface (sym,tm) =
let namty = try dest_const tm with Failure _ -> dest_var tm in
the_interface := filter ((<>) (sym,namty)) (!the_interface);;
let override_interface (sym,tm) =
let namty = try dest_const tm with Failure _ -> dest_var tm in
let interface = filter ((<>)sym o fst) (!the_interface) in
the_interface := (sym,namty)::interface;;
let overload_interface (sym,tm) =
let gty = try assoc sym (!the_overload_skeletons) with Failure _ ->
failwith ("symbol \""^sym^"\" is not overloadable") in
let (name,ty) as namty = try dest_const tm with Failure _ -> dest_var tm in
if not (can (type_match gty ty) [])
then failwith "Not an instance of type skeleton" else
let interface = filter ((<>) (sym,namty)) (!the_interface) in
the_interface := (sym,namty)::interface;;
let prioritize_overload ty =
do_list
(fun (s,gty) ->
try let _,(n,t) = find
(fun (s',(n,t)) -> s' = s && mem ty (map fst (type_match gty t [])))
(!the_interface) in
overload_interface(s,mk_var(n,t))
with Failure _ -> ())
(!the_overload_skeletons);;
(* ------------------------------------------------------------------------- *)
(* Type abbreviations. *)
(* ------------------------------------------------------------------------- *)
let new_type_abbrev,remove_type_abbrev,type_abbrevs =
let the_type_abbreviations = ref ([]:(string*hol_type)list) in
let remove_type_abbrev s =
the_type_abbreviations :=
filter (fun (s',_) -> s' <> s) (!the_type_abbreviations) in
let new_type_abbrev(s,ty) =
(remove_type_abbrev s;
the_type_abbreviations := merge(<) [s,ty] (!the_type_abbreviations)) in
let type_abbrevs() = !the_type_abbreviations in
new_type_abbrev,remove_type_abbrev,type_abbrevs;;
(* ------------------------------------------------------------------------- *)
(* Handle constant hiding. *)
(* ------------------------------------------------------------------------- *)
let hide_constant,unhide_constant,is_hidden =
let hcs = ref ([]:string list) in
let hide_constant c = hcs := union [c] (!hcs)
and unhide_constant c = hcs := subtract (!hcs) [c]
and is_hidden c = mem c (!hcs) in
hide_constant,unhide_constant,is_hidden;;
(* ------------------------------------------------------------------------- *)
(* The type of pretypes. *)
(* ------------------------------------------------------------------------- *)
type pretype = Utv of string (* User type variable *)
| Ptycon of string * pretype list (* Type constructor *)
| Stv of int;; (* System type variable *)
(* ------------------------------------------------------------------------- *)
(* Dummy pretype for the parser to stick in before a proper typing pass. *)
(* ------------------------------------------------------------------------- *)
let dpty = Ptycon("",[]);;
(* ------------------------------------------------------------------------- *)
(* Convert type to pretype. *)
(* ------------------------------------------------------------------------- *)
let rec pretype_of_type ty =
match ty with
Tyvar s -> Utv s
| Tyapp(con,args) -> Ptycon(con,map pretype_of_type args);;
(* ------------------------------------------------------------------------- *)
(* Preterm syntax. *)
(* ------------------------------------------------------------------------- *)
type preterm = Varp of string * pretype (* Variable - v *)
| Constp of string * pretype (* Constant - c *)
| Combp of preterm * preterm (* Combination - f x *)
Lambda - abstraction - \x . t
| Typing of preterm * pretype;; (* Type constraint - t : ty *)
(* ------------------------------------------------------------------------- *)
(* Convert term to preterm. *)
(* ------------------------------------------------------------------------- *)
let rec preterm_of_term tm =
try let n,ty = dest_var tm in
Varp(n,pretype_of_type ty)
with Failure _ -> try
let n,ty = dest_const tm in
Constp(n,pretype_of_type ty)
with Failure _ -> try
let v,bod = dest_abs tm in
Absp(preterm_of_term v,preterm_of_term bod)
with Failure _ ->
let l,r = dest_comb tm in
Combp(preterm_of_term l,preterm_of_term r);;
(* ------------------------------------------------------------------------- *)
Main pretype->type , preterm->term and retypechecking functions .
(* ------------------------------------------------------------------------- *)
let type_of_pretype,term_of_preterm,retypecheck =
let tyv_num = ref 0 in
let new_type_var() = let n = !tyv_num in (tyv_num := n + 1; Stv(n)) in
let pmk_cv(s,pty) =
if can get_const_type s then Constp(s,pty)
else Varp(s,pty) in
let pmk_numeral =
let num_pty = Ptycon("num",[]) in
let NUMERAL = Constp("NUMERAL",Ptycon("fun",[num_pty; num_pty]))
and BIT0 = Constp("BIT0",Ptycon("fun",[num_pty; num_pty]))
and BIT1 = Constp("BIT1",Ptycon("fun",[num_pty; num_pty]))
and t_0 = Constp("_0",num_pty) in
let rec pmk_numeral(n) =
if n =/ num_0 then t_0 else
let m = quo_num n (num_2) and b = mod_num n (num_2) in
let op = if b =/ num_0 then BIT0 else BIT1 in
Combp(op,pmk_numeral(m)) in
fun n -> Combp(NUMERAL,pmk_numeral n) in
(* ----------------------------------------------------------------------- *)
Pretype substitution for a pretype resulting from translation of type .
(* ----------------------------------------------------------------------- *)
let rec pretype_subst th ty =
match ty with
Ptycon(tycon,args) -> Ptycon(tycon,map (pretype_subst th) args)
| Utv v -> rev_assocd ty th ty
| _ -> failwith "pretype_subst: Unexpected form of pretype" in
(* ----------------------------------------------------------------------- *)
(* Convert type to pretype with new Stvs for all type variables. *)
(* ----------------------------------------------------------------------- *)
let pretype_instance ty =
let gty = pretype_of_type ty
and tyvs = map pretype_of_type (tyvars ty) in
let subs = map (fun tv -> new_type_var(),tv) tyvs in
pretype_subst subs gty in
(* ----------------------------------------------------------------------- *)
(* Get a new instance of a constant's generic type modulo interface. *)
(* ----------------------------------------------------------------------- *)
let get_generic_type cname =
match filter ((=) cname o fst) (!the_interface) with
[_,(c,ty)] -> ty
| _::_::_ -> assoc cname (!the_overload_skeletons)
| [] -> get_const_type cname in
(* ----------------------------------------------------------------------- *)
(* Get the implicit generic type of a variable. *)
(* ----------------------------------------------------------------------- *)
let get_var_type vname =
assoc vname !the_implicit_types in
(* ----------------------------------------------------------------------- *)
(* Unravel unifications and apply them to a type. *)
(* ----------------------------------------------------------------------- *)
let rec solve env pty =
match pty with
Ptycon(f,args) -> Ptycon(f,map (solve env) args)
| Stv(i) -> if defined env i then solve env (apply env i) else pty
| _ -> pty in
(* ----------------------------------------------------------------------- *)
(* Functions for display of preterms and pretypes, by converting them *)
(* to terms and types then re-using standard printing functions. *)
(* ----------------------------------------------------------------------- *)
let free_stvs =
let rec free_stvs = function
|Stv n -> [n]
|Utv _ -> []
|Ptycon(_,args) -> flat (map free_stvs args)
in
setify o free_stvs
in
let string_of_pretype stvs =
let rec type_of_pretype' ns = function
|Stv n -> mk_vartype (if mem n ns then "?" ^ string_of_int n else "_")
|Utv v -> mk_vartype v
|Ptycon(con,args) -> mk_type(con,map (type_of_pretype' ns) args)
in
string_of_type o type_of_pretype' stvs
in
let string_of_preterm =
let rec untyped_t_of_pt = function
|Varp(s,pty) -> mk_var(s,aty)
|Constp(s,pty) -> mk_mconst(s,get_const_type s)
|Combp(l,r) -> mk_comb(untyped_t_of_pt l,untyped_t_of_pt r)
|Absp(v,bod) -> mk_gabs(untyped_t_of_pt v,untyped_t_of_pt bod)
|Typing(ptm,pty) -> untyped_t_of_pt ptm
in
string_of_term o untyped_t_of_pt
in
let string_of_ty_error env = function
|None ->
"unify: types cannot be unified "
^ "(you should not see this message, please report)"
|Some(t,ty1,ty2) ->
let ty1 = solve env ty1 and ty2 = solve env ty2 in
let sty1 = string_of_pretype (free_stvs ty2) ty1 in
let sty2 = string_of_pretype (free_stvs ty1) ty2 in
let default_msg s =
" " ^ s ^ " cannot have type " ^ sty1 ^ " and " ^ sty2
^ " simultaneously"
in
match t with
|Constp(s,_) ->
" " ^ s ^ " has type " ^ string_of_type (get_const_type s) ^ ", "
^ "it cannot be used with type " ^ sty2
|Varp(s,_) -> default_msg s
|t -> default_msg (string_of_preterm t)
in
(* ----------------------------------------------------------------------- *)
(* Unification of types *)
(* ----------------------------------------------------------------------- *)
let rec istrivial ptm env x = function
|Stv y ->
y = x || defined env y && istrivial ptm env x (apply env y)
|Ptycon(f,args) when exists (istrivial ptm env x) args ->
failwith (string_of_ty_error env ptm)
|(Ptycon _ | Utv _) -> false
in
let unify ptm env ty1 ty2 =
let rec unify env = function
|[] -> env
|(ty1,ty2,_)::oth when ty1 = ty2 -> unify env oth
|(Ptycon(f,fargs),Ptycon(g,gargs),ptm)::oth ->
if f = g && length fargs = length gargs
then unify env (map2 (fun x y -> x,y,ptm) fargs gargs @ oth)
else failwith (string_of_ty_error env ptm)
|(Stv x,t,ptm)::oth ->
if defined env x then unify env ((apply env x,t,ptm)::oth)
else unify (if istrivial ptm env x t then env else (x|->t) env) oth
|(t,Stv x,ptm)::oth -> unify env ((Stv x,t,ptm)::oth)
|(_,_,ptm)::oth -> failwith (string_of_ty_error env ptm)
in
unify env [ty1,ty2,match ptm with None -> None | Some t -> Some(t,ty1,ty2)]
in
(* ----------------------------------------------------------------------- *)
(* Attempt to attach a given type to a term, performing unifications. *)
(* ----------------------------------------------------------------------- *)
let rec typify ty (ptm,venv,uenv) =
match ptm with
|Varp(s,_) when can (assoc s) venv ->
let ty' = assoc s venv in
Varp(s,ty'),[],unify (Some ptm) uenv ty' ty
|Varp(s,_) when can num_of_string s ->
let t = pmk_numeral(num_of_string s) in
let ty' = Ptycon("num",[]) in
t,[],unify (Some ptm) uenv ty' ty
|Varp(s,_) ->
warn (s <> "" && isnum s) "Non-numeral begins with a digit";
if not(is_hidden s) && can get_generic_type s then
let pty = pretype_instance(get_generic_type s) in
let ptm = Constp(s,pty) in
ptm,[],unify (Some ptm) uenv pty ty
else
let ptm = Varp(s,ty) in
if not(can get_var_type s) then ptm,[s,ty],uenv
else
let pty = pretype_instance(get_var_type s) in
ptm,[s,ty],unify (Some ptm) uenv pty ty
|Combp(f,x) ->
let ty'' = new_type_var() in
let ty' = Ptycon("fun",[ty'';ty]) in
let f',venv1,uenv1 = typify ty' (f,venv,uenv) in
let x',venv2,uenv2 = typify ty'' (x,venv1@venv,uenv1) in
Combp(f',x'),(venv1@venv2),uenv2
|Typing(tm,pty) -> typify ty (tm,venv,unify (Some tm) uenv ty pty)
|Absp(v,bod) ->
let ty',ty'' =
match ty with
|Ptycon("fun",[ty';ty'']) -> ty',ty''
|_ -> new_type_var(),new_type_var()
in
let ty''' = Ptycon("fun",[ty';ty'']) in
let uenv0 = unify (Some ptm) uenv ty''' ty in
let v',venv1,uenv1 =
let v',venv1,uenv1 = typify ty' (v,[],uenv0) in
match v' with
|Constp(s,_) when !ignore_constant_varstruct ->
Varp(s,ty'),[s,ty'],uenv0
|_ -> v',venv1,uenv1
in
let bod',venv2,uenv2 = typify ty'' (bod,venv1@venv,uenv1) in
Absp(v',bod'),venv2,uenv2
|_ -> failwith "typify: unexpected constant at this stage"
in
(* ----------------------------------------------------------------------- *)
(* Further specialize type constraints by resolving overloadings. *)
(* ----------------------------------------------------------------------- *)
let rec resolve_interface ptm cont env =
match ptm with
Combp(f,x) -> resolve_interface f (resolve_interface x cont) env
| Absp(v,bod) -> resolve_interface v (resolve_interface bod cont) env
| Varp(_,_) -> cont env
| Constp(s,ty) ->
let maps = filter (fun (s',_) -> s' = s) (!the_interface) in
if maps = [] then cont env else
tryfind (fun (_,(_,ty')) ->
let ty' = pretype_instance ty' in
cont(unify (Some ptm) env ty' ty)) maps
in
(* ----------------------------------------------------------------------- *)
(* Hence apply throughout a preterm. *)
(* ----------------------------------------------------------------------- *)
let rec solve_preterm env ptm =
match ptm with
Varp(s,ty) -> Varp(s,solve env ty)
| Combp(f,x) -> Combp(solve_preterm env f,solve_preterm env x)
| Absp(v,bod) -> Absp(solve_preterm env v,solve_preterm env bod)
| Constp(s,ty) -> let tys = solve env ty in
try let _,(c',_) = find
(fun (s',(c',ty')) ->
s = s' && can (unify None env (pretype_instance ty')) ty)
(!the_interface) in
pmk_cv(c',tys)
with Failure _ -> Constp(s,tys)
in
(* ----------------------------------------------------------------------- *)
(* Flag to indicate that Stvs were translated to real type variables. *)
(* ----------------------------------------------------------------------- *)
let stvs_translated = ref false in
(* ----------------------------------------------------------------------- *)
(* Pretype <-> type conversion; -> flags system type variable translation. *)
(* ----------------------------------------------------------------------- *)
let rec type_of_pretype ty =
match ty with
Stv n -> stvs_translated := true;
let s = "?"^(string_of_int n) in
mk_vartype(s)
| Utv(v) -> mk_vartype(v)
| Ptycon(con,args) -> mk_type(con,map type_of_pretype args) in
(* ----------------------------------------------------------------------- *)
(* Maps preterms to terms. *)
(* ----------------------------------------------------------------------- *)
let term_of_preterm =
let rec term_of_preterm ptm =
match ptm with
Varp(s,pty) -> mk_var(s,type_of_pretype pty)
| Constp(s,pty) -> mk_mconst(s,type_of_pretype pty)
| Combp(l,r) -> mk_comb(term_of_preterm l,term_of_preterm r)
| Absp(v,bod) -> mk_gabs(term_of_preterm v,term_of_preterm bod)
| Typing(ptm,pty) -> term_of_preterm ptm in
let report_type_invention () =
if !stvs_translated then
if !type_invention_error
then failwith "typechecking error (cannot infer type of variables)"
else warn !type_invention_warning "inventing type variables" in
fun ptm -> stvs_translated := false;
let tm = term_of_preterm ptm in
report_type_invention (); tm in
(* ----------------------------------------------------------------------- *)
(* Overall typechecker: initial typecheck plus overload resolution pass. *)
(* ----------------------------------------------------------------------- *)
let retypecheck venv ptm =
let ty = new_type_var() in
let ptm',_,env =
try typify ty (ptm,venv,undefined)
with Failure e -> failwith
("typechecking error (initial type assignment):" ^ e) in
let env' =
try resolve_interface ptm' (fun e -> e) env
with Failure _ -> failwith "typechecking error (overload resolution)" in
let ptm'' = solve_preterm env' ptm' in
ptm'' in
type_of_pretype,term_of_preterm,retypecheck;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/d125b0ae73e546a63ed458a7891f4e14ae0409e2/preterm.ml | ocaml | =========================================================================
=========================================================================
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Flags controlling the treatment of invented type variables in quotations.
It can be treated as an error, result in a warning, or neither of those.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Implicit types or type schemes for non-constants.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Overloading and interface mapping.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Type abbreviations.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Handle constant hiding.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
The type of pretypes.
-------------------------------------------------------------------------
User type variable
Type constructor
System type variable
-------------------------------------------------------------------------
Dummy pretype for the parser to stick in before a proper typing pass.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Convert type to pretype.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Preterm syntax.
-------------------------------------------------------------------------
Variable - v
Constant - c
Combination - f x
Type constraint - t : ty
-------------------------------------------------------------------------
Convert term to preterm.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Convert type to pretype with new Stvs for all type variables.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Get a new instance of a constant's generic type modulo interface.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Get the implicit generic type of a variable.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Unravel unifications and apply them to a type.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Functions for display of preterms and pretypes, by converting them
to terms and types then re-using standard printing functions.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Unification of types
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Attempt to attach a given type to a term, performing unifications.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Further specialize type constraints by resolving overloadings.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Hence apply throughout a preterm.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Flag to indicate that Stvs were translated to real type variables.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Pretype <-> type conversion; -> flags system type variable translation.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Maps preterms to terms.
-----------------------------------------------------------------------
-----------------------------------------------------------------------
Overall typechecker: initial typecheck plus overload resolution pass.
----------------------------------------------------------------------- | Preterms and pretypes ; typechecking ; translation to types and terms .
, University of Cambridge Computer Laboratory
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , 2012
( c ) Copyright , 2012
needs "printer.ml";;
Flag to say whether to treat varstruct " \const . bod " as variable .
let ignore_constant_varstruct = ref true;;
let type_invention_warning = ref true;;
let type_invention_error = ref false;;
let the_implicit_types = ref ([]:(string*hol_type)list);;
let make_overloadable s gty =
if can (assoc s) (!the_overload_skeletons)
then if assoc s (!the_overload_skeletons) = gty then ()
else failwith "make_overloadable: differs from existing skeleton"
else the_overload_skeletons := (s,gty)::(!the_overload_skeletons);;
let remove_interface sym =
let interface = filter ((<>)sym o fst) (!the_interface) in
the_interface := interface;;
let reduce_interface (sym,tm) =
let namty = try dest_const tm with Failure _ -> dest_var tm in
the_interface := filter ((<>) (sym,namty)) (!the_interface);;
let override_interface (sym,tm) =
let namty = try dest_const tm with Failure _ -> dest_var tm in
let interface = filter ((<>)sym o fst) (!the_interface) in
the_interface := (sym,namty)::interface;;
let overload_interface (sym,tm) =
let gty = try assoc sym (!the_overload_skeletons) with Failure _ ->
failwith ("symbol \""^sym^"\" is not overloadable") in
let (name,ty) as namty = try dest_const tm with Failure _ -> dest_var tm in
if not (can (type_match gty ty) [])
then failwith "Not an instance of type skeleton" else
let interface = filter ((<>) (sym,namty)) (!the_interface) in
the_interface := (sym,namty)::interface;;
let prioritize_overload ty =
do_list
(fun (s,gty) ->
try let _,(n,t) = find
(fun (s',(n,t)) -> s' = s && mem ty (map fst (type_match gty t [])))
(!the_interface) in
overload_interface(s,mk_var(n,t))
with Failure _ -> ())
(!the_overload_skeletons);;
let new_type_abbrev,remove_type_abbrev,type_abbrevs =
let the_type_abbreviations = ref ([]:(string*hol_type)list) in
let remove_type_abbrev s =
the_type_abbreviations :=
filter (fun (s',_) -> s' <> s) (!the_type_abbreviations) in
let new_type_abbrev(s,ty) =
(remove_type_abbrev s;
the_type_abbreviations := merge(<) [s,ty] (!the_type_abbreviations)) in
let type_abbrevs() = !the_type_abbreviations in
new_type_abbrev,remove_type_abbrev,type_abbrevs;;
let hide_constant,unhide_constant,is_hidden =
let hcs = ref ([]:string list) in
let hide_constant c = hcs := union [c] (!hcs)
and unhide_constant c = hcs := subtract (!hcs) [c]
and is_hidden c = mem c (!hcs) in
hide_constant,unhide_constant,is_hidden;;
let dpty = Ptycon("",[]);;
let rec pretype_of_type ty =
match ty with
Tyvar s -> Utv s
| Tyapp(con,args) -> Ptycon(con,map pretype_of_type args);;
Lambda - abstraction - \x . t
let rec preterm_of_term tm =
try let n,ty = dest_var tm in
Varp(n,pretype_of_type ty)
with Failure _ -> try
let n,ty = dest_const tm in
Constp(n,pretype_of_type ty)
with Failure _ -> try
let v,bod = dest_abs tm in
Absp(preterm_of_term v,preterm_of_term bod)
with Failure _ ->
let l,r = dest_comb tm in
Combp(preterm_of_term l,preterm_of_term r);;
Main pretype->type , preterm->term and retypechecking functions .
let type_of_pretype,term_of_preterm,retypecheck =
let tyv_num = ref 0 in
let new_type_var() = let n = !tyv_num in (tyv_num := n + 1; Stv(n)) in
let pmk_cv(s,pty) =
if can get_const_type s then Constp(s,pty)
else Varp(s,pty) in
let pmk_numeral =
let num_pty = Ptycon("num",[]) in
let NUMERAL = Constp("NUMERAL",Ptycon("fun",[num_pty; num_pty]))
and BIT0 = Constp("BIT0",Ptycon("fun",[num_pty; num_pty]))
and BIT1 = Constp("BIT1",Ptycon("fun",[num_pty; num_pty]))
and t_0 = Constp("_0",num_pty) in
let rec pmk_numeral(n) =
if n =/ num_0 then t_0 else
let m = quo_num n (num_2) and b = mod_num n (num_2) in
let op = if b =/ num_0 then BIT0 else BIT1 in
Combp(op,pmk_numeral(m)) in
fun n -> Combp(NUMERAL,pmk_numeral n) in
Pretype substitution for a pretype resulting from translation of type .
let rec pretype_subst th ty =
match ty with
Ptycon(tycon,args) -> Ptycon(tycon,map (pretype_subst th) args)
| Utv v -> rev_assocd ty th ty
| _ -> failwith "pretype_subst: Unexpected form of pretype" in
let pretype_instance ty =
let gty = pretype_of_type ty
and tyvs = map pretype_of_type (tyvars ty) in
let subs = map (fun tv -> new_type_var(),tv) tyvs in
pretype_subst subs gty in
let get_generic_type cname =
match filter ((=) cname o fst) (!the_interface) with
[_,(c,ty)] -> ty
| _::_::_ -> assoc cname (!the_overload_skeletons)
| [] -> get_const_type cname in
let get_var_type vname =
assoc vname !the_implicit_types in
let rec solve env pty =
match pty with
Ptycon(f,args) -> Ptycon(f,map (solve env) args)
| Stv(i) -> if defined env i then solve env (apply env i) else pty
| _ -> pty in
let free_stvs =
let rec free_stvs = function
|Stv n -> [n]
|Utv _ -> []
|Ptycon(_,args) -> flat (map free_stvs args)
in
setify o free_stvs
in
let string_of_pretype stvs =
let rec type_of_pretype' ns = function
|Stv n -> mk_vartype (if mem n ns then "?" ^ string_of_int n else "_")
|Utv v -> mk_vartype v
|Ptycon(con,args) -> mk_type(con,map (type_of_pretype' ns) args)
in
string_of_type o type_of_pretype' stvs
in
let string_of_preterm =
let rec untyped_t_of_pt = function
|Varp(s,pty) -> mk_var(s,aty)
|Constp(s,pty) -> mk_mconst(s,get_const_type s)
|Combp(l,r) -> mk_comb(untyped_t_of_pt l,untyped_t_of_pt r)
|Absp(v,bod) -> mk_gabs(untyped_t_of_pt v,untyped_t_of_pt bod)
|Typing(ptm,pty) -> untyped_t_of_pt ptm
in
string_of_term o untyped_t_of_pt
in
let string_of_ty_error env = function
|None ->
"unify: types cannot be unified "
^ "(you should not see this message, please report)"
|Some(t,ty1,ty2) ->
let ty1 = solve env ty1 and ty2 = solve env ty2 in
let sty1 = string_of_pretype (free_stvs ty2) ty1 in
let sty2 = string_of_pretype (free_stvs ty1) ty2 in
let default_msg s =
" " ^ s ^ " cannot have type " ^ sty1 ^ " and " ^ sty2
^ " simultaneously"
in
match t with
|Constp(s,_) ->
" " ^ s ^ " has type " ^ string_of_type (get_const_type s) ^ ", "
^ "it cannot be used with type " ^ sty2
|Varp(s,_) -> default_msg s
|t -> default_msg (string_of_preterm t)
in
let rec istrivial ptm env x = function
|Stv y ->
y = x || defined env y && istrivial ptm env x (apply env y)
|Ptycon(f,args) when exists (istrivial ptm env x) args ->
failwith (string_of_ty_error env ptm)
|(Ptycon _ | Utv _) -> false
in
let unify ptm env ty1 ty2 =
let rec unify env = function
|[] -> env
|(ty1,ty2,_)::oth when ty1 = ty2 -> unify env oth
|(Ptycon(f,fargs),Ptycon(g,gargs),ptm)::oth ->
if f = g && length fargs = length gargs
then unify env (map2 (fun x y -> x,y,ptm) fargs gargs @ oth)
else failwith (string_of_ty_error env ptm)
|(Stv x,t,ptm)::oth ->
if defined env x then unify env ((apply env x,t,ptm)::oth)
else unify (if istrivial ptm env x t then env else (x|->t) env) oth
|(t,Stv x,ptm)::oth -> unify env ((Stv x,t,ptm)::oth)
|(_,_,ptm)::oth -> failwith (string_of_ty_error env ptm)
in
unify env [ty1,ty2,match ptm with None -> None | Some t -> Some(t,ty1,ty2)]
in
let rec typify ty (ptm,venv,uenv) =
match ptm with
|Varp(s,_) when can (assoc s) venv ->
let ty' = assoc s venv in
Varp(s,ty'),[],unify (Some ptm) uenv ty' ty
|Varp(s,_) when can num_of_string s ->
let t = pmk_numeral(num_of_string s) in
let ty' = Ptycon("num",[]) in
t,[],unify (Some ptm) uenv ty' ty
|Varp(s,_) ->
warn (s <> "" && isnum s) "Non-numeral begins with a digit";
if not(is_hidden s) && can get_generic_type s then
let pty = pretype_instance(get_generic_type s) in
let ptm = Constp(s,pty) in
ptm,[],unify (Some ptm) uenv pty ty
else
let ptm = Varp(s,ty) in
if not(can get_var_type s) then ptm,[s,ty],uenv
else
let pty = pretype_instance(get_var_type s) in
ptm,[s,ty],unify (Some ptm) uenv pty ty
|Combp(f,x) ->
let ty'' = new_type_var() in
let ty' = Ptycon("fun",[ty'';ty]) in
let f',venv1,uenv1 = typify ty' (f,venv,uenv) in
let x',venv2,uenv2 = typify ty'' (x,venv1@venv,uenv1) in
Combp(f',x'),(venv1@venv2),uenv2
|Typing(tm,pty) -> typify ty (tm,venv,unify (Some tm) uenv ty pty)
|Absp(v,bod) ->
let ty',ty'' =
match ty with
|Ptycon("fun",[ty';ty'']) -> ty',ty''
|_ -> new_type_var(),new_type_var()
in
let ty''' = Ptycon("fun",[ty';ty'']) in
let uenv0 = unify (Some ptm) uenv ty''' ty in
let v',venv1,uenv1 =
let v',venv1,uenv1 = typify ty' (v,[],uenv0) in
match v' with
|Constp(s,_) when !ignore_constant_varstruct ->
Varp(s,ty'),[s,ty'],uenv0
|_ -> v',venv1,uenv1
in
let bod',venv2,uenv2 = typify ty'' (bod,venv1@venv,uenv1) in
Absp(v',bod'),venv2,uenv2
|_ -> failwith "typify: unexpected constant at this stage"
in
let rec resolve_interface ptm cont env =
match ptm with
Combp(f,x) -> resolve_interface f (resolve_interface x cont) env
| Absp(v,bod) -> resolve_interface v (resolve_interface bod cont) env
| Varp(_,_) -> cont env
| Constp(s,ty) ->
let maps = filter (fun (s',_) -> s' = s) (!the_interface) in
if maps = [] then cont env else
tryfind (fun (_,(_,ty')) ->
let ty' = pretype_instance ty' in
cont(unify (Some ptm) env ty' ty)) maps
in
let rec solve_preterm env ptm =
match ptm with
Varp(s,ty) -> Varp(s,solve env ty)
| Combp(f,x) -> Combp(solve_preterm env f,solve_preterm env x)
| Absp(v,bod) -> Absp(solve_preterm env v,solve_preterm env bod)
| Constp(s,ty) -> let tys = solve env ty in
try let _,(c',_) = find
(fun (s',(c',ty')) ->
s = s' && can (unify None env (pretype_instance ty')) ty)
(!the_interface) in
pmk_cv(c',tys)
with Failure _ -> Constp(s,tys)
in
let stvs_translated = ref false in
let rec type_of_pretype ty =
match ty with
Stv n -> stvs_translated := true;
let s = "?"^(string_of_int n) in
mk_vartype(s)
| Utv(v) -> mk_vartype(v)
| Ptycon(con,args) -> mk_type(con,map type_of_pretype args) in
let term_of_preterm =
let rec term_of_preterm ptm =
match ptm with
Varp(s,pty) -> mk_var(s,type_of_pretype pty)
| Constp(s,pty) -> mk_mconst(s,type_of_pretype pty)
| Combp(l,r) -> mk_comb(term_of_preterm l,term_of_preterm r)
| Absp(v,bod) -> mk_gabs(term_of_preterm v,term_of_preterm bod)
| Typing(ptm,pty) -> term_of_preterm ptm in
let report_type_invention () =
if !stvs_translated then
if !type_invention_error
then failwith "typechecking error (cannot infer type of variables)"
else warn !type_invention_warning "inventing type variables" in
fun ptm -> stvs_translated := false;
let tm = term_of_preterm ptm in
report_type_invention (); tm in
let retypecheck venv ptm =
let ty = new_type_var() in
let ptm',_,env =
try typify ty (ptm,venv,undefined)
with Failure e -> failwith
("typechecking error (initial type assignment):" ^ e) in
let env' =
try resolve_interface ptm' (fun e -> e) env
with Failure _ -> failwith "typechecking error (overload resolution)" in
let ptm'' = solve_preterm env' ptm' in
ptm'' in
type_of_pretype,term_of_preterm,retypecheck;;
|
67de76cc6abec9c6020032504cd3b2e4f1155dd4b510063b1053061e5e1835e4 | shayan-najd/NativeMetaprogramming | tcrun043.hs | # LANGUAGE GADTs , TypeFamilies , ConstraintKinds #
import GHC.Exts ( Constraint )
type Showish = Show
f :: (Showish a) => a -> String
f x = show x ++ show x
data T = T
data F = F
data GADT a where
Tish :: GADT T
Fish :: GADT F
type family Indexed a b :: Constraint
type instance Indexed T b = Show b
type instance Indexed F b = Num b
g :: (Indexed a b) => GADT a -> b -> Either String b
g Tish x = Left (show x)
g Fish x = Right (x + 1)
type TwoConstraints a = (Show a, Num a)
-- We'll NOINLINE h so that we test the code generation for
-- constraint tuples
# NOINLINE h #
h :: TwoConstraints a => a -> String
h x = show (x + 1)
main :: IO ()
main = do
print $ f 9
print $ f True
print $ g Tish 10
print $ g Tish False
print $ g Fish 11
print $ g Fish 12.0
print $ h 13
print $ h 14.0
| null | https://raw.githubusercontent.com/shayan-najd/NativeMetaprogramming/24e5f85990642d3f0b0044be4327b8f52fce2ba3/testsuite/tests/typecheck/should_run/tcrun043.hs | haskell | We'll NOINLINE h so that we test the code generation for
constraint tuples | # LANGUAGE GADTs , TypeFamilies , ConstraintKinds #
import GHC.Exts ( Constraint )
type Showish = Show
f :: (Showish a) => a -> String
f x = show x ++ show x
data T = T
data F = F
data GADT a where
Tish :: GADT T
Fish :: GADT F
type family Indexed a b :: Constraint
type instance Indexed T b = Show b
type instance Indexed F b = Num b
g :: (Indexed a b) => GADT a -> b -> Either String b
g Tish x = Left (show x)
g Fish x = Right (x + 1)
type TwoConstraints a = (Show a, Num a)
# NOINLINE h #
h :: TwoConstraints a => a -> String
h x = show (x + 1)
main :: IO ()
main = do
print $ f 9
print $ f True
print $ g Tish 10
print $ g Tish False
print $ g Fish 11
print $ g Fish 12.0
print $ h 13
print $ h 14.0
|
87894e903a03c872fd08d581f23186b21a08464e8d425c814cbefd33c7c6893c | simonmar/parconc-examples | rsa2.hs | --
Derived from a program believed to be originally written by
Launchbury , and incorporating the RSA algorithm which is in the
-- public domain.
--
import System.Environment
import Control.Parallel.Strategies
import Data.List
import qualified Data.ByteString.Lazy.Char8 as B
import Data.ByteString.Lazy.Char8 (ByteString)
import ByteStringCompat
main = do
[cmd,f] <- getArgs
text <- case f of
"-" -> B.getContents
_ -> B.readFile f
case cmd of
"encrypt" -> B.putStr (encrypt n e text)
"decrypt" -> B.putStr (decrypt n d text)
example keys , created by makeKey below
n, d, e :: Integer
(n,d,e) = (3539517541822645630044332546732747854710141643130106075585179940882036712515975698104695392573887034788933523673604280427152984392565826058380509963039612419361429882234327760449752708861159361414595229,121492527803044541056704751360974487724009957507650761043424679483464778334890045929773805597614290949,216244483337223224019000724904989828660716358310562600433314577442746058361727768326718965949745599136958260211917551718034992348233259083876505235987999070191048638795502931877693189179113255689722281)
encrypt, decrypt :: Integer -> Integer -> ByteString -> ByteString
-- <<encrypt
encrypt n e = B.unlines
< 1 >
. map (B.pack . show . power e n . code)
. chunk (size n)
-- >>
decrypt n d = B.concat
. map (B.pack . decode . power d n)
. integers
. B.lines
integers :: [ByteString] -> [Integer]
integers bs = [ i | Just (i,_) <- map B.readInteger bs ]
------ Converting between Strings and Integers -----------
code :: ByteString -> Integer
code = B.foldl' accum 0
where accum x y = (128 * x) + fromIntegral (fromEnum y)
decode :: Integer -> String
decode n = reverse (expand n)
where expand 0 = []
expand x = toEnum (fromIntegral (x `mod` 128)) : expand (x `div` 128)
chunk :: Int -> ByteString -> [ByteString]
chunk n xs | B.null xs = []
chunk n xs = as : chunk n bs
where (as,bs) = B.splitAt (fromIntegral n) xs
size :: Integer -> Int
size n = (length (show n) * 47) `div` 100 -- log_128 10 = 0.4745
------- Constructing keys -------------------------
makeKeys :: Integer -> Integer -> (Integer, Integer, Integer)
makeKeys r s = (p*q, d, invert ((p-1)*(q-1)) d)
where p = nextPrime r
q = nextPrime s
d = nextPrime (p+q+1)
nextPrime :: Integer -> Integer
nextPrime a = head (filter prime [odd,odd+2..])
where odd | even a = a+1
| True = a
prime p = and [power (p-1) p x == 1 | x <- [3,5,7]]
invert :: Integer -> Integer -> Integer
invert n a = if e<0 then e+n else e
where e=iter n 0 a 1
iter :: Integer -> Integer -> Integer -> Integer -> Integer
iter g v 0 w = v
iter g v h w = iter h w (g `mod` h) (v - (g `div` h)*w)
------- Fast exponentiation, mod m -----------------
power :: Integer -> Integer -> Integer -> Integer
power 0 m x = 1
power n m x | even n = sqr (power (n `div` 2) m x) `mod` m
| True = (x * power (n-1) m x) `mod` m
sqr :: Integer -> Integer
sqr x = x * x
| null | https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/rsa2.hs | haskell |
public domain.
<<encrypt
>>
---- Converting between Strings and Integers -----------
log_128 10 = 0.4745
----- Constructing keys -------------------------
----- Fast exponentiation, mod m ----------------- | Derived from a program believed to be originally written by
Launchbury , and incorporating the RSA algorithm which is in the
import System.Environment
import Control.Parallel.Strategies
import Data.List
import qualified Data.ByteString.Lazy.Char8 as B
import Data.ByteString.Lazy.Char8 (ByteString)
import ByteStringCompat
main = do
[cmd,f] <- getArgs
text <- case f of
"-" -> B.getContents
_ -> B.readFile f
case cmd of
"encrypt" -> B.putStr (encrypt n e text)
"decrypt" -> B.putStr (decrypt n d text)
example keys , created by makeKey below
n, d, e :: Integer
(n,d,e) = (3539517541822645630044332546732747854710141643130106075585179940882036712515975698104695392573887034788933523673604280427152984392565826058380509963039612419361429882234327760449752708861159361414595229,121492527803044541056704751360974487724009957507650761043424679483464778334890045929773805597614290949,216244483337223224019000724904989828660716358310562600433314577442746058361727768326718965949745599136958260211917551718034992348233259083876505235987999070191048638795502931877693189179113255689722281)
encrypt, decrypt :: Integer -> Integer -> ByteString -> ByteString
encrypt n e = B.unlines
< 1 >
. map (B.pack . show . power e n . code)
. chunk (size n)
decrypt n d = B.concat
. map (B.pack . decode . power d n)
. integers
. B.lines
integers :: [ByteString] -> [Integer]
integers bs = [ i | Just (i,_) <- map B.readInteger bs ]
code :: ByteString -> Integer
code = B.foldl' accum 0
where accum x y = (128 * x) + fromIntegral (fromEnum y)
decode :: Integer -> String
decode n = reverse (expand n)
where expand 0 = []
expand x = toEnum (fromIntegral (x `mod` 128)) : expand (x `div` 128)
chunk :: Int -> ByteString -> [ByteString]
chunk n xs | B.null xs = []
chunk n xs = as : chunk n bs
where (as,bs) = B.splitAt (fromIntegral n) xs
size :: Integer -> Int
makeKeys :: Integer -> Integer -> (Integer, Integer, Integer)
makeKeys r s = (p*q, d, invert ((p-1)*(q-1)) d)
where p = nextPrime r
q = nextPrime s
d = nextPrime (p+q+1)
nextPrime :: Integer -> Integer
nextPrime a = head (filter prime [odd,odd+2..])
where odd | even a = a+1
| True = a
prime p = and [power (p-1) p x == 1 | x <- [3,5,7]]
invert :: Integer -> Integer -> Integer
invert n a = if e<0 then e+n else e
where e=iter n 0 a 1
iter :: Integer -> Integer -> Integer -> Integer -> Integer
iter g v 0 w = v
iter g v h w = iter h w (g `mod` h) (v - (g `div` h)*w)
power :: Integer -> Integer -> Integer -> Integer
power 0 m x = 1
power n m x | even n = sqr (power (n `div` 2) m x) `mod` m
| True = (x * power (n-1) m x) `mod` m
sqr :: Integer -> Integer
sqr x = x * x
|
18f1cc9ef81c4247a51733072a5cd7ff8078df8f1a17ab52f01f326aa796fc9b | erlang/otp | megaco_messenger_misc.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2003 - 2023 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%----------------------------------------------------------------------
%% Purpose: Misc functions used both from the megaco_messenger module
%% and the megaco_ack_sender module.
%%
%%----------------------------------------------------------------------
-module(megaco_messenger_misc).
%% Application internal export
-export([encode_body/3,
encode_trans_request/2,
encode_trans_reply/2,
encode_actions/3,
send_body/3,
send_message/3,
transform_transaction_reply/2
]).
%% Test functions
-export([compose_message/3, encode_message/2]).
-include_lib("megaco/include/megaco.hrl").
-include("megaco_message_internal.hrl").
-include_lib("megaco/src/app/megaco_internal.hrl").
-define(MSG_HDR_SZ, 128). % This is just a guess...
-ifdef(MEGACO_TEST_CODE).
-define(SIM(Other,Where),
fun(Afun,Bfun) ->
Kfun = {?MODULE,Bfun},
case (catch ets:lookup(megaco_test_data, Kfun)) of
[{Kfun,Cfun}] ->
Cfun(Afun);
_ ->
Afun
end
end(Other,Where)).
-define(TC_AWAIT_SEND_EVENT(SendFunction),
case megaco_tc_controller:lookup(send_function) of
{value, {Tag, Pid}} when is_pid(Pid) ->
Pid ! {Tag, self(), SendFuncion},
receive
{Tag, Pid} ->
ok
end;
_ ->
ok
end).
-else.
-define(SIM(Other,Where),Other).
-define(TC_AWAIT_SEND_EVENT(_),ok).
-endif.
%%----------------------------------------------------------------------
%% Encode the transaction request
%%----------------------------------------------------------------------
encode_trans_request(CD, TR) when is_record(TR, 'TransactionRequest') ->
?report_debug(CD, "encode trans request", [TR]),
Trans = {transactionRequest, TR},
encode_transaction(CD, Trans).
encode_trans_reply(#conn_data{segment_send = SegSend,
max_pdu_size = Max,
protocol_version = V} = CD, Reply)
when (SegSend == infinity) or (is_integer(SegSend) and (SegSend > 0)) and
is_integer(V) and (V >= 3) and
is_integer(Max) and (Max >= ?MSG_HDR_SZ) ->
(catch encode_segmented_trans_reply(CD, Reply));
encode_trans_reply(CD, TR) when is_record(TR, megaco_transaction_reply) ->
?report_debug(CD, "encode trans reply", [TR]),
Trans = {transactionReply, transform_transaction_reply(CD, TR)},
encode_transaction(CD, Trans);
encode_trans_reply(CD, TR) when is_tuple(TR) and
(element(1, TR) == 'TransactionReply') ->
?report_debug(CD, "encode trans reply", [TR]),
Trans = {transactionReply, TR},
encode_transaction(CD, Trans).
encode_segmented_trans_reply(#conn_data{max_pdu_size = Max} = CD, Rep) ->
#megaco_transaction_reply{transactionResult = Res1} = Rep,
case Res1 of
{actionReplies, AR} when is_list(AR) andalso (length(AR) >= 1) ->
case encode_action_replies(CD, AR) of
{Size, EncodedARs} when Size =< (Max - ?MSG_HDR_SZ) ->
?report_debug(CD, "action replies encoded size ok",
[Size, Max]),
%% No need to segment message: within size limit
Res2 = {actionReplies, EncodedARs},
TR = Rep#megaco_transaction_reply{transactionResult = Res2},
TR2 = transform_transaction_reply(CD, TR),
Trans = {transactionReply, TR2},
encode_transaction(CD, Trans);
{Size, EncodecARs} ->
?report_debug(CD,
"action replies encoded size to large - "
"segment",
[Size, Max]),
%% Over size limit, so go segment the message
encode_segments(CD, Rep, EncodecARs)
end;
_ ->
TR = transform_transaction_reply(CD, Rep),
Trans = {transactionReply, TR},
encode_transaction(CD, Trans)
end.
encode_segments(CD, Reply, EncodecARs) ->
encode_segments(CD, Reply, EncodecARs, 1, []).
encode_segments(CD, Reply, [EncodedAR], SN, EncodedSegs) ->
Bin = encode_segment(CD, Reply, EncodedAR, SN, 'NULL'),
{ok, lists:reverse([{SN, Bin}|EncodedSegs])};
encode_segments(CD, Reply, [EncodedAR|EncodedARs], SN, EncodedSegs) ->
Bin = encode_segment(CD, Reply, EncodedAR, SN, asn1_NOVALUE),
encode_segments(CD, Reply, EncodedARs, SN + 1, [{SN, Bin}|EncodedSegs]).
encode_segment(CD, Reply, EncodedAR, SN, SC) ->
Res = {actionReplies, [EncodedAR]},
TR0 = Reply#megaco_transaction_reply{transactionResult = Res,
segmentNumber = SN,
segmentationComplete = SC},
TR = transform_transaction_reply(CD, TR0),
Trans = {transactionReply, TR},
case encode_transaction(CD, Trans) of
{ok, Bin} ->
Bin;
Error ->
throw(Error)
end.
encode_transaction(#conn_data{protocol_version = V,
encoding_mod = EM,
encoding_config = EC} = CD, Trans) ->
case (catch EM:encode_transaction(EC, V, Trans)) of
{ok, Bin} ->
?SIM({ok, Bin}, encode_trans);
{'EXIT', {undef, _}} ->
{error, not_implemented};
{error, not_implemented} = Error1 ->
Error1;
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_transaction, [EC, V, Trans], Reason}};
Error2 ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_transaction, [EC, V, Trans], Error2}}
end.
%%----------------------------------------------------------------------
%% Encode the action request's
%%----------------------------------------------------------------------
encode_actions(#conn_data{protocol_version = V} = CD, TraceLabel, ARs) ->
?report_debug(CD, TraceLabel, [ARs]),
%% Encode the actions
EM = CD#conn_data.encoding_mod,
EC = CD#conn_data.encoding_config,
case (catch EM:encode_action_requests(EC, V, ARs)) of
{ok, Bin} when is_binary(Bin) ->
?SIM({ok, Bin}, encode_actions);
{'EXIT', {undef, _}} ->
incNumErrors(CD#conn_data.conn_handle),
Reason = not_implemented,
{error, {EM, encode_action_requests, [EC, ARs], Reason}};
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_action_requests, [EC, ARs], Reason}};
Error ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_action_requests, [EC, ARs], Error}}
end.
%%----------------------------------------------------------------------
%% Encode the action reply's
%%----------------------------------------------------------------------
encode_action_replies(CD, AR) ->
encode_action_replies(CD, AR, 0, []).
encode_action_replies(_, [], Size, Acc) ->
{Size, lists:reverse(Acc)};
encode_action_replies(#conn_data{protocol_version = V,
encoding_mod = Mod,
encoding_config = Conf} = CD,
[AR|ARs], Size, Acc) ->
case (catch Mod:encode_action_reply(Conf, V, AR)) of
{ok, Bin} when is_binary(Bin) ->
encode_action_replies(CD, ARs, Size + byte_size(Bin), [Bin|Acc]);
{'EXIT', {undef, _}} ->
throw({error, not_implemented});
{error, not_implemented} = Error1 ->
throw(Error1);
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
throw({error, {Mod, encode_action_reply, [Conf, AR], Reason}});
Error ->
incNumErrors(CD#conn_data.conn_handle),
throw({error, {Mod, encode_action_reply, [Conf, AR], Error}})
end.
%%----------------------------------------------------------------------
%% Encode the message body
%%----------------------------------------------------------------------
encode_body(#conn_data{protocol_version = V} = ConnData,
TraceLabel, Body) ->
%% Create the message envelope
MegaMsg = compose_message(ConnData, V, Body),
?report_debug(ConnData, TraceLabel, [MegaMsg]),
%% Encode the message
EM = ConnData#conn_data.encoding_mod,
EC = ConnData#conn_data.encoding_config,
case (catch EM:encode_message(EC, V, MegaMsg)) of
{ok, Bin} when is_binary(Bin) ->
?SIM({ok, Bin}, encode_body);
{error, Reason} ->
incNumErrors(ConnData#conn_data.conn_handle),
{error, {EM, [EC, MegaMsg], Reason}};
Error ->
incNumErrors(ConnData#conn_data.conn_handle),
{error, {EM, [EC, MegaMsg], Error}}
end.
%%----------------------------------------------------------------------
%% Compose and encode a message
%%----------------------------------------------------------------------
compose_message(#conn_data{conn_handle = CH,
auth_data = MsgAuth}, V, Body) ->
LocalMid = CH#megaco_conn_handle.local_mid,
Msg = #'Message'{version = V,
mId = LocalMid,
messageBody = Body},
: Compute ?
mess = Msg},
MegaMsg.
encode_message(#conn_data{protocol_version = Version,
encoding_mod = EncodingMod,
encoding_config = EncodingConfig}, MegaMsg) ->
(catch EncodingMod:encode_message(EncodingConfig, Version, MegaMsg)).
%%----------------------------------------------------------------------
%% Send the message body
%%----------------------------------------------------------------------
send_body(ConnData, TraceLabel, Body) ->
case encode_body(ConnData, TraceLabel, Body) of
{ok, Bin} ->
send_message(ConnData, false, Bin);
{error, Reason} ->
{error, Reason}
end.
%%----------------------------------------------------------------------
%% Send the (encoded) message
%%----------------------------------------------------------------------
send_message(#conn_data{resend_indication = flag} = ConnData,
Resend, Bin) ->
do_send_message(ConnData, send_message, Bin, [Resend]);
send_message(#conn_data{resend_indication = true} = ConnData,
true, Bin) ->
do_send_message(ConnData, resend_message, Bin, []);
send_message(ConnData, _Resend, Bin) ->
do_send_message(ConnData, send_message, Bin, []).
do_send_message(ConnData, SendFunc, Bin, Extra) ->
%% Send the message
#conn_data{send_mod = SendMod,
send_handle = SendHandle} = ConnData,
?TC_AWAIT_SEND_EVENT(SendFunc),
?report_trace(ConnData, "send bytes", [{bytes, Bin},
{send_func, SendFunc}]),
Args = [SendHandle, Bin | Extra],
case (catch apply(SendMod, SendFunc, Args)) of
ok ->
?SIM({ok, Bin}, send_message);
{cancel, Reason} ->
?report_trace(ConnData, "<CANCEL> send_message callback",
[{bytes, Bin}, {cancel, Reason}]),
{error, {send_message_cancelled, Reason}};
{error, Reason} ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {error, Reason}]),
error_msg("failed (error) sending message [using ~w] (~p):"
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Reason}};
{'EXIT', Reason} = Error ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {exit, Reason}]),
error_msg("failed (exit) sending message [using ~w] (~p):"
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Error}};
Reason ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {error, Reason}]),
error_msg("failed sending message [using ~w] on (~p): "
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Reason}}
end.
%%%-----------------------------------------------------------------
%%% Misc internal util functions
%%%-----------------------------------------------------------------
transform_transaction_reply(#conn_data{protocol_version = V}, TR)
when is_integer(V) and (V >= 3) ->
#megaco_transaction_reply{transactionId = TransId,
immAckRequired = IAR,
transactionResult = TransRes,
segmentNumber = SegNo,
segmentationComplete = SegComplete} = TR,
{'TransactionReply', TransId, IAR, TransRes, SegNo, SegComplete};
transform_transaction_reply(_, TR) ->
#megaco_transaction_reply{transactionId = TransId,
immAckRequired = IAR,
transactionResult = TransRes} = TR,
{'TransactionReply', TransId, IAR, TransRes}.
%%-----------------------------------------------------------------
%% Func: error_msg/2
%% Description: Send an error message
%%-----------------------------------------------------------------
error_msg(F, A) ->
?megaco_error(F, A).
%%-----------------------------------------------------------------
%% Func: incNumErrors/0, incNumErrors/1, incNumTimerRecovery/1
%% Description: SNMP counter increment functions
%%-----------------------------------------------------------------
incNumErrors(CH) ->
incNum({CH, medGwyGatewayNumErrors}).
incNum(Cnt) ->
case (catch ets:update_counter(megaco_stats, Cnt, 1)) of
{'EXIT', {badarg, _R}} ->
ets:insert(megaco_stats, {Cnt, 1});
Old ->
Old
end.
p(F , A ) - >
%% print(now(), F, A).
print(Ts , F , A ) - >
%% io:format("*** [~s] ~p ***"
%% "~n " ++ F ++ "~n",
[ format_timestamp(Ts ) , self ( ) | A ] ) .
%% format_timestamp(Now) ->
{ _ N1 , _ N2 , N3 } = Now ,
%% {Date, Time} = calendar:now_to_datetime(Now),
{ YYYY , , DD } = Date ,
{ Hour , , Sec } = Time ,
%% FormatDate =
%% io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
[ YYYY , , DD , Hour , , Sec , round(N3/1000 ) ] ) ,
%% lists:flatten(FormatDate).
| null | https://raw.githubusercontent.com/erlang/otp/2b397d7e5580480dc32fa9751db95f4b89ff029e/lib/megaco/src/engine/megaco_messenger_misc.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
Purpose: Misc functions used both from the megaco_messenger module
and the megaco_ack_sender module.
----------------------------------------------------------------------
Application internal export
Test functions
This is just a guess...
----------------------------------------------------------------------
Encode the transaction request
----------------------------------------------------------------------
No need to segment message: within size limit
Over size limit, so go segment the message
----------------------------------------------------------------------
Encode the action request's
----------------------------------------------------------------------
Encode the actions
----------------------------------------------------------------------
Encode the action reply's
----------------------------------------------------------------------
----------------------------------------------------------------------
Encode the message body
----------------------------------------------------------------------
Create the message envelope
Encode the message
----------------------------------------------------------------------
Compose and encode a message
----------------------------------------------------------------------
----------------------------------------------------------------------
Send the message body
----------------------------------------------------------------------
----------------------------------------------------------------------
Send the (encoded) message
----------------------------------------------------------------------
Send the message
-----------------------------------------------------------------
Misc internal util functions
-----------------------------------------------------------------
-----------------------------------------------------------------
Func: error_msg/2
Description: Send an error message
-----------------------------------------------------------------
-----------------------------------------------------------------
Func: incNumErrors/0, incNumErrors/1, incNumTimerRecovery/1
Description: SNMP counter increment functions
-----------------------------------------------------------------
print(now(), F, A).
io:format("*** [~s] ~p ***"
"~n " ++ F ++ "~n",
format_timestamp(Now) ->
{Date, Time} = calendar:now_to_datetime(Now),
FormatDate =
io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
lists:flatten(FormatDate). | Copyright Ericsson AB 2003 - 2023 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(megaco_messenger_misc).
-export([encode_body/3,
encode_trans_request/2,
encode_trans_reply/2,
encode_actions/3,
send_body/3,
send_message/3,
transform_transaction_reply/2
]).
-export([compose_message/3, encode_message/2]).
-include_lib("megaco/include/megaco.hrl").
-include("megaco_message_internal.hrl").
-include_lib("megaco/src/app/megaco_internal.hrl").
-ifdef(MEGACO_TEST_CODE).
-define(SIM(Other,Where),
fun(Afun,Bfun) ->
Kfun = {?MODULE,Bfun},
case (catch ets:lookup(megaco_test_data, Kfun)) of
[{Kfun,Cfun}] ->
Cfun(Afun);
_ ->
Afun
end
end(Other,Where)).
-define(TC_AWAIT_SEND_EVENT(SendFunction),
case megaco_tc_controller:lookup(send_function) of
{value, {Tag, Pid}} when is_pid(Pid) ->
Pid ! {Tag, self(), SendFuncion},
receive
{Tag, Pid} ->
ok
end;
_ ->
ok
end).
-else.
-define(SIM(Other,Where),Other).
-define(TC_AWAIT_SEND_EVENT(_),ok).
-endif.
encode_trans_request(CD, TR) when is_record(TR, 'TransactionRequest') ->
?report_debug(CD, "encode trans request", [TR]),
Trans = {transactionRequest, TR},
encode_transaction(CD, Trans).
encode_trans_reply(#conn_data{segment_send = SegSend,
max_pdu_size = Max,
protocol_version = V} = CD, Reply)
when (SegSend == infinity) or (is_integer(SegSend) and (SegSend > 0)) and
is_integer(V) and (V >= 3) and
is_integer(Max) and (Max >= ?MSG_HDR_SZ) ->
(catch encode_segmented_trans_reply(CD, Reply));
encode_trans_reply(CD, TR) when is_record(TR, megaco_transaction_reply) ->
?report_debug(CD, "encode trans reply", [TR]),
Trans = {transactionReply, transform_transaction_reply(CD, TR)},
encode_transaction(CD, Trans);
encode_trans_reply(CD, TR) when is_tuple(TR) and
(element(1, TR) == 'TransactionReply') ->
?report_debug(CD, "encode trans reply", [TR]),
Trans = {transactionReply, TR},
encode_transaction(CD, Trans).
encode_segmented_trans_reply(#conn_data{max_pdu_size = Max} = CD, Rep) ->
#megaco_transaction_reply{transactionResult = Res1} = Rep,
case Res1 of
{actionReplies, AR} when is_list(AR) andalso (length(AR) >= 1) ->
case encode_action_replies(CD, AR) of
{Size, EncodedARs} when Size =< (Max - ?MSG_HDR_SZ) ->
?report_debug(CD, "action replies encoded size ok",
[Size, Max]),
Res2 = {actionReplies, EncodedARs},
TR = Rep#megaco_transaction_reply{transactionResult = Res2},
TR2 = transform_transaction_reply(CD, TR),
Trans = {transactionReply, TR2},
encode_transaction(CD, Trans);
{Size, EncodecARs} ->
?report_debug(CD,
"action replies encoded size to large - "
"segment",
[Size, Max]),
encode_segments(CD, Rep, EncodecARs)
end;
_ ->
TR = transform_transaction_reply(CD, Rep),
Trans = {transactionReply, TR},
encode_transaction(CD, Trans)
end.
encode_segments(CD, Reply, EncodecARs) ->
encode_segments(CD, Reply, EncodecARs, 1, []).
encode_segments(CD, Reply, [EncodedAR], SN, EncodedSegs) ->
Bin = encode_segment(CD, Reply, EncodedAR, SN, 'NULL'),
{ok, lists:reverse([{SN, Bin}|EncodedSegs])};
encode_segments(CD, Reply, [EncodedAR|EncodedARs], SN, EncodedSegs) ->
Bin = encode_segment(CD, Reply, EncodedAR, SN, asn1_NOVALUE),
encode_segments(CD, Reply, EncodedARs, SN + 1, [{SN, Bin}|EncodedSegs]).
encode_segment(CD, Reply, EncodedAR, SN, SC) ->
Res = {actionReplies, [EncodedAR]},
TR0 = Reply#megaco_transaction_reply{transactionResult = Res,
segmentNumber = SN,
segmentationComplete = SC},
TR = transform_transaction_reply(CD, TR0),
Trans = {transactionReply, TR},
case encode_transaction(CD, Trans) of
{ok, Bin} ->
Bin;
Error ->
throw(Error)
end.
encode_transaction(#conn_data{protocol_version = V,
encoding_mod = EM,
encoding_config = EC} = CD, Trans) ->
case (catch EM:encode_transaction(EC, V, Trans)) of
{ok, Bin} ->
?SIM({ok, Bin}, encode_trans);
{'EXIT', {undef, _}} ->
{error, not_implemented};
{error, not_implemented} = Error1 ->
Error1;
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_transaction, [EC, V, Trans], Reason}};
Error2 ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_transaction, [EC, V, Trans], Error2}}
end.
encode_actions(#conn_data{protocol_version = V} = CD, TraceLabel, ARs) ->
?report_debug(CD, TraceLabel, [ARs]),
EM = CD#conn_data.encoding_mod,
EC = CD#conn_data.encoding_config,
case (catch EM:encode_action_requests(EC, V, ARs)) of
{ok, Bin} when is_binary(Bin) ->
?SIM({ok, Bin}, encode_actions);
{'EXIT', {undef, _}} ->
incNumErrors(CD#conn_data.conn_handle),
Reason = not_implemented,
{error, {EM, encode_action_requests, [EC, ARs], Reason}};
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_action_requests, [EC, ARs], Reason}};
Error ->
incNumErrors(CD#conn_data.conn_handle),
{error, {EM, encode_action_requests, [EC, ARs], Error}}
end.
encode_action_replies(CD, AR) ->
encode_action_replies(CD, AR, 0, []).
encode_action_replies(_, [], Size, Acc) ->
{Size, lists:reverse(Acc)};
encode_action_replies(#conn_data{protocol_version = V,
encoding_mod = Mod,
encoding_config = Conf} = CD,
[AR|ARs], Size, Acc) ->
case (catch Mod:encode_action_reply(Conf, V, AR)) of
{ok, Bin} when is_binary(Bin) ->
encode_action_replies(CD, ARs, Size + byte_size(Bin), [Bin|Acc]);
{'EXIT', {undef, _}} ->
throw({error, not_implemented});
{error, not_implemented} = Error1 ->
throw(Error1);
{error, Reason} ->
incNumErrors(CD#conn_data.conn_handle),
throw({error, {Mod, encode_action_reply, [Conf, AR], Reason}});
Error ->
incNumErrors(CD#conn_data.conn_handle),
throw({error, {Mod, encode_action_reply, [Conf, AR], Error}})
end.
encode_body(#conn_data{protocol_version = V} = ConnData,
TraceLabel, Body) ->
MegaMsg = compose_message(ConnData, V, Body),
?report_debug(ConnData, TraceLabel, [MegaMsg]),
EM = ConnData#conn_data.encoding_mod,
EC = ConnData#conn_data.encoding_config,
case (catch EM:encode_message(EC, V, MegaMsg)) of
{ok, Bin} when is_binary(Bin) ->
?SIM({ok, Bin}, encode_body);
{error, Reason} ->
incNumErrors(ConnData#conn_data.conn_handle),
{error, {EM, [EC, MegaMsg], Reason}};
Error ->
incNumErrors(ConnData#conn_data.conn_handle),
{error, {EM, [EC, MegaMsg], Error}}
end.
compose_message(#conn_data{conn_handle = CH,
auth_data = MsgAuth}, V, Body) ->
LocalMid = CH#megaco_conn_handle.local_mid,
Msg = #'Message'{version = V,
mId = LocalMid,
messageBody = Body},
: Compute ?
mess = Msg},
MegaMsg.
encode_message(#conn_data{protocol_version = Version,
encoding_mod = EncodingMod,
encoding_config = EncodingConfig}, MegaMsg) ->
(catch EncodingMod:encode_message(EncodingConfig, Version, MegaMsg)).
send_body(ConnData, TraceLabel, Body) ->
case encode_body(ConnData, TraceLabel, Body) of
{ok, Bin} ->
send_message(ConnData, false, Bin);
{error, Reason} ->
{error, Reason}
end.
send_message(#conn_data{resend_indication = flag} = ConnData,
Resend, Bin) ->
do_send_message(ConnData, send_message, Bin, [Resend]);
send_message(#conn_data{resend_indication = true} = ConnData,
true, Bin) ->
do_send_message(ConnData, resend_message, Bin, []);
send_message(ConnData, _Resend, Bin) ->
do_send_message(ConnData, send_message, Bin, []).
do_send_message(ConnData, SendFunc, Bin, Extra) ->
#conn_data{send_mod = SendMod,
send_handle = SendHandle} = ConnData,
?TC_AWAIT_SEND_EVENT(SendFunc),
?report_trace(ConnData, "send bytes", [{bytes, Bin},
{send_func, SendFunc}]),
Args = [SendHandle, Bin | Extra],
case (catch apply(SendMod, SendFunc, Args)) of
ok ->
?SIM({ok, Bin}, send_message);
{cancel, Reason} ->
?report_trace(ConnData, "<CANCEL> send_message callback",
[{bytes, Bin}, {cancel, Reason}]),
{error, {send_message_cancelled, Reason}};
{error, Reason} ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {error, Reason}]),
error_msg("failed (error) sending message [using ~w] (~p):"
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Reason}};
{'EXIT', Reason} = Error ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {exit, Reason}]),
error_msg("failed (exit) sending message [using ~w] (~p):"
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Error}};
Reason ->
incNumErrors(ConnData#conn_data.conn_handle),
?report_important(ConnData, "<ERROR> send_message callback",
[{bytes, Bin}, {error, Reason}]),
error_msg("failed sending message [using ~w] on (~p): "
"~n~w", [SendFunc, SendHandle, Reason]),
{error, {send_message_failed, Reason}}
end.
transform_transaction_reply(#conn_data{protocol_version = V}, TR)
when is_integer(V) and (V >= 3) ->
#megaco_transaction_reply{transactionId = TransId,
immAckRequired = IAR,
transactionResult = TransRes,
segmentNumber = SegNo,
segmentationComplete = SegComplete} = TR,
{'TransactionReply', TransId, IAR, TransRes, SegNo, SegComplete};
transform_transaction_reply(_, TR) ->
#megaco_transaction_reply{transactionId = TransId,
immAckRequired = IAR,
transactionResult = TransRes} = TR,
{'TransactionReply', TransId, IAR, TransRes}.
error_msg(F, A) ->
?megaco_error(F, A).
incNumErrors(CH) ->
incNum({CH, medGwyGatewayNumErrors}).
incNum(Cnt) ->
case (catch ets:update_counter(megaco_stats, Cnt, 1)) of
{'EXIT', {badarg, _R}} ->
ets:insert(megaco_stats, {Cnt, 1});
Old ->
Old
end.
p(F , A ) - >
print(Ts , F , A ) - >
[ format_timestamp(Ts ) , self ( ) | A ] ) .
{ _ N1 , _ N2 , N3 } = Now ,
{ YYYY , , DD } = Date ,
{ Hour , , Sec } = Time ,
[ YYYY , , DD , Hour , , Sec , round(N3/1000 ) ] ) ,
|
aae6a20eda620c582aef93d710c424cf32bb8d9dd8c31577126021bc623432cc | danielmiladinov/joy-of-clojure | defining_control_structures.clj | ;; Defining Control Structures
;; ---------------------------------------------------------------------------------------------------------------------
Most control structures in Clojure are implemented via macros , so they provide a nice starting point for learning how
macros can be useful . can be built with or without using syntax - quote , so we 'll show examples of each .
In languages lacking macros , such as for example , the definition of control structures relies on the use of
;; higher-order functions such as we showed earlier. Although this fact in no way limits the ability to create control
structures in Haskell , the approach that take to the problem is different . The most obvious advantage of macros
;; over higher-order functions is that the former manipulate compile-time forms, transforming them into runtime forms.
;; This allows your programs to be written in ways natural to your problem domain, while still maintaining runtime
efficiency . Clojure already provides a rich set of control structures , including but not limited to doseq , while , if ,
;; if-let, and do, but in this section you'll write a few others.
;; Defining control structures without syntax-quote
;; ------------------------------------------------
;; Because the arguments to defmacro aren't evaluated before being passed to the macro, they can be viewed as pure data
;; structures and manipulated and analyzed as such. Because of this, amazing things can be done on the raw forms
;; supplied to macros even in the absence of unquoting.
;; Imagine a macro named do-until that executes all of its clauses evaluating as true until it gets one that is falsey:
(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You never see me")
:lollipop (println "Truthy thing"))
; Even
; Odd
;;=> nil
A good example of this type of macro is Clojure 's core macro cond , which with some minor modifications can be made to
;; behave differently:
(defmacro do-until [& clauses]
(when clauses ; When there are clauses
(list 'clojure.core/when (first clauses) ; ... build up a list of each paired clause
(if (next clauses)
(second clauses)
(throw (IllegalArgumentException. "do-until requires an even number of forms")))
(cons 'do-until (nnext clauses))))) ; ... recursively
The first expansion of do - until illustrates how this macro operates :
(macroexpand-1 '(do-until true (prn 1) false (prn 2)))
= > ( clojure.core/when true ( prn 1 ) ( do - until false ( prn 2 ) ) )
;; do-until recursively expands into a series of when calls, which themselves expand into a series of if expressions
;; (because when is a macro defined in terms of the built-in if):
(require '[clojure.walk :as walk])
(walk/macroexpand-all '(do-until true (prn 1) false (prn 2)))
= > ( if true ( do ( prn 1 ) ( if false ( do ( prn 2 ) nil ) ) ) )
(do-until true (prn 1) false (prn 2))
1
;;=> nil
;; You could write out the nested if structure manually and achieve the same result, but the beauty of macros lies in
;; the fact that they can do so on your behalf while presenting a lightweight and intuitive form. In cases where
;; do-until can be used, it removes the need to write and maintain superfluous boilerplate code. This idea can be
;; extended to macros in general and their propensity to reduce unneeded boilerplate for a large category of
circumstances , as you desire . One thing to note about do - until is that it 's meant to be used only for side effects ,
;; because it's designed to always return nil. Macros starting with do tend to act the same way.
;; Defining control structures using syntax-quote and unquoting
;; ------------------------------------------------------------
;; Not all control structures are as simple as do-until. Sometimes you'll want to selectively evaluate macro arguments,
structures , or substructures . In this section , we 'll explore one such macro named unless , implemented using unquote
;; and unquote-splice.
;; Ruby provides a control structure named unless that reverses the sense of a when statement, executing the body of a
;; block when a given condition evaluates to false:
(unless (even? 3) "Now we see it . . . ")
;;=> "Now we see it . . . "
(unless (even? 2) "Now we don't.")
;;=> nil
;; The maverick implementation of unless as demonstrated previously and as shown next is straightforward:
(defmacro unless [condition & body]
`(if (not ~condition) ; Unquote condition
(do ~@body))) ; Splice body
;; The body of the unless implementation uses syntax-quote, unquote, and unquote-splice. Syntax-quote allows the if form
;; to act as a template for the expression that any use of the macro becomes when expanded. The unquote and
;; splicing-unquote provide the "blanks" where the values for the parameters condition and body will be inserted.
;; You can see unless in action next:
(unless true (println "nope"))
;;=> nil
(unless false (println "yep!"))
;; yep!
;;=> nil
;; Because unless relies on the result of a condition for its operation, it's imperative that it evaluate the condition
part using unquote . If we did n't use unquote in our example , then instead of evaluating a function ( even ? 3 ) , it
;; would attempt to resolve a namespace var named condition that may not exist -- and if it did exist, it might be
arbitrarily at the time of the macro call . Some of the unintended consequences of this mistake are shown here :
(macroexpand `(if (not condition) "got it")) ; Missing ~
;;=> (if (clojure.core/not user/condition) "got it") ; Resolved to var
(eval `(if (not condition) "got it"))
;;=> java.lang.RuntimeException: No such var: user/condition ; Unbound var
(def condition false) ; Bound to var
(eval `(if (not condition) "got it")) ; Resolved to var
;;=> "got it"
;; Clearly this isn't the desired behavior. Instead, by unquoting the condition local, you ensure that the function call
;; is used instead. It's easy to forget to add an unquote to the body of a macro, and depending on the condition of your
;; runtime environment, the problem may not be immediately obvious.
| null | https://raw.githubusercontent.com/danielmiladinov/joy-of-clojure/cad7d1851e153beb12a2cd536eb467be12cb7a73/src/joy-of-clojure/chapter8/defining_control_structures.clj | clojure | Defining Control Structures
---------------------------------------------------------------------------------------------------------------------
higher-order functions such as we showed earlier. Although this fact in no way limits the ability to create control
over higher-order functions is that the former manipulate compile-time forms, transforming them into runtime forms.
This allows your programs to be written in ways natural to your problem domain, while still maintaining runtime
if-let, and do, but in this section you'll write a few others.
Defining control structures without syntax-quote
------------------------------------------------
Because the arguments to defmacro aren't evaluated before being passed to the macro, they can be viewed as pure data
structures and manipulated and analyzed as such. Because of this, amazing things can be done on the raw forms
supplied to macros even in the absence of unquoting.
Imagine a macro named do-until that executes all of its clauses evaluating as true until it gets one that is falsey:
Even
Odd
=> nil
behave differently:
When there are clauses
... build up a list of each paired clause
... recursively
do-until recursively expands into a series of when calls, which themselves expand into a series of if expressions
(because when is a macro defined in terms of the built-in if):
=> nil
You could write out the nested if structure manually and achieve the same result, but the beauty of macros lies in
the fact that they can do so on your behalf while presenting a lightweight and intuitive form. In cases where
do-until can be used, it removes the need to write and maintain superfluous boilerplate code. This idea can be
extended to macros in general and their propensity to reduce unneeded boilerplate for a large category of
because it's designed to always return nil. Macros starting with do tend to act the same way.
Defining control structures using syntax-quote and unquoting
------------------------------------------------------------
Not all control structures are as simple as do-until. Sometimes you'll want to selectively evaluate macro arguments,
and unquote-splice.
Ruby provides a control structure named unless that reverses the sense of a when statement, executing the body of a
block when a given condition evaluates to false:
=> "Now we see it . . . "
=> nil
The maverick implementation of unless as demonstrated previously and as shown next is straightforward:
Unquote condition
Splice body
The body of the unless implementation uses syntax-quote, unquote, and unquote-splice. Syntax-quote allows the if form
to act as a template for the expression that any use of the macro becomes when expanded. The unquote and
splicing-unquote provide the "blanks" where the values for the parameters condition and body will be inserted.
You can see unless in action next:
=> nil
yep!
=> nil
Because unless relies on the result of a condition for its operation, it's imperative that it evaluate the condition
would attempt to resolve a namespace var named condition that may not exist -- and if it did exist, it might be
Missing ~
=> (if (clojure.core/not user/condition) "got it") ; Resolved to var
=> java.lang.RuntimeException: No such var: user/condition ; Unbound var
Bound to var
Resolved to var
=> "got it"
Clearly this isn't the desired behavior. Instead, by unquoting the condition local, you ensure that the function call
is used instead. It's easy to forget to add an unquote to the body of a macro, and depending on the condition of your
runtime environment, the problem may not be immediately obvious. | Most control structures in Clojure are implemented via macros , so they provide a nice starting point for learning how
macros can be useful . can be built with or without using syntax - quote , so we 'll show examples of each .
In languages lacking macros , such as for example , the definition of control structures relies on the use of
structures in Haskell , the approach that take to the problem is different . The most obvious advantage of macros
efficiency . Clojure already provides a rich set of control structures , including but not limited to doseq , while , if ,
(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You never see me")
:lollipop (println "Truthy thing"))
A good example of this type of macro is Clojure 's core macro cond , which with some minor modifications can be made to
(defmacro do-until [& clauses]
(if (next clauses)
(second clauses)
(throw (IllegalArgumentException. "do-until requires an even number of forms")))
The first expansion of do - until illustrates how this macro operates :
(macroexpand-1 '(do-until true (prn 1) false (prn 2)))
= > ( clojure.core/when true ( prn 1 ) ( do - until false ( prn 2 ) ) )
(require '[clojure.walk :as walk])
(walk/macroexpand-all '(do-until true (prn 1) false (prn 2)))
= > ( if true ( do ( prn 1 ) ( if false ( do ( prn 2 ) nil ) ) ) )
(do-until true (prn 1) false (prn 2))
1
circumstances , as you desire . One thing to note about do - until is that it 's meant to be used only for side effects ,
structures , or substructures . In this section , we 'll explore one such macro named unless , implemented using unquote
(unless (even? 3) "Now we see it . . . ")
(unless (even? 2) "Now we don't.")
(defmacro unless [condition & body]
(unless true (println "nope"))
(unless false (println "yep!"))
part using unquote . If we did n't use unquote in our example , then instead of evaluating a function ( even ? 3 ) , it
arbitrarily at the time of the macro call . Some of the unintended consequences of this mistake are shown here :
(eval `(if (not condition) "got it"))
|
1b510626e29d136aa136bc773e7da7aa309e2514663d006d3995c5a37ba145e6 | nikita-volkov/domain | Prelude.hs | module Domain.Prelude
(
module Exports,
showAsText,
)
where
-- base
-------------------------
import Control.Applicative as Exports hiding (WrappedArrow(..))
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.IO.Class as Exports
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.Functor.Contravariant as Exports
import Data.Int as Exports
import Data.IORef as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (sortOn, isSubsequenceOf, uncons, concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.List.NonEmpty as Exports (NonEmpty(..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt)
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.STRef as Exports
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, withMVar, threadWaitWriteSTM, threadWaitWrite, threadWaitReadSTM, threadWaitRead)
import GHC.Exts as Exports (IsList(..), lazy, inline, sortWith, groupWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import GHC.OverloadedLabels as Exports
import GHC.Records as Exports
import Numeric as Exports
import Prelude as Exports hiding (fail, concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, id, (.))
import System.Environment as Exports
import System.Exit as Exports
import System.IO as Exports (Handle, hClose)
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, ReadS, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readPrec_to_P, readP_to_Prec, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (printf, hPrintf)
import Text.Read as Exports (Read(..), readMaybe, readEither)
import Unsafe.Coerce as Exports
-- text
-------------------------
import Data.Text as Exports (Text)
-- bytestring
-------------------------
import Data.ByteString as Exports (ByteString)
-- hashable
-------------------------
import Data.Hashable as Exports (Hashable)
-- template-haskell
-------------------------
import Language.Haskell.TH.Syntax as Exports (Lift)
showAsText :: Show a => a -> Text
showAsText = show >>> fromString
| null | https://raw.githubusercontent.com/nikita-volkov/domain/1f140a8981cc604c52ebd02f9f8f773345e766b8/library/Domain/Prelude.hs | haskell | base
-----------------------
text
-----------------------
bytestring
-----------------------
hashable
-----------------------
template-haskell
----------------------- | module Domain.Prelude
(
module Exports,
showAsText,
)
where
import Control.Applicative as Exports hiding (WrappedArrow(..))
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.IO.Class as Exports
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.Functor.Contravariant as Exports
import Data.Int as Exports
import Data.IORef as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (sortOn, isSubsequenceOf, uncons, concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.List.NonEmpty as Exports (NonEmpty(..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt)
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.STRef as Exports
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, withMVar, threadWaitWriteSTM, threadWaitWrite, threadWaitReadSTM, threadWaitRead)
import GHC.Exts as Exports (IsList(..), lazy, inline, sortWith, groupWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import GHC.OverloadedLabels as Exports
import GHC.Records as Exports
import Numeric as Exports
import Prelude as Exports hiding (fail, concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, id, (.))
import System.Environment as Exports
import System.Exit as Exports
import System.IO as Exports (Handle, hClose)
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, ReadS, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readPrec_to_P, readP_to_Prec, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (printf, hPrintf)
import Text.Read as Exports (Read(..), readMaybe, readEither)
import Unsafe.Coerce as Exports
import Data.Text as Exports (Text)
import Data.ByteString as Exports (ByteString)
import Data.Hashable as Exports (Hashable)
import Language.Haskell.TH.Syntax as Exports (Lift)
showAsText :: Show a => a -> Text
showAsText = show >>> fromString
|
cc56a28aa4ffc0e90cc65a389e424ea1f161445cdfedf62504515ece4b0efe75 | minoki/haskell-floating-point | MinMaxSpec.hs | module MinMaxSpec where
import Data.Coerce
import Data.Functor.Identity
import Data.Proxy
import Numeric.Floating.IEEE
import Numeric.Floating.IEEE.Internal
import Numeric.Floating.IEEE.NaN (RealFloatNaN(..))
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Util
default ()
isQuietNaN :: RealFloatNaN a => a -> Bool
isQuietNaN x = isNaN x && not (isSignaling x)
prop_minimum :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_minimum _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 1
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` (-1)
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` (-0)
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` (-0)
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ isQuietNaN (m sNaN 1.0)
, counterexample "(1.0,sNaN)" $ isQuietNaN (m 1.0 sNaN)
, counterexample "(qNaN,1.0)" $ isQuietNaN (m qNaN 1.0)
, counterexample "(1.0,qNaN)" $ isQuietNaN (m 1.0 qNaN)
]
prop_maximum :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_maximum _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 3
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` 1
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` 0
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` 0
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ isQuietNaN (m sNaN 1.0)
, counterexample "(1.0,sNaN)" $ isQuietNaN (m 1.0 sNaN)
, counterexample "(qNaN,1.0)" $ isQuietNaN (m qNaN 1.0)
, counterexample "(1.0,qNaN)" $ isQuietNaN (m 1.0 qNaN)
]
prop_minimumNumber :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_minimumNumber _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 1
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` (-1)
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` (-0)
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` (-0)
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ m sNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,sNaN)" $ m 1.0 sNaN `sameFloatP` 1.0
, counterexample "(qNaN,1.0)" $ m qNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,qNaN)" $ m 1.0 qNaN `sameFloatP` 1.0
]
prop_maximumNumber :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_maximumNumber _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 3
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` 1
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` 0
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` 0
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ m sNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,sNaN)" $ m 1.0 sNaN `sameFloatP` 1.0
, counterexample "(qNaN,1.0)" $ m qNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,qNaN)" $ m 1.0 qNaN `sameFloatP` 1.0
]
# NOINLINE spec #
spec :: Spec
spec = do
describe "Float" $ do
let proxy :: Proxy Float
proxy = Proxy
prop "minimum'" $ prop_minimum proxy minimum'
prop "minimum' (generic)" $ prop_minimum proxy (coerce (minimum' :: Identity Float -> Identity Float -> Identity Float))
prop "minimumFloat" $ prop_minimum proxy minimumFloat
prop "minimumNumber" $ prop_minimumNumber proxy minimumNumber
prop "minimumNumber (generic)" $ prop_minimumNumber proxy (coerce (minimumNumber :: Identity Float -> Identity Float -> Identity Float))
prop "minimumNumberFloat" $ prop_minimumNumber proxy minimumNumberFloat
prop "maximum'" $ prop_maximum proxy maximum'
prop "maximum' (generic)" $ prop_maximum proxy (coerce (maximum' :: Identity Float -> Identity Float -> Identity Float))
prop "maximumFloat" $ prop_maximum proxy maximumFloat
prop "maximumNumber" $ prop_maximumNumber proxy maximumNumber
prop "maximumNumber (generic)" $ prop_maximumNumber proxy (coerce (maximumNumber :: Identity Float -> Identity Float -> Identity Float))
prop "maximumNumberFloat" $ prop_maximumNumber proxy maximumNumberFloat
describe "Double" $ do
let proxy :: Proxy Double
proxy = Proxy
prop "minimum'" $ prop_minimum proxy minimum'
prop "minimum' (generic)" $ prop_minimum proxy (coerce (minimum' :: Identity Double -> Identity Double -> Identity Double))
prop "minimumDouble" $ prop_minimum proxy minimumDouble
prop "minimumNumber" $ prop_minimumNumber proxy minimumNumber
prop "minimumNumber (generic)" $ prop_minimumNumber proxy (coerce (minimumNumber :: Identity Double -> Identity Double -> Identity Double))
prop "minimumNumberDouble" $ prop_minimumNumber proxy minimumNumberDouble
prop "maximum'" $ prop_maximum proxy maximum'
prop "maximum' (generic)" $ prop_maximum proxy (coerce (maximum' :: Identity Double -> Identity Double -> Identity Double))
prop "maximumDouble" $ prop_maximum proxy maximumDouble
prop "maximumNumber" $ prop_maximumNumber proxy maximumNumber
prop "maximumNumber (generic)" $ prop_maximumNumber proxy (coerce (maximumNumber :: Identity Double -> Identity Double -> Identity Double))
prop "maximumNumberDouble" $ prop_maximumNumber proxy maximumNumberDouble
| null | https://raw.githubusercontent.com/minoki/haskell-floating-point/7d7bb31bb2b07c637a5eaeda92fc622566e9b141/fp-ieee/test/MinMaxSpec.hs | haskell | module MinMaxSpec where
import Data.Coerce
import Data.Functor.Identity
import Data.Proxy
import Numeric.Floating.IEEE
import Numeric.Floating.IEEE.Internal
import Numeric.Floating.IEEE.NaN (RealFloatNaN(..))
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Util
default ()
isQuietNaN :: RealFloatNaN a => a -> Bool
isQuietNaN x = isNaN x && not (isSignaling x)
prop_minimum :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_minimum _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 1
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` (-1)
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` (-0)
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` (-0)
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ isQuietNaN (m sNaN 1.0)
, counterexample "(1.0,sNaN)" $ isQuietNaN (m 1.0 sNaN)
, counterexample "(qNaN,1.0)" $ isQuietNaN (m qNaN 1.0)
, counterexample "(1.0,qNaN)" $ isQuietNaN (m 1.0 qNaN)
]
prop_maximum :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_maximum _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 3
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` 1
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` 0
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` 0
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ isQuietNaN (m sNaN 1.0)
, counterexample "(1.0,sNaN)" $ isQuietNaN (m 1.0 sNaN)
, counterexample "(qNaN,1.0)" $ isQuietNaN (m qNaN 1.0)
, counterexample "(1.0,qNaN)" $ isQuietNaN (m 1.0 qNaN)
]
prop_minimumNumber :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_minimumNumber _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 1
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` (-1)
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` (-0)
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` (-0)
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ m sNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,sNaN)" $ m 1.0 sNaN `sameFloatP` 1.0
, counterexample "(qNaN,1.0)" $ m qNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,qNaN)" $ m 1.0 qNaN `sameFloatP` 1.0
]
prop_maximumNumber :: RealFloatNaN a => Proxy a -> (a -> a -> a) -> Property
prop_maximumNumber _ m =
let sNaN = setPayloadSignaling 1
qNaN = setPayload 1
in conjoin
[ counterexample "(1,3)" $ m 1 3 `sameFloatP` 3
, counterexample "(1,-1)" $ m 1 (-1) `sameFloatP` 1
, counterexample "(0,0)" $ m 0 0 `sameFloatP` 0
, counterexample "(0,-0)" $ m 0 (-0) `sameFloatP` 0
, counterexample "(-0,0)" $ m (-0) 0 `sameFloatP` 0
, counterexample "(-0,-0)" $ m (-0) (-0) `sameFloatP` (-0)
, counterexample "(sNaN,sNaN)" $ isQuietNaN (m sNaN sNaN)
, counterexample "(sNaN,qNaN)" $ isQuietNaN (m sNaN qNaN)
, counterexample "(qNaN,sNaN)" $ isQuietNaN (m qNaN sNaN)
, counterexample "(qNaN,qNaN)" $ isQuietNaN (m qNaN qNaN)
, counterexample "(sNaN,1.0)" $ m sNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,sNaN)" $ m 1.0 sNaN `sameFloatP` 1.0
, counterexample "(qNaN,1.0)" $ m qNaN 1.0 `sameFloatP` 1.0
, counterexample "(1.0,qNaN)" $ m 1.0 qNaN `sameFloatP` 1.0
]
# NOINLINE spec #
spec :: Spec
spec = do
describe "Float" $ do
let proxy :: Proxy Float
proxy = Proxy
prop "minimum'" $ prop_minimum proxy minimum'
prop "minimum' (generic)" $ prop_minimum proxy (coerce (minimum' :: Identity Float -> Identity Float -> Identity Float))
prop "minimumFloat" $ prop_minimum proxy minimumFloat
prop "minimumNumber" $ prop_minimumNumber proxy minimumNumber
prop "minimumNumber (generic)" $ prop_minimumNumber proxy (coerce (minimumNumber :: Identity Float -> Identity Float -> Identity Float))
prop "minimumNumberFloat" $ prop_minimumNumber proxy minimumNumberFloat
prop "maximum'" $ prop_maximum proxy maximum'
prop "maximum' (generic)" $ prop_maximum proxy (coerce (maximum' :: Identity Float -> Identity Float -> Identity Float))
prop "maximumFloat" $ prop_maximum proxy maximumFloat
prop "maximumNumber" $ prop_maximumNumber proxy maximumNumber
prop "maximumNumber (generic)" $ prop_maximumNumber proxy (coerce (maximumNumber :: Identity Float -> Identity Float -> Identity Float))
prop "maximumNumberFloat" $ prop_maximumNumber proxy maximumNumberFloat
describe "Double" $ do
let proxy :: Proxy Double
proxy = Proxy
prop "minimum'" $ prop_minimum proxy minimum'
prop "minimum' (generic)" $ prop_minimum proxy (coerce (minimum' :: Identity Double -> Identity Double -> Identity Double))
prop "minimumDouble" $ prop_minimum proxy minimumDouble
prop "minimumNumber" $ prop_minimumNumber proxy minimumNumber
prop "minimumNumber (generic)" $ prop_minimumNumber proxy (coerce (minimumNumber :: Identity Double -> Identity Double -> Identity Double))
prop "minimumNumberDouble" $ prop_minimumNumber proxy minimumNumberDouble
prop "maximum'" $ prop_maximum proxy maximum'
prop "maximum' (generic)" $ prop_maximum proxy (coerce (maximum' :: Identity Double -> Identity Double -> Identity Double))
prop "maximumDouble" $ prop_maximum proxy maximumDouble
prop "maximumNumber" $ prop_maximumNumber proxy maximumNumber
prop "maximumNumber (generic)" $ prop_maximumNumber proxy (coerce (maximumNumber :: Identity Double -> Identity Double -> Identity Double))
prop "maximumNumberDouble" $ prop_maximumNumber proxy maximumNumberDouble
| |
c586566ae656454a2c3be8442c2c1d905167b1588a65a9a5ebdb9008d9709dee | well-typed/generics-sop | Instances.hs | # LANGUAGE EmptyCase #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -freduction - depth=100 #
# OPTIONS_GHC -fno - warn - deprecations #
| Instances for ' Generic ' and ' HasMetadata ' .
--
We define instances for datatypes from @generics - sop@ and
-- @base@ that are supported.
--
-- (There are only instances defined in this module, so the
-- documentation is empty.)
--
module Generics.SOP.Instances () where
GHC versions and base versions :
--
7.6.3 : 4.6.0.1
7.8.3 : 4.7.0.1
7.8.4 : 4.7.0.2
7.10.3 : 4.8.2.0
8.0.2 : 4.9.1.0
8.2.2 : 4.10.1.0
8.4.3 : 4.11.1.0
8.6.1 : 4.12.0.0
import Control.Exception
import Data.Char
import Data.Complex
import Data.Data
import Data.Fixed
import Data.Functor.Compose -- new
import qualified Data.Functor.Const -- new
import Data.Functor.Identity -- new
import Data.Functor.Product -- new
import Data.Functor.Sum -- new
import Data.List.NonEmpty -- new
import qualified Data.Monoid
import Data.Ord
import qualified Data.Semigroup -- new
import Data.Version
import Data.Void -- new
import Foreign.C.Error
import Foreign.C.Types
#if MIN_VERSION_base(4,11,0)
import GHC.ByteOrder -- new
#endif
import GHC.Conc -- new
import GHC.ExecutionStack -- new
import GHC.Exts -- new
import GHC.Events -- platform - specific , omitted
import GHC.Fingerprint -- new
import GHC.Float -- new
import qualified GHC.Generics -- new
import GHC.IO.Buffer -- new
import GHC.IO.Device -- new
import GHC.IO.Encoding -- new
import GHC.IO.Encoding.Failure -- new
import GHC.IO.Exception -- new
import GHC.IO.Handle -- new
import GHC.RTS.Flags -- new
import qualified GHC.Stack -- new
import GHC.StaticPtr -- new
import GHC.Stats -- new
import System.Console.GetOpt
import System.IO
import Text.Printf
import Text.Read.Lex
import Generics.SOP.BasicFunctors
import Generics.SOP.Classes
import Generics.SOP.TH
-- Types from Generics.SOP:
deriveGeneric ''I
deriveGeneric ''K
deriveGeneric ''(:.:)
deriveGeneric ''(-.->) -- new
-- Cannot derive instances for Sing
-- Cannot derive instances for Shape
Can not derive instances for NP , NS , POP , SOP
-- Cannot derive instances for metadata types
Types from the Prelude :
deriveGeneric ''Bool
deriveGeneric ''Ordering
deriveGeneric ''Maybe
deriveGeneric ''Either
deriveGeneric ''()
2
deriveGeneric ''(,,)
deriveGeneric ''(,,,)
5
deriveGeneric ''(,,,,,)
deriveGeneric ''(,,,,,,)
deriveGeneric ''(,,,,,,,)
deriveGeneric ''(,,,,,,,,)
10
deriveGeneric ''(,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,)
15
deriveGeneric ''(,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,)
20
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,)
25
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,,,)
30
deriveGeneric ''[]
-- Other types from base:
-- From Control.Exception:
deriveGeneric ''IOException
deriveGeneric ''ArithException
deriveGeneric ''ArrayException
deriveGeneric ''AssertionFailed
deriveGeneric ''AsyncException
deriveGeneric ''NonTermination
deriveGeneric ''NestedAtomically
deriveGeneric ''BlockedIndefinitelyOnMVar
deriveGeneric ''BlockedIndefinitelyOnSTM
deriveGeneric ''AllocationLimitExceeded -- new
deriveGeneric ''Deadlock
deriveGeneric ''NoMethodError
deriveGeneric ''PatternMatchFail
deriveGeneric ''RecConError
deriveGeneric ''RecSelError
deriveGeneric ''RecUpdError
deriveGeneric ''ErrorCall
deriveGeneric ''TypeError -- new
deriveGeneric ''MaskingState
From Data . :
deriveGeneric ''GeneralCategory
-- From Data.Complex:
deriveGeneric ''Complex
-- From Data.Data:
deriveGeneric ''DataRep
deriveGeneric ''Fixity
deriveGeneric ''ConstrRep
-- From Data.Fixed:
deriveGeneric ''Fixed
deriveGeneric ''E0
deriveGeneric ''E1
deriveGeneric ''E2
deriveGeneric ''E3
deriveGeneric ''E6
deriveGeneric ''E9
deriveGeneric ''E12
-- From Data.Functor.Compose
deriveGeneric ''Compose -- new
-- From Data.Functor.Const
deriveGeneric ''Data.Functor.Const.Const -- new
-- From Data.Functor.Identity
deriveGeneric ''Identity -- new
-- From Data.Functor.Product
deriveGeneric ''Product -- new
-- From Data.Functor.Sum
deriveGeneric ''Sum -- new
From Data . List . NonEmpty
deriveGeneric ''NonEmpty -- new
-- From Data.Monoid:
deriveGeneric ''Data.Monoid.Dual
deriveGeneric ''Data.Monoid.Endo
deriveGeneric ''Data.Monoid.All
deriveGeneric ''Data.Monoid.Any
deriveGeneric ''Data.Monoid.Sum
deriveGeneric ''Data.Monoid.Product
deriveGeneric ''Data.Monoid.First
deriveGeneric ''Data.Monoid.Last
deriveGeneric ''Data.Monoid.Alt -- new
-- From Data.Ord:
deriveGeneric ''Down
-- From Data.Proxy:
deriveGeneric ''Proxy
-- From Data.Semigroup:
deriveGeneric ''Data.Semigroup.Min -- new
deriveGeneric ''Data.Semigroup.Max -- new
deriveGeneric ''Data.Semigroup.First -- new
deriveGeneric ''Data.Semigroup.Last -- new
deriveGeneric ''Data.Semigroup.WrappedMonoid -- new
#if !MIN_VERSION_base(4,16,0)
deriveGeneric ''Data.Semigroup.Option -- new
#endif
deriveGeneric ''Data.Semigroup.Arg -- new
-- From Data.Version:
deriveGeneric ''Version
-- From Data.Void:
deriveGeneric ''Void -- new
-- From Foreign.C.Error:
deriveGeneric ''Errno
-- From Foreign.C.Types:
deriveGeneric ''CChar
deriveGeneric ''CSChar
deriveGeneric ''CUChar
deriveGeneric ''CShort
deriveGeneric ''CUShort
deriveGeneric ''CInt
deriveGeneric ''CUInt
deriveGeneric ''CLong
deriveGeneric ''CULong
deriveGeneric ''CPtrdiff
deriveGeneric ''CSize
deriveGeneric ''CWchar
deriveGeneric ''CSigAtomic
deriveGeneric ''CLLong
deriveGeneric ''CULLong
deriveGeneric ''CIntPtr
deriveGeneric ''CUIntPtr
deriveGeneric ''CIntMax
deriveGeneric ''CUIntMax
deriveGeneric ''CClock
deriveGeneric ''CTime
deriveGeneric ''CUSeconds
deriveGeneric ''CSUSeconds
deriveGeneric ''CFloat
deriveGeneric ''CDouble
#if MIN_VERSION_base(4,11,0)
-- From GHC.ByteOrder:
deriveGeneric ''ByteOrder -- new
#endif
-- From GHC.Conc:
deriveGeneric ''ThreadStatus -- new
deriveGeneric ''BlockReason -- new
-- From GHC.ExecutionStack:
deriveGeneric ''Location -- new
deriveGeneric ''SrcLoc -- new
-- From GHC.Exts:
deriveGeneric ''RuntimeRep -- new
deriveGeneric ''VecCount -- new
deriveGeneric ''VecElem -- new
#if !MIN_VERSION_base(4,15,0)
deriveGeneric ''SpecConstrAnnotation -- new
#endif
-- From GHC.Generics:
deriveGeneric ''GHC.Generics.K1 -- new
deriveGeneric ''GHC.Generics.U1 -- new
deriveGeneric ''GHC.Generics.V1 -- new
deriveGeneric ''GHC.Generics.Par1 -- new
deriveGeneric ''GHC.Generics.M1 -- new
deriveGeneric ''GHC.Generics.R -- new
deriveGeneric ''GHC.Generics.S -- new
deriveGeneric ''GHC.Generics.D -- new
deriveGeneric ''GHC.Generics.C -- new
deriveGeneric ''(GHC.Generics.:*:) -- new
deriveGeneric ''(GHC.Generics.:+:) -- new
deriveGeneric ''(GHC.Generics.:.:) -- new
deriveGeneric ''GHC.Generics.Associativity -- new
deriveGeneric ''GHC.Generics.DecidedStrictness -- new
deriveGeneric ''GHC.Generics.SourceStrictness -- new
deriveGeneric ''GHC.Generics.SourceUnpackedness -- new
deriveGeneric ''GHC.Generics.Fixity -- new
-- From GHC.IO.Buffer:
deriveGeneric ''Buffer -- new
deriveGeneric ''BufferState -- new
-- From GHC.IO.Device:
deriveGeneric ''IODeviceType -- new
-- From GHC.IO.Encoding:
deriveGeneric ''BufferCodec -- new
deriveGeneric ''CodingProgress -- new
-- From GHC.IO.Encoding.Failure:
deriveGeneric ''CodingFailureMode -- new
-- From GHC.Fingerprint
deriveGeneric ''Fingerprint -- new
-- From GHC.Float
deriveGeneric ''FFFormat -- new
-- From GHC.IO.Exception:
#if MIN_VERSION_base(4,11,0)
deriveGeneric ''FixIOException -- new
deriveGeneric ''IOErrorType -- new
#endif
-- From GHC.IO.Handle:
deriveGeneric ''HandlePosn -- new
#if MIN_VERSION_base(4,10,0)
deriveGeneric ''LockMode -- new
#endif
-- From GHC.RTS.Flags:
deriveGeneric ''RTSFlags -- new
deriveGeneric ''GiveGCStats -- new
deriveGeneric ''GCFlags -- new
deriveGeneric ''ConcFlags -- new
deriveGeneric ''MiscFlags -- new
deriveGeneric ''DebugFlags -- new
deriveGeneric ''DoCostCentres -- new
deriveGeneric ''CCFlags -- new
deriveGeneric ''DoHeapProfile -- new
deriveGeneric ''ProfFlags -- new
deriveGeneric ''DoTrace -- new
deriveGeneric ''TraceFlags -- new
deriveGeneric ''TickyFlags -- new
#if MIN_VERSION_base(4,10,0)
deriveGeneric ''ParFlags -- new
#endif
-- From GHC.Stack:
deriveGeneric ''GHC.Stack.SrcLoc -- new
deriveGeneric ''GHC.Stack.CallStack -- new
-- From GHC.StaticPtr:
deriveGeneric ''StaticPtrInfo -- new
From :
#if MIN_VERSION_base(4,10,0)
deriveGeneric ''RTSStats -- new
deriveGeneric ''GCDetails -- new
#endif
#if !MIN_VERSION_base(4,11,0)
deriveGeneric ''GCStats -- new
#endif
From System . Console . :
deriveGeneric ''ArgOrder
deriveGeneric ''OptDescr
deriveGeneric ''ArgDescr
-- From System.Exit:
deriveGeneric ''ExitCode
-- From System.IO:
deriveGeneric ''IOMode
deriveGeneric ''BufferMode
deriveGeneric ''SeekMode
deriveGeneric ''Newline
deriveGeneric ''NewlineMode
-- From Text.Printf:
deriveGeneric ''FieldFormat
deriveGeneric ''FormatAdjustment
deriveGeneric ''FormatSign
deriveGeneric ''FormatParse
From Text . Read . :
deriveGeneric ''Lexeme
deriveGeneric ''Number
Abstract / primitive datatypes ( we do n't derive Generic for these ):
--
-- Ratio
Integer
ThreadId
-- Chan
MVar
-- QSem
-- QSemN
DataType
Dynamic
-- IORef
TypeRep
-- TyCon
-- TypeRepKey
KProxy -- not abstract , but intended for kind - level use
-- STRef
-- Unique
-- ForeignPtr
CFile
-- CFpos
-- CJmpBuf
-- Pool
-- Ptr
-- FunPtr
IntPtr
WordPtr
-- StablePtr
-- Char
-- Double
-- Float
-- Int
-- Int8
-- Int16
Int32
Int64
Word
-- Word8
-- Word16
-- Word64
-- IO
-- ST
-- (->)
RealWorld
-- Handle
HandlePosn
-- TextEncoding
-- StableName
-- Weak
-- ReadP
-- ReadPrec
STM
TVar
Natural
-- Event
EventManager
CostCentre
-- CostCentreStack
--
-- Datatypes we cannot currently handle:
--
SomeException
-- SomeAsyncException
-- Handler
-- Coercion
-- (:~:)
| null | https://raw.githubusercontent.com/well-typed/generics-sop/58d7f2eab3a4f603fee50db27b2ad2f224881c9f/generics-sop/src/Generics/SOP/Instances.hs | haskell |
@base@ that are supported.
(There are only instances defined in this module, so the
documentation is empty.)
new
new
new
new
new
new
new
new
new
new
new
new
platform - specific , omitted
new
new
new
new
new
new
new
new
new
new
new
new
new
Types from Generics.SOP:
new
Cannot derive instances for Sing
Cannot derive instances for Shape
Cannot derive instances for metadata types
Other types from base:
From Control.Exception:
new
new
From Data.Complex:
From Data.Data:
From Data.Fixed:
From Data.Functor.Compose
new
From Data.Functor.Const
new
From Data.Functor.Identity
new
From Data.Functor.Product
new
From Data.Functor.Sum
new
new
From Data.Monoid:
new
From Data.Ord:
From Data.Proxy:
From Data.Semigroup:
new
new
new
new
new
new
new
From Data.Version:
From Data.Void:
new
From Foreign.C.Error:
From Foreign.C.Types:
From GHC.ByteOrder:
new
From GHC.Conc:
new
new
From GHC.ExecutionStack:
new
new
From GHC.Exts:
new
new
new
new
From GHC.Generics:
new
new
new
new
new
new
new
new
new
new
new
new
new
new
new
new
new
From GHC.IO.Buffer:
new
new
From GHC.IO.Device:
new
From GHC.IO.Encoding:
new
new
From GHC.IO.Encoding.Failure:
new
From GHC.Fingerprint
new
From GHC.Float
new
From GHC.IO.Exception:
new
new
From GHC.IO.Handle:
new
new
From GHC.RTS.Flags:
new
new
new
new
new
new
new
new
new
new
new
new
new
new
From GHC.Stack:
new
new
From GHC.StaticPtr:
new
new
new
new
From System.Exit:
From System.IO:
From Text.Printf:
Ratio
Chan
QSem
QSemN
IORef
TyCon
TypeRepKey
not abstract , but intended for kind - level use
STRef
Unique
ForeignPtr
CFpos
CJmpBuf
Pool
Ptr
FunPtr
StablePtr
Char
Double
Float
Int
Int8
Int16
Word8
Word16
Word64
IO
ST
(->)
Handle
TextEncoding
StableName
Weak
ReadP
ReadPrec
Event
CostCentreStack
Datatypes we cannot currently handle:
SomeAsyncException
Handler
Coercion
(:~:) | # LANGUAGE EmptyCase #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -freduction - depth=100 #
# OPTIONS_GHC -fno - warn - deprecations #
| Instances for ' Generic ' and ' HasMetadata ' .
We define instances for datatypes from @generics - sop@ and
module Generics.SOP.Instances () where
GHC versions and base versions :
7.6.3 : 4.6.0.1
7.8.3 : 4.7.0.1
7.8.4 : 4.7.0.2
7.10.3 : 4.8.2.0
8.0.2 : 4.9.1.0
8.2.2 : 4.10.1.0
8.4.3 : 4.11.1.0
8.6.1 : 4.12.0.0
import Control.Exception
import Data.Char
import Data.Complex
import Data.Data
import Data.Fixed
import qualified Data.Monoid
import Data.Ord
import Data.Version
import Foreign.C.Error
import Foreign.C.Types
#if MIN_VERSION_base(4,11,0)
#endif
import System.Console.GetOpt
import System.IO
import Text.Printf
import Text.Read.Lex
import Generics.SOP.BasicFunctors
import Generics.SOP.Classes
import Generics.SOP.TH
deriveGeneric ''I
deriveGeneric ''K
deriveGeneric ''(:.:)
Can not derive instances for NP , NS , POP , SOP
Types from the Prelude :
deriveGeneric ''Bool
deriveGeneric ''Ordering
deriveGeneric ''Maybe
deriveGeneric ''Either
deriveGeneric ''()
2
deriveGeneric ''(,,)
deriveGeneric ''(,,,)
5
deriveGeneric ''(,,,,,)
deriveGeneric ''(,,,,,,)
deriveGeneric ''(,,,,,,,)
deriveGeneric ''(,,,,,,,,)
10
deriveGeneric ''(,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,)
15
deriveGeneric ''(,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,)
20
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,)
25
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,,)
deriveGeneric ''(,,,,,,,,,,,,,,,,,,,,,,,,,,,,)
30
deriveGeneric ''[]
deriveGeneric ''IOException
deriveGeneric ''ArithException
deriveGeneric ''ArrayException
deriveGeneric ''AssertionFailed
deriveGeneric ''AsyncException
deriveGeneric ''NonTermination
deriveGeneric ''NestedAtomically
deriveGeneric ''BlockedIndefinitelyOnMVar
deriveGeneric ''BlockedIndefinitelyOnSTM
deriveGeneric ''Deadlock
deriveGeneric ''NoMethodError
deriveGeneric ''PatternMatchFail
deriveGeneric ''RecConError
deriveGeneric ''RecSelError
deriveGeneric ''RecUpdError
deriveGeneric ''ErrorCall
deriveGeneric ''MaskingState
From Data . :
deriveGeneric ''GeneralCategory
deriveGeneric ''Complex
deriveGeneric ''DataRep
deriveGeneric ''Fixity
deriveGeneric ''ConstrRep
deriveGeneric ''Fixed
deriveGeneric ''E0
deriveGeneric ''E1
deriveGeneric ''E2
deriveGeneric ''E3
deriveGeneric ''E6
deriveGeneric ''E9
deriveGeneric ''E12
From Data . List . NonEmpty
deriveGeneric ''Data.Monoid.Dual
deriveGeneric ''Data.Monoid.Endo
deriveGeneric ''Data.Monoid.All
deriveGeneric ''Data.Monoid.Any
deriveGeneric ''Data.Monoid.Sum
deriveGeneric ''Data.Monoid.Product
deriveGeneric ''Data.Monoid.First
deriveGeneric ''Data.Monoid.Last
deriveGeneric ''Down
deriveGeneric ''Proxy
#if !MIN_VERSION_base(4,16,0)
#endif
deriveGeneric ''Version
deriveGeneric ''Errno
deriveGeneric ''CChar
deriveGeneric ''CSChar
deriveGeneric ''CUChar
deriveGeneric ''CShort
deriveGeneric ''CUShort
deriveGeneric ''CInt
deriveGeneric ''CUInt
deriveGeneric ''CLong
deriveGeneric ''CULong
deriveGeneric ''CPtrdiff
deriveGeneric ''CSize
deriveGeneric ''CWchar
deriveGeneric ''CSigAtomic
deriveGeneric ''CLLong
deriveGeneric ''CULLong
deriveGeneric ''CIntPtr
deriveGeneric ''CUIntPtr
deriveGeneric ''CIntMax
deriveGeneric ''CUIntMax
deriveGeneric ''CClock
deriveGeneric ''CTime
deriveGeneric ''CUSeconds
deriveGeneric ''CSUSeconds
deriveGeneric ''CFloat
deriveGeneric ''CDouble
#if MIN_VERSION_base(4,11,0)
#endif
#if !MIN_VERSION_base(4,15,0)
#endif
#if MIN_VERSION_base(4,11,0)
#endif
#if MIN_VERSION_base(4,10,0)
#endif
#if MIN_VERSION_base(4,10,0)
#endif
From :
#if MIN_VERSION_base(4,10,0)
#endif
#if !MIN_VERSION_base(4,11,0)
#endif
From System . Console . :
deriveGeneric ''ArgOrder
deriveGeneric ''OptDescr
deriveGeneric ''ArgDescr
deriveGeneric ''ExitCode
deriveGeneric ''IOMode
deriveGeneric ''BufferMode
deriveGeneric ''SeekMode
deriveGeneric ''Newline
deriveGeneric ''NewlineMode
deriveGeneric ''FieldFormat
deriveGeneric ''FormatAdjustment
deriveGeneric ''FormatSign
deriveGeneric ''FormatParse
From Text . Read . :
deriveGeneric ''Lexeme
deriveGeneric ''Number
Abstract / primitive datatypes ( we do n't derive Generic for these ):
Integer
ThreadId
MVar
DataType
Dynamic
TypeRep
CFile
IntPtr
WordPtr
Int32
Int64
Word
RealWorld
HandlePosn
STM
TVar
Natural
EventManager
CostCentre
SomeException
|
38dd4a2749e81ca47814c36c347e010c62dfa0813074ea9b6add8f44a0082db3 | unnohideyuki/bunny | sample275.hs | pseq :: a -> b -> b
pseq x y = y
main = putStrLn (undefined `pseq` "abcd")
| null | https://raw.githubusercontent.com/unnohideyuki/bunny/501856ff48f14b252b674585f25a2bf3801cb185/compiler/test/samples/sample275.hs | haskell | pseq :: a -> b -> b
pseq x y = y
main = putStrLn (undefined `pseq` "abcd")
| |
650e3377eb2194458970642ddc6df8139c093fc5e3d499314d4267dc8d34dc95 | finnishtransportagency/harja | laadunseuranta.cljs | (ns harja.tiedot.urakka.laadunseuranta
"Tämä nimiavaruus hallinnoi laadunseurantaa sekä laatupoikkeamia ja tarkastuksia"
(:require [reagent.core :refer [atom]]
[harja.loki :refer [log logt tarkkaile!]]
[cljs.core.async :refer [<!]]
[harja.loki :refer [log]]
[harja.tiedot.navigaatio :as nav]
[harja.tiedot.urakka :as u]
[harja.asiakas.kommunikaatio :as k])
(:require-macros [harja.atom :refer [reaction<!]]
[reagent.ratom :refer [reaction]]
[cljs.core.async.macros :refer [go]]))
(defonce laadunseurannassa? (atom false))
(defn hae-urakan-yllapitokohteet-lomakkeelle
"Hakee urakan ylläpitokohteet näytettäväksi laatupoikkeamalomakkeella."
[urakka-id sopimus-id]
(k/post! :urakan-yllapitokohteet-lomakkeelle
{:urakka-id urakka-id
:sopimus-id sopimus-id}))
(def urakan-yllapitokohteet-lomakkeelle
(reaction<! [urakka-id (:id @nav/valittu-urakka)
urakka-tyyppi (:tyyppi @nav/valittu-urakka)
[sopimus-id _] @u/valittu-sopimusnumero
laadunseurannassa? @laadunseurannassa?
yllapitokohdeurakka? @u/yllapitokohdeurakka?]
{:nil-kun-haku-kaynnissa? true}
(when (and yllapitokohdeurakka?
laadunseurannassa? urakka-id sopimus-id)
(hae-urakan-yllapitokohteet-lomakkeelle urakka-id sopimus-id))))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/488b1e096f0611e175221d74ba4f2ffed6bea8f1/src/cljs/harja/tiedot/urakka/laadunseuranta.cljs | clojure | (ns harja.tiedot.urakka.laadunseuranta
"Tämä nimiavaruus hallinnoi laadunseurantaa sekä laatupoikkeamia ja tarkastuksia"
(:require [reagent.core :refer [atom]]
[harja.loki :refer [log logt tarkkaile!]]
[cljs.core.async :refer [<!]]
[harja.loki :refer [log]]
[harja.tiedot.navigaatio :as nav]
[harja.tiedot.urakka :as u]
[harja.asiakas.kommunikaatio :as k])
(:require-macros [harja.atom :refer [reaction<!]]
[reagent.ratom :refer [reaction]]
[cljs.core.async.macros :refer [go]]))
(defonce laadunseurannassa? (atom false))
(defn hae-urakan-yllapitokohteet-lomakkeelle
"Hakee urakan ylläpitokohteet näytettäväksi laatupoikkeamalomakkeella."
[urakka-id sopimus-id]
(k/post! :urakan-yllapitokohteet-lomakkeelle
{:urakka-id urakka-id
:sopimus-id sopimus-id}))
(def urakan-yllapitokohteet-lomakkeelle
(reaction<! [urakka-id (:id @nav/valittu-urakka)
urakka-tyyppi (:tyyppi @nav/valittu-urakka)
[sopimus-id _] @u/valittu-sopimusnumero
laadunseurannassa? @laadunseurannassa?
yllapitokohdeurakka? @u/yllapitokohdeurakka?]
{:nil-kun-haku-kaynnissa? true}
(when (and yllapitokohdeurakka?
laadunseurannassa? urakka-id sopimus-id)
(hae-urakan-yllapitokohteet-lomakkeelle urakka-id sopimus-id))))
| |
78223a511ae5e5aed4ca74a48babbc1b3eb4a1659f3ff2e6c64b5cc7838a7227 | ocsigen/lwt | lwt_condition.mli | OCaml promise library
*
* Copyright ( c ) 2009 , Metaweb Technologies , Inc.
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
* * Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
* * Redistributions in binary form must reproduce the above
* copyright notice , this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution .
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ` ` AS IS '' AND ANY
* EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR
* BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE
* OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* Copyright (c) 2009, Metaweb Technologies, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*)
(** Conditions *)
(** Condition variables to synchronize between threads. *)
type 'a t
(** Condition variable type. The type parameter denotes the type of
value propagated from notifier to waiter. *)
val create : unit -> 'a t
(** [create ()] creates a new condition variable. *)
val wait : ?mutex:Lwt_mutex.t -> 'a t -> 'a Lwt.t
(** [wait mutex condvar] will cause the current thread to block,
awaiting notification for a condition variable, [condvar]. If
provided, the [mutex] must have been previously locked (within
the scope of [Lwt_mutex.with_lock], for example) and is
temporarily unlocked until the condition is notified. Upon
notification, [mutex] is re-locked before [wait] returns and
the thread's activity is resumed. When the awaited condition
is notified, the value parameter passed to [signal] is
returned. *)
val signal : 'a t -> 'a -> unit
(** [signal condvar value] notifies that a condition is ready. A
single waiting thread will be awoken and will receive the
notification value which will be returned from [wait]. Note
that condition notification is not "sticky", i.e. if there is
no waiter when [signal] is called, the notification will be
missed and the value discarded. *)
val broadcast : 'a t -> 'a -> unit
(** [broadcast condvar value] notifies all waiting threads. Each
will be awoken in turn and will receive the same notification
value. *)
val broadcast_exn : 'a t -> exn -> unit
* [ broadcast_exn condvar exn ] fails all waiting threads with exception
[ exn ] .
@since 2.6.0
[exn].
@since 2.6.0 *)
| null | https://raw.githubusercontent.com/ocsigen/lwt/9943ba77a5508feaea5e1fb60b011db4179f9c61/src/core/lwt_condition.mli | ocaml | * Conditions
* Condition variables to synchronize between threads.
* Condition variable type. The type parameter denotes the type of
value propagated from notifier to waiter.
* [create ()] creates a new condition variable.
* [wait mutex condvar] will cause the current thread to block,
awaiting notification for a condition variable, [condvar]. If
provided, the [mutex] must have been previously locked (within
the scope of [Lwt_mutex.with_lock], for example) and is
temporarily unlocked until the condition is notified. Upon
notification, [mutex] is re-locked before [wait] returns and
the thread's activity is resumed. When the awaited condition
is notified, the value parameter passed to [signal] is
returned.
* [signal condvar value] notifies that a condition is ready. A
single waiting thread will be awoken and will receive the
notification value which will be returned from [wait]. Note
that condition notification is not "sticky", i.e. if there is
no waiter when [signal] is called, the notification will be
missed and the value discarded.
* [broadcast condvar value] notifies all waiting threads. Each
will be awoken in turn and will receive the same notification
value. | OCaml promise library
*
* Copyright ( c ) 2009 , Metaweb Technologies , Inc.
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
* * Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
* * Redistributions in binary form must reproduce the above
* copyright notice , this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution .
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ` ` AS IS '' AND ANY
* EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR
* BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE
* OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* Copyright (c) 2009, Metaweb Technologies, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*)
type 'a t
val create : unit -> 'a t
val wait : ?mutex:Lwt_mutex.t -> 'a t -> 'a Lwt.t
val signal : 'a t -> 'a -> unit
val broadcast : 'a t -> 'a -> unit
val broadcast_exn : 'a t -> exn -> unit
* [ broadcast_exn condvar exn ] fails all waiting threads with exception
[ exn ] .
@since 2.6.0
[exn].
@since 2.6.0 *)
|
7196d8fc8f77c63aa0a11020b069f4f3182629a1f7c90b131c752ef35d16de6f | raehik/binrep | Tar.hs | module Binrep.Example.Tar where
import Binrep
import Binrep.Generic
import Binrep.Type.NullPadded
import Binrep.Type.AsciiNat
import GHC.Generics ( Generic )
import Data.Word ( Word8 )
import GHC.TypeNats
import Data.ByteString qualified as B
import FlatParse.Basic qualified as FP
type BS = B.ByteString
-- | The naturals in tars are sized octal ASCII digit strings that end with a
-- null byte (and may start with leading ASCII zeroes). The size includes the
-- terminating null, so you get @n-1@ digits. What a farce.
--
-- Don't use this constructor directly! The size must be checked to ensure it
-- fits.
newtype TarNat n = TarNat { getTarNat :: AsciiNat 8 }
deriving stock (Generic, Show, Eq)
instance KnownNat n => BLen (TarNat n) where
type CBLen (TarNat n) = n
| No need to check for underflow etc . as TarNat guarantees good sizing .
instance KnownNat n => Put (TarNat n) where
put (TarNat an) = put pfxNulls <> put an <> put @Word8 0x00
where
pfxNulls = B.replicate (fromIntegral pfxNullCount) 0x30
pfxNullCount = n - blen an - 1
n = typeNatToBLen @n
instance KnownNat n => Get (TarNat n) where
get = do
an <- FP.isolate (fromIntegral (n - 1)) get
get @Word8 >>= \case
0x00 -> pure $ TarNat an
w -> eBase $ EExpectedByte 0x00 w
where
n = typeNatToBLen @n
-- Partial header
data Tar = Tar
{ tarFileName :: NullPadded 100 BS
, tarFileMode :: TarNat 8
, tarFileUIDOwner :: TarNat 8
, tarFileUIDGroup :: TarNat 8
, tarFileFileSize :: TarNat 12
, tarFileLastMod :: TarNat 12
} deriving stock (Generic, Show, Eq)
instance BLen Tar where blen = blenGeneric cNoSum
instance Put Tar where put = putGeneric cNoSum
instance Get Tar where get = getGeneric cNoSum
| null | https://raw.githubusercontent.com/raehik/binrep/d7b7b0c7c3e0bebbba49701db530b98ac0b154c5/src/Binrep/Example/Tar.hs | haskell | | The naturals in tars are sized octal ASCII digit strings that end with a
null byte (and may start with leading ASCII zeroes). The size includes the
terminating null, so you get @n-1@ digits. What a farce.
Don't use this constructor directly! The size must be checked to ensure it
fits.
Partial header | module Binrep.Example.Tar where
import Binrep
import Binrep.Generic
import Binrep.Type.NullPadded
import Binrep.Type.AsciiNat
import GHC.Generics ( Generic )
import Data.Word ( Word8 )
import GHC.TypeNats
import Data.ByteString qualified as B
import FlatParse.Basic qualified as FP
type BS = B.ByteString
newtype TarNat n = TarNat { getTarNat :: AsciiNat 8 }
deriving stock (Generic, Show, Eq)
instance KnownNat n => BLen (TarNat n) where
type CBLen (TarNat n) = n
| No need to check for underflow etc . as TarNat guarantees good sizing .
instance KnownNat n => Put (TarNat n) where
put (TarNat an) = put pfxNulls <> put an <> put @Word8 0x00
where
pfxNulls = B.replicate (fromIntegral pfxNullCount) 0x30
pfxNullCount = n - blen an - 1
n = typeNatToBLen @n
instance KnownNat n => Get (TarNat n) where
get = do
an <- FP.isolate (fromIntegral (n - 1)) get
get @Word8 >>= \case
0x00 -> pure $ TarNat an
w -> eBase $ EExpectedByte 0x00 w
where
n = typeNatToBLen @n
data Tar = Tar
{ tarFileName :: NullPadded 100 BS
, tarFileMode :: TarNat 8
, tarFileUIDOwner :: TarNat 8
, tarFileUIDGroup :: TarNat 8
, tarFileFileSize :: TarNat 12
, tarFileLastMod :: TarNat 12
} deriving stock (Generic, Show, Eq)
instance BLen Tar where blen = blenGeneric cNoSum
instance Put Tar where put = putGeneric cNoSum
instance Get Tar where get = getGeneric cNoSum
|
05f5d93974b5ad64e9a3d6939b4e95c1b934ca628794f718d3e7be4bda63eb79 | GaloisInc/LIMA | OM1.hs | -- |
-- Module : OM1
Copyright : 2016
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : unknown
--
A specification for the distributed , fault tolerant system ) written
using
--
module OM1
( om1
)
where
import Control.Monad (forM, forM_)
import Data.Int
import Language.LIMA
import Language.LIMA.C (printProbe)
import Language.Sally
-- Parameters ----------------------------------------------------------
numRelays = 3
numRecvs = 3
relaySet = [0..numRelays-1]
recvSet = [0..numRecvs-1]
) Spec ------------------------------------------------------------
-- | Top level rule
om1 :: Atom ()
om1 = do
-- setup channels for communication between source, relays, and receivers
s2rs <- mapM newChannel [ tg "s2r" i | i <- relaySet ]
r2rs <- mapM (mapM newChannel) [ [ tg2 "r2r" i j | j <- recvSet ]
| i <- relaySet ]
votes <- mapM msgVar [ tg "vote" j | j <- recvSet ]
-- declare source node
source (map fst s2rs)
-- declare relay nodes
forM_ relaySet $ \ident ->
relay ident (snd (s2rs !! ident))
(map fst (r2rs !! ident))
-- declare receiver nodes
dones <- forM recvSet $ \ident ->
recv ident [ snd ((r2rs !! i) !! ident) | i <- relaySet ] (votes !! ident)
assert "agreement" $ imply (and_ (map value dones)) (all_ (\(v,w) -> value v ==. value w)
[ (v,w) | v <- votes, w <- votes ])
assert "validity" $ imply (and_ (map value dones)) (all_ (\v -> value v ==. goodMsg) votes)
observer
-- Source --------------------------------------------------------------
-- | Source node, a.k.a. "The General"
source :: [ChanInput] -- ^ output channels to broadcast on
-> Atom ()
source cs = atom "source" $ do
done <- bool "done" False
-- activation condition
cond $ not_ (value done)
-- behavior
done <== Const True
forM_ cs $ \c -> writeChannel c goodMsg
-- Relays --------------------------------------------------------------
| Relay node , a.k.a . a generic 0th round " Lieutenant "
relay :: Int -- ^ relay id
-> ChanOutput -- ^ channel from source
-> [ChanInput] -- ^ channels to receivers
-> Atom ()
relay ident inC outCs = atom (tg "relay" ident) $ do
done <- bool "done" False
msg <- msgVar (tg "relay_msg" ident)
-- activation condition:
-- we haven't stored a value yet and there is a message waiting
-- on the channel 'inC'
cond $ isMissing msg &&. fullChannel inC
-- behavior
m <- readChannel inC :: Atom (E MsgType)
msg <== m
done <== Const True
forM_ outCs $ \c -> writeChannel c m
-- Receivers -----------------------------------------------------------
| Receiver node , a.k.a . a generic 1st round " Lieutenant "
recv :: Int -- ^ receiver id
-> [ChanOutput] -- ^ channels from relays
-> V MsgType
-> Atom (V Bool)
recv ident inCs vote = atom (tg "recv" ident) $ do
done <- bool "done" False
buffer <- mapM msgVar [ tg (tg "buffer" ident) i | i <- relaySet ]
declare multiple " pollers " , one for each buffer location
forM_ relaySet $ \i ->
atom (tg2 "recv_poll" ident i) $ do
cond $ isMissing (buffer !! i) &&. fullChannel (inCs !! i)
b' <- readChannel (inCs !! i)
(buffer !! i) <== b'
-- declare a voter
atom (tg "recv_vote" ident) $ do
cond $ all_ (not_ . isMissing) buffer
vote <== computeVote (value <$> buffer)
done <== Const True
return done
| Boyer - Moore Fast Majority Vote
computeVote :: [E MsgType] -> E MsgType
computeVote = fst . foldr iter (missingMsgValueE, Const 0)
where
iter x (y, c) = ( mux (x ==. y) onTrue1 onFalse1
, mux (x ==. y) onTrue2 onFalse2)
where
-- rules:
if x = = . y , then ( y , c+1 )
else if c = = 0 , then ( x , 1 )
-- else (y, c-1)
onTrue1 = y
onTrue2 = c + Const 1
onFalse1 = mux (c ==. Const 0) x y
onFalse2 = mux (c ==. Const 0) (Const 1) (c - Const 1)
_ = c :: E Int64
-- | Synchronous observer node; current prints probe values to console at
-- phase 0. This node has no activation or behavior so its part in the model
-- is trivial.
observer :: Atom ()
observer = atom "observer" $ do
ps <- probes
mapM_ printProbe ps
-- Helper functions and definitions for Channels and Messages ----------
type MsgType = Int64
msgType = Int64
-- | Specially designated intended message to be send in the absense of faults
goodMsg :: E MsgType
goodMsg = Const 0
-- | Special message type value indicating "no message present"
missingMsgValue :: MsgType
missingMsgValue = 0
missingMsgValueE :: E MsgType
missingMsgValueE = Const 0
isMissing :: V MsgType -> E Bool
isMissing = (==. missingMsgValueE) . value
| Declare a new channel with ' missingMsgValue ' as its initial value
newChannel :: String -> Atom (ChanInput, ChanOutput)
newChannel = flip channel msgType
-- | Declare a variable of message type and add a probe for it to the
-- environment
msgVar :: Name -> Atom (V MsgType)
msgVar nm = do
v <- msgVar' nm
probe nm (value v)
return v
-- | Declare a message variable w/o adding a probe
msgVar' :: Name -> Atom (V MsgType)
msgVar' nm = int64 nm missingMsgValue
-- | Tag a name with an ID
tg :: Name -> Int -> Name
tg nm i = nm ++ "_" ++ show i
-- | Tag a name with a pair of IDs
tg2 :: Name -> Int -> Int -> Name
tg2 nm i j = nm ++ "_" ++ show i ++ "_" ++ show j
| null | https://raw.githubusercontent.com/GaloisInc/LIMA/8006bb52b2fb5d3264fe55ef8c9b7c89ab7f4630/case-studies/OM1/OM1.hs | haskell | |
Module : OM1
License : BSD3
Maintainer :
Stability : experimental
Portability : unknown
Parameters ----------------------------------------------------------
----------------------------------------------------------
| Top level rule
setup channels for communication between source, relays, and receivers
declare source node
declare relay nodes
declare receiver nodes
Source --------------------------------------------------------------
| Source node, a.k.a. "The General"
^ output channels to broadcast on
activation condition
behavior
Relays --------------------------------------------------------------
^ relay id
^ channel from source
^ channels to receivers
activation condition:
we haven't stored a value yet and there is a message waiting
on the channel 'inC'
behavior
Receivers -----------------------------------------------------------
^ receiver id
^ channels from relays
declare a voter
rules:
else (y, c-1)
| Synchronous observer node; current prints probe values to console at
phase 0. This node has no activation or behavior so its part in the model
is trivial.
Helper functions and definitions for Channels and Messages ----------
| Specially designated intended message to be send in the absense of faults
| Special message type value indicating "no message present"
| Declare a variable of message type and add a probe for it to the
environment
| Declare a message variable w/o adding a probe
| Tag a name with an ID
| Tag a name with a pair of IDs | Copyright : 2016
A specification for the distributed , fault tolerant system ) written
using
module OM1
( om1
)
where
import Control.Monad (forM, forM_)
import Data.Int
import Language.LIMA
import Language.LIMA.C (printProbe)
import Language.Sally
numRelays = 3
numRecvs = 3
relaySet = [0..numRelays-1]
recvSet = [0..numRecvs-1]
om1 :: Atom ()
om1 = do
s2rs <- mapM newChannel [ tg "s2r" i | i <- relaySet ]
r2rs <- mapM (mapM newChannel) [ [ tg2 "r2r" i j | j <- recvSet ]
| i <- relaySet ]
votes <- mapM msgVar [ tg "vote" j | j <- recvSet ]
source (map fst s2rs)
forM_ relaySet $ \ident ->
relay ident (snd (s2rs !! ident))
(map fst (r2rs !! ident))
dones <- forM recvSet $ \ident ->
recv ident [ snd ((r2rs !! i) !! ident) | i <- relaySet ] (votes !! ident)
assert "agreement" $ imply (and_ (map value dones)) (all_ (\(v,w) -> value v ==. value w)
[ (v,w) | v <- votes, w <- votes ])
assert "validity" $ imply (and_ (map value dones)) (all_ (\v -> value v ==. goodMsg) votes)
observer
-> Atom ()
source cs = atom "source" $ do
done <- bool "done" False
cond $ not_ (value done)
done <== Const True
forM_ cs $ \c -> writeChannel c goodMsg
| Relay node , a.k.a . a generic 0th round " Lieutenant "
-> Atom ()
relay ident inC outCs = atom (tg "relay" ident) $ do
done <- bool "done" False
msg <- msgVar (tg "relay_msg" ident)
cond $ isMissing msg &&. fullChannel inC
m <- readChannel inC :: Atom (E MsgType)
msg <== m
done <== Const True
forM_ outCs $ \c -> writeChannel c m
| Receiver node , a.k.a . a generic 1st round " Lieutenant "
-> V MsgType
-> Atom (V Bool)
recv ident inCs vote = atom (tg "recv" ident) $ do
done <- bool "done" False
buffer <- mapM msgVar [ tg (tg "buffer" ident) i | i <- relaySet ]
declare multiple " pollers " , one for each buffer location
forM_ relaySet $ \i ->
atom (tg2 "recv_poll" ident i) $ do
cond $ isMissing (buffer !! i) &&. fullChannel (inCs !! i)
b' <- readChannel (inCs !! i)
(buffer !! i) <== b'
atom (tg "recv_vote" ident) $ do
cond $ all_ (not_ . isMissing) buffer
vote <== computeVote (value <$> buffer)
done <== Const True
return done
| Boyer - Moore Fast Majority Vote
computeVote :: [E MsgType] -> E MsgType
computeVote = fst . foldr iter (missingMsgValueE, Const 0)
where
iter x (y, c) = ( mux (x ==. y) onTrue1 onFalse1
, mux (x ==. y) onTrue2 onFalse2)
where
if x = = . y , then ( y , c+1 )
else if c = = 0 , then ( x , 1 )
onTrue1 = y
onTrue2 = c + Const 1
onFalse1 = mux (c ==. Const 0) x y
onFalse2 = mux (c ==. Const 0) (Const 1) (c - Const 1)
_ = c :: E Int64
observer :: Atom ()
observer = atom "observer" $ do
ps <- probes
mapM_ printProbe ps
type MsgType = Int64
msgType = Int64
goodMsg :: E MsgType
goodMsg = Const 0
missingMsgValue :: MsgType
missingMsgValue = 0
missingMsgValueE :: E MsgType
missingMsgValueE = Const 0
isMissing :: V MsgType -> E Bool
isMissing = (==. missingMsgValueE) . value
| Declare a new channel with ' missingMsgValue ' as its initial value
newChannel :: String -> Atom (ChanInput, ChanOutput)
newChannel = flip channel msgType
msgVar :: Name -> Atom (V MsgType)
msgVar nm = do
v <- msgVar' nm
probe nm (value v)
return v
msgVar' :: Name -> Atom (V MsgType)
msgVar' nm = int64 nm missingMsgValue
tg :: Name -> Int -> Name
tg nm i = nm ++ "_" ++ show i
tg2 :: Name -> Int -> Int -> Name
tg2 nm i j = nm ++ "_" ++ show i ++ "_" ++ show j
|
d1eb404bd4feaa4b6f75a2b4bf857871406a962808dc9e8e76b53d34633bafcf | MaxOw/computational-geometry | General.hs | # Language MultiParamTypeClasses #
{-# Language FlexibleInstances #-}
{-# Language FlexibleContexts #-}
--------------------------------------------------------------------------------
-- |
-- Module : Geometry.Plane.General
Copyright : ( C ) 2017
-- License : BSD-style (see LICENSE)
Maintainer :
--
-- General representation of a plane. Plane in the General Form is Hession
Normal Form scaled by an arbitrary non - zero scalar .
--
--------------------------------------------------------------------------------
module Geometry.Plane.General
( Plane (..)
, Plane2, Plane3
, Plane2D, Plane3D
, MakePlane (..)
, unsafeMakePlane
, flipPlane
, collinear
, coincidence ,
, PlanesRelation (..), Incidence (..), Orientation (..)
, planesRelation
, isParallel
) where
import Protolude hiding (zipWith, zero)
import Data.Maybe (fromJust)
import qualified Data.List as List
import Linear
import Linear . Solve
import Linear.Affine (Point, (.-.))
import qualified Linear.Affine as Point
import Data.EqZero
-- | Internally Plane is represented as a pair (sN, sO) where N is a normal
-- vector of a plane O is the distance of that plane from the origin and s is an
arbitrary non - zero scalar .
data Plane v n = Plane
{ planeVector :: !(v n)
, planeLast :: !n
} deriving (Eq, Ord, Show)
type Plane2 = Plane V2
type Plane3 = Plane V3
type Plane2D = Plane V2 Double
type Plane3D = Plane V3 Double
instance (NFData (v n), NFData n) => NFData (Plane v n) where
rnf (Plane vs l) = rnf vs `seq` rnf l
-- | Flip plane orientation.
flipPlane :: (Functor v, Num n) => Plane v n -> Plane v n
flipPlane (Plane v n) = Plane (fmap negate v) (negate n)
class MakePlane v n where
-- | Make plane from vector of points. Returns Nothing if vectors between
-- points are linearly dependent
makePlane :: v (Point v n) -> Maybe (Plane v n)
instance (Num n, Eq n) => MakePlane V3 n where
makePlane (V3 p1 p2 p3)
| n == zero = Nothing
| otherwise = Just $ Plane n d
where
n = cross (p2 .-. p1) (p3 .-. p1)
d = negate $ dot n $ unPoint p1
-- | Assumes that points form a valid plane (i.e. vectors between all points are
-- linearly independent).
unsafeMakePlane :: MakePlane v n => v (Point v n) -> Plane v n
unsafeMakePlane = fromJust . makePlane
makePlane : : ( Applicative v , Solve v n , )
= > v ( Point v n ) - > Maybe ( Plane v n )
-- makePlane ps = Plane < $ > solve ups ( pure 1 ) < * > pure 1
makePlane ps = uncurry Plane < $ > solve ups ( pure 1 )
where
ups = fmap unPoint ps
-- | Assumes that points form a valid plane ( i.e. vectors between all points are
-- linearly independent ) .
unsafeMakePlane : : ( Applicative v , Solve v n , )
= > v ( Point v n ) - > Plane v n
-- unsafeMakePlane ps = Plane ( fromJust $ solve ups ( pure 1 ) ) 1
-- unsafeMakePlane ps = Plane v d
unsafeMakePlane ps = case solve ups ( pure 1 ) of
Just ( v , d ) - > Plane v d
Nothing - > error " Bla " -- . toS $ List.unlines $ map show ps
where
-- Just ( v , d ) = solve ups ( pure 1 )
ups = fmap unPoint ps
makePlane :: (Applicative v, Solve v n, Num n)
=> v (Point v n) -> Maybe (Plane v n)
-- makePlane ps = Plane <$> solve ups (pure 1) <*> pure 1
makePlane ps = uncurry Plane <$> solve ups (pure 1)
where
ups = fmap unPoint ps
-- | Assumes that points form a valid plane (i.e. vectors between all points are
-- linearly independent).
unsafeMakePlane :: (Applicative v, Solve v n, Num n)
=> v (Point v n) -> Plane v n
-- unsafeMakePlane ps = Plane (fromJust $ solve ups (pure 1)) 1
-- unsafeMakePlane ps = Plane v d
unsafeMakePlane ps = case solve ups (pure 1) of
Just (v, d) -> Plane v d
Nothing -> error "Bla" -- . toS $ List.unlines $ map show ps
where
-- Just (v, d) = solve ups (pure 1)
ups = fmap unPoint ps
-}
-- | Convert point to a vector.
unPoint :: Point v n -> v n
unPoint (Point.P x) = x
--------------------------------------------------------------------------------
| Test whether two vectors are collinear .
collinear :: (Foldable v, Num n, EqZero n) => v n -> v n -> Bool
collinear v w = all f $ combinations 2 $ zipWith (,) v w
where
f [(a, b), (c, d)] = eqZero $ a*d - b*c
f _ = False -- To silence exhaustiveness checker
-- | All n-combinations of a given list.
combinations :: Int -> [a] -> [[a]]
combinations k is
| k <= 0 = [ [] ]
| otherwise = [ x:r | x:xs <- tails is, r <- combinations (k-1) xs ]
-- | Zip two `Foldable` structures to a list with a given function.
zipWith :: Foldable f => (a -> b -> c) -> f a -> f b -> [c]
zipWith f a b = List.zipWith f (toList a) (toList b)
| Test co - incidence of two planes assuming collinearity .
coincidence :: (Foldable v, Num n, EqZero n) => Plane v n -> Plane v n -> Bool
coincidence (Plane v1 d1) (Plane v2 d2) = all f $ zipWith (,) v1 v2
where
f (x1, x2) = eqZero $ x1*d2 - x2*d1
| Test co - orientation of two assuming collinearity .
coorientation :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> Bool
coorientation (Plane v1 d1) (Plane v2 d2)
= all geqZero $ d1*d2 : zipWith (*) v1 v2
--------------------------------------------------------------------------------
data PlanesRelation = Parallel Incidence Orientation | Crossing deriving Show
data Incidence = CoIncident | NonIncident deriving Show
data Orientation = CoOriented | AntiOriented deriving Show
| Relate two planes on Parallelism , Incidence and Orientation .
planesRelation :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> PlanesRelation
planesRelation p1@(Plane v1 _) p2@(Plane v2 _)
| collinear v1 v2 = Parallel incidence orientation
| otherwise = Crossing
where
incidence = bool NonIncident CoIncident $ coincidence p1 p2
orientation = bool AntiOriented CoOriented $ coorientation p1 p2
isParallel :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> Bool
isParallel a b = case planesRelation a b of
Parallel _ _ -> True
Crossing -> False
| null | https://raw.githubusercontent.com/MaxOw/computational-geometry/20c93aa05b151b115250a18d1203fdf9a01f705e/src/Geometry/Plane/General.hs | haskell | # Language FlexibleInstances #
# Language FlexibleContexts #
------------------------------------------------------------------------------
|
Module : Geometry.Plane.General
License : BSD-style (see LICENSE)
General representation of a plane. Plane in the General Form is Hession
------------------------------------------------------------------------------
| Internally Plane is represented as a pair (sN, sO) where N is a normal
vector of a plane O is the distance of that plane from the origin and s is an
| Flip plane orientation.
| Make plane from vector of points. Returns Nothing if vectors between
points are linearly dependent
| Assumes that points form a valid plane (i.e. vectors between all points are
linearly independent).
makePlane ps = Plane < $ > solve ups ( pure 1 ) < * > pure 1
| Assumes that points form a valid plane ( i.e. vectors between all points are
linearly independent ) .
unsafeMakePlane ps = Plane ( fromJust $ solve ups ( pure 1 ) ) 1
unsafeMakePlane ps = Plane v d
. toS $ List.unlines $ map show ps
Just ( v , d ) = solve ups ( pure 1 )
makePlane ps = Plane <$> solve ups (pure 1) <*> pure 1
| Assumes that points form a valid plane (i.e. vectors between all points are
linearly independent).
unsafeMakePlane ps = Plane (fromJust $ solve ups (pure 1)) 1
unsafeMakePlane ps = Plane v d
. toS $ List.unlines $ map show ps
Just (v, d) = solve ups (pure 1)
| Convert point to a vector.
------------------------------------------------------------------------------
To silence exhaustiveness checker
| All n-combinations of a given list.
| Zip two `Foldable` structures to a list with a given function.
------------------------------------------------------------------------------ | # Language MultiParamTypeClasses #
Copyright : ( C ) 2017
Maintainer :
Normal Form scaled by an arbitrary non - zero scalar .
module Geometry.Plane.General
( Plane (..)
, Plane2, Plane3
, Plane2D, Plane3D
, MakePlane (..)
, unsafeMakePlane
, flipPlane
, collinear
, coincidence ,
, PlanesRelation (..), Incidence (..), Orientation (..)
, planesRelation
, isParallel
) where
import Protolude hiding (zipWith, zero)
import Data.Maybe (fromJust)
import qualified Data.List as List
import Linear
import Linear . Solve
import Linear.Affine (Point, (.-.))
import qualified Linear.Affine as Point
import Data.EqZero
arbitrary non - zero scalar .
data Plane v n = Plane
{ planeVector :: !(v n)
, planeLast :: !n
} deriving (Eq, Ord, Show)
type Plane2 = Plane V2
type Plane3 = Plane V3
type Plane2D = Plane V2 Double
type Plane3D = Plane V3 Double
instance (NFData (v n), NFData n) => NFData (Plane v n) where
rnf (Plane vs l) = rnf vs `seq` rnf l
flipPlane :: (Functor v, Num n) => Plane v n -> Plane v n
flipPlane (Plane v n) = Plane (fmap negate v) (negate n)
class MakePlane v n where
makePlane :: v (Point v n) -> Maybe (Plane v n)
instance (Num n, Eq n) => MakePlane V3 n where
makePlane (V3 p1 p2 p3)
| n == zero = Nothing
| otherwise = Just $ Plane n d
where
n = cross (p2 .-. p1) (p3 .-. p1)
d = negate $ dot n $ unPoint p1
unsafeMakePlane :: MakePlane v n => v (Point v n) -> Plane v n
unsafeMakePlane = fromJust . makePlane
makePlane : : ( Applicative v , Solve v n , )
= > v ( Point v n ) - > Maybe ( Plane v n )
makePlane ps = uncurry Plane < $ > solve ups ( pure 1 )
where
ups = fmap unPoint ps
unsafeMakePlane : : ( Applicative v , Solve v n , )
= > v ( Point v n ) - > Plane v n
unsafeMakePlane ps = case solve ups ( pure 1 ) of
Just ( v , d ) - > Plane v d
where
ups = fmap unPoint ps
makePlane :: (Applicative v, Solve v n, Num n)
=> v (Point v n) -> Maybe (Plane v n)
makePlane ps = uncurry Plane <$> solve ups (pure 1)
where
ups = fmap unPoint ps
unsafeMakePlane :: (Applicative v, Solve v n, Num n)
=> v (Point v n) -> Plane v n
unsafeMakePlane ps = case solve ups (pure 1) of
Just (v, d) -> Plane v d
where
ups = fmap unPoint ps
-}
unPoint :: Point v n -> v n
unPoint (Point.P x) = x
| Test whether two vectors are collinear .
collinear :: (Foldable v, Num n, EqZero n) => v n -> v n -> Bool
collinear v w = all f $ combinations 2 $ zipWith (,) v w
where
f [(a, b), (c, d)] = eqZero $ a*d - b*c
combinations :: Int -> [a] -> [[a]]
combinations k is
| k <= 0 = [ [] ]
| otherwise = [ x:r | x:xs <- tails is, r <- combinations (k-1) xs ]
zipWith :: Foldable f => (a -> b -> c) -> f a -> f b -> [c]
zipWith f a b = List.zipWith f (toList a) (toList b)
| Test co - incidence of two planes assuming collinearity .
coincidence :: (Foldable v, Num n, EqZero n) => Plane v n -> Plane v n -> Bool
coincidence (Plane v1 d1) (Plane v2 d2) = all f $ zipWith (,) v1 v2
where
f (x1, x2) = eqZero $ x1*d2 - x2*d1
| Test co - orientation of two assuming collinearity .
coorientation :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> Bool
coorientation (Plane v1 d1) (Plane v2 d2)
= all geqZero $ d1*d2 : zipWith (*) v1 v2
data PlanesRelation = Parallel Incidence Orientation | Crossing deriving Show
data Incidence = CoIncident | NonIncident deriving Show
data Orientation = CoOriented | AntiOriented deriving Show
| Relate two planes on Parallelism , Incidence and Orientation .
planesRelation :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> PlanesRelation
planesRelation p1@(Plane v1 _) p2@(Plane v2 _)
| collinear v1 v2 = Parallel incidence orientation
| otherwise = Crossing
where
incidence = bool NonIncident CoIncident $ coincidence p1 p2
orientation = bool AntiOriented CoOriented $ coorientation p1 p2
isParallel :: (Foldable v, Num n, Ord n, EqZero n)
=> Plane v n -> Plane v n -> Bool
isParallel a b = case planesRelation a b of
Parallel _ _ -> True
Crossing -> False
|
f531efab438af02152d06c703140614c4a1687e5fbc3d4f8550c728fc98a15cf | akvo/resumed | resumed.clj | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
;; file, You can obtain one at /
(ns org.akvo.resumed
(:require [clojure.core.cache :as cache]
[clojure.java.io :as io]
[clojure.string :as str])
(:import [java.io File FileOutputStream ByteArrayOutputStream]
java.util.UUID
javax.xml.bind.DatatypeConverter))
(def tus-headers
{"Tus-Resumable" "1.0.0"
"Tus-Version" "1.0.0"
"Tus-Extension" "creation"})
(defn gen-id []
(.replaceAll (str (UUID/randomUUID)) "-" ""))
(defn options-headers []
(select-keys tus-headers ["Tus-Version" "Tus-Extension" "Tus-Max-Size"]))
(defn get-header ^String [req header]
(get-in req [:headers (.toLowerCase ^String header)]))
(defn to-number
"Returns a numeric representation of a String.
Returns -1 on unparseable String, blank or nil"
[s]
(if (not (str/blank? s))
(try
(Long/valueOf ^String s)
(catch Exception _
-1))
-1))
(defmulti handle-request
(fn [req opts]
(:request-method req)))
(defmethod handle-request :default
[req opts]
{:status 400})
(defmethod handle-request :options
[req {:keys [max-upload-size]}]
{:status 204
:headers (assoc (options-headers) "Tus-Max-Size" (str max-upload-size))})
(defn id-from-url [url]
(last (str/split url #"/")))
(defmethod handle-request :head
[req {:keys [save-path upload-cache]}]
(let [upload-id (id-from-url (:uri req))]
(if-let [found (cache/lookup @upload-cache upload-id)]
{:status 200
:headers (assoc tus-headers
"Upload-Offset" (str (:offset found))
"Upload-Length" (str (:length found))
"Upload-Metadata" (:metadata found)
"Cache-Control" "no-cache")}
{:status 404
:body "Not Found"
:headers {"Cache-Control" "no-cache"}})))
(defn patch
[req {:keys [save-path upload-cache]}]
(let [id (id-from-url (:uri req))
found (cache/lookup @upload-cache id)]
(if found
(let [rlen (-> req (get-header "content-length") to-number)
off (-> req (get-header "upload-offset") to-number)
ct (get-header req "content-type")
tmp (ByteArrayOutputStream.)
_ (io/copy (:body req) tmp)
len (.size tmp)]
(cond
(not= "application/offset+octet-stream" ct) {:status 400
:body "Bad request: Content-Type must be application/offset+octet-stream"}
(not= (:offset found) off) {:status 409
:body "Conflict: Wrong Upload-Offset"}
(and (not= -1 rlen)
(not= rlen len)) {:status 400
:body "Bad request: Request body size doesn't match with Content-Length header"}
(or (> len (:length found))
(> (+ len (:offset found)) (:length found))) {:status 413
:body (format "Body size exceeds the %s bytes allowed"
(:length found))}
:else (with-open [fos (FileOutputStream. ^String (:file found) true)]
(.write fos (.toByteArray tmp))
(let [len (.size tmp)
new-uploads (swap! upload-cache update-in [id :offset] + len)]
{:status 204
:headers (assoc tus-headers
"Upload-Offset" (str (get-in new-uploads [id :offset])))}))))
{:status 404
:body "Not Found"})))
(defmethod handle-request :patch
[req opts]
(patch req opts))
(defn get-filename
"Returns a file name decoding a base64 string of
Upload-Metadata header.
Attribution: "
[s]
(when-not (str/blank? s)
(let [m (->> (str/split s #",")
(map #(str/split % #" "))
(into {}))]
(when-let [filename (get m "filename")]
(-> filename
(DatatypeConverter/parseBase64Binary)
(String.))))))
(defn host
"Returns the HOST for a given request
It attempts to honor: X-Forwared-Host, Origin, Host headers"
[req]
(or (get-header req "x-forwarded-host")
(some-> req (get-header "host") (.split ":") first)
(:server-name req)))
(def ^:const known-protocols #{"http" "https"})
(defn protocol
"Returns the protocol #{\"http\" \"https\"} for a given request"
[req]
(let [forwarded-proto (get-header req "x-forwarded-proto")]
(if (known-protocols forwarded-proto)
forwarded-proto
(name (:scheme req)))))
(def ^:const http-default-ports #{443 80})
(defn port
"Returns the port of the request,
Empty if port is 80, 443 as those are default ports
Empty for forwared requests (server behind a proxy)"
[req]
(let [forwarded (or (get-header req "x-forwarded-host")
(get-header req "x-forwarded-proto"))
fallback (str ":" (:server-port req))]
(if (or forwarded
(http-default-ports (:server-port req)))
""
(if-let [host (get-header req "host")]
(if (.contains host ":")
(re-find #":\d+" host)
fallback)
fallback))))
(defn location
"Get Location string from request"
[req]
(or (some-> req (get-header "origin") (str (:uri req)))
(format "%s" (protocol req) (host req) (port req) (:uri req))))
(defn post
[req {:keys [save-path upload-cache max-upload-size]}]
(let [len (-> req (get-header "upload-length") to-number)]
(cond
(neg? len) {:status 400
:body "Bad Request"}
(> len max-upload-size) {:status 413
:body "Request Entity Loo Large"}
:else (let [id (gen-id)
um (get-header req "upload-metadata")
fname (or (get-filename um) "file")
path (File. ^String save-path ^String id)
f (File. ^File path ^String fname)]
(.mkdirs path)
(.createNewFile f)
(swap! upload-cache assoc id {:offset 0
:file (.getAbsolutePath f)
:length len
:metadata um})
{:status 201
:headers {"Location" (str (location req) "/" id)
"Upload-Length" (str len)
"Upload-Metadata" um}}))))
(defmethod handle-request :post
[req opts]
(let [method-override (get-header req "x-http-method-override")]
(if (= method-override "PATCH")
(patch req opts)
(post req opts))))
(defn save-path [save-dir]
(str (or save-dir (System/getProperty "java.io.tmpdir")) "/resumed"))
(defn make-handler
"Returns a ring handler capable of responding to client requests from
a `tus` client. An optional map with configuration can be used
{:save-dir \"/path/to/save/dir\"} defaults to `java.io.tmpdir`"
[& [opts]]
(let [save-path (save-path (:save-dir opts))
upload-cache (atom (or (:upload-cache opts)
(cache/fifo-cache-factory {} :threshold 250)))
max-upload-size (* 1024
1024
(or (:max-upload-size opts) 50))]
(fn [req]
(handle-request req {:save-path save-path
:upload-cache upload-cache
:max-upload-size max-upload-size}))))
(defn file-for-upload
"Given a save-dir and a file upload url, it returns the java.io.File that was uploaded"
[save-dir uri]
(let [id (id-from-url uri)
upload-path (str (save-path save-dir) "/" id)]
(when-not (re-matches #"[a-zA-Z0-9-]+" id)
(throw (ex-info "Invalid file" {:filename id})))
(-> upload-path
io/file
.listFiles
first)))
| null | https://raw.githubusercontent.com/akvo/resumed/266acfa5bb52c9b484af19f0bcfbfacb60b97319/src/org/akvo/resumed.clj | clojure | file, You can obtain one at / | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
(ns org.akvo.resumed
(:require [clojure.core.cache :as cache]
[clojure.java.io :as io]
[clojure.string :as str])
(:import [java.io File FileOutputStream ByteArrayOutputStream]
java.util.UUID
javax.xml.bind.DatatypeConverter))
(def tus-headers
{"Tus-Resumable" "1.0.0"
"Tus-Version" "1.0.0"
"Tus-Extension" "creation"})
(defn gen-id []
(.replaceAll (str (UUID/randomUUID)) "-" ""))
(defn options-headers []
(select-keys tus-headers ["Tus-Version" "Tus-Extension" "Tus-Max-Size"]))
(defn get-header ^String [req header]
(get-in req [:headers (.toLowerCase ^String header)]))
(defn to-number
"Returns a numeric representation of a String.
Returns -1 on unparseable String, blank or nil"
[s]
(if (not (str/blank? s))
(try
(Long/valueOf ^String s)
(catch Exception _
-1))
-1))
(defmulti handle-request
(fn [req opts]
(:request-method req)))
(defmethod handle-request :default
[req opts]
{:status 400})
(defmethod handle-request :options
[req {:keys [max-upload-size]}]
{:status 204
:headers (assoc (options-headers) "Tus-Max-Size" (str max-upload-size))})
(defn id-from-url [url]
(last (str/split url #"/")))
(defmethod handle-request :head
[req {:keys [save-path upload-cache]}]
(let [upload-id (id-from-url (:uri req))]
(if-let [found (cache/lookup @upload-cache upload-id)]
{:status 200
:headers (assoc tus-headers
"Upload-Offset" (str (:offset found))
"Upload-Length" (str (:length found))
"Upload-Metadata" (:metadata found)
"Cache-Control" "no-cache")}
{:status 404
:body "Not Found"
:headers {"Cache-Control" "no-cache"}})))
(defn patch
[req {:keys [save-path upload-cache]}]
(let [id (id-from-url (:uri req))
found (cache/lookup @upload-cache id)]
(if found
(let [rlen (-> req (get-header "content-length") to-number)
off (-> req (get-header "upload-offset") to-number)
ct (get-header req "content-type")
tmp (ByteArrayOutputStream.)
_ (io/copy (:body req) tmp)
len (.size tmp)]
(cond
(not= "application/offset+octet-stream" ct) {:status 400
:body "Bad request: Content-Type must be application/offset+octet-stream"}
(not= (:offset found) off) {:status 409
:body "Conflict: Wrong Upload-Offset"}
(and (not= -1 rlen)
(not= rlen len)) {:status 400
:body "Bad request: Request body size doesn't match with Content-Length header"}
(or (> len (:length found))
(> (+ len (:offset found)) (:length found))) {:status 413
:body (format "Body size exceeds the %s bytes allowed"
(:length found))}
:else (with-open [fos (FileOutputStream. ^String (:file found) true)]
(.write fos (.toByteArray tmp))
(let [len (.size tmp)
new-uploads (swap! upload-cache update-in [id :offset] + len)]
{:status 204
:headers (assoc tus-headers
"Upload-Offset" (str (get-in new-uploads [id :offset])))}))))
{:status 404
:body "Not Found"})))
(defmethod handle-request :patch
[req opts]
(patch req opts))
(defn get-filename
"Returns a file name decoding a base64 string of
Upload-Metadata header.
Attribution: "
[s]
(when-not (str/blank? s)
(let [m (->> (str/split s #",")
(map #(str/split % #" "))
(into {}))]
(when-let [filename (get m "filename")]
(-> filename
(DatatypeConverter/parseBase64Binary)
(String.))))))
(defn host
"Returns the HOST for a given request
It attempts to honor: X-Forwared-Host, Origin, Host headers"
[req]
(or (get-header req "x-forwarded-host")
(some-> req (get-header "host") (.split ":") first)
(:server-name req)))
(def ^:const known-protocols #{"http" "https"})
(defn protocol
"Returns the protocol #{\"http\" \"https\"} for a given request"
[req]
(let [forwarded-proto (get-header req "x-forwarded-proto")]
(if (known-protocols forwarded-proto)
forwarded-proto
(name (:scheme req)))))
(def ^:const http-default-ports #{443 80})
(defn port
"Returns the port of the request,
Empty if port is 80, 443 as those are default ports
Empty for forwared requests (server behind a proxy)"
[req]
(let [forwarded (or (get-header req "x-forwarded-host")
(get-header req "x-forwarded-proto"))
fallback (str ":" (:server-port req))]
(if (or forwarded
(http-default-ports (:server-port req)))
""
(if-let [host (get-header req "host")]
(if (.contains host ":")
(re-find #":\d+" host)
fallback)
fallback))))
(defn location
"Get Location string from request"
[req]
(or (some-> req (get-header "origin") (str (:uri req)))
(format "%s" (protocol req) (host req) (port req) (:uri req))))
(defn post
[req {:keys [save-path upload-cache max-upload-size]}]
(let [len (-> req (get-header "upload-length") to-number)]
(cond
(neg? len) {:status 400
:body "Bad Request"}
(> len max-upload-size) {:status 413
:body "Request Entity Loo Large"}
:else (let [id (gen-id)
um (get-header req "upload-metadata")
fname (or (get-filename um) "file")
path (File. ^String save-path ^String id)
f (File. ^File path ^String fname)]
(.mkdirs path)
(.createNewFile f)
(swap! upload-cache assoc id {:offset 0
:file (.getAbsolutePath f)
:length len
:metadata um})
{:status 201
:headers {"Location" (str (location req) "/" id)
"Upload-Length" (str len)
"Upload-Metadata" um}}))))
(defmethod handle-request :post
[req opts]
(let [method-override (get-header req "x-http-method-override")]
(if (= method-override "PATCH")
(patch req opts)
(post req opts))))
(defn save-path [save-dir]
(str (or save-dir (System/getProperty "java.io.tmpdir")) "/resumed"))
(defn make-handler
"Returns a ring handler capable of responding to client requests from
a `tus` client. An optional map with configuration can be used
{:save-dir \"/path/to/save/dir\"} defaults to `java.io.tmpdir`"
[& [opts]]
(let [save-path (save-path (:save-dir opts))
upload-cache (atom (or (:upload-cache opts)
(cache/fifo-cache-factory {} :threshold 250)))
max-upload-size (* 1024
1024
(or (:max-upload-size opts) 50))]
(fn [req]
(handle-request req {:save-path save-path
:upload-cache upload-cache
:max-upload-size max-upload-size}))))
(defn file-for-upload
"Given a save-dir and a file upload url, it returns the java.io.File that was uploaded"
[save-dir uri]
(let [id (id-from-url uri)
upload-path (str (save-path save-dir) "/" id)]
(when-not (re-matches #"[a-zA-Z0-9-]+" id)
(throw (ex-info "Invalid file" {:filename id})))
(-> upload-path
io/file
.listFiles
first)))
|
70c2346a577bf1e693e29820dd658b068dda4d26b50e97a8419fd25b7e983e37 | jahfer/clj-activitypub | net.clj | (ns clj-activitypub.net
(:require [clj-activitypub.internal.thread-cache :as tc]
[clj-activitypub.internal.crypto :as crypto]
[clj-activitypub.internal.http-util :as http]
[clj-http.client :as client]
[clojure.set :refer [union]]
[clojure.string :as str]
[clojure.walk :refer [stringify-keys]]))
(def signature-headers ["(request-target)" "host" "date" "digest"])
(defn- str-for-signature [headers]
(let [headers-xf (reduce-kv
(fn [m k v]
(assoc m (str/lower-case k) v)) {} headers)]
(->> signature-headers
(select-keys headers-xf)
(reduce-kv (fn [coll k v] (conj coll (str k ": " v))) [])
(interpose "\n")
(apply str))))
(defn gen-signature-header
"Generates a HTTP Signature string based on the provided map of headers."
[config headers]
(let [{:keys [user-id private-key]} config
string-to-sign (str-for-signature headers)
signature (crypto/base64-encode (crypto/sign string-to-sign private-key))
sig-header-keys {"keyId" user-id
"headers" (str/join " " signature-headers)
"signature" signature}]
(->> sig-header-keys
(reduce-kv (fn [m k v]
(conj m (str k "=" "\"" v "\""))) [])
(interpose ",")
(apply str))))
(defn auth-headers
"Given a config and request map of {:body ... :headers ...}, returns the
original set of headers with Signature and Digest attributes appended. If
Date is not in the original header set, it will also be appended."
[config {:keys [body headers] :or {headers {}}}]
(let [digest (http/digest body)
headers (cond-> headers
(not (contains? headers "Date")) (assoc "Date" (http/date)))
headers' (-> headers
(assoc "Digest" digest)
(assoc "(request-target)" "post /inbox"))]
(assoc headers
"Signature" (gen-signature-header config headers')
"Digest" digest)))
(def ^:private object-cache (tc/make))
(defn reset-object-cache!
"Removes all entries from the object cache, which is populated with results
from [[fetch-objects!]] or [[fetch-user!]]."
[]
(tc/reset object-cache))
(def actor-type? #{"Person" "Service" "Application"})
(def terminal-object-type? (union actor-type?))
(def collection-type? #{"OrderedCollection" "Collection"})
(def collection-page-type? #{"OrderedCollectionPage" "CollectionPage"})
(def any-collection-type? (union collection-type? collection-page-type?))
(declare resolve!)
(declare lazy-resolve!)
(defn- ensure-seq [x]
(if (sequential? x) x [x]))
(defn- resolve-collection! [object]
(let [type (:type object)]
(condp some (if (coll? type) type [type])
collection-type? (lazy-resolve! (:first object))
collection-page-type? (let [items (or (:orderedItems object) (:items object))]
(cond-> []
items (concat (map lazy-resolve! items))
(:next object) (concat (ensure-seq (lazy-resolve! (:next object)))))))))
(defn- fetch-objects!
[remote-id]
(tc/fetch object-cache remote-id
#(delay (do
(println "Performing GET" remote-id)
(some-> (try (client/get remote-id http/GET-config)
(catch Exception _ nil))
(:body)
(lazy-resolve!))))))
(defn lazy-resolve!
[str-or-obj]
(condp apply [str-or-obj]
string? (fetch-objects! str-or-obj)
map? (if (any-collection-type? (:type str-or-obj))
(resolve-collection! str-or-obj)
str-or-obj)
sequential? str-or-obj
delay? str-or-obj
nil? []))
(defn resolve!
"Fetches the resource(s) located at remote-id from a remote server. Results
are returned as a collection. If URL points to an ActivityPub Collection,
the links will be followed until a resolved object is found. Will return
cached results if they exist in memory."
[str-or-obj]
(let [result (-> str-or-obj
(lazy-resolve!)
(ensure-seq))]
(letfn [(branch? [x] (or (delay? x)
(sequential? x)))
(children [x] (map force (ensure-seq x)))]
(remove branch? (tree-seq branch? children result)))))
;; (def coll [:a :b (delay [:c :d])])
( remove # ( - > % force coll ? ) ( tree - seq # ( - > % force coll ? ) # ( - > % force seq ) coll ) )
(defn fetch-actor!
"Fetches the actor located at user-id from a remote server. If you wish to
retrieve a list of objects, see [[fetch-objects!]]. Will return a cached
result if it exists in memory."
[user-id]
(let [object (first (resolve! user-id))]
(when (actor-type? (:type object))
object)))
(defn delivery-targets!
"Returns the distinct inbox locations for the audience of the activity. This
includes the :to, :cc, :audience, :target, :inReplyTo, :object, and :tag
fields while also removing the author's own address. If the user's server
supports a sharedInbox, that location is returned instead."
[activity]
(let [activity (stringify-keys activity)]
(->> (map activity ["to" "cc" "audience" "target"
"inReplyTo" "object" "tag"])
(flatten)
(map #(condp apply [%]
map? (or (get % "actor")
(get % "attributedTo")
(when (actor-type? (get % "type"))
(get % "id")))
string? %
nil))
(remove nil?)
(distinct)
(remove #(= % (get activity "actor")))
(mapcat resolve!)
(map #(or (get-in % [:endpoints :sharedInbox])
(get % :inbox)))
(distinct)))) | null | https://raw.githubusercontent.com/jahfer/clj-activitypub/fec096c7c5fd99aba56084651baf0706887d9c1e/activitypub-core/src/clj_activitypub/net.clj | clojure | (def coll [:a :b (delay [:c :d])]) | (ns clj-activitypub.net
(:require [clj-activitypub.internal.thread-cache :as tc]
[clj-activitypub.internal.crypto :as crypto]
[clj-activitypub.internal.http-util :as http]
[clj-http.client :as client]
[clojure.set :refer [union]]
[clojure.string :as str]
[clojure.walk :refer [stringify-keys]]))
(def signature-headers ["(request-target)" "host" "date" "digest"])
(defn- str-for-signature [headers]
(let [headers-xf (reduce-kv
(fn [m k v]
(assoc m (str/lower-case k) v)) {} headers)]
(->> signature-headers
(select-keys headers-xf)
(reduce-kv (fn [coll k v] (conj coll (str k ": " v))) [])
(interpose "\n")
(apply str))))
(defn gen-signature-header
"Generates a HTTP Signature string based on the provided map of headers."
[config headers]
(let [{:keys [user-id private-key]} config
string-to-sign (str-for-signature headers)
signature (crypto/base64-encode (crypto/sign string-to-sign private-key))
sig-header-keys {"keyId" user-id
"headers" (str/join " " signature-headers)
"signature" signature}]
(->> sig-header-keys
(reduce-kv (fn [m k v]
(conj m (str k "=" "\"" v "\""))) [])
(interpose ",")
(apply str))))
(defn auth-headers
"Given a config and request map of {:body ... :headers ...}, returns the
original set of headers with Signature and Digest attributes appended. If
Date is not in the original header set, it will also be appended."
[config {:keys [body headers] :or {headers {}}}]
(let [digest (http/digest body)
headers (cond-> headers
(not (contains? headers "Date")) (assoc "Date" (http/date)))
headers' (-> headers
(assoc "Digest" digest)
(assoc "(request-target)" "post /inbox"))]
(assoc headers
"Signature" (gen-signature-header config headers')
"Digest" digest)))
(def ^:private object-cache (tc/make))
(defn reset-object-cache!
"Removes all entries from the object cache, which is populated with results
from [[fetch-objects!]] or [[fetch-user!]]."
[]
(tc/reset object-cache))
(def actor-type? #{"Person" "Service" "Application"})
(def terminal-object-type? (union actor-type?))
(def collection-type? #{"OrderedCollection" "Collection"})
(def collection-page-type? #{"OrderedCollectionPage" "CollectionPage"})
(def any-collection-type? (union collection-type? collection-page-type?))
(declare resolve!)
(declare lazy-resolve!)
(defn- ensure-seq [x]
(if (sequential? x) x [x]))
(defn- resolve-collection! [object]
(let [type (:type object)]
(condp some (if (coll? type) type [type])
collection-type? (lazy-resolve! (:first object))
collection-page-type? (let [items (or (:orderedItems object) (:items object))]
(cond-> []
items (concat (map lazy-resolve! items))
(:next object) (concat (ensure-seq (lazy-resolve! (:next object)))))))))
(defn- fetch-objects!
[remote-id]
(tc/fetch object-cache remote-id
#(delay (do
(println "Performing GET" remote-id)
(some-> (try (client/get remote-id http/GET-config)
(catch Exception _ nil))
(:body)
(lazy-resolve!))))))
(defn lazy-resolve!
[str-or-obj]
(condp apply [str-or-obj]
string? (fetch-objects! str-or-obj)
map? (if (any-collection-type? (:type str-or-obj))
(resolve-collection! str-or-obj)
str-or-obj)
sequential? str-or-obj
delay? str-or-obj
nil? []))
(defn resolve!
"Fetches the resource(s) located at remote-id from a remote server. Results
are returned as a collection. If URL points to an ActivityPub Collection,
the links will be followed until a resolved object is found. Will return
cached results if they exist in memory."
[str-or-obj]
(let [result (-> str-or-obj
(lazy-resolve!)
(ensure-seq))]
(letfn [(branch? [x] (or (delay? x)
(sequential? x)))
(children [x] (map force (ensure-seq x)))]
(remove branch? (tree-seq branch? children result)))))
( remove # ( - > % force coll ? ) ( tree - seq # ( - > % force coll ? ) # ( - > % force seq ) coll ) )
(defn fetch-actor!
"Fetches the actor located at user-id from a remote server. If you wish to
retrieve a list of objects, see [[fetch-objects!]]. Will return a cached
result if it exists in memory."
[user-id]
(let [object (first (resolve! user-id))]
(when (actor-type? (:type object))
object)))
(defn delivery-targets!
"Returns the distinct inbox locations for the audience of the activity. This
includes the :to, :cc, :audience, :target, :inReplyTo, :object, and :tag
fields while also removing the author's own address. If the user's server
supports a sharedInbox, that location is returned instead."
[activity]
(let [activity (stringify-keys activity)]
(->> (map activity ["to" "cc" "audience" "target"
"inReplyTo" "object" "tag"])
(flatten)
(map #(condp apply [%]
map? (or (get % "actor")
(get % "attributedTo")
(when (actor-type? (get % "type"))
(get % "id")))
string? %
nil))
(remove nil?)
(distinct)
(remove #(= % (get activity "actor")))
(mapcat resolve!)
(map #(or (get-in % [:endpoints :sharedInbox])
(get % :inbox)))
(distinct)))) |
60e3507d5ee5dd9c409fa89152e04dd01653ae5f7c60aa257e094feb2a48df39 | discus-lang/ddc | Prim.hs | {-# OPTIONS_HADDOCK hide #-}
module DDC.Core.Check.Judge.Type.Prim
( checkPrim
, shapeOfPrim)
where
import DDC.Core.Check.Judge.Type.Base
import qualified DDC.Type.Sum as Sum
checkPrim :: Checker a n
checkPrim !_table !ctx !_mode !_demand (XPrim a p)
= do
returnX a
(\z -> XPrim z p)
(shapeOfPrim p)
(Sum.empty kEffect)
ctx
checkPrim _ _ _ _ _
= error "ddc-core.checkPrim: no match"
shapeOfPrim :: Prim -> Type n
shapeOfPrim p
= case p of
PElaborate
-> tForall kData $ \tVal -> tVal
PTuple ls
-> tForalls (replicate (length ls) kData)
$ \tsParam -> foldr tFun
(tTuple [ (l, t) | l <- ls | t <- tsParam])
tsParam
PRecord ls
-> tForalls (replicate (length ls) kData)
$ \tsParam -> foldr tFun
(tRecord [ (l, t) | l <- ls | t <- tsParam ])
tsParam
PVariant _l
-> tForalls [kData, kData]
$ \[tObj, tField] -> tField `tFun` tObj
PProject _l
-> tForalls [kData, kData]
$ \[tObj, tField] -> tObj `tFun` tField
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Core/Check/Judge/Type/Prim.hs | haskell | # OPTIONS_HADDOCK hide # | module DDC.Core.Check.Judge.Type.Prim
( checkPrim
, shapeOfPrim)
where
import DDC.Core.Check.Judge.Type.Base
import qualified DDC.Type.Sum as Sum
checkPrim :: Checker a n
checkPrim !_table !ctx !_mode !_demand (XPrim a p)
= do
returnX a
(\z -> XPrim z p)
(shapeOfPrim p)
(Sum.empty kEffect)
ctx
checkPrim _ _ _ _ _
= error "ddc-core.checkPrim: no match"
shapeOfPrim :: Prim -> Type n
shapeOfPrim p
= case p of
PElaborate
-> tForall kData $ \tVal -> tVal
PTuple ls
-> tForalls (replicate (length ls) kData)
$ \tsParam -> foldr tFun
(tTuple [ (l, t) | l <- ls | t <- tsParam])
tsParam
PRecord ls
-> tForalls (replicate (length ls) kData)
$ \tsParam -> foldr tFun
(tRecord [ (l, t) | l <- ls | t <- tsParam ])
tsParam
PVariant _l
-> tForalls [kData, kData]
$ \[tObj, tField] -> tField `tFun` tObj
PProject _l
-> tForalls [kData, kData]
$ \[tObj, tField] -> tObj `tFun` tField
|
8cec51538e9ee979b35690416d570f82178b4303cbdfe04f710a31ac9ea0da44 | ds-wizard/engine-backend | DocumentTemplateDraftDataDAO.hs | module Wizard.Database.DAO.DocumentTemplate.DocumentTemplateDraftDataDAO where
import Control.Monad.Reader (asks)
import Data.String (fromString)
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
import GHC.Int
import Wizard.Database.DAO.Common
import Wizard.Database.Mapping.DocumentTemplate.DocumentTemplateDraftData ()
import Wizard.Model.Context.AppContext
import Wizard.Model.Context.ContextLenses ()
import Wizard.Model.DocumentTemplate.DocumentTemplateDraftData
entityName = "document_template_draft_data"
findDraftDataById :: String -> AppContextM DocumentTemplateDraftData
findDraftDataById id = do
appUuid <- asks currentAppUuid
createFindEntityByFn entityName [appQueryUuid appUuid, ("document_template_id", id)]
insertDraftData :: DocumentTemplateDraftData -> AppContextM Int64
insertDraftData = createInsertFn entityName
updateDraftDataById :: DocumentTemplateDraftData -> AppContextM Int64
updateDraftDataById draftData = do
appUuid <- asks currentAppUuid
let sql =
fromString
"UPDATE document_template_draft_data SET document_template_id = ?, questionnaire_uuid = ?, format_uuid = ?, app_uuid = ?, created_at = ?, updated_at = ? WHERE app_uuid = ? AND document_template_id = ?"
let params = toRow draftData ++ [toField draftData.appUuid, toField draftData.documentTemplateId]
logQuery sql params
let action conn = execute conn sql params
runDB action
deleteDraftDatas :: AppContextM Int64
deleteDraftDatas = createDeleteEntitiesFn entityName
deleteDraftDataByDocumentTemplateId :: String -> AppContextM Int64
deleteDraftDataByDocumentTemplateId tmlId = do
appUuid <- asks currentAppUuid
createDeleteEntitiesByFn entityName [appQueryUuid appUuid, ("document_template_id", tmlId)]
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-wizard/src/Wizard/Database/DAO/DocumentTemplate/DocumentTemplateDraftDataDAO.hs | haskell | module Wizard.Database.DAO.DocumentTemplate.DocumentTemplateDraftDataDAO where
import Control.Monad.Reader (asks)
import Data.String (fromString)
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
import GHC.Int
import Wizard.Database.DAO.Common
import Wizard.Database.Mapping.DocumentTemplate.DocumentTemplateDraftData ()
import Wizard.Model.Context.AppContext
import Wizard.Model.Context.ContextLenses ()
import Wizard.Model.DocumentTemplate.DocumentTemplateDraftData
entityName = "document_template_draft_data"
findDraftDataById :: String -> AppContextM DocumentTemplateDraftData
findDraftDataById id = do
appUuid <- asks currentAppUuid
createFindEntityByFn entityName [appQueryUuid appUuid, ("document_template_id", id)]
insertDraftData :: DocumentTemplateDraftData -> AppContextM Int64
insertDraftData = createInsertFn entityName
updateDraftDataById :: DocumentTemplateDraftData -> AppContextM Int64
updateDraftDataById draftData = do
appUuid <- asks currentAppUuid
let sql =
fromString
"UPDATE document_template_draft_data SET document_template_id = ?, questionnaire_uuid = ?, format_uuid = ?, app_uuid = ?, created_at = ?, updated_at = ? WHERE app_uuid = ? AND document_template_id = ?"
let params = toRow draftData ++ [toField draftData.appUuid, toField draftData.documentTemplateId]
logQuery sql params
let action conn = execute conn sql params
runDB action
deleteDraftDatas :: AppContextM Int64
deleteDraftDatas = createDeleteEntitiesFn entityName
deleteDraftDataByDocumentTemplateId :: String -> AppContextM Int64
deleteDraftDataByDocumentTemplateId tmlId = do
appUuid <- asks currentAppUuid
createDeleteEntitiesByFn entityName [appQueryUuid appUuid, ("document_template_id", tmlId)]
| |
8095807a417486a5413eeb015ce9dc8fcbe340a42f1eb294e22248d1aa91d7c6 | rurban/clisp | clos-method1.lisp | Common Lisp Object System for CLISP : Methods
21.8.1993 - 2004
1998 - 2004 , 2007 , 2010 , 2017
German comments translated into English : 2002 - 04 - 08
(in-package "CLOS")
;;; ---------------------------------------------------------------------------
(defparameter <method>
(defclass method (standard-stablehash metaobject)
flag , if this method comes from a
:type boolean
:accessor method-from-defgeneric))
(:fixed-slot-locations t)
(:generic-accessors nil)))
(defun initialize-instance-<method> (method &rest args
&key ((from-defgeneric from-defgeneric) nil)
((backpointer backpointer) nil backpointer-p)
&allow-other-keys)
(if *classes-finished*
(apply #'%initialize-instance method args) ; == (call-next-method)
; Bootstrapping: Simulate the effect of #'%initialize-instance.
(apply #'shared-initialize-<standard-stablehash> method 't args))
; Fill the slots.
(setf (method-from-defgeneric method) from-defgeneric)
; Fill the backpointer. This is needed for NO-NEXT-METHOD to work: When
; CALL-NEXT-METHOD is called from within the method function without a next
; method being available, the method function must call NO-NEXT-METHOD with
; the method object as argument. But since the method function is called
; with the argument list and the remaining methods list as arguments, it
; cannot know about the method object to which it belongs. We solve this
; paradox by constructing a backpointer cons that the method function
; has access to and that points back to the method object after it has been
; initialized.
(when backpointer-p
(setf (car backpointer) method))
method)
;;; ---------------------------------------------------------------------------
ABI
(defclass standard-method (method)
(($fast-function ; the function with fast calling conventions, i.e.
; argument list (&rest arguments) or
; (next-methods-function &rest arguments), depending
; on wants-next-method-p
:type (or null function)
:accessor std-method-fast-function)
($wants-next-method-p ; flag, if the NEXT-METHOD (as function with all
arguments ) resp . NIL is to be passed as first
; argument (= NIL for :BEFORE- and :AFTER-methods)
:type boolean
:accessor std-method-wants-next-method-p)
($function ; the function with slow calling conventions, i.e.
; argument list (arguments next-methods-list)
:type (or null function)
:accessor std-method-function)
list of specializers , e.g. classes or
; eql-specializers
:type list
:accessor std-method-specializers)
($qualifiers ; list of non-NIL atoms, e.g. (:before)
:type list
:accessor std-method-qualifiers)
lambda list without specializers
:type list
:accessor std-method-lambda-list)
($signature ; signature struct (see functions.lisp)
:type (simple-vector 6)
:accessor std-method-signature)
string or NIL
:type (or string null)
:accessor std-method-documentation)
($gf ; the generic function, which this method belongs to
; (only for the purpose of CALL-NEXT-METHOD and
; NO-NEXT-METHOD)
:type (or null generic-function)
:accessor std-method-generic-function))
(:fixed-slot-locations t)
(:generic-accessors nil)))
;; Note about the argument passing convention for methods:
1 ) The MOP description of COMPUTE - EFFECTIVE - METHOD and MAKE - METHOD - LAMBDA
says that a method function takes 2 arguments : the list of arguments ( ! )
;; and the list of next methods. This is awfully inefficient, and useless
;; (since MAKE-METHOD-LAMBDA is ill-designed anyway). Therefore here we
;; pass to the function the arguments as-is, and the next methods as
inserted first argument , if needed .
2 ) Instead of the list of next methods , we pass an effective method that
;; consists of these next methods. This is more efficient (saves a FUNCALL)
;; for the simple case of a single applicable method, but is less
;; efficient (a FUNCALL instead of just a CAR) for longer lists of methods.
3 ) We do n't explicitly pass the generic function to the method during the
;; invocation. However, for CALL-NEXT-METHOD, NO-NEXT-METHOD and
the generic function must be known . So we have
;; to store a generic function backpointer in the method.
(defun method-lambda-list-to-signature (lambda-list errfunc)
(multiple-value-bind (reqvars optvars optinits optsvars rest
keyp keywords keyvars keyinits keysvars
allowp auxvars auxinits)
(analyze-lambdalist lambda-list errfunc)
(declare (ignore optinits optsvars keyvars keyinits keysvars
auxvars auxinits))
(make-signature
:req-num (length reqvars) :opt-num (length optvars)
:rest-p (or keyp (not (eql rest 0))) :keys-p keyp
:keywords keywords :allow-p allowp)))
(defun initialize-instance-<standard-method> (method &rest args
&key (qualifiers '())
(lambda-list nil lambda-list-p)
(specializers nil specializers-p)
(function nil function-p)
(documentation nil)
((fast-function fast-function) nil fast-function-p)
((wants-next-method-p wants-next-method-p) nil)
((signature signature) nil signature-p)
((gf gf) nil)
((from-defgeneric from-defgeneric) nil)
((backpointer backpointer) nil)
&allow-other-keys)
(declare (ignore from-defgeneric backpointer))
(apply #'initialize-instance-<method> method args) ; == (call-next-method)
; Check the qualifiers.
(unless (proper-list-p qualifiers)
(error (TEXT "(~S ~S): The ~S argument should be a proper list, not ~S")
'initialize-instance 'standard-method ':qualifiers qualifiers))
(unless (notany #'listp qualifiers)
(error (TEXT "(~S ~S): The qualifiers list should consist of non-NIL atoms, not ~S")
'initialize-instance 'standard-method qualifiers))
; Check the lambda-list and compute the signature from it.
(unless lambda-list-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':lambda-list))
(let ((sig (method-lambda-list-to-signature lambda-list
#'(lambda (form detail errorstring &rest arguments)
(sys::lambda-list-error form detail
(TEXT "(~S ~S): Invalid ~S argument: ~?")
'initialize-instance 'standard-method ':lambda-list
errorstring arguments)))))
; Check the signature argument. It is optional; specifying it only has
; the purpose of saving memory allocation (by sharing the same signature
; for all reader methods and the same signature for all writer methods).
(if signature-p
(unless (equalp sig signature)
(error (TEXT "(~S ~S): Lambda-list ~S and signature ~S are inconsistent.")
'initialize-instance 'standard-method lambda-list signature))
(setq signature sig)))
Check the specializers .
(unless specializers-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':specializers))
(unless (proper-list-p specializers)
(error (TEXT "(~S ~S): The ~S argument should be a proper list, not ~S")
'initialize-instance 'standard-method ':specializers specializers))
(dolist (x specializers)
(unless (or (defined-class-p x) (eql-specializer-p x))
(if (typep x 'specializer)
(error (TEXT "(~S ~S): The element ~S of the ~S argument is not yet defined.")
'initialize-instance 'standard-method x ':specializers)
(error (TEXT "(~S ~S): The element ~S of the ~S argument is not of type ~S.")
'initialize-instance 'standard-method x ':specializers 'specializer))))
(unless (= (length specializers) (sig-req-num signature))
(error (TEXT "(~S ~S): The lambda list ~S has ~S required arguments, but the specializers list ~S has length ~S.")
'initialize-instance 'standard-method lambda-list (sig-req-num signature)
specializers (length specializers)))
; Check the function, fast-function and wants-next-method-p.
(unless (or function-p fast-function-p)
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':function))
(when function-p
(unless (functionp function)
(error (TEXT "(~S ~S): The ~S argument should be a function, not ~S")
'initialize-instance 'standard-method ':function function)))
(when fast-function-p
(unless (functionp fast-function)
(error (TEXT "(~S ~S): The ~S argument should be a function, not ~S")
'initialize-instance 'standard-method 'fast-function fast-function)))
(unless (typep wants-next-method-p 'boolean)
(error (TEXT "(~S ~S): The ~S argument should be a NIL or T, not ~S")
'initialize-instance 'standard-method 'wants-next-method-p wants-next-method-p))
(when function-p
;; :function overrides fast-function and wants-next-method-p, because it is
;; the standardized way (employed by user-defined method classes) to define
;; the behaviour of a method.
(setq fast-function nil
wants-next-method-p t))
; Check the documentation.
(unless (or (null documentation) (stringp documentation))
(error (TEXT "(~S ~S): The ~S argument should be a string or NIL, not ~S")
'initialize-instance 'standard-method ':documentation documentation))
; Fill the slots.
(setf (std-method-fast-function method) fast-function)
(setf (std-method-wants-next-method-p method) wants-next-method-p)
(setf (std-method-function method) function)
(setf (std-method-specializers method) specializers)
(setf (std-method-qualifiers method) qualifiers)
(setf (std-method-lambda-list method) lambda-list)
(setf (std-method-signature method) signature)
(setf (std-method-documentation method) documentation)
(setf (std-method-generic-function method) gf)
method)
(defun make-instance-<standard-method> (class &rest args
&key &allow-other-keys)
;; class = <standard-method>
;; Don't add functionality here! This is a preliminary definition that is
;; replaced with #'make-instance later.
(declare (ignore class))
(let ((method (%allocate-instance <standard-method>)))
(apply #'initialize-instance-<standard-method> method args)))
(defun print-object-<standard-method> (method stream)
(print-unreadable-object (method stream :type t)
(if (and (not (eq (sys::%unbound) (std-method-qualifiers method)))
(not (eq (sys::%unbound) (std-method-specializers method))))
(progn
(dolist (q (std-method-qualifiers method))
(write q :stream stream)
(write-char #\Space stream))
(write (mapcar #'specializer-pretty (std-method-specializers method))
:stream stream))
(write :uninitialized :stream stream))))
;; Preliminary.
;; During bootstrapping, only <standard-method> instances are used.
ABI
&key &allow-other-keys)
(apply #'make-instance-<standard-method> class args))
(predefun method-function (method)
(std-method-function-or-substitute method))
(predefun method-qualifiers (method)
(std-method-qualifiers method))
(predefun method-lambda-list (method)
(std-method-lambda-list method))
(predefun method-signature (method)
(std-method-signature method))
(predefun method-specializers (method)
(std-method-specializers method))
(predefun method-generic-function (method)
(std-method-generic-function method))
(predefun (setf method-generic-function) (new-gf method)
(setf (std-method-generic-function method) new-gf))
;;; ---------------------------------------------------------------------------
(defparameter <standard-accessor-method>
(defclass standard-accessor-method (standard-method)
(($slot-definition ; direct slot definition responsible for this method
:type direct-slot-definition
:accessor %accessor-method-slot-definition))
(:fixed-slot-locations t)
(:generic-accessors nil)))
(defun initialize-instance-<standard-accessor-method> (method &rest args
&key (slot-definition nil slot-definition-p)
&allow-other-keys)
(apply #'initialize-instance-<standard-method> method args) ; == (call-next-method)
; Check the slot-definition.
(unless slot-definition-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-accessor-method ':slot-definition))
(unless (typep slot-definition 'direct-slot-definition)
(error (TEXT "(~S ~S): Argument ~S is not of type ~S.")
'initialize-instance 'standard-accessor-method ':slot-definition
'direct-slot-definition))
; Fill the slots.
(setf (%accessor-method-slot-definition method) slot-definition)
method)
;;; ---------------------------------------------------------------------------
(defparameter <standard-reader-method>
(defclass standard-reader-method (standard-accessor-method)
()
(:fixed-slot-locations t)))
(defun make-instance-<standard-reader-method> (class &rest args
&key &allow-other-keys)
;; class = <standard-reader-method>
;; Don't add functionality here! This is a preliminary definition that is
;; replaced with #'make-instance later.
(declare (ignore class))
(let ((method (%allocate-instance <standard-reader-method>)))
(apply #'initialize-instance-<standard-accessor-method> method args)))
;;; ---------------------------------------------------------------------------
(defparameter <standard-writer-method>
(defclass standard-writer-method (standard-accessor-method)
()
(:fixed-slot-locations t)))
(defun make-instance-<standard-writer-method> (class &rest args
&key &allow-other-keys)
;; class = <standard-writer-method>
;; Don't add functionality here! This is a preliminary definition that is
;; replaced with #'make-instance later.
(declare (ignore class))
(let ((method (%allocate-instance <standard-writer-method>)))
(apply #'initialize-instance-<standard-accessor-method> method args)))
;;; ---------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/rurban/clisp/75ed2995ff8f5364bcc18727cde9438cca4e7c2c/src/clos-method1.lisp | lisp | ---------------------------------------------------------------------------
== (call-next-method)
Bootstrapping: Simulate the effect of #'%initialize-instance.
Fill the slots.
Fill the backpointer. This is needed for NO-NEXT-METHOD to work: When
CALL-NEXT-METHOD is called from within the method function without a next
method being available, the method function must call NO-NEXT-METHOD with
the method object as argument. But since the method function is called
with the argument list and the remaining methods list as arguments, it
cannot know about the method object to which it belongs. We solve this
paradox by constructing a backpointer cons that the method function
has access to and that points back to the method object after it has been
initialized.
---------------------------------------------------------------------------
the function with fast calling conventions, i.e.
argument list (&rest arguments) or
(next-methods-function &rest arguments), depending
on wants-next-method-p
flag, if the NEXT-METHOD (as function with all
argument (= NIL for :BEFORE- and :AFTER-methods)
the function with slow calling conventions, i.e.
argument list (arguments next-methods-list)
eql-specializers
list of non-NIL atoms, e.g. (:before)
signature struct (see functions.lisp)
the generic function, which this method belongs to
(only for the purpose of CALL-NEXT-METHOD and
NO-NEXT-METHOD)
Note about the argument passing convention for methods:
and the list of next methods. This is awfully inefficient, and useless
(since MAKE-METHOD-LAMBDA is ill-designed anyway). Therefore here we
pass to the function the arguments as-is, and the next methods as
consists of these next methods. This is more efficient (saves a FUNCALL)
for the simple case of a single applicable method, but is less
efficient (a FUNCALL instead of just a CAR) for longer lists of methods.
invocation. However, for CALL-NEXT-METHOD, NO-NEXT-METHOD and
to store a generic function backpointer in the method.
== (call-next-method)
Check the qualifiers.
Check the lambda-list and compute the signature from it.
Check the signature argument. It is optional; specifying it only has
the purpose of saving memory allocation (by sharing the same signature
for all reader methods and the same signature for all writer methods).
Check the function, fast-function and wants-next-method-p.
:function overrides fast-function and wants-next-method-p, because it is
the standardized way (employed by user-defined method classes) to define
the behaviour of a method.
Check the documentation.
Fill the slots.
class = <standard-method>
Don't add functionality here! This is a preliminary definition that is
replaced with #'make-instance later.
Preliminary.
During bootstrapping, only <standard-method> instances are used.
---------------------------------------------------------------------------
direct slot definition responsible for this method
== (call-next-method)
Check the slot-definition.
Fill the slots.
---------------------------------------------------------------------------
class = <standard-reader-method>
Don't add functionality here! This is a preliminary definition that is
replaced with #'make-instance later.
---------------------------------------------------------------------------
class = <standard-writer-method>
Don't add functionality here! This is a preliminary definition that is
replaced with #'make-instance later.
--------------------------------------------------------------------------- | Common Lisp Object System for CLISP : Methods
21.8.1993 - 2004
1998 - 2004 , 2007 , 2010 , 2017
German comments translated into English : 2002 - 04 - 08
(in-package "CLOS")
(defparameter <method>
(defclass method (standard-stablehash metaobject)
flag , if this method comes from a
:type boolean
:accessor method-from-defgeneric))
(:fixed-slot-locations t)
(:generic-accessors nil)))
(defun initialize-instance-<method> (method &rest args
&key ((from-defgeneric from-defgeneric) nil)
((backpointer backpointer) nil backpointer-p)
&allow-other-keys)
(if *classes-finished*
(apply #'shared-initialize-<standard-stablehash> method 't args))
(setf (method-from-defgeneric method) from-defgeneric)
(when backpointer-p
(setf (car backpointer) method))
method)
ABI
(defclass standard-method (method)
:type (or null function)
:accessor std-method-fast-function)
arguments ) resp . NIL is to be passed as first
:type boolean
:accessor std-method-wants-next-method-p)
:type (or null function)
:accessor std-method-function)
list of specializers , e.g. classes or
:type list
:accessor std-method-specializers)
:type list
:accessor std-method-qualifiers)
lambda list without specializers
:type list
:accessor std-method-lambda-list)
:type (simple-vector 6)
:accessor std-method-signature)
string or NIL
:type (or string null)
:accessor std-method-documentation)
:type (or null generic-function)
:accessor std-method-generic-function))
(:fixed-slot-locations t)
(:generic-accessors nil)))
1 ) The MOP description of COMPUTE - EFFECTIVE - METHOD and MAKE - METHOD - LAMBDA
says that a method function takes 2 arguments : the list of arguments ( ! )
inserted first argument , if needed .
2 ) Instead of the list of next methods , we pass an effective method that
3 ) We do n't explicitly pass the generic function to the method during the
the generic function must be known . So we have
(defun method-lambda-list-to-signature (lambda-list errfunc)
(multiple-value-bind (reqvars optvars optinits optsvars rest
keyp keywords keyvars keyinits keysvars
allowp auxvars auxinits)
(analyze-lambdalist lambda-list errfunc)
(declare (ignore optinits optsvars keyvars keyinits keysvars
auxvars auxinits))
(make-signature
:req-num (length reqvars) :opt-num (length optvars)
:rest-p (or keyp (not (eql rest 0))) :keys-p keyp
:keywords keywords :allow-p allowp)))
(defun initialize-instance-<standard-method> (method &rest args
&key (qualifiers '())
(lambda-list nil lambda-list-p)
(specializers nil specializers-p)
(function nil function-p)
(documentation nil)
((fast-function fast-function) nil fast-function-p)
((wants-next-method-p wants-next-method-p) nil)
((signature signature) nil signature-p)
((gf gf) nil)
((from-defgeneric from-defgeneric) nil)
((backpointer backpointer) nil)
&allow-other-keys)
(declare (ignore from-defgeneric backpointer))
(unless (proper-list-p qualifiers)
(error (TEXT "(~S ~S): The ~S argument should be a proper list, not ~S")
'initialize-instance 'standard-method ':qualifiers qualifiers))
(unless (notany #'listp qualifiers)
(error (TEXT "(~S ~S): The qualifiers list should consist of non-NIL atoms, not ~S")
'initialize-instance 'standard-method qualifiers))
(unless lambda-list-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':lambda-list))
(let ((sig (method-lambda-list-to-signature lambda-list
#'(lambda (form detail errorstring &rest arguments)
(sys::lambda-list-error form detail
(TEXT "(~S ~S): Invalid ~S argument: ~?")
'initialize-instance 'standard-method ':lambda-list
errorstring arguments)))))
(if signature-p
(unless (equalp sig signature)
(error (TEXT "(~S ~S): Lambda-list ~S and signature ~S are inconsistent.")
'initialize-instance 'standard-method lambda-list signature))
(setq signature sig)))
Check the specializers .
(unless specializers-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':specializers))
(unless (proper-list-p specializers)
(error (TEXT "(~S ~S): The ~S argument should be a proper list, not ~S")
'initialize-instance 'standard-method ':specializers specializers))
(dolist (x specializers)
(unless (or (defined-class-p x) (eql-specializer-p x))
(if (typep x 'specializer)
(error (TEXT "(~S ~S): The element ~S of the ~S argument is not yet defined.")
'initialize-instance 'standard-method x ':specializers)
(error (TEXT "(~S ~S): The element ~S of the ~S argument is not of type ~S.")
'initialize-instance 'standard-method x ':specializers 'specializer))))
(unless (= (length specializers) (sig-req-num signature))
(error (TEXT "(~S ~S): The lambda list ~S has ~S required arguments, but the specializers list ~S has length ~S.")
'initialize-instance 'standard-method lambda-list (sig-req-num signature)
specializers (length specializers)))
(unless (or function-p fast-function-p)
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-method ':function))
(when function-p
(unless (functionp function)
(error (TEXT "(~S ~S): The ~S argument should be a function, not ~S")
'initialize-instance 'standard-method ':function function)))
(when fast-function-p
(unless (functionp fast-function)
(error (TEXT "(~S ~S): The ~S argument should be a function, not ~S")
'initialize-instance 'standard-method 'fast-function fast-function)))
(unless (typep wants-next-method-p 'boolean)
(error (TEXT "(~S ~S): The ~S argument should be a NIL or T, not ~S")
'initialize-instance 'standard-method 'wants-next-method-p wants-next-method-p))
(when function-p
(setq fast-function nil
wants-next-method-p t))
(unless (or (null documentation) (stringp documentation))
(error (TEXT "(~S ~S): The ~S argument should be a string or NIL, not ~S")
'initialize-instance 'standard-method ':documentation documentation))
(setf (std-method-fast-function method) fast-function)
(setf (std-method-wants-next-method-p method) wants-next-method-p)
(setf (std-method-function method) function)
(setf (std-method-specializers method) specializers)
(setf (std-method-qualifiers method) qualifiers)
(setf (std-method-lambda-list method) lambda-list)
(setf (std-method-signature method) signature)
(setf (std-method-documentation method) documentation)
(setf (std-method-generic-function method) gf)
method)
(defun make-instance-<standard-method> (class &rest args
&key &allow-other-keys)
(declare (ignore class))
(let ((method (%allocate-instance <standard-method>)))
(apply #'initialize-instance-<standard-method> method args)))
(defun print-object-<standard-method> (method stream)
(print-unreadable-object (method stream :type t)
(if (and (not (eq (sys::%unbound) (std-method-qualifiers method)))
(not (eq (sys::%unbound) (std-method-specializers method))))
(progn
(dolist (q (std-method-qualifiers method))
(write q :stream stream)
(write-char #\Space stream))
(write (mapcar #'specializer-pretty (std-method-specializers method))
:stream stream))
(write :uninitialized :stream stream))))
ABI
&key &allow-other-keys)
(apply #'make-instance-<standard-method> class args))
(predefun method-function (method)
(std-method-function-or-substitute method))
(predefun method-qualifiers (method)
(std-method-qualifiers method))
(predefun method-lambda-list (method)
(std-method-lambda-list method))
(predefun method-signature (method)
(std-method-signature method))
(predefun method-specializers (method)
(std-method-specializers method))
(predefun method-generic-function (method)
(std-method-generic-function method))
(predefun (setf method-generic-function) (new-gf method)
(setf (std-method-generic-function method) new-gf))
(defparameter <standard-accessor-method>
(defclass standard-accessor-method (standard-method)
:type direct-slot-definition
:accessor %accessor-method-slot-definition))
(:fixed-slot-locations t)
(:generic-accessors nil)))
(defun initialize-instance-<standard-accessor-method> (method &rest args
&key (slot-definition nil slot-definition-p)
&allow-other-keys)
(unless slot-definition-p
(error (TEXT "(~S ~S): Missing ~S argument.")
'initialize-instance 'standard-accessor-method ':slot-definition))
(unless (typep slot-definition 'direct-slot-definition)
(error (TEXT "(~S ~S): Argument ~S is not of type ~S.")
'initialize-instance 'standard-accessor-method ':slot-definition
'direct-slot-definition))
(setf (%accessor-method-slot-definition method) slot-definition)
method)
(defparameter <standard-reader-method>
(defclass standard-reader-method (standard-accessor-method)
()
(:fixed-slot-locations t)))
(defun make-instance-<standard-reader-method> (class &rest args
&key &allow-other-keys)
(declare (ignore class))
(let ((method (%allocate-instance <standard-reader-method>)))
(apply #'initialize-instance-<standard-accessor-method> method args)))
(defparameter <standard-writer-method>
(defclass standard-writer-method (standard-accessor-method)
()
(:fixed-slot-locations t)))
(defun make-instance-<standard-writer-method> (class &rest args
&key &allow-other-keys)
(declare (ignore class))
(let ((method (%allocate-instance <standard-writer-method>)))
(apply #'initialize-instance-<standard-accessor-method> method args)))
|
b5c436fc68c4e2c96a515c14dcaab00d4a9b0ab67110b3206e805df2771e0a2e | verement/etamoo | Network.hs |
{-# LANGUAGE OverloadedStrings #-}
module MOO.Network (
Point(..)
, Listener(..)
, HostName
, PortNumber
, value2point
, point2value
, createListener
, listen
, unlisten
, shutdownListeners
) where
import Control.Applicative ((<$>))
import Control.Concurrent.STM (STM, TVar, atomically, modifyTVar, readTVarIO,
readTVar, writeTVar)
import Control.Exception (try, finally)
import Control.Monad (when)
import Data.Monoid ((<>))
import Data.Text (Text)
import System.IO.Error (isPermissionError)
import MOO.Connection (connectionHandler)
import MOO.Network.TCP (HostName, PortNumber, createTCPListener)
import MOO.Object
import {-# SOURCE #-} MOO.Task
import MOO.Types
import qualified Data.Map as M
import qualified Data.Text as T
data Point = TCP (Maybe HostName) PortNumber
deriving (Eq)
instance Ord Point where
TCP _ port1 `compare` TCP _ port2 = port1 `compare` port2
data Listener = Listener {
listenerObject :: ObjId
, listenerPoint :: Point
, listenerPrintMessages :: Bool
, listenerCancel :: IO ()
}
initListener = Listener {
listenerObject = systemObject
, listenerPoint = TCP Nothing 0
, listenerPrintMessages = True
, listenerCancel = return ()
}
value2point :: Value -> MOO Point
value2point value = do
world <- getWorld
case value of
Int port -> return $ TCP (bindAddress world) (fromIntegral port)
_ -> raise E_TYPE
point2value :: Point -> Value
point2value point = case point of
TCP _ port -> Int (fromIntegral port)
point2text :: Point -> Text
point2text point = case point of
TCP _ port -> "port " <> T.pack (show port)
createListener :: TVar World -> ObjId -> Point -> Bool -> IO Listener
createListener world' object point printMessages = do
let listener = initListener {
listenerObject = object
, listenerPoint = point
, listenerPrintMessages = printMessages
}
handler = connectionHandler world' object printMessages
listener <- case point of
TCP{} -> createTCPListener listener handler
world <- readTVarIO world'
let canon = listenerPoint listener
who = toText (Obj object)
what = " listening on " <> point2text canon
listening = "LISTEN: " <> who <> " now" <> what
notListening = "UNLISTEN: " <> who <> " no longer" <> what
logUnlisten = atomically $ writeLog world notListening
listener' = listener { listenerCancel = listenerCancel listener
`finally` logUnlisten }
atomically $ do
writeLog world listening
modifyTVar world' $ \world -> world {
listeners = M.insert canon listener' (listeners world) }
return listener'
listen :: ObjId -> Point -> Bool -> MOO Point
listen object point printMessages = do
world <- getWorld
when (point `M.member` listeners world) $ raise E_INVARG
world' <- getWorld'
result <- requestIO $ try $ createListener world' object point printMessages
case result of
Left err | isPermissionError err -> raise E_PERM
| otherwise -> raise E_QUOTA
Right listener -> return (listenerPoint listener)
unlisten :: Point -> MOO ()
unlisten point = do
world <- getWorld
case point `M.lookup` listeners world of
Just Listener { listenerCancel = cancelListener } -> do
putWorld world { listeners = M.delete point (listeners world) }
delayIO cancelListener
Nothing -> raise E_INVARG
shutdownListeners :: TVar World -> STM (IO ())
shutdownListeners world' = do
world <- readTVar world'
writeTVar world' world { listeners = M.empty }
return $ M.fold (\listener io -> listenerCancel listener >> io) (return ()) $
listeners world
| null | https://raw.githubusercontent.com/verement/etamoo/af65e2581ab5d093c9490f43692ef89bafc2efc1/src/MOO/Network.hs | haskell | # LANGUAGE OverloadedStrings #
# SOURCE # |
module MOO.Network (
Point(..)
, Listener(..)
, HostName
, PortNumber
, value2point
, point2value
, createListener
, listen
, unlisten
, shutdownListeners
) where
import Control.Applicative ((<$>))
import Control.Concurrent.STM (STM, TVar, atomically, modifyTVar, readTVarIO,
readTVar, writeTVar)
import Control.Exception (try, finally)
import Control.Monad (when)
import Data.Monoid ((<>))
import Data.Text (Text)
import System.IO.Error (isPermissionError)
import MOO.Connection (connectionHandler)
import MOO.Network.TCP (HostName, PortNumber, createTCPListener)
import MOO.Object
import MOO.Types
import qualified Data.Map as M
import qualified Data.Text as T
data Point = TCP (Maybe HostName) PortNumber
deriving (Eq)
instance Ord Point where
TCP _ port1 `compare` TCP _ port2 = port1 `compare` port2
data Listener = Listener {
listenerObject :: ObjId
, listenerPoint :: Point
, listenerPrintMessages :: Bool
, listenerCancel :: IO ()
}
initListener = Listener {
listenerObject = systemObject
, listenerPoint = TCP Nothing 0
, listenerPrintMessages = True
, listenerCancel = return ()
}
value2point :: Value -> MOO Point
value2point value = do
world <- getWorld
case value of
Int port -> return $ TCP (bindAddress world) (fromIntegral port)
_ -> raise E_TYPE
point2value :: Point -> Value
point2value point = case point of
TCP _ port -> Int (fromIntegral port)
point2text :: Point -> Text
point2text point = case point of
TCP _ port -> "port " <> T.pack (show port)
createListener :: TVar World -> ObjId -> Point -> Bool -> IO Listener
createListener world' object point printMessages = do
let listener = initListener {
listenerObject = object
, listenerPoint = point
, listenerPrintMessages = printMessages
}
handler = connectionHandler world' object printMessages
listener <- case point of
TCP{} -> createTCPListener listener handler
world <- readTVarIO world'
let canon = listenerPoint listener
who = toText (Obj object)
what = " listening on " <> point2text canon
listening = "LISTEN: " <> who <> " now" <> what
notListening = "UNLISTEN: " <> who <> " no longer" <> what
logUnlisten = atomically $ writeLog world notListening
listener' = listener { listenerCancel = listenerCancel listener
`finally` logUnlisten }
atomically $ do
writeLog world listening
modifyTVar world' $ \world -> world {
listeners = M.insert canon listener' (listeners world) }
return listener'
listen :: ObjId -> Point -> Bool -> MOO Point
listen object point printMessages = do
world <- getWorld
when (point `M.member` listeners world) $ raise E_INVARG
world' <- getWorld'
result <- requestIO $ try $ createListener world' object point printMessages
case result of
Left err | isPermissionError err -> raise E_PERM
| otherwise -> raise E_QUOTA
Right listener -> return (listenerPoint listener)
unlisten :: Point -> MOO ()
unlisten point = do
world <- getWorld
case point `M.lookup` listeners world of
Just Listener { listenerCancel = cancelListener } -> do
putWorld world { listeners = M.delete point (listeners world) }
delayIO cancelListener
Nothing -> raise E_INVARG
shutdownListeners :: TVar World -> STM (IO ())
shutdownListeners world' = do
world <- readTVar world'
writeTVar world' world { listeners = M.empty }
return $ M.fold (\listener io -> listenerCancel listener >> io) (return ()) $
listeners world
|
cb3d573c98efc4727fd3222553f5ebc80fe2b3a049cca35541298a2e03427612 | rd--/hsc3 | decay.help.hs | -- decay ; as envelope
let n = pinkNoiseId 'α' ar + sinOsc ar 11000 0
s = impulse ar (xLine kr 1 50 20 RemoveSynth) 0.25
in decay s 0.05 * n
---- ; drawings
Sound.Sc3.Plot.plot_ugen1 0.05 (decay (impulse ar 1 0) 0.01)
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Help/Ugen/decay.help.hs | haskell | decay ; as envelope
-- ; drawings | let n = pinkNoiseId 'α' ar + sinOsc ar 11000 0
s = impulse ar (xLine kr 1 50 20 RemoveSynth) 0.25
in decay s 0.05 * n
Sound.Sc3.Plot.plot_ugen1 0.05 (decay (impulse ar 1 0) 0.01)
|
d23ed5db902b3691a8c8ec76b3085c604d3bedaa5878a181963d7d6f4e9f8d62 | alesaccoia/festival_flinger | cmu_us_clb_lexicon.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Carnegie Mellon University ; ; ;
and and ; ; ;
Copyright ( c ) 1998 - 2000 ; ; ;
All Rights Reserved . ; ; ;
;;; ;;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;;
;;; this software and its documentation without restriction, including ;;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;;
;;; permit persons to whom this work is furnished to do so, subject to ;;;
;;; the following conditions: ;;;
1 . The code must retain the above copyright notice , this list of ; ; ;
;;; conditions and the following disclaimer. ;;;
2 . Any modifications must be clearly marked as such . ; ; ;
3 . Original authors ' names are not deleted . ; ; ;
4 . The authors ' names are not used to endorse or promote products ; ; ;
;;; derived from this software without specific prior written ;;;
;;; permission. ;;;
;;; ;;;
CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK ; ; ;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE ; ; ;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
;;; THIS SOFTWARE. ;;;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
CMU lexicon for US English
;;;
;;; Load any necessary files here
(require 'postlex)
(setup_cmu_lex)
(define (cmu_us_clb::select_lexicon)
"(cmu_us_clb::select_lexicon)
Set up the CMU lexicon for US English."
(lex.select "cmu")
Post lexical rules
(set! postlex_rules_hooks (list postlex_apos_s_check))
(set! postlex_vowel_reduce_cart_tree nil) ; no reduction
)
(define (cmu_us_clb::reset_lexicon)
"(cmu_us_clb::reset_lexicon)
Reset lexicon information."
t
)
(provide 'cmu_us_clb_lexicon)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/voices/us/cmu_us_clb_cg/festvox/cmu_us_clb_lexicon.scm | scheme |
;;;
; ;
; ;
; ;
; ;
;;;
Permission is hereby granted, free of charge, to use and distribute ;;;
this software and its documentation without restriction, including ;;;
without limitation the rights to use, copy, modify, merge, publish, ;;;
distribute, sublicense, and/or sell copies of this work, and to ;;;
permit persons to whom this work is furnished to do so, subject to ;;;
the following conditions: ;;;
; ;
conditions and the following disclaimer. ;;;
; ;
; ;
; ;
derived from this software without specific prior written ;;;
permission. ;;;
;;;
; ;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
; ;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
; ;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
THIS SOFTWARE. ;;;
;;;
Load any necessary files here
no reduction | CMU lexicon for US English
(require 'postlex)
(setup_cmu_lex)
(define (cmu_us_clb::select_lexicon)
"(cmu_us_clb::select_lexicon)
Set up the CMU lexicon for US English."
(lex.select "cmu")
Post lexical rules
(set! postlex_rules_hooks (list postlex_apos_s_check))
)
(define (cmu_us_clb::reset_lexicon)
"(cmu_us_clb::reset_lexicon)
Reset lexicon information."
t
)
(provide 'cmu_us_clb_lexicon)
|
ba06dc783f65cf0266f4a7dbf7d243b50deec605904c2f800823a2db27f8929a | utkarshkukreti/reaml | 3.ml | let[@reaml.component "Foo"] _foo () =
if true
then (
let[@reaml] _count, _setCount = Reaml.useState () in
())
else ()
| null | https://raw.githubusercontent.com/utkarshkukreti/reaml/5640a36293ba2a765171225deddb644f4d6c5d26/ppx/tests/failing/3.ml | ocaml | let[@reaml.component "Foo"] _foo () =
if true
then (
let[@reaml] _count, _setCount = Reaml.useState () in
())
else ()
| |
cd23ac5641b8e8cf686b64e23ff8b485d3c529142222c4d90c361edf2d559c64 | esl/MongooseIM | mongoose_graphql_token_admin_mutation.erl | -module(mongoose_graphql_token_admin_mutation).
-behaviour(mongoose_graphql).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
-import(mongoose_graphql_helper, [make_error/2]).
-type token_info() :: map().
execute(_Ctx, token, <<"requestToken">>, #{<<"user">> := JID}) ->
request_token(JID);
execute(_Ctx, token, <<"revokeToken">>, #{<<"user">> := JID}) ->
revoke_token(JID).
-spec request_token(jid:jid()) -> {ok, token_info()} | {error, resolver_error()}.
request_token(JID) ->
case mod_auth_token_api:create_token(JID) of
{ok, _} = Result -> Result;
Error -> make_error(Error, #{user => JID})
end.
-spec revoke_token(jid:jid()) -> {ok, string()} | {error, resolver_error()}.
revoke_token(JID) ->
case mod_auth_token_api:revoke_token_command(JID) of
{ok, _} = Result -> Result;
Error -> make_error(Error, #{user => JID})
end.
| null | https://raw.githubusercontent.com/esl/MongooseIM/d5abdd699fbdd81f89d87341745b112e87a80b1e/src/graphql/admin/mongoose_graphql_token_admin_mutation.erl | erlang | -module(mongoose_graphql_token_admin_mutation).
-behaviour(mongoose_graphql).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
-import(mongoose_graphql_helper, [make_error/2]).
-type token_info() :: map().
execute(_Ctx, token, <<"requestToken">>, #{<<"user">> := JID}) ->
request_token(JID);
execute(_Ctx, token, <<"revokeToken">>, #{<<"user">> := JID}) ->
revoke_token(JID).
-spec request_token(jid:jid()) -> {ok, token_info()} | {error, resolver_error()}.
request_token(JID) ->
case mod_auth_token_api:create_token(JID) of
{ok, _} = Result -> Result;
Error -> make_error(Error, #{user => JID})
end.
-spec revoke_token(jid:jid()) -> {ok, string()} | {error, resolver_error()}.
revoke_token(JID) ->
case mod_auth_token_api:revoke_token_command(JID) of
{ok, _} = Result -> Result;
Error -> make_error(Error, #{user => JID})
end.
| |
6fe496ad5025e8cd0920973e5ec22a1a357ae045a202e4e7331997487de70f3b | zkat/chanl | futures.lisp | ;;;; -*- Mode: lisp; indent-tabs-mode: nil -*-
;;;;
;;;; ChanL example implementation of doing concurrency using futures instead of channels.
;;;;
Copyright © 2009 ,
;;;;
;;;; This file is derived from 'Eager Future'; see the file COPYRIGHT, in the top directory,
;;;; for the license information for that project.
;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :chanl.examples)
This example is similar to Eager Future 's API .
;;; It demonstrates the value of channels as concurrency primitives.
(defstruct (future (:print-object (lambda (f s) (print-unreadable-object (f s :type t :identity t)))))
(channel (make-instance 'buffered-channel :size 1) :read-only t))
(define-condition execution-error (error)
((cause :initarg :cause :reader execution-error-cause)
(future :initarg :future :reader execution-error-future))
(:report (lambda (condition stream)
(format stream "~A errored during execution.~%Cause: ~A"
(execution-error-future condition)
(execution-error-cause condition)))))
(let ((sentinel (make-symbol (format nil "The future has performed an illegal ~
operation and will have to be shut down"))))
(defun yield (future)
"Yield the values returned by FUTURE. If FUTURE isn't ready to yield yet, block until it is."
(let ((yielded-values (recv (future-channel future))))
(send (future-channel future) yielded-values)
(if (eq sentinel (car yielded-values))
(error (cdr yielded-values))
(values-list yielded-values))))
(defun future-call (function &key (initial-bindings *default-special-bindings*)
(name "Anonymous FUTURE"))
"Executes FUNCTION in parallel and returns a future that will yield the return value of
that function. INITIAL-BINDINGS may be provided to create dynamic bindings inside the thread."
(let ((future (make-future)))
(pcall (lambda ()
(send (future-channel future)
(handler-case
(multiple-value-list (funcall function))
(condition (cause)
(cons sentinel (make-condition 'execution-error
:cause cause :future future))))))
:initial-bindings initial-bindings
:name name)
future))
) ; End sentinel closure
(defmacro future-exec ((&key initial-bindings name) &body body)
"Convenience macro that makes the lambda for you."
`(future-call (lambda () ,@body)
,@(when initial-bindings `(:initial-bindings ,initial-bindings))
,@(when name `(:name ,name))))
(defun future-select (&rest futures)
"Blocks until one of the futures in FUTURES (a sequence) is ready to yield,
then returns that future."
;; This is an improvement. However, we should try to find some way of not "thrashing". - Adlai
(setf futures (sort futures (lambda (a b) a b (zerop (random 2)))))
;; This is incorrect. SEND/RECV-BLOCKS-P should not be used outside of the internals. - syko
(loop for future = (find-if 'send-blocks-p futures :key 'future-channel)
when future return future))
(defmacro future-let ((&rest bindings) &body body)
(loop for (symbol . forms) in bindings
for future = (make-symbol (string symbol))
collect `(,future (future-exec (:name "FUTURE-LET Worker") ,@forms)) into futures
collect `(,symbol (yield ,future)) into variables
finally (return `(let ,futures (symbol-macrolet ,variables ,@body)))))
;; EXAMPLES> (defparameter *future* (future-exec () 'success))
;; *FUTURE*
;; EXAMPLES> (yield *future*)
SUCCESS
EXAMPLES > ( yield ( future - select ( future - exec ( ) ( sleep 10 ) ' long )
( future - exec ( ) ( sleep 2 ) ' short ) ) )
;; SHORT
;; EXAMPLES> (defparameter *future* (future-exec () (error "OHNOES")))
;; *FUTURE*
;; EXAMPLES> (yield *future*)
;; ...
;; #<FUTURE #x14FFE71E> errored during execution.
;; Cause: OHNOES
;; [Condition of type EXECUTION-ERROR]
;; ...
;; Invoking restart: Return to SLIME's top level.
;; ; Evaluation aborted.
| null | https://raw.githubusercontent.com/zkat/chanl/3a0ad5b9ae31b3874ac91c541fd997123c2e03db/examples/futures.lisp | lisp | -*- Mode: lisp; indent-tabs-mode: nil -*-
ChanL example implementation of doing concurrency using futures instead of channels.
This file is derived from 'Eager Future'; see the file COPYRIGHT, in the top directory,
for the license information for that project.
It demonstrates the value of channels as concurrency primitives.
End sentinel closure
This is an improvement. However, we should try to find some way of not "thrashing". - Adlai
This is incorrect. SEND/RECV-BLOCKS-P should not be used outside of the internals. - syko
EXAMPLES> (defparameter *future* (future-exec () 'success))
*FUTURE*
EXAMPLES> (yield *future*)
SHORT
EXAMPLES> (defparameter *future* (future-exec () (error "OHNOES")))
*FUTURE*
EXAMPLES> (yield *future*)
...
#<FUTURE #x14FFE71E> errored during execution.
Cause: OHNOES
[Condition of type EXECUTION-ERROR]
...
Invoking restart: Return to SLIME's top level.
; Evaluation aborted. | Copyright © 2009 ,
(in-package :chanl.examples)
This example is similar to Eager Future 's API .
(defstruct (future (:print-object (lambda (f s) (print-unreadable-object (f s :type t :identity t)))))
(channel (make-instance 'buffered-channel :size 1) :read-only t))
(define-condition execution-error (error)
((cause :initarg :cause :reader execution-error-cause)
(future :initarg :future :reader execution-error-future))
(:report (lambda (condition stream)
(format stream "~A errored during execution.~%Cause: ~A"
(execution-error-future condition)
(execution-error-cause condition)))))
(let ((sentinel (make-symbol (format nil "The future has performed an illegal ~
operation and will have to be shut down"))))
(defun yield (future)
"Yield the values returned by FUTURE. If FUTURE isn't ready to yield yet, block until it is."
(let ((yielded-values (recv (future-channel future))))
(send (future-channel future) yielded-values)
(if (eq sentinel (car yielded-values))
(error (cdr yielded-values))
(values-list yielded-values))))
(defun future-call (function &key (initial-bindings *default-special-bindings*)
(name "Anonymous FUTURE"))
"Executes FUNCTION in parallel and returns a future that will yield the return value of
that function. INITIAL-BINDINGS may be provided to create dynamic bindings inside the thread."
(let ((future (make-future)))
(pcall (lambda ()
(send (future-channel future)
(handler-case
(multiple-value-list (funcall function))
(condition (cause)
(cons sentinel (make-condition 'execution-error
:cause cause :future future))))))
:initial-bindings initial-bindings
:name name)
future))
(defmacro future-exec ((&key initial-bindings name) &body body)
"Convenience macro that makes the lambda for you."
`(future-call (lambda () ,@body)
,@(when initial-bindings `(:initial-bindings ,initial-bindings))
,@(when name `(:name ,name))))
(defun future-select (&rest futures)
"Blocks until one of the futures in FUTURES (a sequence) is ready to yield,
then returns that future."
(setf futures (sort futures (lambda (a b) a b (zerop (random 2)))))
(loop for future = (find-if 'send-blocks-p futures :key 'future-channel)
when future return future))
(defmacro future-let ((&rest bindings) &body body)
(loop for (symbol . forms) in bindings
for future = (make-symbol (string symbol))
collect `(,future (future-exec (:name "FUTURE-LET Worker") ,@forms)) into futures
collect `(,symbol (yield ,future)) into variables
finally (return `(let ,futures (symbol-macrolet ,variables ,@body)))))
SUCCESS
EXAMPLES > ( yield ( future - select ( future - exec ( ) ( sleep 10 ) ' long )
( future - exec ( ) ( sleep 2 ) ' short ) ) )
|
3523e8161c98760184970c4299fa57d285aef1aa31098283eb70cf6b5a81f6de | antono/guix-debian | mit-krb5.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages mit-krb5)
#:use-module (gnu packages)
#:use-module (gnu packages bison)
#:use-module (gnu packages perl)
#:use-module (gnu packages gcc)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public mit-krb5
(package
(name "mit-krb5")
(version "1.11.3")
(source (origin
(method url-fetch)
(uri (string-append "/"
(string-copy version 0 (string-rindex version #\.))
"/krb5-" version "-signed.tar"))
(sha256 (base32
"1daiaxgkxcryqs37w28v4x1vajqmay4l144d1zd9c2d7jjxr9gcs"))))
(build-system gnu-build-system)
(native-inputs
`(("patch/init-fix" ,(search-patch "mit-krb5-init-fix.patch"))
("bison" ,bison)
("perl" ,perl)
XXX : When built with GCC 4.8 , the ' db_test ' test program enters an
;; infinite loop. As a stopgap measure, build with GCC 4.7.
("gcc" ,gcc-4.7)))
(arguments
'(#:phases
(alist-replace
'unpack
(lambda* (#:key source #:allow-other-keys)
(let ((inner
(substring source
(string-index-right source #\k)
(string-index-right source #\-))))
(and (zero? (system* "tar" "xvf" source))
(zero? (system* "tar" "xvf" (string-append inner ".tar.gz")))
(chdir inner)
(chdir "src")
;; XXX The current patch system does not support unusual
;; source unpack methods, so we have to apply this patch in a
;; non-standard way.
(zero? (system* "patch" "-p1" "--batch" "-i"
(assoc-ref %build-inputs "patch/init-fix"))))))
(alist-replace
'check
(lambda* (#:key inputs #:allow-other-keys #:rest args)
(let ((perl (assoc-ref inputs "perl"))
(check (assoc-ref %standard-phases 'check)))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("/bin/cat") (string-append perl "/bin/perl")))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("D/bin/sh") (string-append "D" (which "bash"))))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("bindir=/bin/.") (string-append "bindir=" perl "/bin")))
;; use existing files and directories in test
(substitute* "tests/resolve/Makefile"
(("-p telnet") "-p 23"))
;; avoid service names since /etc/services is unavailable
(apply check args)))
%standard-phases))))
(synopsis "MIT Kerberos 5")
(description
"Massachusetts Institute of Technology implementation of Kerberos.
Kerberos is a network authentication protocol designed to provide strong
authentication for client/server applications by using secret-key cryptography.")
(license (bsd-style "file"
"See NOTICE in the distribution."))
(home-page "/")))
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/gnu/packages/mit-krb5.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
infinite loop. As a stopgap measure, build with GCC 4.7.
XXX The current patch system does not support unusual
source unpack methods, so we have to apply this patch in a
non-standard way.
use existing files and directories in test
avoid service names since /etc/services is unavailable | Copyright © 2012 , 2013 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages mit-krb5)
#:use-module (gnu packages)
#:use-module (gnu packages bison)
#:use-module (gnu packages perl)
#:use-module (gnu packages gcc)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public mit-krb5
(package
(name "mit-krb5")
(version "1.11.3")
(source (origin
(method url-fetch)
(uri (string-append "/"
(string-copy version 0 (string-rindex version #\.))
"/krb5-" version "-signed.tar"))
(sha256 (base32
"1daiaxgkxcryqs37w28v4x1vajqmay4l144d1zd9c2d7jjxr9gcs"))))
(build-system gnu-build-system)
(native-inputs
`(("patch/init-fix" ,(search-patch "mit-krb5-init-fix.patch"))
("bison" ,bison)
("perl" ,perl)
XXX : When built with GCC 4.8 , the ' db_test ' test program enters an
("gcc" ,gcc-4.7)))
(arguments
'(#:phases
(alist-replace
'unpack
(lambda* (#:key source #:allow-other-keys)
(let ((inner
(substring source
(string-index-right source #\k)
(string-index-right source #\-))))
(and (zero? (system* "tar" "xvf" source))
(zero? (system* "tar" "xvf" (string-append inner ".tar.gz")))
(chdir inner)
(chdir "src")
(zero? (system* "patch" "-p1" "--batch" "-i"
(assoc-ref %build-inputs "patch/init-fix"))))))
(alist-replace
'check
(lambda* (#:key inputs #:allow-other-keys #:rest args)
(let ((perl (assoc-ref inputs "perl"))
(check (assoc-ref %standard-phases 'check)))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("/bin/cat") (string-append perl "/bin/perl")))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("D/bin/sh") (string-append "D" (which "bash"))))
(substitute* "plugins/kdb/db2/libdb2/test/run.test"
(("bindir=/bin/.") (string-append "bindir=" perl "/bin")))
(substitute* "tests/resolve/Makefile"
(("-p telnet") "-p 23"))
(apply check args)))
%standard-phases))))
(synopsis "MIT Kerberos 5")
(description
"Massachusetts Institute of Technology implementation of Kerberos.
Kerberos is a network authentication protocol designed to provide strong
authentication for client/server applications by using secret-key cryptography.")
(license (bsd-style "file"
"See NOTICE in the distribution."))
(home-page "/")))
|
9561af7a30fb81c764f40720510300145c21836d64af94a7f16ec82226ed9c9a | sras/servant-examples | GeneralAuthentication.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE TypeFamilies #
module GeneralAuthentication where
import Servant ( PlainText
, AuthProtect
, Get
, Context((:.), EmptyContext)
, Proxy(..)
, type (:>) -- Syntax for importing type operator
, type (:<|>)
, (:<|>)(..)
)
import Servant.Server (Handler, Server, Application, serveWithContext)
import Network.Wai.Handler.Warp (run)
import Network.Wai (Request)
import Servant.Server.Experimental.Auth (AuthHandler, AuthServerData, mkAuthHandler)
data User = User
lookupUser :: Request -> Handler User
lookupUser = undefined -- Actual authenticating function
authHandler :: AuthHandler Request User
authHandler = mkAuthHandler lookupUser
handlerName :: User -> Handler String
handlerName _ = return "sras"
handlerAge :: Handler String
handlerAge = return "30"
type instance AuthServerData (AuthProtect "Example Auth Realm") = User
type ServantType = AuthProtect "Example Auth Realm" :> "person" :> "name" :> Get '[PlainText] String
:<|> "person" :> "age" :> Get '[PlainText] String
server :: Server ServantType
server = handlerName :<|> handlerAge
app :: Application
app = serveWithContext (Proxy :: Proxy ServantType) ctx server
where
ctx = authHandler :. EmptyContext
mainFn :: IO ()
mainFn = run 4000 app
| null | https://raw.githubusercontent.com/sras/servant-examples/923b54a13e14a4c2a37a3633dc7e2fa8fe49adc6/src/GeneralAuthentication.hs | haskell | # LANGUAGE OverloadedStrings #
Syntax for importing type operator
Actual authenticating function | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE TypeFamilies #
module GeneralAuthentication where
import Servant ( PlainText
, AuthProtect
, Get
, Context((:.), EmptyContext)
, Proxy(..)
, type (:<|>)
, (:<|>)(..)
)
import Servant.Server (Handler, Server, Application, serveWithContext)
import Network.Wai.Handler.Warp (run)
import Network.Wai (Request)
import Servant.Server.Experimental.Auth (AuthHandler, AuthServerData, mkAuthHandler)
data User = User
lookupUser :: Request -> Handler User
authHandler :: AuthHandler Request User
authHandler = mkAuthHandler lookupUser
handlerName :: User -> Handler String
handlerName _ = return "sras"
handlerAge :: Handler String
handlerAge = return "30"
type instance AuthServerData (AuthProtect "Example Auth Realm") = User
type ServantType = AuthProtect "Example Auth Realm" :> "person" :> "name" :> Get '[PlainText] String
:<|> "person" :> "age" :> Get '[PlainText] String
server :: Server ServantType
server = handlerName :<|> handlerAge
app :: Application
app = serveWithContext (Proxy :: Proxy ServantType) ctx server
where
ctx = authHandler :. EmptyContext
mainFn :: IO ()
mainFn = run 4000 app
|
57939158ce52241bbfe6b3cbcef75fe0181b92775966b7c14bcedf52b801283f | simonmar/parconc-examples | ByteStringCompat.hs | # LANGUAGE CPP #
module ByteStringCompat () where
import qualified Data.ByteString.Lazy.Char8 as B
import Data.ByteString.Lazy.Char8 (ByteString)
import Control.DeepSeq
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData ByteString where
rnf x = B.length x `seq` ()
#endif
| null | https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/ByteStringCompat.hs | haskell | # LANGUAGE CPP #
module ByteStringCompat () where
import qualified Data.ByteString.Lazy.Char8 as B
import Data.ByteString.Lazy.Char8 (ByteString)
import Control.DeepSeq
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData ByteString where
rnf x = B.length x `seq` ()
#endif
| |
17098c80f5ab35eb980eafb1aa464b8944ac2e665f6b16309d46923016d947f1 | brendanhay/amazonka | ListUsers.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . Transfer . ListUsers
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Lists the users for a file transfer protocol-enabled server that you
specify by passing the @ServerId@ parameter .
--
-- This operation returns paginated results.
module Amazonka.Transfer.ListUsers
( -- * Creating a Request
ListUsers (..),
newListUsers,
-- * Request Lenses
listUsers_maxResults,
listUsers_nextToken,
listUsers_serverId,
-- * Destructuring the Response
ListUsersResponse (..),
newListUsersResponse,
-- * Response Lenses
listUsersResponse_nextToken,
listUsersResponse_httpStatus,
listUsersResponse_serverId,
listUsersResponse_users,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
| /See:/ ' newListUsers ' smart constructor .
data ListUsers = ListUsers'
| Specifies the number of users to return as a response to the @ListUsers@
-- request.
maxResults :: Prelude.Maybe Prelude.Natural,
| When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
nextToken :: Prelude.Maybe Prelude.Text,
-- | A system-assigned unique identifier for a server that has users assigned
-- to it.
serverId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
Create a value of ' ListUsers ' with all optional fields omitted .
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
' maxResults ' , ' listUsers_maxResults ' - Specifies the number of users to return as a response to the @ListUsers@
-- request.
--
' ' , ' ' - When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
--
-- 'serverId', 'listUsers_serverId' - A system-assigned unique identifier for a server that has users assigned
-- to it.
newListUsers ::
-- | 'serverId'
Prelude.Text ->
ListUsers
newListUsers pServerId_ =
ListUsers'
{ maxResults = Prelude.Nothing,
nextToken = Prelude.Nothing,
serverId = pServerId_
}
| Specifies the number of users to return as a response to the @ListUsers@
-- request.
listUsers_maxResults :: Lens.Lens' ListUsers (Prelude.Maybe Prelude.Natural)
listUsers_maxResults = Lens.lens (\ListUsers' {maxResults} -> maxResults) (\s@ListUsers' {} a -> s {maxResults = a} :: ListUsers)
| When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
listUsers_nextToken :: Lens.Lens' ListUsers (Prelude.Maybe Prelude.Text)
listUsers_nextToken = Lens.lens (\ListUsers' {nextToken} -> nextToken) (\s@ListUsers' {} a -> s {nextToken = a} :: ListUsers)
-- | A system-assigned unique identifier for a server that has users assigned
-- to it.
listUsers_serverId :: Lens.Lens' ListUsers Prelude.Text
listUsers_serverId = Lens.lens (\ListUsers' {serverId} -> serverId) (\s@ListUsers' {} a -> s {serverId = a} :: ListUsers)
instance Core.AWSPager ListUsers where
page rq rs
| Core.stop
( rs
Lens.^? listUsersResponse_nextToken Prelude.. Lens._Just
) =
Prelude.Nothing
| Core.stop (rs Lens.^. listUsersResponse_users) =
Prelude.Nothing
| Prelude.otherwise =
Prelude.Just Prelude.$
rq
Prelude.& listUsers_nextToken
Lens..~ rs
Lens.^? listUsersResponse_nextToken Prelude.. Lens._Just
instance Core.AWSRequest ListUsers where
type AWSResponse ListUsers = ListUsersResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
ListUsersResponse'
Prelude.<$> (x Data..?> "NextToken")
Prelude.<*> (Prelude.pure (Prelude.fromEnum s))
Prelude.<*> (x Data..:> "ServerId")
Prelude.<*> (x Data..?> "Users" Core..!@ Prelude.mempty)
)
instance Prelude.Hashable ListUsers where
hashWithSalt _salt ListUsers' {..} =
_salt `Prelude.hashWithSalt` maxResults
`Prelude.hashWithSalt` nextToken
`Prelude.hashWithSalt` serverId
instance Prelude.NFData ListUsers where
rnf ListUsers' {..} =
Prelude.rnf maxResults
`Prelude.seq` Prelude.rnf nextToken
`Prelude.seq` Prelude.rnf serverId
instance Data.ToHeaders ListUsers where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ("TransferService.ListUsers" :: Prelude.ByteString),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON ListUsers where
toJSON ListUsers' {..} =
Data.object
( Prelude.catMaybes
[ ("MaxResults" Data..=) Prelude.<$> maxResults,
("NextToken" Data..=) Prelude.<$> nextToken,
Prelude.Just ("ServerId" Data..= serverId)
]
)
instance Data.ToPath ListUsers where
toPath = Prelude.const "/"
instance Data.ToQuery ListUsers where
toQuery = Prelude.const Prelude.mempty
-- | /See:/ 'newListUsersResponse' smart constructor.
data ListUsersResponse = ListUsersResponse'
| When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
nextToken :: Prelude.Maybe Prelude.Text,
-- | The response's http status code.
httpStatus :: Prelude.Int,
-- | A system-assigned unique identifier for a server that the users are
-- assigned to.
serverId :: Prelude.Text,
| Returns the user accounts and their properties for the @ServerId@ value
-- that you specify.
users :: [ListedUser]
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'ListUsersResponse' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
' ' , ' listUsersResponse_nextToken ' - When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
--
-- 'httpStatus', 'listUsersResponse_httpStatus' - The response's http status code.
--
-- 'serverId', 'listUsersResponse_serverId' - A system-assigned unique identifier for a server that the users are
-- assigned to.
--
' users ' , ' listUsersResponse_users ' - Returns the user accounts and their properties for the @ServerId@ value
-- that you specify.
newListUsersResponse ::
-- | 'httpStatus'
Prelude.Int ->
-- | 'serverId'
Prelude.Text ->
ListUsersResponse
newListUsersResponse pHttpStatus_ pServerId_ =
ListUsersResponse'
{ nextToken = Prelude.Nothing,
httpStatus = pHttpStatus_,
serverId = pServerId_,
users = Prelude.mempty
}
| When you can get additional results from the @ListUsers@ call , a
-- @NextToken@ parameter is returned in the output. You can then pass in a
-- subsequent command to the @NextToken@ parameter to continue listing
-- additional users.
listUsersResponse_nextToken :: Lens.Lens' ListUsersResponse (Prelude.Maybe Prelude.Text)
listUsersResponse_nextToken = Lens.lens (\ListUsersResponse' {nextToken} -> nextToken) (\s@ListUsersResponse' {} a -> s {nextToken = a} :: ListUsersResponse)
-- | The response's http status code.
listUsersResponse_httpStatus :: Lens.Lens' ListUsersResponse Prelude.Int
listUsersResponse_httpStatus = Lens.lens (\ListUsersResponse' {httpStatus} -> httpStatus) (\s@ListUsersResponse' {} a -> s {httpStatus = a} :: ListUsersResponse)
-- | A system-assigned unique identifier for a server that the users are
-- assigned to.
listUsersResponse_serverId :: Lens.Lens' ListUsersResponse Prelude.Text
listUsersResponse_serverId = Lens.lens (\ListUsersResponse' {serverId} -> serverId) (\s@ListUsersResponse' {} a -> s {serverId = a} :: ListUsersResponse)
| Returns the user accounts and their properties for the @ServerId@ value
-- that you specify.
listUsersResponse_users :: Lens.Lens' ListUsersResponse [ListedUser]
listUsersResponse_users = Lens.lens (\ListUsersResponse' {users} -> users) (\s@ListUsersResponse' {} a -> s {users = a} :: ListUsersResponse) Prelude.. Lens.coerced
instance Prelude.NFData ListUsersResponse where
rnf ListUsersResponse' {..} =
Prelude.rnf nextToken
`Prelude.seq` Prelude.rnf httpStatus
`Prelude.seq` Prelude.rnf serverId
`Prelude.seq` Prelude.rnf users
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-transfer/gen/Amazonka/Transfer/ListUsers.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Lists the users for a file transfer protocol-enabled server that you
This operation returns paginated results.
* Creating a Request
* Request Lenses
* Destructuring the Response
* Response Lenses
request.
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
| A system-assigned unique identifier for a server that has users assigned
to it.
|
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
request.
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
'serverId', 'listUsers_serverId' - A system-assigned unique identifier for a server that has users assigned
to it.
| 'serverId'
request.
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
| A system-assigned unique identifier for a server that has users assigned
to it.
| /See:/ 'newListUsersResponse' smart constructor.
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
| The response's http status code.
| A system-assigned unique identifier for a server that the users are
assigned to.
that you specify.
|
Create a value of 'ListUsersResponse' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
'httpStatus', 'listUsersResponse_httpStatus' - The response's http status code.
'serverId', 'listUsersResponse_serverId' - A system-assigned unique identifier for a server that the users are
assigned to.
that you specify.
| 'httpStatus'
| 'serverId'
@NextToken@ parameter is returned in the output. You can then pass in a
subsequent command to the @NextToken@ parameter to continue listing
additional users.
| The response's http status code.
| A system-assigned unique identifier for a server that the users are
assigned to.
that you specify. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . Transfer . ListUsers
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
specify by passing the @ServerId@ parameter .
module Amazonka.Transfer.ListUsers
ListUsers (..),
newListUsers,
listUsers_maxResults,
listUsers_nextToken,
listUsers_serverId,
ListUsersResponse (..),
newListUsersResponse,
listUsersResponse_nextToken,
listUsersResponse_httpStatus,
listUsersResponse_serverId,
listUsersResponse_users,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
| /See:/ ' newListUsers ' smart constructor .
data ListUsers = ListUsers'
| Specifies the number of users to return as a response to the @ListUsers@
maxResults :: Prelude.Maybe Prelude.Natural,
| When you can get additional results from the @ListUsers@ call , a
nextToken :: Prelude.Maybe Prelude.Text,
serverId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Create a value of ' ListUsers ' with all optional fields omitted .
Use < -lens generic - lens > or < optics > to modify other optional fields .
' maxResults ' , ' listUsers_maxResults ' - Specifies the number of users to return as a response to the @ListUsers@
' ' , ' ' - When you can get additional results from the @ListUsers@ call , a
newListUsers ::
Prelude.Text ->
ListUsers
newListUsers pServerId_ =
ListUsers'
{ maxResults = Prelude.Nothing,
nextToken = Prelude.Nothing,
serverId = pServerId_
}
| Specifies the number of users to return as a response to the @ListUsers@
listUsers_maxResults :: Lens.Lens' ListUsers (Prelude.Maybe Prelude.Natural)
listUsers_maxResults = Lens.lens (\ListUsers' {maxResults} -> maxResults) (\s@ListUsers' {} a -> s {maxResults = a} :: ListUsers)
| When you can get additional results from the @ListUsers@ call , a
listUsers_nextToken :: Lens.Lens' ListUsers (Prelude.Maybe Prelude.Text)
listUsers_nextToken = Lens.lens (\ListUsers' {nextToken} -> nextToken) (\s@ListUsers' {} a -> s {nextToken = a} :: ListUsers)
listUsers_serverId :: Lens.Lens' ListUsers Prelude.Text
listUsers_serverId = Lens.lens (\ListUsers' {serverId} -> serverId) (\s@ListUsers' {} a -> s {serverId = a} :: ListUsers)
instance Core.AWSPager ListUsers where
page rq rs
| Core.stop
( rs
Lens.^? listUsersResponse_nextToken Prelude.. Lens._Just
) =
Prelude.Nothing
| Core.stop (rs Lens.^. listUsersResponse_users) =
Prelude.Nothing
| Prelude.otherwise =
Prelude.Just Prelude.$
rq
Prelude.& listUsers_nextToken
Lens..~ rs
Lens.^? listUsersResponse_nextToken Prelude.. Lens._Just
instance Core.AWSRequest ListUsers where
type AWSResponse ListUsers = ListUsersResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveJSON
( \s h x ->
ListUsersResponse'
Prelude.<$> (x Data..?> "NextToken")
Prelude.<*> (Prelude.pure (Prelude.fromEnum s))
Prelude.<*> (x Data..:> "ServerId")
Prelude.<*> (x Data..?> "Users" Core..!@ Prelude.mempty)
)
instance Prelude.Hashable ListUsers where
hashWithSalt _salt ListUsers' {..} =
_salt `Prelude.hashWithSalt` maxResults
`Prelude.hashWithSalt` nextToken
`Prelude.hashWithSalt` serverId
instance Prelude.NFData ListUsers where
rnf ListUsers' {..} =
Prelude.rnf maxResults
`Prelude.seq` Prelude.rnf nextToken
`Prelude.seq` Prelude.rnf serverId
instance Data.ToHeaders ListUsers where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ("TransferService.ListUsers" :: Prelude.ByteString),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON ListUsers where
toJSON ListUsers' {..} =
Data.object
( Prelude.catMaybes
[ ("MaxResults" Data..=) Prelude.<$> maxResults,
("NextToken" Data..=) Prelude.<$> nextToken,
Prelude.Just ("ServerId" Data..= serverId)
]
)
instance Data.ToPath ListUsers where
toPath = Prelude.const "/"
instance Data.ToQuery ListUsers where
toQuery = Prelude.const Prelude.mempty
data ListUsersResponse = ListUsersResponse'
| When you can get additional results from the @ListUsers@ call , a
nextToken :: Prelude.Maybe Prelude.Text,
httpStatus :: Prelude.Int,
serverId :: Prelude.Text,
| Returns the user accounts and their properties for the @ServerId@ value
users :: [ListedUser]
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
' ' , ' listUsersResponse_nextToken ' - When you can get additional results from the @ListUsers@ call , a
' users ' , ' listUsersResponse_users ' - Returns the user accounts and their properties for the @ServerId@ value
newListUsersResponse ::
Prelude.Int ->
Prelude.Text ->
ListUsersResponse
newListUsersResponse pHttpStatus_ pServerId_ =
ListUsersResponse'
{ nextToken = Prelude.Nothing,
httpStatus = pHttpStatus_,
serverId = pServerId_,
users = Prelude.mempty
}
| When you can get additional results from the @ListUsers@ call , a
listUsersResponse_nextToken :: Lens.Lens' ListUsersResponse (Prelude.Maybe Prelude.Text)
listUsersResponse_nextToken = Lens.lens (\ListUsersResponse' {nextToken} -> nextToken) (\s@ListUsersResponse' {} a -> s {nextToken = a} :: ListUsersResponse)
listUsersResponse_httpStatus :: Lens.Lens' ListUsersResponse Prelude.Int
listUsersResponse_httpStatus = Lens.lens (\ListUsersResponse' {httpStatus} -> httpStatus) (\s@ListUsersResponse' {} a -> s {httpStatus = a} :: ListUsersResponse)
listUsersResponse_serverId :: Lens.Lens' ListUsersResponse Prelude.Text
listUsersResponse_serverId = Lens.lens (\ListUsersResponse' {serverId} -> serverId) (\s@ListUsersResponse' {} a -> s {serverId = a} :: ListUsersResponse)
| Returns the user accounts and their properties for the @ServerId@ value
listUsersResponse_users :: Lens.Lens' ListUsersResponse [ListedUser]
listUsersResponse_users = Lens.lens (\ListUsersResponse' {users} -> users) (\s@ListUsersResponse' {} a -> s {users = a} :: ListUsersResponse) Prelude.. Lens.coerced
instance Prelude.NFData ListUsersResponse where
rnf ListUsersResponse' {..} =
Prelude.rnf nextToken
`Prelude.seq` Prelude.rnf httpStatus
`Prelude.seq` Prelude.rnf serverId
`Prelude.seq` Prelude.rnf users
|
8d9b53ca5f56b9e5d9f44e6f13de608a9c7d74569f191f7d20d193eb26536e43 | ftovagliari/ocamleditor | prepare_build.ml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
#cd "src"
#use "../tools/scripting.ml"
open Printf
let required_ocaml_version = "4.01.0"
let use_modified_gtkThread = ref false
let record_backtrace = ref true
let exe = if is_win32 then ".exe" else ""
let generate_oebuild_script () =
run "ocaml -I common str.cma unix.cma miscellanea.cmo file_util.cmo generate_oebuild_script.ml";;
let prepare_build () =
if Sys.ocaml_version < required_ocaml_version then begin
eprintf "You are using OCaml-%s but version %s is required." Sys.ocaml_version required_ocaml_version;
end else begin
cp ~echo:true (if !use_modified_gtkThread then "gtkThreadModified.ml" else "gtkThreadOriginal.ml") "gtkThread2.ml";
if not (Sys.file_exists "../plugins") then (mkdir "../plugins");
run "ocamllex err_lexer.mll";
run "ocamlyacc err_parser.mly";
if not is_win32 then begin
Disabled because on Windows it changes the file permissions of oe_config.ml
forcing it to be recompiled for plugins .
forcing it to be recompiled for plugins.*)
substitute ~filename:"oe_config.ml" ~regexp:true
["let _ = Printexc\\.record_backtrace \\(\\(true\\)\\|\\(false\\)\\)$",
(sprintf "let _ = Printexc.record_backtrace %b" !record_backtrace)];
end;
(try generate_oebuild_script() with Failure msg -> raise (Script_error ("generate_oebuild_script()", 2)));
(* *)
let chan = open_out_bin "../src/build_id.ml" in
kprintf (output_string chan) "let timestamp = \"%f\"\n" (Unix.gettimeofday ());
kprintf (output_string chan) "let git_hash = %S\n" (get_command_output "git rev-parse HEAD" |> List.hd);
close_out_noerr chan;
(* *)
print_newline()
end;;
let _ = main ~default_target:prepare_build ~targets:[
"-generate-oebuild-script", generate_oebuild_script, " (undocumented)";
]~options:[
"-record-backtrace", Bool (fun x -> record_backtrace := x), " Turn recording of exception backtraces on or off";
"-use-modified-gtkThread", Set use_modified_gtkThread, " Set this flag if you have Lablgtk-2.14.2 or earlier
for using the included modified version of gtkThread.ml
to reduce CPU consumption";
] ()
| null | https://raw.githubusercontent.com/ftovagliari/ocamleditor/a6d1e69b14dfd4466ac842a3ed97d1423a3883e6/tools/prepare_build.ml | ocaml |
OCamlEditor
Copyright ( C ) 2010 - 2014
This file is part of OCamlEditor .
OCamlEditor is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
OCamlEditor is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
OCamlEditor
Copyright (C) 2010-2014 Francesco Tovagliari
This file is part of OCamlEditor.
OCamlEditor is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OCamlEditor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
*)
#cd "src"
#use "../tools/scripting.ml"
open Printf
let required_ocaml_version = "4.01.0"
let use_modified_gtkThread = ref false
let record_backtrace = ref true
let exe = if is_win32 then ".exe" else ""
let generate_oebuild_script () =
run "ocaml -I common str.cma unix.cma miscellanea.cmo file_util.cmo generate_oebuild_script.ml";;
let prepare_build () =
if Sys.ocaml_version < required_ocaml_version then begin
eprintf "You are using OCaml-%s but version %s is required." Sys.ocaml_version required_ocaml_version;
end else begin
cp ~echo:true (if !use_modified_gtkThread then "gtkThreadModified.ml" else "gtkThreadOriginal.ml") "gtkThread2.ml";
if not (Sys.file_exists "../plugins") then (mkdir "../plugins");
run "ocamllex err_lexer.mll";
run "ocamlyacc err_parser.mly";
if not is_win32 then begin
Disabled because on Windows it changes the file permissions of oe_config.ml
forcing it to be recompiled for plugins .
forcing it to be recompiled for plugins.*)
substitute ~filename:"oe_config.ml" ~regexp:true
["let _ = Printexc\\.record_backtrace \\(\\(true\\)\\|\\(false\\)\\)$",
(sprintf "let _ = Printexc.record_backtrace %b" !record_backtrace)];
end;
(try generate_oebuild_script() with Failure msg -> raise (Script_error ("generate_oebuild_script()", 2)));
let chan = open_out_bin "../src/build_id.ml" in
kprintf (output_string chan) "let timestamp = \"%f\"\n" (Unix.gettimeofday ());
kprintf (output_string chan) "let git_hash = %S\n" (get_command_output "git rev-parse HEAD" |> List.hd);
close_out_noerr chan;
print_newline()
end;;
let _ = main ~default_target:prepare_build ~targets:[
"-generate-oebuild-script", generate_oebuild_script, " (undocumented)";
]~options:[
"-record-backtrace", Bool (fun x -> record_backtrace := x), " Turn recording of exception backtraces on or off";
"-use-modified-gtkThread", Set use_modified_gtkThread, " Set this flag if you have Lablgtk-2.14.2 or earlier
for using the included modified version of gtkThread.ml
to reduce CPU consumption";
] ()
| |
e109a407e34af6a5cc8548036d07bf5113bab2d3e865ed27743a5e0e0e52da1b | mmottl/gsl-ocaml | stats.ml | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
let () = Error.init ()
external mean : ?w:float array -> float array -> float
= "ml_gsl_stats_mean"
external variance : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_variance"
external sd : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_sd"
external variance_with_fixed_mean :
?w:float array -> mean:float -> float array -> float
= "ml_gsl_stats_variance_with_fixed_mean"
external sd_with_fixed_mean :
?w:float array -> mean:float -> float array -> float
= "ml_gsl_stats_sd_with_fixed_mean"
external absdev : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_absdev"
external skew : ?w:float array -> float array -> float
= "ml_gsl_stats_skew"
external skew_m_sd :
?w:float array -> mean:float ->
sd:float -> float array -> float
= "ml_gsl_stats_skew_m_sd"
external kurtosis : ?w:float array -> float array -> float
= "ml_gsl_stats_kurtosis"
external kurtosis_m_sd :
?w:float array -> mean:float ->
sd:float -> float array -> float
= "ml_gsl_stats_kurtosis_m_sd"
external lag1_autocorrelation :
?mean:float -> float array -> float
= "ml_gsl_stats_lag1_autocorrelation"
external covariance :
float array -> float array -> float
= "ml_gsl_stats_covariance"
external covariance_m :
mean1:float -> float array ->
mean2:float -> float array -> float
= "ml_gsl_stats_covariance_m"
external max : float array -> float
= "ml_gsl_stats_max"
external min : float array -> float
= "ml_gsl_stats_min"
external minmax : float array -> float * float
= "ml_gsl_stats_minmax"
external max_index : float array -> int
= "ml_gsl_stats_max_index"
external min_index : float array -> int
= "ml_gsl_stats_min_index"
external minmax_index : float array -> int * int
= "ml_gsl_stats_minmax_index"
external quantile_from_sorted_data : float array -> float -> float
= "ml_gsl_stats_quantile_from_sorted_data"
external correlation :
float array -> float array -> float
= "ml_gsl_stats_correlation"
| null | https://raw.githubusercontent.com/mmottl/gsl-ocaml/76f8d93cccc1f23084f4a33d3e0a8f1289450580/src/stats.ml | ocaml | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
let () = Error.init ()
external mean : ?w:float array -> float array -> float
= "ml_gsl_stats_mean"
external variance : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_variance"
external sd : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_sd"
external variance_with_fixed_mean :
?w:float array -> mean:float -> float array -> float
= "ml_gsl_stats_variance_with_fixed_mean"
external sd_with_fixed_mean :
?w:float array -> mean:float -> float array -> float
= "ml_gsl_stats_sd_with_fixed_mean"
external absdev : ?w:float array -> ?mean:float -> float array -> float
= "ml_gsl_stats_absdev"
external skew : ?w:float array -> float array -> float
= "ml_gsl_stats_skew"
external skew_m_sd :
?w:float array -> mean:float ->
sd:float -> float array -> float
= "ml_gsl_stats_skew_m_sd"
external kurtosis : ?w:float array -> float array -> float
= "ml_gsl_stats_kurtosis"
external kurtosis_m_sd :
?w:float array -> mean:float ->
sd:float -> float array -> float
= "ml_gsl_stats_kurtosis_m_sd"
external lag1_autocorrelation :
?mean:float -> float array -> float
= "ml_gsl_stats_lag1_autocorrelation"
external covariance :
float array -> float array -> float
= "ml_gsl_stats_covariance"
external covariance_m :
mean1:float -> float array ->
mean2:float -> float array -> float
= "ml_gsl_stats_covariance_m"
external max : float array -> float
= "ml_gsl_stats_max"
external min : float array -> float
= "ml_gsl_stats_min"
external minmax : float array -> float * float
= "ml_gsl_stats_minmax"
external max_index : float array -> int
= "ml_gsl_stats_max_index"
external min_index : float array -> int
= "ml_gsl_stats_min_index"
external minmax_index : float array -> int * int
= "ml_gsl_stats_minmax_index"
external quantile_from_sorted_data : float array -> float -> float
= "ml_gsl_stats_quantile_from_sorted_data"
external correlation :
float array -> float array -> float
= "ml_gsl_stats_correlation"
| |
51defcdc3f6f607095695bd94268a3aa81e7139c1c62a0de8be52da5caad22ef | softwarelanguageslab/maf | R5RS_gabriel_triangl-4.scm | ; Changes:
* removed : 0
* added : 2
* swaps : 0
; * negated predicates: 0
* swapped branches : 1
* calls to i d fun : 2
(letrec ((*board* (vector 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 1))
(*sequence* (vector 0 0 0 0 0 0 0 0 0 0 0 0 0 0))
(*a* (vector 1 2 4 3 5 6 1 3 6 2 5 4 11 12 13 7 8 4 4 7 11 8 12 13 6 10 15 9 14 13 13 14 15 9 10 6 6))
(*b* (vector 2 4 7 5 8 9 3 6 10 5 9 8 12 13 14 8 9 5 2 4 7 5 8 9 3 6 10 5 9 8 12 13 14 8 9 5 5))
(*c* (vector 4 7 11 8 12 13 6 10 15 9 14 13 13 14 15 9 10 6 1 2 4 3 5 6 1 3 6 2 5 4 11 12 13 7 8 4 4))
(*answer* ())
(attempt (lambda (i depth)
(<change>
@sensitivity:FA
((lambda (x) x) @sensitivity:FA))
(if (= depth 14)
(<change>
(begin
(set! *answer* (cons (cdr (vector->list *sequence*)) *answer*))
#t)
(if (if (= 1 (vector-ref *board* (vector-ref *a* i))) (if (= 1 (vector-ref *board* (vector-ref *b* i))) (= 0 (vector-ref *board* (vector-ref *c* i))) #f) #f)
(begin
(vector-set! *board* (vector-ref *a* i) 0)
(vector-set! *board* (vector-ref *b* i) 0)
(vector-set! *board* (vector-ref *c* i) 1)
((lambda (x) x) (vector-set! *sequence* depth i))
(letrec ((__do_loop (lambda (j depth)
@sensitivity:FA
(if (let ((__or_res (= j 36))) (if __or_res __or_res (attempt j depth)))
#f
(__do_loop (+ j 1) depth)))))
(__do_loop 0 (+ depth 1)))
(vector-set! *board* (vector-ref *a* i) 1)
(vector-set! *board* (vector-ref *b* i) 1)
(vector-set! *board* (vector-ref *c* i) 0)
vector-set!
#f)
#f))
(<change>
(if (if (= 1 (vector-ref *board* (vector-ref *a* i))) (if (= 1 (vector-ref *board* (vector-ref *b* i))) (= 0 (vector-ref *board* (vector-ref *c* i))) #f) #f)
(begin
(vector-set! *board* (vector-ref *a* i) 0)
(vector-set! *board* (vector-ref *b* i) 0)
(vector-set! *board* (vector-ref *c* i) 1)
(vector-set! *sequence* depth i)
(letrec ((__do_loop (lambda (j depth)
@sensitivity:FA
(if (let ((__or_res (= j 36))) (if __or_res __or_res (attempt j depth)))
#f
(__do_loop (+ j 1) depth)))))
(__do_loop 0 (+ depth 1)))
(vector-set! *board* (vector-ref *a* i) 1)
(vector-set! *board* (vector-ref *b* i) 1)
(vector-set! *board* (vector-ref *c* i) 0)
#f)
#f)
(begin
(set! *answer* (cons (cdr (vector->list *sequence*)) *answer*))
(display (vector->list *sequence*))
#t)))))
(test (lambda (i depth)
@sensitivity:FA
(set! *answer* ())
(attempt i depth)
(car *answer*))))
(equal?
(test 22 1)
(__toplevel_cons
22
(__toplevel_cons
34
(__toplevel_cons
31
(__toplevel_cons
15
(__toplevel_cons
7
(__toplevel_cons
1
(__toplevel_cons
20
(__toplevel_cons
17
(__toplevel_cons
25
(__toplevel_cons 6 (__toplevel_cons 5 (__toplevel_cons 13 (__toplevel_cons 32 ()))))))))))))))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_gabriel_triangl-4.scm | scheme | Changes:
* negated predicates: 0 | * removed : 0
* added : 2
* swaps : 0
* swapped branches : 1
* calls to i d fun : 2
(letrec ((*board* (vector 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 1))
(*sequence* (vector 0 0 0 0 0 0 0 0 0 0 0 0 0 0))
(*a* (vector 1 2 4 3 5 6 1 3 6 2 5 4 11 12 13 7 8 4 4 7 11 8 12 13 6 10 15 9 14 13 13 14 15 9 10 6 6))
(*b* (vector 2 4 7 5 8 9 3 6 10 5 9 8 12 13 14 8 9 5 2 4 7 5 8 9 3 6 10 5 9 8 12 13 14 8 9 5 5))
(*c* (vector 4 7 11 8 12 13 6 10 15 9 14 13 13 14 15 9 10 6 1 2 4 3 5 6 1 3 6 2 5 4 11 12 13 7 8 4 4))
(*answer* ())
(attempt (lambda (i depth)
(<change>
@sensitivity:FA
((lambda (x) x) @sensitivity:FA))
(if (= depth 14)
(<change>
(begin
(set! *answer* (cons (cdr (vector->list *sequence*)) *answer*))
#t)
(if (if (= 1 (vector-ref *board* (vector-ref *a* i))) (if (= 1 (vector-ref *board* (vector-ref *b* i))) (= 0 (vector-ref *board* (vector-ref *c* i))) #f) #f)
(begin
(vector-set! *board* (vector-ref *a* i) 0)
(vector-set! *board* (vector-ref *b* i) 0)
(vector-set! *board* (vector-ref *c* i) 1)
((lambda (x) x) (vector-set! *sequence* depth i))
(letrec ((__do_loop (lambda (j depth)
@sensitivity:FA
(if (let ((__or_res (= j 36))) (if __or_res __or_res (attempt j depth)))
#f
(__do_loop (+ j 1) depth)))))
(__do_loop 0 (+ depth 1)))
(vector-set! *board* (vector-ref *a* i) 1)
(vector-set! *board* (vector-ref *b* i) 1)
(vector-set! *board* (vector-ref *c* i) 0)
vector-set!
#f)
#f))
(<change>
(if (if (= 1 (vector-ref *board* (vector-ref *a* i))) (if (= 1 (vector-ref *board* (vector-ref *b* i))) (= 0 (vector-ref *board* (vector-ref *c* i))) #f) #f)
(begin
(vector-set! *board* (vector-ref *a* i) 0)
(vector-set! *board* (vector-ref *b* i) 0)
(vector-set! *board* (vector-ref *c* i) 1)
(vector-set! *sequence* depth i)
(letrec ((__do_loop (lambda (j depth)
@sensitivity:FA
(if (let ((__or_res (= j 36))) (if __or_res __or_res (attempt j depth)))
#f
(__do_loop (+ j 1) depth)))))
(__do_loop 0 (+ depth 1)))
(vector-set! *board* (vector-ref *a* i) 1)
(vector-set! *board* (vector-ref *b* i) 1)
(vector-set! *board* (vector-ref *c* i) 0)
#f)
#f)
(begin
(set! *answer* (cons (cdr (vector->list *sequence*)) *answer*))
(display (vector->list *sequence*))
#t)))))
(test (lambda (i depth)
@sensitivity:FA
(set! *answer* ())
(attempt i depth)
(car *answer*))))
(equal?
(test 22 1)
(__toplevel_cons
22
(__toplevel_cons
34
(__toplevel_cons
31
(__toplevel_cons
15
(__toplevel_cons
7
(__toplevel_cons
1
(__toplevel_cons
20
(__toplevel_cons
17
(__toplevel_cons
25
(__toplevel_cons 6 (__toplevel_cons 5 (__toplevel_cons 13 (__toplevel_cons 32 ()))))))))))))))) |
078ba93e9c28fcab86eab55dbbed26130cade18aefedd5d8efcb8d58e4d79bad | chaoxu/fancy-walks | B.hs | {-# OPTIONS_GHC -O2 #-}
import Data.List
import Data.Maybe
import Data.Char
import Data.Array.IArray
import Data.Array.Unboxed (UArray)
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq, (<|), (|>), (><), ViewL(..), ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
import Control.Parallel.Strategies
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
k <- readInt
pts <- replicateM n ((,) <$> readInt <*> readInt)
return (k, pts)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
let output = parMap rdeepseq solve input
forM_ (zip [1..] output) $ \(cas, result) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ show result
solve (k, pts) = binarySearch ((<=k) . minNum pts) (0, 64000 + 10)
binarySearch :: (Int -> Bool) -> (Int, Int) -> Int
binarySearch check (lo, hi)
| lo == hi = lo
| check mid = binarySearch check (lo, mid)
| otherwise = binarySearch check (mid + 1, hi)
where
mid = (lo + hi) `div` 2
minNum :: [(Int, Int)] -> Int -> Int
minNum pts len = minSteps ((1 `shiftL` n) - 1)
where
xs = map head . group . sort $ [ x + dx | x <- map fst pts, dx <- [0, -len]]
ys = map head . group . sort $ [ y + dy | y <- map snd pts, dy <- [0, -len]]
n = length pts
calcMask :: (Int, Int) -> Int
calcMask (x, y) = foldl (.|.) 0 [ 1 `shiftL` i :: Int
| (i, pt) <- zip [0..] pts
, inRange ((x, y), (x+len, y+len)) pt
]
masks = map head . group . sort $ [ calcMask (x, y) | x <- xs, y <- ys]
inf = 10^9 :: Int
minSteps :: Int -> Int
minSteps = (cache!)
where
bnds = (0, (1 `shiftL` n) - 1)
cache = listArray bnds $ map go $ range bnds :: Array Int Int
go 0 = 0
go msk = foldl min inf lens + 1
where
lens = [ minSteps nmsk
| m <- masks
, let nmsk = msk .&. complement m
, nmsk /= msk
]
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/code.google.com/codejam/Practice%20Contests/Practice%20Contest/B.hs | haskell | # OPTIONS_GHC -O2 # |
import Data.List
import Data.Maybe
import Data.Char
import Data.Array.IArray
import Data.Array.Unboxed (UArray)
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq, (<|), (|>), (><), ViewL(..), ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
import Control.Parallel.Strategies
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
k <- readInt
pts <- replicateM n ((,) <$> readInt <*> readInt)
return (k, pts)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
let output = parMap rdeepseq solve input
forM_ (zip [1..] output) $ \(cas, result) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ show result
solve (k, pts) = binarySearch ((<=k) . minNum pts) (0, 64000 + 10)
binarySearch :: (Int -> Bool) -> (Int, Int) -> Int
binarySearch check (lo, hi)
| lo == hi = lo
| check mid = binarySearch check (lo, mid)
| otherwise = binarySearch check (mid + 1, hi)
where
mid = (lo + hi) `div` 2
minNum :: [(Int, Int)] -> Int -> Int
minNum pts len = minSteps ((1 `shiftL` n) - 1)
where
xs = map head . group . sort $ [ x + dx | x <- map fst pts, dx <- [0, -len]]
ys = map head . group . sort $ [ y + dy | y <- map snd pts, dy <- [0, -len]]
n = length pts
calcMask :: (Int, Int) -> Int
calcMask (x, y) = foldl (.|.) 0 [ 1 `shiftL` i :: Int
| (i, pt) <- zip [0..] pts
, inRange ((x, y), (x+len, y+len)) pt
]
masks = map head . group . sort $ [ calcMask (x, y) | x <- xs, y <- ys]
inf = 10^9 :: Int
minSteps :: Int -> Int
minSteps = (cache!)
where
bnds = (0, (1 `shiftL` n) - 1)
cache = listArray bnds $ map go $ range bnds :: Array Int Int
go 0 = 0
go msk = foldl min inf lens + 1
where
lens = [ minSteps nmsk
| m <- masks
, let nmsk = msk .&. complement m
, nmsk /= msk
]
|
b8bb2382faa5cc82360cb96784dbd3bafbcd750ccbba4b8ffdee43fe4b06546d | nuvla/api-server | credential_infrastructure_service_openstack.clj | (ns sixsq.nuvla.server.resources.credential-infrastructure-service-openstack
"
Provides `docker-machine` credentials for OpenStack. The attribute names
correspond exactly to those required by `docker-machine`.
"
(:require
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential :as p]
[sixsq.nuvla.server.resources.credential-template-infrastructure-service-openstack :as tpl]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-infrastructure-service-openstack :as service]
[sixsq.nuvla.server.util.metadata :as gen-md]))
;;
;; convert template to credential
;;
(defmethod p/tpl->credential tpl/credential-subtype
[{:keys [subtype
method
openstack-username
openstack-password
acl]} _request]
(let [resource (cond-> {:resource-type p/resource-type
:subtype subtype
:method method
:openstack-username openstack-username
:openstack-password openstack-password}
acl (assoc :acl acl))]
[nil resource]))
;;
;; multimethods for validation
;;
(def validate-fn (u/create-spec-validation-fn ::service/schema))
(defmethod p/validate-subtype tpl/credential-subtype
[resource]
(validate-fn resource))
(def create-validate-fn (u/create-spec-validation-fn ::service/schema-create))
(defmethod p/create-validate-subtype tpl/credential-subtype
[resource]
(create-validate-fn resource))
;;
;; initialization
;;
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::service/schema))
(defn initialize
[]
(md/register resource-metadata))
| null | https://raw.githubusercontent.com/nuvla/api-server/b45a97801f7225e3a5b8cd0c31bb971b42b2c90e/code/src/sixsq/nuvla/server/resources/credential_infrastructure_service_openstack.clj | clojure |
convert template to credential
multimethods for validation
initialization
| (ns sixsq.nuvla.server.resources.credential-infrastructure-service-openstack
"
Provides `docker-machine` credentials for OpenStack. The attribute names
correspond exactly to those required by `docker-machine`.
"
(:require
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential :as p]
[sixsq.nuvla.server.resources.credential-template-infrastructure-service-openstack :as tpl]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-infrastructure-service-openstack :as service]
[sixsq.nuvla.server.util.metadata :as gen-md]))
(defmethod p/tpl->credential tpl/credential-subtype
[{:keys [subtype
method
openstack-username
openstack-password
acl]} _request]
(let [resource (cond-> {:resource-type p/resource-type
:subtype subtype
:method method
:openstack-username openstack-username
:openstack-password openstack-password}
acl (assoc :acl acl))]
[nil resource]))
(def validate-fn (u/create-spec-validation-fn ::service/schema))
(defmethod p/validate-subtype tpl/credential-subtype
[resource]
(validate-fn resource))
(def create-validate-fn (u/create-spec-validation-fn ::service/schema-create))
(defmethod p/create-validate-subtype tpl/credential-subtype
[resource]
(create-validate-fn resource))
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::service/schema))
(defn initialize
[]
(md/register resource-metadata))
|
367cf798ec42fc10db706c7de939cb7f7467aca3cf35a2d05ce063728ae6a693 | elaforge/karya | Integrate.hs | Copyright 2013
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
| Cmd - level support for integration . These cmds interpret the output of
the calls in " Derive . Call . Integrate " to create score from deriver output
and merge it back into the current score .
An example of track integration :
- Add \ " | < \ " to a note track title , which causes damage and a rederive .
- The integrate call @<@ collects events and puts them into derive results ,
which go into , which winds up at ' integrate_tracks ' .
- ' integrate_tracks ' finds no existing derived tracks , so it merges into
[ ] , which creates new tracks , and damages the whole block .
- Then it sets ' Cmd.derive_immediately ' on the block , which removes the
usual derive wait .
- Derive once again emits integrate results , which winds up at
' integrate_tracks ' again , but since there are no changes this time , there
is no further damage , and derivation stops . This additional integration
just to find out there were no changes is inefficient , but not a big deal
since it only happens the first time .
Modify source track :
- Track damage causes a rederive , which causes the @<@ call to collect
integrated events .
- ' integrate_tracks ' merges the changes into the destination track ( or
tracks ) , which damages them .
- This time when the derive happens , since there was no damage on the
source track , it gets cached . The cache intentionally does n't retain
integrated events , so @<@ is skipped and I do n't get a second derivation .
Block integration is similar , except that I do n't get a double derivation
when the first new block is created , since the damage is separated to
a different block .
It might be a little more orthogonal to omit the thing where
I automatically create an integrated block or track if there are none , but
it 's convenient in practice . It does , however , make it tricky to undo
past the integrate , since if you undo the creation , the
integrate call is still there and just creates another . Be quick !
This also implements score integration , which is a higher level form of
integration that simply copies score events directly , without the
intervening derive step .
the calls in "Derive.Call.Integrate" to create score from deriver output
and merge it back into the current score.
An example of track integration:
- Add \" | <\" to a note track title, which causes damage and a rederive.
- The integrate call @<@ collects events and puts them into derive results,
which go into DeriveComplete, which winds up at 'integrate_tracks'.
- 'integrate_tracks' finds no existing derived tracks, so it merges into
[], which creates new tracks, and damages the whole block.
- Then it sets 'Cmd.derive_immediately' on the block, which removes the
usual derive wait.
- Derive once again emits integrate results, which winds up at
'integrate_tracks' again, but since there are no changes this time, there
is no further damage, and derivation stops. This additional integration
just to find out there were no changes is inefficient, but not a big deal
since it only happens the first time.
Modify source track:
- Track damage causes a rederive, which causes the @<@ call to collect
integrated events.
- 'integrate_tracks' merges the changes into the destination track (or
tracks), which damages them.
- This time when the derive happens, since there was no damage on the
source track, it gets cached. The cache intentionally doesn't retain
integrated events, so @<@ is skipped and I don't get a second derivation.
Block integration is similar, except that I don't get a double derivation
when the first new block is created, since the damage is separated to
a different block.
It might be a little more orthogonal to omit the thing where
I automatically create an integrated block or track if there are none, but
it's convenient in practice. It does, however, make it tricky to undo
past the integrate, since if you undo the block\/track creation, the
integrate call is still there and just creates another. Be quick!
This also implements score integration, which is a higher level form of
integration that simply copies score events directly, without the
intervening derive step.
-}
module Cmd.Integrate (cmd_integrate, score_integrate, manual_integrate) where
import qualified Data.Either as Either
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Util.Log as Log
import qualified Util.Seq as Seq
import qualified Cmd.Cmd as Cmd
import qualified Cmd.Create as Create
import qualified Cmd.Integrate.Convert as Convert
import qualified Cmd.Integrate.Merge as Merge
import qualified Cmd.Msg as Msg
import qualified Derive.Derive as Derive
import qualified Ui.Block as Block
import qualified Ui.Ui as Ui
import qualified Ui.Update as Update
import Global
import Types
-- | Derive integrate takes the result of a derivation and merges it into
-- blocks or tracks which are marked as integrate destinations. A special
-- derive call captures events and saves them in 'Cmd.perf_integrated'.
cmd_integrate :: Cmd.M m => Msg.Msg -> m Cmd.Status
cmd_integrate (Msg.DeriveStatus block_id (Msg.DeriveComplete perf _)) = do
-- If a block or track wants to integrate twice with different events,
-- I don't know which ones to give to the destinations, and wind up
-- creating a new track every time.
let (dups, integrates) = Either.partitionEithers $ map is_dup $
Seq.group_stable Derive.integrated_source (Cmd.perf_integrated perf)
is_dup (x :| xs) = if null xs then Right x else Left x
unless (null dups) $
Log.warn $ "these blocks or tracks want to integrate twice: "
<> Text.intercalate ", "
(map (either pretty pretty . Derive.integrated_source) dups)
mapM_ (integrate block_id) integrates
return Cmd.Continue
cmd_integrate _ = return Cmd.Continue
-- | Integrate the track information into the current state.
integrate :: Cmd.M m => BlockId -> Derive.Integrated -> m ()
integrate derived_block_id integrated = do
tracks <- Convert.convert derived_block_id
(Derive.integrated_events integrated)
case Derive.integrated_source integrated of
Left block_id -> integrate_block block_id tracks
Right track_id -> integrate_tracks derived_block_id track_id tracks
| Update and replace the DeriveDestinations for the given TrackId .
-- A source track can have multiple destinations, and each of those is actually
a list of DeriveDestinations .
integrate_tracks :: Cmd.M m => BlockId -> TrackId -> Convert.Tracks -> m ()
integrate_tracks block_id track_id tracks = do
block <- Ui.get_block block_id
-- This means the < call on a non-top block emitted Cmd.perf_integrated.
unless (track_id `elem` Block.block_track_ids block) $
Cmd.throw $ "derivation of " <> pretty block_id <> " wanted to derive "
<> pretty track_id <> ", which is not in that block"
itracks <- Block.block_integrated_tracks <$> Ui.get_block block_id
let dests =
[ dests
| (source_id, Block.DeriveDestinations dests) <- itracks
, source_id == track_id
]
new_dests <- if null dests
then (:[]) <$> Merge.merge_tracks Merge.KeepTitles block_id tracks []
else mapM (Merge.merge_tracks Merge.KeepTitles block_id tracks) dests
unless (null new_dests) $
Log.notice $ "derive track integrate " <> pretty block_id <> " "
<> pretty track_id <> " to "
<> pretty (map (map (fst . Block.dest_note)) new_dests)
Ui.modify_integrated_tracks block_id $ replace track_id
[(track_id, Block.DeriveDestinations dests) | dests <- new_dests]
Cmd.derive_immediately [block_id]
-- | Look for blocks derived from this one and replace their contents, or
-- create a new block if there are no blocks derived from this one.
integrate_block :: Cmd.M m => BlockId -> Convert.Tracks -> m ()
integrate_block source_id tracks = do
blocks <- Ui.gets Ui.state_blocks
dest_blocks <- case integrated_from blocks of
[] -> do
(block_id, dests) <- Merge.create_block source_id tracks
Create.view block_id
return [(block_id, dests)]
integrated -> forM integrated $ \(dest_id, track_dests) ->
(,) dest_id <$> Merge.merge_block dest_id tracks track_dests
Log.notice $ "derive integrated " <> showt source_id <> " to "
<> pretty (map fst dest_blocks)
forM_ dest_blocks $ \(dest_block_id, track_dests) ->
Ui.set_integrated_block dest_block_id $
Just (source_id, Block.DeriveDestinations track_dests)
Cmd.derive_immediately (map fst dest_blocks)
where
integrated_from blocks =
[ (block_id, dests)
| (block_id, Just (source_block, Block.DeriveDestinations dests))
<- map (second Block.block_integrated) (Map.toList blocks)
, source_block == source_id
]
-- * score integrate
| For each block with ' Block . ScoreDestinations ' , figure out if their sources
-- have damage, and if so, re-integrate.
score_integrate :: [Update.UiUpdate] -> Ui.State
-> Either Ui.Error ([Log.Msg], Ui.State, Update.UiDamage)
score_integrate updates state = Ui.run_id state $ do
These both use the passed state instead of using Ui.get when figuring
-- out if there are updates that require integration. This way, a
-- track integrate can't trigger a block integrate, at least not until the
-- next call to this function.
track_logs <- concatMapM score_integrate_tracks $
needs_track_score_integrate updates state
block_logs <- mapM score_integrate_block $
needs_block_score_integrate updates state
return $ map (Log.msg Log.Notice Nothing) (track_logs ++ block_logs)
score_integrate_block :: Ui.M m => BlockId -> m Text
score_integrate_block source_id = do
blocks <- Ui.gets Ui.state_blocks
let integrated = integrated_from blocks
forM_ integrated $ \(dest_id, dests) -> do
dests <- Merge.score_merge_block source_id dest_id dests
Ui.set_integrated_block dest_id $
Just (source_id, Block.ScoreDestinations dests)
return $ "score integrated " <> showt source_id <> " to: "
<> pretty (map fst integrated)
where
integrated_from blocks =
[ (block_id, dests)
| (block_id, Just (source_block, Block.ScoreDestinations dests))
<- map (second Block.block_integrated) (Map.toList blocks)
, source_block == source_id
]
score_integrate_tracks :: Ui.M m => (BlockId, TrackId) -> m [Text]
score_integrate_tracks (block_id, track_id) = do
itracks <- Block.block_integrated_tracks <$> Ui.get_block block_id
let dests =
[ dests
| (source_id, Block.ScoreDestinations dests) <- itracks
, source_id == track_id
]
new_dests <- mapM (Merge.score_merge_tracks block_id track_id) dests
Ui.modify_integrated_tracks block_id $ replace track_id
[(track_id, Block.ScoreDestinations dests) | dests <- new_dests]
return $ map msg new_dests
where
msg dests = "score integrated " <> showt track_id <> ": "
<> Text.intercalate ", "
[ pretty source_id <> " -> " <> pretty dest_id
| (source_id, (dest_id, _)) <- dests
]
replace :: Eq key => key -> [(key, a)] -> [(key, a)] -> [(key, a)]
replace key new xs = new ++ filter ((/=key) . fst) xs
-- | Blocks which are block score integrate sources and have damage.
needs_block_score_integrate :: [Update.UiUpdate] -> Ui.State -> [BlockId]
needs_block_score_integrate updates state =
filter has_integrated $ Map.keys $ flip Map.restrictKeys damaged_blocks $
Ui.state_blocks state
where
TODO this is a linear search through all blocks
has_integrated block_id = not $ null
[ ()
| Just (dest_block_id, Block.ScoreDestinations {}) <-
map Block.block_integrated $ Map.elems (Ui.state_blocks state)
, block_id == dest_block_id
]
damaged_blocks = Set.fromList $ mapMaybe block_changed updates
block_changed (Update.Block bid _) = Just bid
block_changed _ = Nothing
-- | Tracks which are track score integrate sources and have damage.
needs_track_score_integrate :: [Update.UiUpdate] -> Ui.State
-> [(BlockId, TrackId)]
needs_track_score_integrate updates state = Seq.unique $
concatMap (integrated_blocks . fst) $ mapMaybe Update.track_changed updates
where
integrated_blocks track_id =
[ (block_id, track_id) | (block_id, block) <- blocks_with track_id
, has_integrated block track_id
]
TODO this is a linear search through all blocks , as is
-- Ui.blocks_with_track_id.
blocks_with track_id = filter (has_track track_id . snd) $ Map.toList $
Ui.state_blocks state
has_track track_id block = track_id `elem` Block.block_track_ids block
has_integrated block track_id = not $ null
[ ()
| (source_track_id, Block.ScoreDestinations {})
<- Block.block_integrated_tracks block
, track_id == source_track_id
]
-- * manual integrate
-- | Find blocks with the source key, and merge the given tracks into them.
--
-- If you are creating a new track, you need to have already done that and put
-- an empty destination in it. Otherwise, this will find no existing
-- destinations and do nothing.
manual_integrate :: Ui.M m => Block.SourceKey -> Convert.Track -- ^ note track
-> [Convert.Track] -- ^ dependent control tracks
-> m ()
manual_integrate key note controls = do
block_dests <- manual_destinations key . Map.toList <$>
Ui.gets Ui.state_blocks
forM_ block_dests $ \(block_id, dests) -> do
new_dests <- forM dests $ \dest ->
Merge.merge_tracks Merge.KeepTitles block_id [(note, controls)]
[dest]
Ui.set_integrated_manual block_id key (Just (concat new_dests))
-- | Find all manual derive destinations with the given key.
manual_destinations :: Block.SourceKey -> [(a, Block.Block)]
-> [(a, [Block.NoteDestination])]
manual_destinations key = filter (not . null . snd)
. map (second (Map.findWithDefault [] key . Block.block_integrated_manual))
| null | https://raw.githubusercontent.com/elaforge/karya/8ea15e6a5fb57e2f15f8c19836751e315f9c09f2/Cmd/Integrate.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
| Derive integrate takes the result of a derivation and merges it into
blocks or tracks which are marked as integrate destinations. A special
derive call captures events and saves them in 'Cmd.perf_integrated'.
If a block or track wants to integrate twice with different events,
I don't know which ones to give to the destinations, and wind up
creating a new track every time.
| Integrate the track information into the current state.
A source track can have multiple destinations, and each of those is actually
This means the < call on a non-top block emitted Cmd.perf_integrated.
| Look for blocks derived from this one and replace their contents, or
create a new block if there are no blocks derived from this one.
* score integrate
have damage, and if so, re-integrate.
out if there are updates that require integration. This way, a
track integrate can't trigger a block integrate, at least not until the
next call to this function.
| Blocks which are block score integrate sources and have damage.
| Tracks which are track score integrate sources and have damage.
Ui.blocks_with_track_id.
* manual integrate
| Find blocks with the source key, and merge the given tracks into them.
If you are creating a new track, you need to have already done that and put
an empty destination in it. Otherwise, this will find no existing
destinations and do nothing.
^ note track
^ dependent control tracks
| Find all manual derive destinations with the given key. | Copyright 2013
| Cmd - level support for integration . These cmds interpret the output of
the calls in " Derive . Call . Integrate " to create score from deriver output
and merge it back into the current score .
An example of track integration :
- Add \ " | < \ " to a note track title , which causes damage and a rederive .
- The integrate call @<@ collects events and puts them into derive results ,
which go into , which winds up at ' integrate_tracks ' .
- ' integrate_tracks ' finds no existing derived tracks , so it merges into
[ ] , which creates new tracks , and damages the whole block .
- Then it sets ' Cmd.derive_immediately ' on the block , which removes the
usual derive wait .
- Derive once again emits integrate results , which winds up at
' integrate_tracks ' again , but since there are no changes this time , there
is no further damage , and derivation stops . This additional integration
just to find out there were no changes is inefficient , but not a big deal
since it only happens the first time .
Modify source track :
- Track damage causes a rederive , which causes the @<@ call to collect
integrated events .
- ' integrate_tracks ' merges the changes into the destination track ( or
tracks ) , which damages them .
- This time when the derive happens , since there was no damage on the
source track , it gets cached . The cache intentionally does n't retain
integrated events , so @<@ is skipped and I do n't get a second derivation .
Block integration is similar , except that I do n't get a double derivation
when the first new block is created , since the damage is separated to
a different block .
It might be a little more orthogonal to omit the thing where
I automatically create an integrated block or track if there are none , but
it 's convenient in practice . It does , however , make it tricky to undo
past the integrate , since if you undo the creation , the
integrate call is still there and just creates another . Be quick !
This also implements score integration , which is a higher level form of
integration that simply copies score events directly , without the
intervening derive step .
the calls in "Derive.Call.Integrate" to create score from deriver output
and merge it back into the current score.
An example of track integration:
- Add \" | <\" to a note track title, which causes damage and a rederive.
- The integrate call @<@ collects events and puts them into derive results,
which go into DeriveComplete, which winds up at 'integrate_tracks'.
- 'integrate_tracks' finds no existing derived tracks, so it merges into
[], which creates new tracks, and damages the whole block.
- Then it sets 'Cmd.derive_immediately' on the block, which removes the
usual derive wait.
- Derive once again emits integrate results, which winds up at
'integrate_tracks' again, but since there are no changes this time, there
is no further damage, and derivation stops. This additional integration
just to find out there were no changes is inefficient, but not a big deal
since it only happens the first time.
Modify source track:
- Track damage causes a rederive, which causes the @<@ call to collect
integrated events.
- 'integrate_tracks' merges the changes into the destination track (or
tracks), which damages them.
- This time when the derive happens, since there was no damage on the
source track, it gets cached. The cache intentionally doesn't retain
integrated events, so @<@ is skipped and I don't get a second derivation.
Block integration is similar, except that I don't get a double derivation
when the first new block is created, since the damage is separated to
a different block.
It might be a little more orthogonal to omit the thing where
I automatically create an integrated block or track if there are none, but
it's convenient in practice. It does, however, make it tricky to undo
past the integrate, since if you undo the block\/track creation, the
integrate call is still there and just creates another. Be quick!
This also implements score integration, which is a higher level form of
integration that simply copies score events directly, without the
intervening derive step.
-}
module Cmd.Integrate (cmd_integrate, score_integrate, manual_integrate) where
import qualified Data.Either as Either
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Util.Log as Log
import qualified Util.Seq as Seq
import qualified Cmd.Cmd as Cmd
import qualified Cmd.Create as Create
import qualified Cmd.Integrate.Convert as Convert
import qualified Cmd.Integrate.Merge as Merge
import qualified Cmd.Msg as Msg
import qualified Derive.Derive as Derive
import qualified Ui.Block as Block
import qualified Ui.Ui as Ui
import qualified Ui.Update as Update
import Global
import Types
cmd_integrate :: Cmd.M m => Msg.Msg -> m Cmd.Status
cmd_integrate (Msg.DeriveStatus block_id (Msg.DeriveComplete perf _)) = do
let (dups, integrates) = Either.partitionEithers $ map is_dup $
Seq.group_stable Derive.integrated_source (Cmd.perf_integrated perf)
is_dup (x :| xs) = if null xs then Right x else Left x
unless (null dups) $
Log.warn $ "these blocks or tracks want to integrate twice: "
<> Text.intercalate ", "
(map (either pretty pretty . Derive.integrated_source) dups)
mapM_ (integrate block_id) integrates
return Cmd.Continue
cmd_integrate _ = return Cmd.Continue
integrate :: Cmd.M m => BlockId -> Derive.Integrated -> m ()
integrate derived_block_id integrated = do
tracks <- Convert.convert derived_block_id
(Derive.integrated_events integrated)
case Derive.integrated_source integrated of
Left block_id -> integrate_block block_id tracks
Right track_id -> integrate_tracks derived_block_id track_id tracks
| Update and replace the DeriveDestinations for the given TrackId .
a list of DeriveDestinations .
integrate_tracks :: Cmd.M m => BlockId -> TrackId -> Convert.Tracks -> m ()
integrate_tracks block_id track_id tracks = do
block <- Ui.get_block block_id
unless (track_id `elem` Block.block_track_ids block) $
Cmd.throw $ "derivation of " <> pretty block_id <> " wanted to derive "
<> pretty track_id <> ", which is not in that block"
itracks <- Block.block_integrated_tracks <$> Ui.get_block block_id
let dests =
[ dests
| (source_id, Block.DeriveDestinations dests) <- itracks
, source_id == track_id
]
new_dests <- if null dests
then (:[]) <$> Merge.merge_tracks Merge.KeepTitles block_id tracks []
else mapM (Merge.merge_tracks Merge.KeepTitles block_id tracks) dests
unless (null new_dests) $
Log.notice $ "derive track integrate " <> pretty block_id <> " "
<> pretty track_id <> " to "
<> pretty (map (map (fst . Block.dest_note)) new_dests)
Ui.modify_integrated_tracks block_id $ replace track_id
[(track_id, Block.DeriveDestinations dests) | dests <- new_dests]
Cmd.derive_immediately [block_id]
integrate_block :: Cmd.M m => BlockId -> Convert.Tracks -> m ()
integrate_block source_id tracks = do
blocks <- Ui.gets Ui.state_blocks
dest_blocks <- case integrated_from blocks of
[] -> do
(block_id, dests) <- Merge.create_block source_id tracks
Create.view block_id
return [(block_id, dests)]
integrated -> forM integrated $ \(dest_id, track_dests) ->
(,) dest_id <$> Merge.merge_block dest_id tracks track_dests
Log.notice $ "derive integrated " <> showt source_id <> " to "
<> pretty (map fst dest_blocks)
forM_ dest_blocks $ \(dest_block_id, track_dests) ->
Ui.set_integrated_block dest_block_id $
Just (source_id, Block.DeriveDestinations track_dests)
Cmd.derive_immediately (map fst dest_blocks)
where
integrated_from blocks =
[ (block_id, dests)
| (block_id, Just (source_block, Block.DeriveDestinations dests))
<- map (second Block.block_integrated) (Map.toList blocks)
, source_block == source_id
]
| For each block with ' Block . ScoreDestinations ' , figure out if their sources
score_integrate :: [Update.UiUpdate] -> Ui.State
-> Either Ui.Error ([Log.Msg], Ui.State, Update.UiDamage)
score_integrate updates state = Ui.run_id state $ do
These both use the passed state instead of using Ui.get when figuring
track_logs <- concatMapM score_integrate_tracks $
needs_track_score_integrate updates state
block_logs <- mapM score_integrate_block $
needs_block_score_integrate updates state
return $ map (Log.msg Log.Notice Nothing) (track_logs ++ block_logs)
score_integrate_block :: Ui.M m => BlockId -> m Text
score_integrate_block source_id = do
blocks <- Ui.gets Ui.state_blocks
let integrated = integrated_from blocks
forM_ integrated $ \(dest_id, dests) -> do
dests <- Merge.score_merge_block source_id dest_id dests
Ui.set_integrated_block dest_id $
Just (source_id, Block.ScoreDestinations dests)
return $ "score integrated " <> showt source_id <> " to: "
<> pretty (map fst integrated)
where
integrated_from blocks =
[ (block_id, dests)
| (block_id, Just (source_block, Block.ScoreDestinations dests))
<- map (second Block.block_integrated) (Map.toList blocks)
, source_block == source_id
]
score_integrate_tracks :: Ui.M m => (BlockId, TrackId) -> m [Text]
score_integrate_tracks (block_id, track_id) = do
itracks <- Block.block_integrated_tracks <$> Ui.get_block block_id
let dests =
[ dests
| (source_id, Block.ScoreDestinations dests) <- itracks
, source_id == track_id
]
new_dests <- mapM (Merge.score_merge_tracks block_id track_id) dests
Ui.modify_integrated_tracks block_id $ replace track_id
[(track_id, Block.ScoreDestinations dests) | dests <- new_dests]
return $ map msg new_dests
where
msg dests = "score integrated " <> showt track_id <> ": "
<> Text.intercalate ", "
[ pretty source_id <> " -> " <> pretty dest_id
| (source_id, (dest_id, _)) <- dests
]
replace :: Eq key => key -> [(key, a)] -> [(key, a)] -> [(key, a)]
replace key new xs = new ++ filter ((/=key) . fst) xs
needs_block_score_integrate :: [Update.UiUpdate] -> Ui.State -> [BlockId]
needs_block_score_integrate updates state =
filter has_integrated $ Map.keys $ flip Map.restrictKeys damaged_blocks $
Ui.state_blocks state
where
TODO this is a linear search through all blocks
has_integrated block_id = not $ null
[ ()
| Just (dest_block_id, Block.ScoreDestinations {}) <-
map Block.block_integrated $ Map.elems (Ui.state_blocks state)
, block_id == dest_block_id
]
damaged_blocks = Set.fromList $ mapMaybe block_changed updates
block_changed (Update.Block bid _) = Just bid
block_changed _ = Nothing
needs_track_score_integrate :: [Update.UiUpdate] -> Ui.State
-> [(BlockId, TrackId)]
needs_track_score_integrate updates state = Seq.unique $
concatMap (integrated_blocks . fst) $ mapMaybe Update.track_changed updates
where
integrated_blocks track_id =
[ (block_id, track_id) | (block_id, block) <- blocks_with track_id
, has_integrated block track_id
]
TODO this is a linear search through all blocks , as is
blocks_with track_id = filter (has_track track_id . snd) $ Map.toList $
Ui.state_blocks state
has_track track_id block = track_id `elem` Block.block_track_ids block
has_integrated block track_id = not $ null
[ ()
| (source_track_id, Block.ScoreDestinations {})
<- Block.block_integrated_tracks block
, track_id == source_track_id
]
-> m ()
manual_integrate key note controls = do
block_dests <- manual_destinations key . Map.toList <$>
Ui.gets Ui.state_blocks
forM_ block_dests $ \(block_id, dests) -> do
new_dests <- forM dests $ \dest ->
Merge.merge_tracks Merge.KeepTitles block_id [(note, controls)]
[dest]
Ui.set_integrated_manual block_id key (Just (concat new_dests))
manual_destinations :: Block.SourceKey -> [(a, Block.Block)]
-> [(a, [Block.NoteDestination])]
manual_destinations key = filter (not . null . snd)
. map (second (Map.findWithDefault [] key . Block.block_integrated_manual))
|
5372f3e1066df06f7b365a2d64fd8e084a019fe2447773d838783e5b8d3d3d37 | MLstate/opalang | ocamlWalk.ml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
CF mli
(* depends *)
module List = Base.List
(* refactoring in progress *)
module OcamlAst = Ocaml
(* shorthands *)
module O = OcamlAst
(* alias *)
(* -- *)
module Ty_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.type_expr constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.type_expr constraint 'a = _ * _ * _
let foldmap tra acc ty =
match ty with
| O.TypeVar _ ->
acc, ty
| O.TypeName (params, type_name) ->
let acc, f_params = List.fold_left_map_stable tra acc params in
acc,
if params == f_params then ty else
O.TypeName (f_params, type_name)
| O.TypeConst _ ->
acc, ty
| O.TypeRef tr ->
let acc, f_tr = tra acc tr in
acc,
if tr == f_tr then ty else
O.TypeRef f_tr
| O.TypeTuple tyl ->
let acc, f_tyl = List.fold_left_map_stable tra acc tyl in
acc,
if tyl == f_tyl then ty else
O.TypeTuple f_tyl
| O.TypeRecord fields ->
let fmap acc ((bool, field, ty) as tpl) =
let acc, fty = tra acc ty in
acc,
if ty = fty then tpl else (bool, field, fty)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then ty else
O.TypeRecord f_fields
| O.TypeConstructor ctl ->
let fmap acc ((k, opt) as cpl) =
let acc, f_opt = Option.foldmap_stable tra acc opt in
acc,
if opt == f_opt then cpl else (k, f_opt)
in
let acc, fctl = List.fold_left_map_stable fmap acc ctl in
acc,
if ctl == fctl then ty else
O.TypeConstructor fctl
| O.TypeArrow (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then ty else
O.TypeArrow (fa, fb)
| O.TypeLabel (bool, label, tb) ->
let acc, ftb = tra acc tb in
acc,
if tb == ftb then ty else
O.TypeLabel (bool, label, ftb)
| O.TypeVerbatim _ ->
acc, ty
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Ty = Traverse.Make2 ( Ty_Subs )
module Pat_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.pattern constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.pattern constraint 'a = _ * _ * _
let foldmap tra acc pat =
match pat with
| O.PatVar _ ->
acc, pat
| O.PatList (hd, tl) ->
let acc, fhd = tra acc hd in
let acc, ftl = tra acc tl in
acc,
if hd == fhd && tl == ftl then pat else
O.PatList (fhd, ftl)
| O.PatEmptyList ->
acc, pat
| O.PatRecord fields ->
let fmap acc ((label, pat) as cpl) =
let acc, fpat = tra acc pat in
acc,
if pat == fpat then cpl else (label, fpat)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then pat else
O.PatRecord f_fields
| O.PatConstructor (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatConstructor (ident, f_ptl)
| O.PatVariant (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatVariant (ident, f_ptl)
| O.PatPVariant (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatPVariant (ident, f_ptl)
| O.PatConst _ ->
acc, pat
| O.PatAny ->
acc, pat
| O.PatAnnot (pa, ty) ->
let acc, fpa = tra acc pa in
acc,
if pa == fpa then pat else
O.PatAnnot (fpa, ty)
| O.PatTuple ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatTuple f_ptl
| O.PatAs (pa, ident) ->
let acc, fpa = tra acc pa in
acc,
if pa == fpa then pat else
O.PatAs (fpa, ident)
| O.PatArray ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatArray f_ptl
| O.PatLazy p ->
let acc, fp = tra acc p in
acc,
if p == fp then pat else
O.PatLazy fp
| O.PatOr ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatOr f_ptl
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Pat = Traverse.Make2 ( Pat_Subs )
module Expr_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.expr constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.expr constraint 'a = _ * _ * _
let rec foldmap tra acc expr =
match expr with
| O.Type _ ->
acc, expr
| O.Val _ ->
acc, expr
| O.Open _ ->
acc, expr
| O.Module (name, expr2, code, expr3) ->
let acc, fexpr2 = Option.foldmap_stable tra acc expr2 in
let acc, fcode = foldmap_code tra acc code in
let acc, fexpr3 = Option.foldmap_stable tra acc expr3 in
acc,
if expr2 == fexpr2 && code == fcode && expr3 == fexpr3 then expr else
O.Module (name, fexpr2, fcode, fexpr3)
| O.ModuleType (name, code) ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then expr else
O.ModuleType (name, fcode)
| O.Structure code ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then expr else
O.Structure fcode
| O.Signature signature ->
let acc, fsignature = foldmap_signature tra acc signature in
acc,
if signature == fsignature then expr else
O.Signature fsignature
| O.DeclareFunctor (name, seol, eo, e) ->
let fmap acc ((s, (eo : O.expr option)) as cpl) =
let acc, feo = Option.foldmap_stable tra acc eo in
acc,
if eo == feo then cpl else (s, feo)
in
let acc, fseol = List.fold_left_map_stable fmap acc seol in
let acc, feo = Option.foldmap_stable tra acc eo in
let acc, fe = tra acc e in
acc,
if seol == fseol && eo == feo && e == fe then expr else
O.DeclareFunctor (name, fseol, feo, fe)
| O.Constructor (ident, el) ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.Constructor (ident, fel)
| O.ConstructorPV (ident, el) ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.ConstructorPV (ident, fel)
| O.Const _ ->
acc, expr
| O.Var ep ->
let acc, fep = foldmap_effective_param tra acc ep in
acc,
if ep == fep then expr else
O.Var fep
| O.MakeRef e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.MakeRef fe
| O.GetRef e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.GetRef fe
| O.SetRef (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.SetRef (fa, fb)
| O.SetMutable (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.SetMutable (fa, fb)
| O.Lazy e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Lazy fe
| O.Tuple el ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.Tuple fel
| O.Cons (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.Cons (fa, fb)
| O.EmptyList ->
acc, expr
| O.Cond (a, b, c) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
let acc, fc = tra acc c in
acc,
if a == fa && b == fb && c == fc then expr else
O.Cond (a, b, c)
| O.App (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.App (fa, fb)
| O.Abs (fps, e) ->
let acc, f_fps = List.fold_left_map_stable (foldmap_formal_param tra) acc fps in
let acc, fe = tra acc e in
acc,
if fps == f_fps && e == fe then expr else
O.Abs (f_fps, fe)
| O.Let bind ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
acc,
if bind == fbind then expr else
O.Let fbind
| O.Letrec bind ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
acc,
if bind == fbind then expr else
O.Letrec fbind
| O.Letin (bind, e) ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
let acc, fe = tra acc e in
acc,
if bind == fbind && e == fe then expr else
O.Letin (fbind, fe)
| O.Letrecin (bind, e) ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
let acc, fe = tra acc e in
acc,
if bind == fbind && e == fe then expr else
O.Letrecin (fbind, fe)
| O.Record (rec_opt, fields) ->
let fmap acc ((f, e) as cpl) =
let acc, fe = tra acc e in
acc,
if e == fe then cpl else (f, fe)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then expr else
O.Record (rec_opt, f_fields)
| O.Dot (e, f) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Dot (fe, f)
| O.Match (e, pl) ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, fe = tra acc e in
let acc, fpl = List.fold_left_map_stable fmap acc pl in
acc,
if e == fe && pl == fpl then expr else
O.Match (fe, fpl)
| O.Sequence (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.Sequence (fa, fb)
| O.Annot (e, ty) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Annot (fe, ty)
| O.Function fpel ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, f_fpel = List.fold_left_map_stable fmap acc fpel in
acc,
if fpel == f_fpel then expr else
O.Function f_fpel
| O.Exception _ ->
acc, expr
| O.Raise (ident, eo) ->
let acc, feo = Option.foldmap_stable tra acc eo in
acc,
if eo == feo then expr else
O.Raise (ident, feo)
| O.Try (e, pl) ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, fe = tra acc e in
let acc, fpl = List.fold_left_map_stable fmap acc pl in
acc,
if e == fe && pl == fpl then expr else
O.Try (fe, fpl)
| O.AnArray el ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.AnArray fel
| O.Comment _ ->
acc, expr
| O.LineAnnot (i, s, e) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.LineAnnot (i, s, fe)
| O.Comments (s, e) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Comments (s, fe)
| O.Assert e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Assert fe
| O.Verbatim _ ->
acc, expr
and foldmap_formal_param tra acc fp =
match fp with
| O.Label _ ->
acc, fp
| O.Opt (label, ty, expr) ->
let acc, fexpr = Option.foldmap_stable tra acc expr in
acc,
if expr == fexpr then fp else
O.Opt (label, ty, fexpr)
| O.Pat _ ->
acc, fp
and foldmap_effective_param tra acc ep =
match ep with
| O.Labeled (label, expr) ->
let acc, fexpr = Option.foldmap_stable tra acc expr in
acc,
if expr == fexpr then ep else
O.Labeled (label, fexpr)
| O.Pated _ ->
acc, ep
and foldmap_code tra acc code = List.fold_left_map_stable tra acc code
and foldmap_signature tra acc sign =
match sign with
| O.Inlined code ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then sign else
O.Inlined fcode
| O.Referenced _ ->
acc, sign
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Expr = Traverse.Make2 ( Expr_Subs )
module PatExpr =
struct
let formal_param_pat_non_rec f_pat acc fp =
match fp with
| O.Label (s, pat, t) ->
let acc, f_pat = Option.foldmap_stable f_pat acc pat in
acc,
if pat == f_pat then fp else
O.Label (s, f_pat, t)
| O.Opt _ ->
acc, fp
| O.Pat pat ->
let acc, f_pat = f_pat acc pat in
acc,
if pat == f_pat then fp else
O.Pat f_pat
let fmap_fp_e f_pat acc ((fp, e) as cpl) =
let acc, f_fp = formal_param_pat_non_rec f_pat acc fp in
acc,
if fp == f_fp then cpl else (f_fp, e)
let fmap_pge f_pat acc ((p, g, e) as tpl) =
let acc, fp = f_pat acc p in
acc,
if p == fp then tpl else (fp, g, e)
let foldmap_expr_pat_non_rec f_expr f_pat acc expr =
let foldmap acc expr =
let acc, expr =
match expr with
| O.Abs (fpl, e) ->
let acc, f_fpl = List.fold_left_map_stable (formal_param_pat_non_rec f_pat) acc fpl in
acc,
if fpl == f_fpl then expr else
O.Abs (f_fpl, e)
| O.Let fpel ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Let f_fpel
| O.Letrec fpel ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letrec f_fpel
| O.Letin (fpel, e) ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letin (f_fpel, e)
| O.Letrecin (fpel, e) ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letrecin (f_fpel, e)
| O.Match (e, pgel) ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Match (e, f_pgel)
| O.Function pgel ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Function f_pgel
| O.Try (e, pgel) ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Try (e, f_pgel)
| _ -> acc, expr
in
f_expr acc expr
in
Expr.foldmap foldmap acc expr
let foldmap f_expr f_pat acc expr =
let f_pat acc pat = Pat.foldmap f_pat acc pat in
foldmap_expr_pat_non_rec f_expr f_pat acc expr
let fold f_expr f_pat acc expr =
let f_expr acc expr = f_expr acc expr, expr in
let f_pat acc pat = f_pat acc pat, pat in
let acc, _ = foldmap f_expr f_pat acc expr in
acc
let map f_expr f_pat expr =
let f_expr () expr = (), f_expr expr in
let f_pat () pat = (), f_pat pat in
let (), expr = foldmap f_expr f_pat () expr in
expr
let iter f_expr f_pat expr =
let f_expr expr = let () = f_expr expr in expr in
let f_pat pat = let () = f_pat pat in pat in
let _ = map f_expr f_pat expr in
()
let foldmap_code f_expr f_pat acc code = List.fold_left_map_stable (foldmap f_expr f_pat) acc code
let fold_code f_expr f_pat acc code = List.fold_left (fold f_expr f_pat) acc code
let map_code f_expr f_pat code = List.map_stable (map f_expr f_pat) code
let iter_code f_expr f_pat code = List.iter (iter f_expr f_pat) code
end
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/compiler/ocamllang/ocamlWalk.ml | ocaml | depends
refactoring in progress
shorthands
alias
-- |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
CF mli
module List = Base.List
module OcamlAst = Ocaml
module O = OcamlAst
module Ty_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.type_expr constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.type_expr constraint 'a = _ * _ * _
let foldmap tra acc ty =
match ty with
| O.TypeVar _ ->
acc, ty
| O.TypeName (params, type_name) ->
let acc, f_params = List.fold_left_map_stable tra acc params in
acc,
if params == f_params then ty else
O.TypeName (f_params, type_name)
| O.TypeConst _ ->
acc, ty
| O.TypeRef tr ->
let acc, f_tr = tra acc tr in
acc,
if tr == f_tr then ty else
O.TypeRef f_tr
| O.TypeTuple tyl ->
let acc, f_tyl = List.fold_left_map_stable tra acc tyl in
acc,
if tyl == f_tyl then ty else
O.TypeTuple f_tyl
| O.TypeRecord fields ->
let fmap acc ((bool, field, ty) as tpl) =
let acc, fty = tra acc ty in
acc,
if ty = fty then tpl else (bool, field, fty)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then ty else
O.TypeRecord f_fields
| O.TypeConstructor ctl ->
let fmap acc ((k, opt) as cpl) =
let acc, f_opt = Option.foldmap_stable tra acc opt in
acc,
if opt == f_opt then cpl else (k, f_opt)
in
let acc, fctl = List.fold_left_map_stable fmap acc ctl in
acc,
if ctl == fctl then ty else
O.TypeConstructor fctl
| O.TypeArrow (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then ty else
O.TypeArrow (fa, fb)
| O.TypeLabel (bool, label, tb) ->
let acc, ftb = tra acc tb in
acc,
if tb == ftb then ty else
O.TypeLabel (bool, label, ftb)
| O.TypeVerbatim _ ->
acc, ty
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Ty = Traverse.Make2 ( Ty_Subs )
module Pat_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.pattern constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.pattern constraint 'a = _ * _ * _
let foldmap tra acc pat =
match pat with
| O.PatVar _ ->
acc, pat
| O.PatList (hd, tl) ->
let acc, fhd = tra acc hd in
let acc, ftl = tra acc tl in
acc,
if hd == fhd && tl == ftl then pat else
O.PatList (fhd, ftl)
| O.PatEmptyList ->
acc, pat
| O.PatRecord fields ->
let fmap acc ((label, pat) as cpl) =
let acc, fpat = tra acc pat in
acc,
if pat == fpat then cpl else (label, fpat)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then pat else
O.PatRecord f_fields
| O.PatConstructor (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatConstructor (ident, f_ptl)
| O.PatVariant (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatVariant (ident, f_ptl)
| O.PatPVariant (ident, ptl) ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatPVariant (ident, f_ptl)
| O.PatConst _ ->
acc, pat
| O.PatAny ->
acc, pat
| O.PatAnnot (pa, ty) ->
let acc, fpa = tra acc pa in
acc,
if pa == fpa then pat else
O.PatAnnot (fpa, ty)
| O.PatTuple ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatTuple f_ptl
| O.PatAs (pa, ident) ->
let acc, fpa = tra acc pa in
acc,
if pa == fpa then pat else
O.PatAs (fpa, ident)
| O.PatArray ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatArray f_ptl
| O.PatLazy p ->
let acc, fp = tra acc p in
acc,
if p == fp then pat else
O.PatLazy fp
| O.PatOr ptl ->
let acc, f_ptl = List.fold_left_map_stable tra acc ptl in
acc,
if ptl == f_ptl then pat else
O.PatOr f_ptl
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Pat = Traverse.Make2 ( Pat_Subs )
module Expr_Subs : TraverseInterface.S2
with type 'a t = OcamlAst.expr constraint 'a = _ * _ * _ =
struct
type 'a t = OcamlAst.expr constraint 'a = _ * _ * _
let rec foldmap tra acc expr =
match expr with
| O.Type _ ->
acc, expr
| O.Val _ ->
acc, expr
| O.Open _ ->
acc, expr
| O.Module (name, expr2, code, expr3) ->
let acc, fexpr2 = Option.foldmap_stable tra acc expr2 in
let acc, fcode = foldmap_code tra acc code in
let acc, fexpr3 = Option.foldmap_stable tra acc expr3 in
acc,
if expr2 == fexpr2 && code == fcode && expr3 == fexpr3 then expr else
O.Module (name, fexpr2, fcode, fexpr3)
| O.ModuleType (name, code) ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then expr else
O.ModuleType (name, fcode)
| O.Structure code ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then expr else
O.Structure fcode
| O.Signature signature ->
let acc, fsignature = foldmap_signature tra acc signature in
acc,
if signature == fsignature then expr else
O.Signature fsignature
| O.DeclareFunctor (name, seol, eo, e) ->
let fmap acc ((s, (eo : O.expr option)) as cpl) =
let acc, feo = Option.foldmap_stable tra acc eo in
acc,
if eo == feo then cpl else (s, feo)
in
let acc, fseol = List.fold_left_map_stable fmap acc seol in
let acc, feo = Option.foldmap_stable tra acc eo in
let acc, fe = tra acc e in
acc,
if seol == fseol && eo == feo && e == fe then expr else
O.DeclareFunctor (name, fseol, feo, fe)
| O.Constructor (ident, el) ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.Constructor (ident, fel)
| O.ConstructorPV (ident, el) ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.ConstructorPV (ident, fel)
| O.Const _ ->
acc, expr
| O.Var ep ->
let acc, fep = foldmap_effective_param tra acc ep in
acc,
if ep == fep then expr else
O.Var fep
| O.MakeRef e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.MakeRef fe
| O.GetRef e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.GetRef fe
| O.SetRef (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.SetRef (fa, fb)
| O.SetMutable (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.SetMutable (fa, fb)
| O.Lazy e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Lazy fe
| O.Tuple el ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.Tuple fel
| O.Cons (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.Cons (fa, fb)
| O.EmptyList ->
acc, expr
| O.Cond (a, b, c) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
let acc, fc = tra acc c in
acc,
if a == fa && b == fb && c == fc then expr else
O.Cond (a, b, c)
| O.App (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.App (fa, fb)
| O.Abs (fps, e) ->
let acc, f_fps = List.fold_left_map_stable (foldmap_formal_param tra) acc fps in
let acc, fe = tra acc e in
acc,
if fps == f_fps && e == fe then expr else
O.Abs (f_fps, fe)
| O.Let bind ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
acc,
if bind == fbind then expr else
O.Let fbind
| O.Letrec bind ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
acc,
if bind == fbind then expr else
O.Letrec fbind
| O.Letin (bind, e) ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
let acc, fe = tra acc e in
acc,
if bind == fbind && e == fe then expr else
O.Letin (fbind, fe)
| O.Letrecin (bind, e) ->
let fmap acc ((fp, e) as cpl) =
let acc, f_fp = foldmap_formal_param tra acc fp in
let acc, fe = tra acc e in
acc,
if fp == f_fp && e == fe then cpl else (f_fp, fe)
in
let acc, fbind = List.fold_left_map_stable fmap acc bind in
let acc, fe = tra acc e in
acc,
if bind == fbind && e == fe then expr else
O.Letrecin (fbind, fe)
| O.Record (rec_opt, fields) ->
let fmap acc ((f, e) as cpl) =
let acc, fe = tra acc e in
acc,
if e == fe then cpl else (f, fe)
in
let acc, f_fields = List.fold_left_map_stable fmap acc fields in
acc,
if fields == f_fields then expr else
O.Record (rec_opt, f_fields)
| O.Dot (e, f) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Dot (fe, f)
| O.Match (e, pl) ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, fe = tra acc e in
let acc, fpl = List.fold_left_map_stable fmap acc pl in
acc,
if e == fe && pl == fpl then expr else
O.Match (fe, fpl)
| O.Sequence (a, b) ->
let acc, fa = tra acc a in
let acc, fb = tra acc b in
acc,
if a == fa && b == fb then expr else
O.Sequence (fa, fb)
| O.Annot (e, ty) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Annot (fe, ty)
| O.Function fpel ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, f_fpel = List.fold_left_map_stable fmap acc fpel in
acc,
if fpel == f_fpel then expr else
O.Function f_fpel
| O.Exception _ ->
acc, expr
| O.Raise (ident, eo) ->
let acc, feo = Option.foldmap_stable tra acc eo in
acc,
if eo == feo then expr else
O.Raise (ident, feo)
| O.Try (e, pl) ->
let fmap acc ( (p, g, e) as tpl )=
let acc, fg = Option.foldmap_stable tra acc g in
let acc, fe = tra acc e in
acc,
if g == fg && e == fe then tpl else (p, fg, fe)
in
let acc, fe = tra acc e in
let acc, fpl = List.fold_left_map_stable fmap acc pl in
acc,
if e == fe && pl == fpl then expr else
O.Try (fe, fpl)
| O.AnArray el ->
let acc, fel = List.fold_left_map_stable tra acc el in
acc,
if el == fel then expr else
O.AnArray fel
| O.Comment _ ->
acc, expr
| O.LineAnnot (i, s, e) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.LineAnnot (i, s, fe)
| O.Comments (s, e) ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Comments (s, fe)
| O.Assert e ->
let acc, fe = tra acc e in
acc,
if e == fe then expr else
O.Assert fe
| O.Verbatim _ ->
acc, expr
and foldmap_formal_param tra acc fp =
match fp with
| O.Label _ ->
acc, fp
| O.Opt (label, ty, expr) ->
let acc, fexpr = Option.foldmap_stable tra acc expr in
acc,
if expr == fexpr then fp else
O.Opt (label, ty, fexpr)
| O.Pat _ ->
acc, fp
and foldmap_effective_param tra acc ep =
match ep with
| O.Labeled (label, expr) ->
let acc, fexpr = Option.foldmap_stable tra acc expr in
acc,
if expr == fexpr then ep else
O.Labeled (label, fexpr)
| O.Pated _ ->
acc, ep
and foldmap_code tra acc code = List.fold_left_map_stable tra acc code
and foldmap_signature tra acc sign =
match sign with
| O.Inlined code ->
let acc, fcode = foldmap_code tra acc code in
acc,
if code == fcode then sign else
O.Inlined fcode
| O.Referenced _ ->
acc, sign
let iter x = Traverse.Unoptimized.iter foldmap x
let map x = Traverse.Unoptimized.map foldmap x
let fold x = Traverse.Unoptimized.fold foldmap x
end
module Expr = Traverse.Make2 ( Expr_Subs )
module PatExpr =
struct
let formal_param_pat_non_rec f_pat acc fp =
match fp with
| O.Label (s, pat, t) ->
let acc, f_pat = Option.foldmap_stable f_pat acc pat in
acc,
if pat == f_pat then fp else
O.Label (s, f_pat, t)
| O.Opt _ ->
acc, fp
| O.Pat pat ->
let acc, f_pat = f_pat acc pat in
acc,
if pat == f_pat then fp else
O.Pat f_pat
let fmap_fp_e f_pat acc ((fp, e) as cpl) =
let acc, f_fp = formal_param_pat_non_rec f_pat acc fp in
acc,
if fp == f_fp then cpl else (f_fp, e)
let fmap_pge f_pat acc ((p, g, e) as tpl) =
let acc, fp = f_pat acc p in
acc,
if p == fp then tpl else (fp, g, e)
let foldmap_expr_pat_non_rec f_expr f_pat acc expr =
let foldmap acc expr =
let acc, expr =
match expr with
| O.Abs (fpl, e) ->
let acc, f_fpl = List.fold_left_map_stable (formal_param_pat_non_rec f_pat) acc fpl in
acc,
if fpl == f_fpl then expr else
O.Abs (f_fpl, e)
| O.Let fpel ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Let f_fpel
| O.Letrec fpel ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letrec f_fpel
| O.Letin (fpel, e) ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letin (f_fpel, e)
| O.Letrecin (fpel, e) ->
let acc, f_fpel = List.fold_left_map_stable (fmap_fp_e f_pat) acc fpel in
acc,
if fpel == f_fpel then expr else
O.Letrecin (f_fpel, e)
| O.Match (e, pgel) ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Match (e, f_pgel)
| O.Function pgel ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Function f_pgel
| O.Try (e, pgel) ->
let acc, f_pgel = List.fold_left_map_stable (fmap_pge f_pat) acc pgel in
acc,
if pgel == f_pgel then expr else
O.Try (e, f_pgel)
| _ -> acc, expr
in
f_expr acc expr
in
Expr.foldmap foldmap acc expr
let foldmap f_expr f_pat acc expr =
let f_pat acc pat = Pat.foldmap f_pat acc pat in
foldmap_expr_pat_non_rec f_expr f_pat acc expr
let fold f_expr f_pat acc expr =
let f_expr acc expr = f_expr acc expr, expr in
let f_pat acc pat = f_pat acc pat, pat in
let acc, _ = foldmap f_expr f_pat acc expr in
acc
let map f_expr f_pat expr =
let f_expr () expr = (), f_expr expr in
let f_pat () pat = (), f_pat pat in
let (), expr = foldmap f_expr f_pat () expr in
expr
let iter f_expr f_pat expr =
let f_expr expr = let () = f_expr expr in expr in
let f_pat pat = let () = f_pat pat in pat in
let _ = map f_expr f_pat expr in
()
let foldmap_code f_expr f_pat acc code = List.fold_left_map_stable (foldmap f_expr f_pat) acc code
let fold_code f_expr f_pat acc code = List.fold_left (fold f_expr f_pat) acc code
let map_code f_expr f_pat code = List.map_stable (map f_expr f_pat) code
let iter_code f_expr f_pat code = List.iter (iter f_expr f_pat) code
end
|
4ff431ea9d5f1dda4316f82679107b65bec58236fbc4cee1027b56955a4c260f | well-typed-lightbulbs/ocaml-esp32 | tprintf.ml | (* TEST
include testing
*)
A test file for the Printf module .
A test file for the Printf module.
*)
open Testing;;
open Printf;;
try
printf "d/i positive\n%!";
test (sprintf "%d/%i" 42 43 = "42/43");
test (sprintf "%-4d/%-5i" 42 43 = "42 /43 ");
test (sprintf "%04d/%05i" 42 43 = "0042/00043");
test (sprintf "%+d/%+i" 42 43 = "+42/+43");
test (sprintf "% d/% i" 42 43 = " 42/ 43");
test (sprintf "%#d/%#i" 42 43 = "42/43");
test (sprintf "%#d/%#i" 123 123 = "123/123");
test (sprintf "%#d/%#i" 1234 1234 = "1_234/1_234");
test (sprintf "%#d/%#i" 12345 12345 = "12_345/12_345");
test (sprintf "%#d/%#i" 123456 123456 = "123_456/123_456");
test (sprintf "%#4d/%#5i" 1234 1234 = "1_234/1_234");
test (sprintf "%#-6d/%#-7i" 1234 1234 = "1_234 /1_234 ");
test (sprintf "%4d/%5i" 42 43 = " 42/ 43");
test (sprintf "%*d" (-4) 42 = "42 ");
test (sprintf "%*d/%*i" 4 42 5 43 = " 42/ 43");
test ( sprintf " % -0+#4d/%-0 # 5i " 42 43 = " +42 / 43 " ) ;
(* >> '#' is incompatible with 'd' *)
printf "\nd/i negative\n%!";
test (sprintf "%d/%i" (-42) (-43) = "-42/-43");
test (sprintf "%-4d/%-5i" (-42) (-43) = "-42 /-43 ");
test (sprintf "%04d/%05i" (-42) (-43) = "-042/-0043");
test (sprintf "%+d/%+i" (-42) (-43) = "-42/-43");
test (sprintf "% d/% i" (-42) (-43) = "-42/-43");
test (sprintf "%#d/%#i" (-42) (-43) = "-42/-43");
test (sprintf "%#d/%#i" (-123) (-123) = "-123/-123");
test (sprintf "%#d/%#i" (-1234) (-1234) = "-1_234/-1_234");
test (sprintf "%#d/%#i" (-12345) (-12345) = "-12_345/-12_345");
test (sprintf "%#d/%#i" (-123456) (-123456) = "-123_456/-123_456");
test (sprintf "%#4d/%#5i" (-1234) (-1234) = "-1_234/-1_234");
test (sprintf "%#-6d/%#-7i" (-1234) (-1234) = "-1_234/-1_234 ");
test (sprintf "%4d/%5i" (-42) (-43) = " -42/ -43");
test (sprintf "%*d" (-4) (-42) = "-42 ");
test (sprintf "%*d/%*i" 4 (-42) 5 (-43) = " -42/ -43");
test ( sprintf " % -0 + # 4d/%-0 + # 5i " ( -42 ) ( -43 ) = " -42 /-43 " ) ;
(* >> '0' is incompatible with '-', '#' is incompatible with 'd' *)
printf "\nu positive\n%!";
test (sprintf "%u" 42 = "42");
test (sprintf "%-4u" 42 = "42 ");
test (sprintf "%04u" 42 = "0042");
test ( sprintf " % + u " 42 = " 42 " ) ;
(* >> '+' is incompatible with 'u' *)
test ( sprintf " % u " 42 = " 42 " ) ;
(* >> ' ' is incompatible with 'u' *)
test (sprintf "%#u" 42 = "42");
test (sprintf "%#u" 123 = "123");
test (sprintf "%#u" 1234 = "1_234");
test (sprintf "%#u" 12345 = "12_345");
test (sprintf "%#u" 123456 = "123_456");
test (sprintf "%#4u" 1234 = "1_234");
test (sprintf "%#6u" 1234 = " 1_234");
test (sprintf "%4u" 42 = " 42");
test (sprintf "%*u" 4 42 = " 42");
test (sprintf "%*u" (-4) 42 = "42 ");
printf "\nu negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%u" (-1) = "2147483647");
test (sprintf "%#u" (-1) = "2_147_483_647");
| 64 ->
test (sprintf "%u" (-1) = "9223372036854775807");
test (sprintf "%#u" (-1) = "9_223_372_036_854_775_807");
| _ -> test false
end;
printf "\nx positive\n%!";
test (sprintf "%x" 42 = "2a");
test (sprintf "%-4x" 42 = "2a ");
test (sprintf "%04x" 42 = "002a");
test ( sprintf " % + x " 42 = " 2a " ) ;
(* >> '+' is incompatible with 'x' *)
test ( sprintf " % x " 42 = " 2a " ) ;
(* >> ' ' is incompatible with 'x' *)
test (sprintf "%#x" 42 = "0x2a");
test (sprintf "%4x" 42 = " 2a");
test (sprintf "%*x" 5 42 = " 2a");
test (sprintf "%*x" (-5) 42 = "2a ");
test (sprintf "%#*x" 5 42 = " 0x2a");
test (sprintf "%#*x" (-5) 42 = "0x2a ");
test (sprintf "%#-*x" 5 42 = "0x2a ");
test (sprintf "%-0+ #*x" 5 42 = "0x2a ");
printf "\nx negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%x" (-42) = "7fffffd6");
| 64 ->
test (sprintf "%x" (-42) = "7fffffffffffffd6");
| _ -> test false
end;
printf "\nX positive\n%!";
test (sprintf "%X" 42 = "2A");
test (sprintf "%-4X" 42 = "2A ");
test (sprintf "%04X" 42 = "002A");
test ( sprintf " % + X " 42 = " 2A " ) ;
(* >> '+' is incompatible with 'X' *)
test ( sprintf " % X " 42 = " 2A " ) ;
(* >> ' ' is incompatible with 'X' *)
test (sprintf "%#X" 42 = "0X2A");
test (sprintf "%4X" 42 = " 2A");
test (sprintf "%*X" 5 42 = " 2A");
test ( sprintf " % -0 + # * X " 5 42 = " 0X2A " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nx negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%X" (-42) = "7FFFFFD6");
| 64 ->
test (sprintf "%X" (-42) = "7FFFFFFFFFFFFFD6");
| _ -> test false
end;
printf "\no positive\n%!";
test (sprintf "%o" 42 = "52");
test (sprintf "%-4o" 42 = "52 ");
test (sprintf "%04o" 42 = "0052");
test ( sprintf " % + o " 42 = " 52 " ) ;
(* >> '+' is incompatible with 'o' *)
test ( sprintf " % o " 42 = " 52 " ) ;
(* >> '+' is incompatible with 'o' *)
test (sprintf "%#o" 42 = "052");
test (sprintf "%4o" 42 = " 52");
test (sprintf "%*o" 5 42 = " 52");
test ( sprintf " % -0 + # * o " 5 42 = " 052 " ) ;
(* >> '-' is incompatible with 'o' *)
printf "\no negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%o" (-42) = "17777777726");
| 64 ->
test (sprintf "%o" (-42) = "777777777777777777726");
| _ -> test false
end;
printf "\ns\n%!";
test (sprintf "%s" "foo" = "foo");
test (sprintf "%-5s" "foo" = "foo ");
test ( sprintf " % 05s " " foo " = " foo " ) ;
(* >> '0' is incompatible with 's' *)
(*test (sprintf "%+s" "foo" = "foo");*)
(* >> '+' is incompatible with 's' *)
(*test (sprintf "% s" "foo" = "foo");*)
(* >> ' ' is incompatible with 's' *)
(*test (sprintf "%#s" "foo" = "foo");*)
(* >> '#' is incompatible with 's' *)
test (sprintf "%5s" "foo" = " foo");
test (sprintf "%1s" "foo" = "foo");
test (sprintf "%*s" 6 "foo" = " foo");
test (sprintf "%*s" (-6) "foo" = "foo ");
test (sprintf "%*s" 2 "foo" = "foo");
test ( sprintf " % -0 + # 5s " " foo " = " foo " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 's' *)
test (sprintf "%s@" "foo" = "foo@");
test (sprintf "%" "foo" = "");
test (sprintf "%s@%s" "foo" "inria.fr" = "");
printf "\nS\n%!";
test (sprintf "%S" "fo\"o" = "\"fo\\\"o\"");
(* test (sprintf "%-5S" "foo" = "\"foo\" "); padding not done *)
test ( sprintf " % 05S " " foo " = " \"foo\ " " ) ; padding not done
(*test (sprintf "%+S" "foo" = "\"foo\"");*)
(* >> '#' is incompatible with 'S' *)
(*test (sprintf "% S" "foo" = "\"foo\"");*)
(* >> '#' is incompatible with 'S' *)
(*test (sprintf "%#S" "foo" = "\"foo\"");*)
(* >> '#' is incompatible with 'S' *)
test ( sprintf " % 5S " " foo " = " \"foo\ " " ) ; padding not done
test (sprintf "%1S" "foo" = "\"foo\"");
test (sprintf "%*S" 8 "foo" = " \"foo\"");
test (sprintf "%*S" (-8) "foo" = "\"foo\" ");
test (sprintf "%*S" 2 "foo" = "\"foo\"");
test ( sprintf " % -0 + # 5S " " foo " = " \"foo\ " " ) ; padding not done
test (sprintf "%S@" "foo" = "\"foo\"@");
test (sprintf "%" "foo" = "\"foo\"@inria.fr");
test (sprintf "%S@%S" "foo" "inria.fr" = "\"foo\"@\"inria.fr\"");
printf "\nc\n%!";
test (sprintf "%c" 'c' = "c");
(* test (sprintf "%-4c" 'c' = "c "); padding not done *)
test ( sprintf " % 04c " ' c ' = " c " ) ; padding not done
(*test (sprintf "%+c" 'c' = "c");*)
(* >> '#' is incompatible with 'c' *)
(*test (sprintf "% c" 'c' = "c");*)
(* >> '#' is incompatible with 'c' *)
(*test (sprintf "%#c" 'c' = "c");*)
(* >> '#' is incompatible with 'c' *)
(* test (sprintf "%4c" 'c' = " c"); padding not done *)
(* test (sprintf "%*c" 2 'c' = " c"); padding not done *)
test ( sprintf " % -0 + # 4c " ' c ' = " c " ) ; padding not done
printf "\nC\n%!";
test (sprintf "%C" 'c' = "'c'");
test (sprintf "%C" '\'' = "'\\''");
(* test (sprintf "%-4C" 'c' = "c "); padding not done *)
(* test (sprintf "%04C" 'c' = " c"); padding not done *)
(*test (sprintf "%+C" 'c' = "'c'");*)
(* >> '+' is incompatible with 'C' *)
(*test (sprintf "% C" 'c' = "'c'");*)
(* >> ' ' is incompatible with 'C' *)
(*test (sprintf "%#C" 'c' = "'c'");*)
(* >> '#' is incompatible with 'C' *)
(* test (sprintf "%4C" 'c' = " c"); padding not done *)
(* test (sprintf "%*C" 2 'c' = " c"); padding not done *)
(* test (sprintf "%-0+ #4C" 'c' = "c "); padding not done *)
printf "\nf\n%!";
test (sprintf "%f" (-42.42) = "-42.420000");
test (sprintf "%-13f" (-42.42) = "-42.420000 ");
test (sprintf "%013f" (-42.42) = "-00042.420000");
test (sprintf "%+f" 42.42 = "+42.420000");
test (sprintf "% f" 42.42 = " 42.420000");
test ( sprintf " % # f " 42.42 = " 42.420000 " ) ;
(* >> '#' is incompatible with 'f' *)
test (sprintf "%13f" 42.42 = " 42.420000");
test (sprintf "%*f" 12 42.42 = " 42.420000");
test ( sprintf " % -0 + # 12f " 42.42 = " +42.420000 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'f' *)
test (sprintf "%.3f" (-42.42) = "-42.420");
test (sprintf "%.*f" (-3) 42.42 = "42.420");
(* dynamically-provided negative precisions are currently silently
turned into their absolute value; we could error on this
in the future (the behavior is unspecified), but the previous
buggy output "%.0-3f-" is not desirable. *)
test (sprintf "%-13.3f" (-42.42) = "-42.420 ");
test (sprintf "%013.3f" (-42.42) = "-00000042.420");
test (sprintf "%+.3f" 42.42 = "+42.420");
test (sprintf "% .3f" 42.42 = " 42.420");
test ( sprintf " % # .3f " 42.42 = " 42.420 " ) ;
(* >> '#' is incompatible with 'f' *)
test (sprintf "%13.3f" 42.42 = " 42.420");
test (sprintf "%*.*f" 12 3 42.42 = " 42.420");
test ( sprintf " % -0 + # 12.3f " 42.42 = " +42.420 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'f' *)
Under Windows ( mingw and maybe also MSVC ) , the stdlib uses three
digits for the exponent instead of the two used by Linux and BSD .
Check that the two strings are equal , except that there may be an
extra zero , and if there is one , there may be a missing space or
zero . All in the first string relative to the second .
digits for the exponent instead of the two used by Linux and BSD.
Check that the two strings are equal, except that there may be an
extra zero, and if there is one, there may be a missing space or
zero. All in the first string relative to the second. *)
let ( =* ) s1 s2 =
let ss1 = s1 ^ "$" in
let ss2 = s2 ^ "$" in
let rec loop i1 i2 extra missing =
if i1 = String.length ss1 && i2 = String.length ss2 then begin
if extra then true else not missing
end else if i1 = String.length ss1 || i2 = String.length ss2 then
false
else begin
match ss1.[i1], ss2.[i2] with
| x, y when x = y -> loop (i1+1) (i2+1) extra missing
| '0', _ when not extra -> loop (i1+1) i2 true missing
| _, (' '|'0') when not missing -> loop i1 (i2+1) extra true
| _, _ -> false
end
in
loop 0 0 false false
in
printf "\nF\n%!";
test (sprintf "%F" 42.42 = "42.42");
test (sprintf "%F" 42.42e42 =* "4.242e+43");
test (sprintf "%F" 42.00 = "42.");
test (sprintf "%F" 0.042 = "0.042");
test (sprintf "%4F" 3. = " 3.");
test (sprintf "%-4F" 3. = "3. ");
test (sprintf "%04F" 3. = "003.");
test (sprintf "%+4F" 3. = " +3.");
test (sprintf "%.3F" 42.42 = "42.4");
test (sprintf "%12.3F" 42.42e42 =* " 4.24e+43");
test (sprintf "%.3F" 42.00 = "42.");
test (sprintf "%.3F" 0.0042 = "0.0042");
test (sprintf "%F" nan = "nan");
test (sprintf "%F" (-. nan) = "nan");
test (sprintf "%F" infinity = "infinity");
test (sprintf "%F" neg_infinity = "neg_infinity");
printf "\n#F\n%!";
test (sprintf "%+#F" (+0.) = "+0x0p+0");
test (sprintf "%+#F" (-0.) = "-0x0p+0");
test (sprintf "%+#F" (+1.) = "+0x1p+0");
test (sprintf "%+#F" (-1.) = "-0x1p+0");
test (sprintf "%+#F" (+1024.) = "+0x1p+10");
test (sprintf "% #F" (+1024.) = " 0x1p+10");
test (sprintf "%+#F" (-1024.) = "-0x1p+10");
test (sprintf "%#F" 0x123.456 = "0x1.23456p+8");
test (sprintf "%#F" 0x123456789ABCDE. = "0x1.23456789abcdep+52");
test (sprintf "%#F" epsilon_float = "0x1p-52");
test (sprintf "%#F" nan = "nan");
test (sprintf "%#F" (-. nan) = "nan");
test (sprintf "%#F" infinity = "infinity");
test (sprintf "%#F" neg_infinity = "neg_infinity");
printf "\nh\n%!";
test (sprintf "%+h" (+0.) = "+0x0p+0");
test (sprintf "%+h" (-0.) = "-0x0p+0");
test (sprintf "%+h" (+1.) = "+0x1p+0");
test (sprintf "%+h" (-1.) = "-0x1p+0");
test (sprintf "%+h" (+1024.) = "+0x1p+10");
test (sprintf "%+h" (-1024.) = "-0x1p+10");
test (sprintf "%h" 0x123.456 = "0x1.23456p+8");
test (sprintf "%h" 0x123456789ABCDE. = "0x1.23456789abcdep+52");
test (sprintf "%h" epsilon_float = "0x1p-52");
test (sprintf "%h" nan = "nan");
test (sprintf "%h" infinity = "infinity");
test (sprintf "%h" neg_infinity = "-infinity");
test (sprintf "%h" (4. *. atan 1.) = "0x1.921fb54442d18p+1");
printf "\nH\n%!";
test (sprintf "%+H" (+0.) = "+0X0P+0");
test (sprintf "%+H" (-0.) = "-0X0P+0");
test (sprintf "%+H" (+1.) = "+0X1P+0");
test (sprintf "%+H" (-1.) = "-0X1P+0");
test (sprintf "%+H" (+1024.) = "+0X1P+10");
test (sprintf "%+H" (-1024.) = "-0X1P+10");
test (sprintf "%H" 0X123.456 = "0X1.23456P+8");
test (sprintf "%H" 0X123456789ABCDE. = "0X1.23456789ABCDEP+52");
test (sprintf "%H" epsilon_float = "0X1P-52");
test (sprintf "%H" nan = "NAN");
test (sprintf "%H" infinity = "INFINITY");
test (sprintf "%H" neg_infinity = "-INFINITY");
test (sprintf "%H" (4. *. atan 1.) = "0X1.921FB54442D18P+1");
printf "\ne\n%!";
test (sprintf "%e" (-42.42) =* "-4.242000e+01");
test (sprintf "%-15e" (-42.42) =* "-4.242000e+01 ");
test (sprintf "%015e" (-42.42) =* "-004.242000e+01");
test (sprintf "%+e" 42.42 =* "+4.242000e+01");
test (sprintf "% e" 42.42 =* " 4.242000e+01");
test ( sprintf " % # e " 42.42 = * " 4.242000e+01 " ) ;
(* >> '#' is incompatible with 'e' *)
test (sprintf "%15e" 42.42 =* " 4.242000e+01");
test (sprintf "%*e" 14 42.42 =* " 4.242000e+01");
test ( sprintf " % -0 + # 14e " 42.42 = * " +4.242000e+01 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'e' *)
test (sprintf "%.3e" (-42.42) =* "-4.242e+01");
test (sprintf "%-15.3e" (-42.42) =* "-4.242e+01 ");
test (sprintf "%015.3e" (-42.42) =* "-000004.242e+01");
test (sprintf "%+.3e" 42.42 =* "+4.242e+01");
test (sprintf "% .3e" 42.42 =* " 4.242e+01");
test ( sprintf " % # .3e " 42.42 = * " 4.242e+01 " ) ;
(* >> '#' is incompatible with 'e' *)
test (sprintf "%15.3e" 42.42 =* " 4.242e+01");
test (sprintf "%*.*e" 11 3 42.42 =* " 4.242e+01");
test ( sprintf " % -0 + # 14.3e " 42.42 = * " +4.242e+01 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'e' *)
printf "\nE\n%!";
test (sprintf "%E" (-42.42) =* "-4.242000E+01");
test (sprintf "%-15E" (-42.42) =* "-4.242000E+01 ");
test (sprintf "%015E" (-42.42) =* "-004.242000E+01");
test (sprintf "%+E" 42.42 =* "+4.242000E+01");
test (sprintf "% E" 42.42 =* " 4.242000E+01");
test ( sprintf " % # E " 42.42 = * " 4.242000E+01 " ) ;
(* >> '#' is incompatible with 'E' *)
test (sprintf "%15E" 42.42 =* " 4.242000E+01");
test (sprintf "%*E" 14 42.42 =* " 4.242000E+01");
test ( sprintf " % -0 + # 14E " 42.42 = * " +4.242000E+01 " ) ;
(* >> '#' is incompatible with 'E' *)
test (sprintf "%.3E" (-42.42) =* "-4.242E+01");
test (sprintf "%-15.3E" (-42.42) =* "-4.242E+01 ");
test (sprintf "%015.3E" (-42.42) =* "-000004.242E+01");
test (sprintf "%+.3E" 42.42 =* "+4.242E+01");
test (sprintf "% .3E" 42.42 =* " 4.242E+01");
test ( sprintf " % # .3E " 42.42 = * " 4.242E+01 " ) ;
(* >> '#' is incompatible with 'E' *)
test (sprintf "%15.3E" 42.42 =* " 4.242E+01");
test (sprintf "%*.*E" 11 3 42.42 =* " 4.242E+01");
test ( sprintf " % -0 + # 14.3E " 42.42 = * " +4.242E+01 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'E' *)
printf "\ng\n%!";
test (sprintf "%g" (-42.42) = "-42.42");
test (sprintf "%.3g" (-4242.) =* "-4.24e+03");
test (sprintf "%-15g" (-42.42) = "-42.42 ");
test (sprintf "%015g" (-42.42) = "-00000000042.42");
test (sprintf "%+g" 42.42 = "+42.42");
test (sprintf "% g" 42.42 = " 42.42");
test (sprintf "%15g" 42.42 = " 42.42");
test (sprintf "%*g" 14 42.42 = " 42.42");
test (sprintf "%.3g" (-42.42) = "-42.4");
printf "\nG\n%!";
test (sprintf "%G" (-42.42) = "-42.42");
test (sprintf "%.3G" (-4242.) =* "-4.24E+03");
test (sprintf "%-15G" (-42.42) = "-42.42 ");
test (sprintf "%015G" (-42.42) = "-00000000042.42");
test (sprintf "%+G" 42.42 = "+42.42");
test (sprintf "% G" 42.42 = " 42.42");
test (sprintf "%15G" 42.42 = " 42.42");
test (sprintf "%*G" 14 42.42 = " 42.42");
test (sprintf "%.3G" (-42.42) = "-42.4");
printf "\nB\n%!";
test (sprintf "%B" true = "true");
test (sprintf "%8B" true = " true");
test (sprintf "%B" false = "false");
test (sprintf "%-8B" false = "false ");
printf "\nld/li positive\n%!";
test (sprintf "%ld/%li" 42l 43l = "42/43");
test (sprintf "%-4ld/%-5li" 42l 43l = "42 /43 ");
test (sprintf "%04ld/%05li" 42l 43l = "0042/00043");
test (sprintf "%+ld/%+li" 42l 43l = "+42/+43");
test (sprintf "% ld/% li" 42l 43l = " 42/ 43");
test ( sprintf " % # ld/%#li " 42l 43l = " 42/43 " ) ;
(* >> '#' is incompatible with 'ld' *)
test (sprintf "%4ld/%5li" 42l 43l = " 42/ 43");
test (sprintf "%*ld/%*li" 4 42l 5 43l = " 42/ 43");
test ( sprintf " % -0+#4ld/%-0 # 5li " 42l 43l = " +42 / 43 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'ld' *)
printf "\nld/li negative\n%!";
test (sprintf "%ld/%li" (-42l) (-43l) = "-42/-43");
test (sprintf "%-4ld/%-5li" (-42l) (-43l) = "-42 /-43 ");
test (sprintf "%04ld/%05li" (-42l) (-43l) = "-042/-0043");
test (sprintf "%+ld/%+li" (-42l) (-43l) = "-42/-43");
test (sprintf "% ld/% li" (-42l) (-43l) = "-42/-43");
test ( sprintf " % # ld/%#li " ( -42l ) ( -43l ) = " -42/-43 " ) ;
(* >> '#' is incompatible with 'ld' *)
test (sprintf "%4ld/%5li" (-42l) (-43l) = " -42/ -43");
test (sprintf "%*ld/%*li" 4 (-42l) 5 (-43l) = " -42/ -43");
test ( sprintf " % -0 + # 4ld/%-0 + # 5li " ( -42l ) ( -43l ) = " -42 /-43 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'ld' *)
printf "\nlu positive\n%!";
test (sprintf "%lu" 42l = "42");
test (sprintf "%-4lu" 42l = "42 ");
test (sprintf "%04lu" 42l = "0042");
test ( sprintf " % + lu " 42l = " 42 " ) ;
(* >> '+' is incompatible with 'lu' *)
test ( sprintf " % lu " 42l = " 42 " ) ;
(* >> ' ' is incompatible with 'lu' *)
test ( sprintf " % # lu " 42l = " 42 " ) ;
(* >> '#' is incompatible with 'lu' *)
test (sprintf "%4lu" 42l = " 42");
test (sprintf "%*lu" 4 42l = " 42");
test ( sprintf " % -0 + # 6ld " 42l = " +42 " ) ;
(* >> '-' is incompatible with '0', '#' is incompatible with 'ld' *)
printf "\nlu negative\n%!";
test (sprintf "%lu" (-1l) = "4294967295");
printf "\nlx positive\n%!";
test (sprintf "%lx" 42l = "2a");
test (sprintf "%-4lx" 42l = "2a ");
test (sprintf "%04lx" 42l = "002a");
(*test (sprintf "%+lx" 42l = "2a");*)
(* >> '+' is incompatible with 'lx' *)
(*test (sprintf "% lx" 42l = "2a");*)
(* >> ' ' is incompatible with 'lx' *)
test (sprintf "%#lx" 42l = "0x2a");
test (sprintf "%4lx" 42l = " 2a");
test (sprintf "%*lx" 5 42l = " 2a");
test ( sprintf " % -0 + # * lx " 5 42l = " 0x2a " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nlx negative\n%!";
test (sprintf "%lx" (-42l) = "ffffffd6");
printf "\nlX positive\n%!";
test (sprintf "%lX" 42l = "2A");
test (sprintf "%-4lX" 42l = "2A ");
test (sprintf "%04lX" 42l = "002A");
(*test (sprintf "%+lX" 42l = "2A");*)
(* >> '+' is incompatible with 'lX' *)
(*test (sprintf "% lX" 42l = "2A");*)
(* >> ' ' is incompatible with 'lX' *)
test (sprintf "%#lX" 42l = "0X2A");
test (sprintf "%4lX" 42l = " 2A");
test (sprintf "%*lX" 5 42l = " 2A");
test ( sprintf " % -0 + # * lX " 5 42l = " 0X2A " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nlx negative\n%!";
test (sprintf "%lX" (-42l) = "FFFFFFD6");
printf "\nlo positive\n%!";
test (sprintf "%lo" 42l = "52");
test (sprintf "%-4lo" 42l = "52 ");
test (sprintf "%04lo" 42l = "0052");
test ( sprintf " % + lo " 42l = " 52 " ) ;
(* >> '+' is incompatible with 'lo' *)
test ( sprintf " % lo " 42l = " 52 " ) ;
(* >> ' ' is incompatible with 'lo' *)
test (sprintf "%#lo" 42l = "052");
test (sprintf "%4lo" 42l = " 52");
test (sprintf "%*lo" 5 42l = " 52");
test ( sprintf " % -0 + # * lo " 5 42l = " 052 " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nlo negative\n%!";
test (sprintf "%lo" (-42l) = "37777777726");
Nativeint not tested : looks like too much work , and anyway it should
work like Int32 or Int64 .
work like Int32 or Int64. *)
printf "\nLd/Li positive\n%!";
test (sprintf "%Ld/%Li" 42L 43L = "42/43");
test (sprintf "%-4Ld/%-5Li" 42L 43L = "42 /43 ");
test (sprintf "%04Ld/%05Li" 42L 43L = "0042/00043");
test ( sprintf " % + Ld/%+Li " 42L 43L = " " ) ;
> > ' + ' is incompatible with ' Ld '
test ( sprintf " % Ld/% " 42L 43L = " 42/ 43 " ) ;
> > ' ' is incompatible with ' Ld '
test ( sprintf " % # " 42L 43L = " 42/43 " ) ;
> > ' # ' is incompatible with ' Ld '
test (sprintf "%4Ld/%5Li" 42L 43L = " 42/ 43");
test (sprintf "%*Ld/%*Li" 4 42L 5 43L = " 42/ 43");
test ( sprintf " % -0+#4Ld/%-0 # 5Li " 42L 43L = " +42 / 43 " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nLd/Li negative\n%!";
test (sprintf "%Ld/%Li" (-42L) (-43L) = "-42/-43");
test (sprintf "%-4Ld/%-5Li" (-42L) (-43L) = "-42 /-43 ");
test (sprintf "%04Ld/%05Li" (-42L) (-43L) = "-042/-0043");
test ( sprintf " % + Ld/%+Li " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' + ' is incompatible with ' Ld '
test ( sprintf " % Ld/% " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' ' is incompatible with ' Ld '
test ( sprintf " % # " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' # ' is incompatible with ' Ld '
test (sprintf "%4Ld/%5Li" (-42L) (-43L) = " -42/ -43");
test (sprintf "%*Ld/%*Li" 4 (-42L) 5 (-43L) = " -42/ -43");
(*test (sprintf "%-0+ #4Ld/%-0+ #5Li" (-42L) (-43L) = "-42 /-43 ");*)
(* >> '-' is incompatible with '0' *)
printf "\nLu positive\n%!";
test (sprintf "%Lu" 42L = "42");
test (sprintf "%-4Lu" 42L = "42 ");
test (sprintf "%04Lu" 42L = "0042");
test ( sprintf " % + Lu " 42L = " 42 " ) ;
> > ' + ' is incompatible with ' '
test ( sprintf " % Lu " 42L = " 42 " ) ;
> > ' ' is incompatible with ' '
test ( sprintf " % # Lu " 42L = " 42 " ) ;
> > ' # ' is incompatible with ' '
test (sprintf "%4Lu" 42L = " 42");
test (sprintf "%*Lu" 4 42L = " 42");
test ( sprintf " % -0 + # 6Ld " 42L = " +42 " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nLu negative\n%!";
test (sprintf "%Lu" (-1L) = "18446744073709551615");
printf "\nLx positive\n%!";
test (sprintf "%Lx" 42L = "2a");
test (sprintf "%-4Lx" 42L = "2a ");
test (sprintf "%04Lx" 42L = "002a");
(*test (sprintf "%+Lx" 42L = "2a");*)
(* >> '+' is incompatible with 'Lx' *)
(*test (sprintf "% Lx" 42L = "2a");*)
(* >> ' ' is incompatible with 'Lx' *)
test (sprintf "%#Lx" 42L = "0x2a");
test (sprintf "%4Lx" 42L = " 2a");
test (sprintf "%*Lx" 5 42L = " 2a");
test ( sprintf " % -0 + # * Lx " 5 42L = " 0x2a " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nLx negative\n%!";
test (sprintf "%Lx" (-42L) = "ffffffffffffffd6");
printf "\nLX positive\n%!";
test (sprintf "%LX" 42L = "2A");
test (sprintf "%-4LX" 42L = "2A ");
test (sprintf "%04LX" 42L = "002A");
(*test (sprintf "%+LX" 42L = "2A");*)
> > ' + ' is incompatible with ' LX '
(*test (sprintf "% LX" 42L = "2A");*)
> > ' ' is incompatible with ' LX '
test (sprintf "%#LX" 42L = "0X2A");
test (sprintf "%4LX" 42L = " 2A");
test (sprintf "%*LX" 5 42L = " 2A");
test ( sprintf " % -0 + # * LX " 5 42L = " 0X2A " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nLx negative\n%!";
test (sprintf "%LX" (-42L) = "FFFFFFFFFFFFFFD6");
printf "\nLo positive\n%!";
test (sprintf "%Lo" 42L = "52");
test (sprintf "%-4Lo" 42L = "52 ");
test (sprintf "%04Lo" 42L = "0052");
test ( sprintf " % + Lo " 42L = " 52 " ) ;
> > ' + ' is incompatible with ' '
test ( sprintf " % Lo " 42L = " 52 " ) ;
> > ' ' is incompatible with ' '
test (sprintf "%#Lo" 42L = "052");
test (sprintf "%4Lo" 42L = " 52");
test (sprintf "%*Lo" 5 42L = " 52");
test ( sprintf " % -0 + # * Lo " 5 42L = " 052 " ) ;
(* >> '-' is incompatible with '0' *)
printf "\nLo negative\n%!";
test (sprintf "%Lo" (-42L) = "1777777777777777777726");
printf "\na\n%!";
let x = ref () in
let f () y = if y == x then "ok" else "wrong" in
test (sprintf "%a" f x = "ok");
printf "\nt\n%!";
let f () = "ok" in
test (sprintf "%t" f = "ok");
(* Work as expected. Prints the format string type digest.
If you want to print the contents of the format string,
do not use a meta format; simply convert the format string
to a string and print it using %s. *)
printf "\n{...%%}\n%!";
let f = format_of_string "%4g/%s" in
test (sprintf "%{%.4F%5S%}" f = "%f%s");
printf "\n(...%%)\n%!";
let f = format_of_string "%d/foo/%s" in
test (sprintf "%(%d%s%)" f 42 "bar" = "42/foo/bar");
printf "\n! %% @ , and constants\n%!";
test (sprintf "%!" = "");
test (sprintf "%%" = "%");
test (sprintf "%@" = "@");
test (sprintf "%," = "");
test (sprintf "@" = "@");
test (sprintf "@@" = "@@");
test (sprintf "@%%" = "@%");
printf "\nend of tests\n%!";
with e ->
printf "unexpected exception: %s\n%!" (Printexc.to_string e);
test false;
;;
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/lib-printf/tprintf.ml | ocaml | TEST
include testing
>> '#' is incompatible with 'd'
>> '0' is incompatible with '-', '#' is incompatible with 'd'
>> '+' is incompatible with 'u'
>> ' ' is incompatible with 'u'
>> '+' is incompatible with 'x'
>> ' ' is incompatible with 'x'
>> '+' is incompatible with 'X'
>> ' ' is incompatible with 'X'
>> '-' is incompatible with '0'
>> '+' is incompatible with 'o'
>> '+' is incompatible with 'o'
>> '-' is incompatible with 'o'
>> '0' is incompatible with 's'
test (sprintf "%+s" "foo" = "foo");
>> '+' is incompatible with 's'
test (sprintf "% s" "foo" = "foo");
>> ' ' is incompatible with 's'
test (sprintf "%#s" "foo" = "foo");
>> '#' is incompatible with 's'
>> '-' is incompatible with '0', '#' is incompatible with 's'
test (sprintf "%-5S" "foo" = "\"foo\" "); padding not done
test (sprintf "%+S" "foo" = "\"foo\"");
>> '#' is incompatible with 'S'
test (sprintf "% S" "foo" = "\"foo\"");
>> '#' is incompatible with 'S'
test (sprintf "%#S" "foo" = "\"foo\"");
>> '#' is incompatible with 'S'
test (sprintf "%-4c" 'c' = "c "); padding not done
test (sprintf "%+c" 'c' = "c");
>> '#' is incompatible with 'c'
test (sprintf "% c" 'c' = "c");
>> '#' is incompatible with 'c'
test (sprintf "%#c" 'c' = "c");
>> '#' is incompatible with 'c'
test (sprintf "%4c" 'c' = " c"); padding not done
test (sprintf "%*c" 2 'c' = " c"); padding not done
test (sprintf "%-4C" 'c' = "c "); padding not done
test (sprintf "%04C" 'c' = " c"); padding not done
test (sprintf "%+C" 'c' = "'c'");
>> '+' is incompatible with 'C'
test (sprintf "% C" 'c' = "'c'");
>> ' ' is incompatible with 'C'
test (sprintf "%#C" 'c' = "'c'");
>> '#' is incompatible with 'C'
test (sprintf "%4C" 'c' = " c"); padding not done
test (sprintf "%*C" 2 'c' = " c"); padding not done
test (sprintf "%-0+ #4C" 'c' = "c "); padding not done
>> '#' is incompatible with 'f'
>> '-' is incompatible with '0', '#' is incompatible with 'f'
dynamically-provided negative precisions are currently silently
turned into their absolute value; we could error on this
in the future (the behavior is unspecified), but the previous
buggy output "%.0-3f-" is not desirable.
>> '#' is incompatible with 'f'
>> '-' is incompatible with '0', '#' is incompatible with 'f'
>> '#' is incompatible with 'e'
>> '-' is incompatible with '0', '#' is incompatible with 'e'
>> '#' is incompatible with 'e'
>> '-' is incompatible with '0', '#' is incompatible with 'e'
>> '#' is incompatible with 'E'
>> '#' is incompatible with 'E'
>> '#' is incompatible with 'E'
>> '-' is incompatible with '0', '#' is incompatible with 'E'
>> '#' is incompatible with 'ld'
>> '-' is incompatible with '0', '#' is incompatible with 'ld'
>> '#' is incompatible with 'ld'
>> '-' is incompatible with '0', '#' is incompatible with 'ld'
>> '+' is incompatible with 'lu'
>> ' ' is incompatible with 'lu'
>> '#' is incompatible with 'lu'
>> '-' is incompatible with '0', '#' is incompatible with 'ld'
test (sprintf "%+lx" 42l = "2a");
>> '+' is incompatible with 'lx'
test (sprintf "% lx" 42l = "2a");
>> ' ' is incompatible with 'lx'
>> '-' is incompatible with '0'
test (sprintf "%+lX" 42l = "2A");
>> '+' is incompatible with 'lX'
test (sprintf "% lX" 42l = "2A");
>> ' ' is incompatible with 'lX'
>> '-' is incompatible with '0'
>> '+' is incompatible with 'lo'
>> ' ' is incompatible with 'lo'
>> '-' is incompatible with '0'
>> '-' is incompatible with '0'
test (sprintf "%-0+ #4Ld/%-0+ #5Li" (-42L) (-43L) = "-42 /-43 ");
>> '-' is incompatible with '0'
>> '-' is incompatible with '0'
test (sprintf "%+Lx" 42L = "2a");
>> '+' is incompatible with 'Lx'
test (sprintf "% Lx" 42L = "2a");
>> ' ' is incompatible with 'Lx'
>> '-' is incompatible with '0'
test (sprintf "%+LX" 42L = "2A");
test (sprintf "% LX" 42L = "2A");
>> '-' is incompatible with '0'
>> '-' is incompatible with '0'
Work as expected. Prints the format string type digest.
If you want to print the contents of the format string,
do not use a meta format; simply convert the format string
to a string and print it using %s. |
A test file for the Printf module .
A test file for the Printf module.
*)
open Testing;;
open Printf;;
try
printf "d/i positive\n%!";
test (sprintf "%d/%i" 42 43 = "42/43");
test (sprintf "%-4d/%-5i" 42 43 = "42 /43 ");
test (sprintf "%04d/%05i" 42 43 = "0042/00043");
test (sprintf "%+d/%+i" 42 43 = "+42/+43");
test (sprintf "% d/% i" 42 43 = " 42/ 43");
test (sprintf "%#d/%#i" 42 43 = "42/43");
test (sprintf "%#d/%#i" 123 123 = "123/123");
test (sprintf "%#d/%#i" 1234 1234 = "1_234/1_234");
test (sprintf "%#d/%#i" 12345 12345 = "12_345/12_345");
test (sprintf "%#d/%#i" 123456 123456 = "123_456/123_456");
test (sprintf "%#4d/%#5i" 1234 1234 = "1_234/1_234");
test (sprintf "%#-6d/%#-7i" 1234 1234 = "1_234 /1_234 ");
test (sprintf "%4d/%5i" 42 43 = " 42/ 43");
test (sprintf "%*d" (-4) 42 = "42 ");
test (sprintf "%*d/%*i" 4 42 5 43 = " 42/ 43");
test ( sprintf " % -0+#4d/%-0 # 5i " 42 43 = " +42 / 43 " ) ;
printf "\nd/i negative\n%!";
test (sprintf "%d/%i" (-42) (-43) = "-42/-43");
test (sprintf "%-4d/%-5i" (-42) (-43) = "-42 /-43 ");
test (sprintf "%04d/%05i" (-42) (-43) = "-042/-0043");
test (sprintf "%+d/%+i" (-42) (-43) = "-42/-43");
test (sprintf "% d/% i" (-42) (-43) = "-42/-43");
test (sprintf "%#d/%#i" (-42) (-43) = "-42/-43");
test (sprintf "%#d/%#i" (-123) (-123) = "-123/-123");
test (sprintf "%#d/%#i" (-1234) (-1234) = "-1_234/-1_234");
test (sprintf "%#d/%#i" (-12345) (-12345) = "-12_345/-12_345");
test (sprintf "%#d/%#i" (-123456) (-123456) = "-123_456/-123_456");
test (sprintf "%#4d/%#5i" (-1234) (-1234) = "-1_234/-1_234");
test (sprintf "%#-6d/%#-7i" (-1234) (-1234) = "-1_234/-1_234 ");
test (sprintf "%4d/%5i" (-42) (-43) = " -42/ -43");
test (sprintf "%*d" (-4) (-42) = "-42 ");
test (sprintf "%*d/%*i" 4 (-42) 5 (-43) = " -42/ -43");
test ( sprintf " % -0 + # 4d/%-0 + # 5i " ( -42 ) ( -43 ) = " -42 /-43 " ) ;
printf "\nu positive\n%!";
test (sprintf "%u" 42 = "42");
test (sprintf "%-4u" 42 = "42 ");
test (sprintf "%04u" 42 = "0042");
test ( sprintf " % + u " 42 = " 42 " ) ;
test ( sprintf " % u " 42 = " 42 " ) ;
test (sprintf "%#u" 42 = "42");
test (sprintf "%#u" 123 = "123");
test (sprintf "%#u" 1234 = "1_234");
test (sprintf "%#u" 12345 = "12_345");
test (sprintf "%#u" 123456 = "123_456");
test (sprintf "%#4u" 1234 = "1_234");
test (sprintf "%#6u" 1234 = " 1_234");
test (sprintf "%4u" 42 = " 42");
test (sprintf "%*u" 4 42 = " 42");
test (sprintf "%*u" (-4) 42 = "42 ");
printf "\nu negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%u" (-1) = "2147483647");
test (sprintf "%#u" (-1) = "2_147_483_647");
| 64 ->
test (sprintf "%u" (-1) = "9223372036854775807");
test (sprintf "%#u" (-1) = "9_223_372_036_854_775_807");
| _ -> test false
end;
printf "\nx positive\n%!";
test (sprintf "%x" 42 = "2a");
test (sprintf "%-4x" 42 = "2a ");
test (sprintf "%04x" 42 = "002a");
test ( sprintf " % + x " 42 = " 2a " ) ;
test ( sprintf " % x " 42 = " 2a " ) ;
test (sprintf "%#x" 42 = "0x2a");
test (sprintf "%4x" 42 = " 2a");
test (sprintf "%*x" 5 42 = " 2a");
test (sprintf "%*x" (-5) 42 = "2a ");
test (sprintf "%#*x" 5 42 = " 0x2a");
test (sprintf "%#*x" (-5) 42 = "0x2a ");
test (sprintf "%#-*x" 5 42 = "0x2a ");
test (sprintf "%-0+ #*x" 5 42 = "0x2a ");
printf "\nx negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%x" (-42) = "7fffffd6");
| 64 ->
test (sprintf "%x" (-42) = "7fffffffffffffd6");
| _ -> test false
end;
printf "\nX positive\n%!";
test (sprintf "%X" 42 = "2A");
test (sprintf "%-4X" 42 = "2A ");
test (sprintf "%04X" 42 = "002A");
test ( sprintf " % + X " 42 = " 2A " ) ;
test ( sprintf " % X " 42 = " 2A " ) ;
test (sprintf "%#X" 42 = "0X2A");
test (sprintf "%4X" 42 = " 2A");
test (sprintf "%*X" 5 42 = " 2A");
test ( sprintf " % -0 + # * X " 5 42 = " 0X2A " ) ;
printf "\nx negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%X" (-42) = "7FFFFFD6");
| 64 ->
test (sprintf "%X" (-42) = "7FFFFFFFFFFFFFD6");
| _ -> test false
end;
printf "\no positive\n%!";
test (sprintf "%o" 42 = "52");
test (sprintf "%-4o" 42 = "52 ");
test (sprintf "%04o" 42 = "0052");
test ( sprintf " % + o " 42 = " 52 " ) ;
test ( sprintf " % o " 42 = " 52 " ) ;
test (sprintf "%#o" 42 = "052");
test (sprintf "%4o" 42 = " 52");
test (sprintf "%*o" 5 42 = " 52");
test ( sprintf " % -0 + # * o " 5 42 = " 052 " ) ;
printf "\no negative\n%!";
begin match Sys.word_size with
| 32 ->
test (sprintf "%o" (-42) = "17777777726");
| 64 ->
test (sprintf "%o" (-42) = "777777777777777777726");
| _ -> test false
end;
printf "\ns\n%!";
test (sprintf "%s" "foo" = "foo");
test (sprintf "%-5s" "foo" = "foo ");
test ( sprintf " % 05s " " foo " = " foo " ) ;
test (sprintf "%5s" "foo" = " foo");
test (sprintf "%1s" "foo" = "foo");
test (sprintf "%*s" 6 "foo" = " foo");
test (sprintf "%*s" (-6) "foo" = "foo ");
test (sprintf "%*s" 2 "foo" = "foo");
test ( sprintf " % -0 + # 5s " " foo " = " foo " ) ;
test (sprintf "%s@" "foo" = "foo@");
test (sprintf "%" "foo" = "");
test (sprintf "%s@%s" "foo" "inria.fr" = "");
printf "\nS\n%!";
test (sprintf "%S" "fo\"o" = "\"fo\\\"o\"");
test ( sprintf " % 05S " " foo " = " \"foo\ " " ) ; padding not done
test ( sprintf " % 5S " " foo " = " \"foo\ " " ) ; padding not done
test (sprintf "%1S" "foo" = "\"foo\"");
test (sprintf "%*S" 8 "foo" = " \"foo\"");
test (sprintf "%*S" (-8) "foo" = "\"foo\" ");
test (sprintf "%*S" 2 "foo" = "\"foo\"");
test ( sprintf " % -0 + # 5S " " foo " = " \"foo\ " " ) ; padding not done
test (sprintf "%S@" "foo" = "\"foo\"@");
test (sprintf "%" "foo" = "\"foo\"@inria.fr");
test (sprintf "%S@%S" "foo" "inria.fr" = "\"foo\"@\"inria.fr\"");
printf "\nc\n%!";
test (sprintf "%c" 'c' = "c");
test ( sprintf " % 04c " ' c ' = " c " ) ; padding not done
test ( sprintf " % -0 + # 4c " ' c ' = " c " ) ; padding not done
printf "\nC\n%!";
test (sprintf "%C" 'c' = "'c'");
test (sprintf "%C" '\'' = "'\\''");
printf "\nf\n%!";
test (sprintf "%f" (-42.42) = "-42.420000");
test (sprintf "%-13f" (-42.42) = "-42.420000 ");
test (sprintf "%013f" (-42.42) = "-00042.420000");
test (sprintf "%+f" 42.42 = "+42.420000");
test (sprintf "% f" 42.42 = " 42.420000");
test ( sprintf " % # f " 42.42 = " 42.420000 " ) ;
test (sprintf "%13f" 42.42 = " 42.420000");
test (sprintf "%*f" 12 42.42 = " 42.420000");
test ( sprintf " % -0 + # 12f " 42.42 = " +42.420000 " ) ;
test (sprintf "%.3f" (-42.42) = "-42.420");
test (sprintf "%.*f" (-3) 42.42 = "42.420");
test (sprintf "%-13.3f" (-42.42) = "-42.420 ");
test (sprintf "%013.3f" (-42.42) = "-00000042.420");
test (sprintf "%+.3f" 42.42 = "+42.420");
test (sprintf "% .3f" 42.42 = " 42.420");
test ( sprintf " % # .3f " 42.42 = " 42.420 " ) ;
test (sprintf "%13.3f" 42.42 = " 42.420");
test (sprintf "%*.*f" 12 3 42.42 = " 42.420");
test ( sprintf " % -0 + # 12.3f " 42.42 = " +42.420 " ) ;
Under Windows ( mingw and maybe also MSVC ) , the stdlib uses three
digits for the exponent instead of the two used by Linux and BSD .
Check that the two strings are equal , except that there may be an
extra zero , and if there is one , there may be a missing space or
zero . All in the first string relative to the second .
digits for the exponent instead of the two used by Linux and BSD.
Check that the two strings are equal, except that there may be an
extra zero, and if there is one, there may be a missing space or
zero. All in the first string relative to the second. *)
let ( =* ) s1 s2 =
let ss1 = s1 ^ "$" in
let ss2 = s2 ^ "$" in
let rec loop i1 i2 extra missing =
if i1 = String.length ss1 && i2 = String.length ss2 then begin
if extra then true else not missing
end else if i1 = String.length ss1 || i2 = String.length ss2 then
false
else begin
match ss1.[i1], ss2.[i2] with
| x, y when x = y -> loop (i1+1) (i2+1) extra missing
| '0', _ when not extra -> loop (i1+1) i2 true missing
| _, (' '|'0') when not missing -> loop i1 (i2+1) extra true
| _, _ -> false
end
in
loop 0 0 false false
in
printf "\nF\n%!";
test (sprintf "%F" 42.42 = "42.42");
test (sprintf "%F" 42.42e42 =* "4.242e+43");
test (sprintf "%F" 42.00 = "42.");
test (sprintf "%F" 0.042 = "0.042");
test (sprintf "%4F" 3. = " 3.");
test (sprintf "%-4F" 3. = "3. ");
test (sprintf "%04F" 3. = "003.");
test (sprintf "%+4F" 3. = " +3.");
test (sprintf "%.3F" 42.42 = "42.4");
test (sprintf "%12.3F" 42.42e42 =* " 4.24e+43");
test (sprintf "%.3F" 42.00 = "42.");
test (sprintf "%.3F" 0.0042 = "0.0042");
test (sprintf "%F" nan = "nan");
test (sprintf "%F" (-. nan) = "nan");
test (sprintf "%F" infinity = "infinity");
test (sprintf "%F" neg_infinity = "neg_infinity");
printf "\n#F\n%!";
test (sprintf "%+#F" (+0.) = "+0x0p+0");
test (sprintf "%+#F" (-0.) = "-0x0p+0");
test (sprintf "%+#F" (+1.) = "+0x1p+0");
test (sprintf "%+#F" (-1.) = "-0x1p+0");
test (sprintf "%+#F" (+1024.) = "+0x1p+10");
test (sprintf "% #F" (+1024.) = " 0x1p+10");
test (sprintf "%+#F" (-1024.) = "-0x1p+10");
test (sprintf "%#F" 0x123.456 = "0x1.23456p+8");
test (sprintf "%#F" 0x123456789ABCDE. = "0x1.23456789abcdep+52");
test (sprintf "%#F" epsilon_float = "0x1p-52");
test (sprintf "%#F" nan = "nan");
test (sprintf "%#F" (-. nan) = "nan");
test (sprintf "%#F" infinity = "infinity");
test (sprintf "%#F" neg_infinity = "neg_infinity");
printf "\nh\n%!";
test (sprintf "%+h" (+0.) = "+0x0p+0");
test (sprintf "%+h" (-0.) = "-0x0p+0");
test (sprintf "%+h" (+1.) = "+0x1p+0");
test (sprintf "%+h" (-1.) = "-0x1p+0");
test (sprintf "%+h" (+1024.) = "+0x1p+10");
test (sprintf "%+h" (-1024.) = "-0x1p+10");
test (sprintf "%h" 0x123.456 = "0x1.23456p+8");
test (sprintf "%h" 0x123456789ABCDE. = "0x1.23456789abcdep+52");
test (sprintf "%h" epsilon_float = "0x1p-52");
test (sprintf "%h" nan = "nan");
test (sprintf "%h" infinity = "infinity");
test (sprintf "%h" neg_infinity = "-infinity");
test (sprintf "%h" (4. *. atan 1.) = "0x1.921fb54442d18p+1");
printf "\nH\n%!";
test (sprintf "%+H" (+0.) = "+0X0P+0");
test (sprintf "%+H" (-0.) = "-0X0P+0");
test (sprintf "%+H" (+1.) = "+0X1P+0");
test (sprintf "%+H" (-1.) = "-0X1P+0");
test (sprintf "%+H" (+1024.) = "+0X1P+10");
test (sprintf "%+H" (-1024.) = "-0X1P+10");
test (sprintf "%H" 0X123.456 = "0X1.23456P+8");
test (sprintf "%H" 0X123456789ABCDE. = "0X1.23456789ABCDEP+52");
test (sprintf "%H" epsilon_float = "0X1P-52");
test (sprintf "%H" nan = "NAN");
test (sprintf "%H" infinity = "INFINITY");
test (sprintf "%H" neg_infinity = "-INFINITY");
test (sprintf "%H" (4. *. atan 1.) = "0X1.921FB54442D18P+1");
printf "\ne\n%!";
test (sprintf "%e" (-42.42) =* "-4.242000e+01");
test (sprintf "%-15e" (-42.42) =* "-4.242000e+01 ");
test (sprintf "%015e" (-42.42) =* "-004.242000e+01");
test (sprintf "%+e" 42.42 =* "+4.242000e+01");
test (sprintf "% e" 42.42 =* " 4.242000e+01");
test ( sprintf " % # e " 42.42 = * " 4.242000e+01 " ) ;
test (sprintf "%15e" 42.42 =* " 4.242000e+01");
test (sprintf "%*e" 14 42.42 =* " 4.242000e+01");
test ( sprintf " % -0 + # 14e " 42.42 = * " +4.242000e+01 " ) ;
test (sprintf "%.3e" (-42.42) =* "-4.242e+01");
test (sprintf "%-15.3e" (-42.42) =* "-4.242e+01 ");
test (sprintf "%015.3e" (-42.42) =* "-000004.242e+01");
test (sprintf "%+.3e" 42.42 =* "+4.242e+01");
test (sprintf "% .3e" 42.42 =* " 4.242e+01");
test ( sprintf " % # .3e " 42.42 = * " 4.242e+01 " ) ;
test (sprintf "%15.3e" 42.42 =* " 4.242e+01");
test (sprintf "%*.*e" 11 3 42.42 =* " 4.242e+01");
test ( sprintf " % -0 + # 14.3e " 42.42 = * " +4.242e+01 " ) ;
printf "\nE\n%!";
test (sprintf "%E" (-42.42) =* "-4.242000E+01");
test (sprintf "%-15E" (-42.42) =* "-4.242000E+01 ");
test (sprintf "%015E" (-42.42) =* "-004.242000E+01");
test (sprintf "%+E" 42.42 =* "+4.242000E+01");
test (sprintf "% E" 42.42 =* " 4.242000E+01");
test ( sprintf " % # E " 42.42 = * " 4.242000E+01 " ) ;
test (sprintf "%15E" 42.42 =* " 4.242000E+01");
test (sprintf "%*E" 14 42.42 =* " 4.242000E+01");
test ( sprintf " % -0 + # 14E " 42.42 = * " +4.242000E+01 " ) ;
test (sprintf "%.3E" (-42.42) =* "-4.242E+01");
test (sprintf "%-15.3E" (-42.42) =* "-4.242E+01 ");
test (sprintf "%015.3E" (-42.42) =* "-000004.242E+01");
test (sprintf "%+.3E" 42.42 =* "+4.242E+01");
test (sprintf "% .3E" 42.42 =* " 4.242E+01");
test ( sprintf " % # .3E " 42.42 = * " 4.242E+01 " ) ;
test (sprintf "%15.3E" 42.42 =* " 4.242E+01");
test (sprintf "%*.*E" 11 3 42.42 =* " 4.242E+01");
test ( sprintf " % -0 + # 14.3E " 42.42 = * " +4.242E+01 " ) ;
printf "\ng\n%!";
test (sprintf "%g" (-42.42) = "-42.42");
test (sprintf "%.3g" (-4242.) =* "-4.24e+03");
test (sprintf "%-15g" (-42.42) = "-42.42 ");
test (sprintf "%015g" (-42.42) = "-00000000042.42");
test (sprintf "%+g" 42.42 = "+42.42");
test (sprintf "% g" 42.42 = " 42.42");
test (sprintf "%15g" 42.42 = " 42.42");
test (sprintf "%*g" 14 42.42 = " 42.42");
test (sprintf "%.3g" (-42.42) = "-42.4");
printf "\nG\n%!";
test (sprintf "%G" (-42.42) = "-42.42");
test (sprintf "%.3G" (-4242.) =* "-4.24E+03");
test (sprintf "%-15G" (-42.42) = "-42.42 ");
test (sprintf "%015G" (-42.42) = "-00000000042.42");
test (sprintf "%+G" 42.42 = "+42.42");
test (sprintf "% G" 42.42 = " 42.42");
test (sprintf "%15G" 42.42 = " 42.42");
test (sprintf "%*G" 14 42.42 = " 42.42");
test (sprintf "%.3G" (-42.42) = "-42.4");
printf "\nB\n%!";
test (sprintf "%B" true = "true");
test (sprintf "%8B" true = " true");
test (sprintf "%B" false = "false");
test (sprintf "%-8B" false = "false ");
printf "\nld/li positive\n%!";
test (sprintf "%ld/%li" 42l 43l = "42/43");
test (sprintf "%-4ld/%-5li" 42l 43l = "42 /43 ");
test (sprintf "%04ld/%05li" 42l 43l = "0042/00043");
test (sprintf "%+ld/%+li" 42l 43l = "+42/+43");
test (sprintf "% ld/% li" 42l 43l = " 42/ 43");
test ( sprintf " % # ld/%#li " 42l 43l = " 42/43 " ) ;
test (sprintf "%4ld/%5li" 42l 43l = " 42/ 43");
test (sprintf "%*ld/%*li" 4 42l 5 43l = " 42/ 43");
test ( sprintf " % -0+#4ld/%-0 # 5li " 42l 43l = " +42 / 43 " ) ;
printf "\nld/li negative\n%!";
test (sprintf "%ld/%li" (-42l) (-43l) = "-42/-43");
test (sprintf "%-4ld/%-5li" (-42l) (-43l) = "-42 /-43 ");
test (sprintf "%04ld/%05li" (-42l) (-43l) = "-042/-0043");
test (sprintf "%+ld/%+li" (-42l) (-43l) = "-42/-43");
test (sprintf "% ld/% li" (-42l) (-43l) = "-42/-43");
test ( sprintf " % # ld/%#li " ( -42l ) ( -43l ) = " -42/-43 " ) ;
test (sprintf "%4ld/%5li" (-42l) (-43l) = " -42/ -43");
test (sprintf "%*ld/%*li" 4 (-42l) 5 (-43l) = " -42/ -43");
test ( sprintf " % -0 + # 4ld/%-0 + # 5li " ( -42l ) ( -43l ) = " -42 /-43 " ) ;
printf "\nlu positive\n%!";
test (sprintf "%lu" 42l = "42");
test (sprintf "%-4lu" 42l = "42 ");
test (sprintf "%04lu" 42l = "0042");
test ( sprintf " % + lu " 42l = " 42 " ) ;
test ( sprintf " % lu " 42l = " 42 " ) ;
test ( sprintf " % # lu " 42l = " 42 " ) ;
test (sprintf "%4lu" 42l = " 42");
test (sprintf "%*lu" 4 42l = " 42");
test ( sprintf " % -0 + # 6ld " 42l = " +42 " ) ;
printf "\nlu negative\n%!";
test (sprintf "%lu" (-1l) = "4294967295");
printf "\nlx positive\n%!";
test (sprintf "%lx" 42l = "2a");
test (sprintf "%-4lx" 42l = "2a ");
test (sprintf "%04lx" 42l = "002a");
test (sprintf "%#lx" 42l = "0x2a");
test (sprintf "%4lx" 42l = " 2a");
test (sprintf "%*lx" 5 42l = " 2a");
test ( sprintf " % -0 + # * lx " 5 42l = " 0x2a " ) ;
printf "\nlx negative\n%!";
test (sprintf "%lx" (-42l) = "ffffffd6");
printf "\nlX positive\n%!";
test (sprintf "%lX" 42l = "2A");
test (sprintf "%-4lX" 42l = "2A ");
test (sprintf "%04lX" 42l = "002A");
test (sprintf "%#lX" 42l = "0X2A");
test (sprintf "%4lX" 42l = " 2A");
test (sprintf "%*lX" 5 42l = " 2A");
test ( sprintf " % -0 + # * lX " 5 42l = " 0X2A " ) ;
printf "\nlx negative\n%!";
test (sprintf "%lX" (-42l) = "FFFFFFD6");
printf "\nlo positive\n%!";
test (sprintf "%lo" 42l = "52");
test (sprintf "%-4lo" 42l = "52 ");
test (sprintf "%04lo" 42l = "0052");
test ( sprintf " % + lo " 42l = " 52 " ) ;
test ( sprintf " % lo " 42l = " 52 " ) ;
test (sprintf "%#lo" 42l = "052");
test (sprintf "%4lo" 42l = " 52");
test (sprintf "%*lo" 5 42l = " 52");
test ( sprintf " % -0 + # * lo " 5 42l = " 052 " ) ;
printf "\nlo negative\n%!";
test (sprintf "%lo" (-42l) = "37777777726");
Nativeint not tested : looks like too much work , and anyway it should
work like Int32 or Int64 .
work like Int32 or Int64. *)
printf "\nLd/Li positive\n%!";
test (sprintf "%Ld/%Li" 42L 43L = "42/43");
test (sprintf "%-4Ld/%-5Li" 42L 43L = "42 /43 ");
test (sprintf "%04Ld/%05Li" 42L 43L = "0042/00043");
test ( sprintf " % + Ld/%+Li " 42L 43L = " " ) ;
> > ' + ' is incompatible with ' Ld '
test ( sprintf " % Ld/% " 42L 43L = " 42/ 43 " ) ;
> > ' ' is incompatible with ' Ld '
test ( sprintf " % # " 42L 43L = " 42/43 " ) ;
> > ' # ' is incompatible with ' Ld '
test (sprintf "%4Ld/%5Li" 42L 43L = " 42/ 43");
test (sprintf "%*Ld/%*Li" 4 42L 5 43L = " 42/ 43");
test ( sprintf " % -0+#4Ld/%-0 # 5Li " 42L 43L = " +42 / 43 " ) ;
printf "\nLd/Li negative\n%!";
test (sprintf "%Ld/%Li" (-42L) (-43L) = "-42/-43");
test (sprintf "%-4Ld/%-5Li" (-42L) (-43L) = "-42 /-43 ");
test (sprintf "%04Ld/%05Li" (-42L) (-43L) = "-042/-0043");
test ( sprintf " % + Ld/%+Li " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' + ' is incompatible with ' Ld '
test ( sprintf " % Ld/% " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' ' is incompatible with ' Ld '
test ( sprintf " % # " ( -42L ) ( -43L ) = " -42/-43 " ) ;
> > ' # ' is incompatible with ' Ld '
test (sprintf "%4Ld/%5Li" (-42L) (-43L) = " -42/ -43");
test (sprintf "%*Ld/%*Li" 4 (-42L) 5 (-43L) = " -42/ -43");
printf "\nLu positive\n%!";
test (sprintf "%Lu" 42L = "42");
test (sprintf "%-4Lu" 42L = "42 ");
test (sprintf "%04Lu" 42L = "0042");
test ( sprintf " % + Lu " 42L = " 42 " ) ;
> > ' + ' is incompatible with ' '
test ( sprintf " % Lu " 42L = " 42 " ) ;
> > ' ' is incompatible with ' '
test ( sprintf " % # Lu " 42L = " 42 " ) ;
> > ' # ' is incompatible with ' '
test (sprintf "%4Lu" 42L = " 42");
test (sprintf "%*Lu" 4 42L = " 42");
test ( sprintf " % -0 + # 6Ld " 42L = " +42 " ) ;
printf "\nLu negative\n%!";
test (sprintf "%Lu" (-1L) = "18446744073709551615");
printf "\nLx positive\n%!";
test (sprintf "%Lx" 42L = "2a");
test (sprintf "%-4Lx" 42L = "2a ");
test (sprintf "%04Lx" 42L = "002a");
test (sprintf "%#Lx" 42L = "0x2a");
test (sprintf "%4Lx" 42L = " 2a");
test (sprintf "%*Lx" 5 42L = " 2a");
test ( sprintf " % -0 + # * Lx " 5 42L = " 0x2a " ) ;
printf "\nLx negative\n%!";
test (sprintf "%Lx" (-42L) = "ffffffffffffffd6");
printf "\nLX positive\n%!";
test (sprintf "%LX" 42L = "2A");
test (sprintf "%-4LX" 42L = "2A ");
test (sprintf "%04LX" 42L = "002A");
> > ' + ' is incompatible with ' LX '
> > ' ' is incompatible with ' LX '
test (sprintf "%#LX" 42L = "0X2A");
test (sprintf "%4LX" 42L = " 2A");
test (sprintf "%*LX" 5 42L = " 2A");
test ( sprintf " % -0 + # * LX " 5 42L = " 0X2A " ) ;
printf "\nLx negative\n%!";
test (sprintf "%LX" (-42L) = "FFFFFFFFFFFFFFD6");
printf "\nLo positive\n%!";
test (sprintf "%Lo" 42L = "52");
test (sprintf "%-4Lo" 42L = "52 ");
test (sprintf "%04Lo" 42L = "0052");
test ( sprintf " % + Lo " 42L = " 52 " ) ;
> > ' + ' is incompatible with ' '
test ( sprintf " % Lo " 42L = " 52 " ) ;
> > ' ' is incompatible with ' '
test (sprintf "%#Lo" 42L = "052");
test (sprintf "%4Lo" 42L = " 52");
test (sprintf "%*Lo" 5 42L = " 52");
test ( sprintf " % -0 + # * Lo " 5 42L = " 052 " ) ;
printf "\nLo negative\n%!";
test (sprintf "%Lo" (-42L) = "1777777777777777777726");
printf "\na\n%!";
let x = ref () in
let f () y = if y == x then "ok" else "wrong" in
test (sprintf "%a" f x = "ok");
printf "\nt\n%!";
let f () = "ok" in
test (sprintf "%t" f = "ok");
printf "\n{...%%}\n%!";
let f = format_of_string "%4g/%s" in
test (sprintf "%{%.4F%5S%}" f = "%f%s");
printf "\n(...%%)\n%!";
let f = format_of_string "%d/foo/%s" in
test (sprintf "%(%d%s%)" f 42 "bar" = "42/foo/bar");
printf "\n! %% @ , and constants\n%!";
test (sprintf "%!" = "");
test (sprintf "%%" = "%");
test (sprintf "%@" = "@");
test (sprintf "%," = "");
test (sprintf "@" = "@");
test (sprintf "@@" = "@@");
test (sprintf "@%%" = "@%");
printf "\nend of tests\n%!";
with e ->
printf "unexpected exception: %s\n%!" (Printexc.to_string e);
test false;
;;
|
122ecfd6ae2afcb83c799e7ff0e6189162db01831e55405b808d99871077fee0 | LexiFi/menhir | invariant.ml | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
(* This module discovers information about the shape and content of the stack
in each of the automaton's states. *)
open Grammar
artificial dependency ; ensures that [ Conflict ] runs first
(* ------------------------------------------------------------------------ *)
(* Compute the known suffix of the stack, a sequence of symbols,
at every state. This is the "short invariant". *)
module SSy =
StackSymbols.Run()
open SSy
(* ------------------------------------------------------------------------ *)
(* Now, compute which states may be held in the known suffix of the stack. *)
module SSt =
StackStates.Run(SSy)
open SSt
(* ------------------------------------------------------------------------ *)
(* If requested, print the information that has been computed above. *)
let () =
Error.logC 3 (dump "short")
(* ------------------------------------------------------------------------ *)
We now determine which states must be represented , that is ,
explicitly pushed onto the stack . For simplicity , a state is either
always represented or never represented . More fine - grained
strategies , where a single state is sometimes pushed onto the stack
and sometimes not pushed , depending on which outgoing transition is
being taken , are conceivable , but quite tricky , and probably not
worth the trouble .
( 1 ) If two states are liable to appear within a single stack cell ,
then one is represented if and only if the other is
represented . This ensures that the structure of stacks is known
everywhere and that we can propose types for stacks .
( 2 ) If a state [ s ] has an outgoing transition along nonterminal
symbol [ nt ] , and if the [ goto ] table for symbol [ nt ] has more than
one target , then state [ s ] is represented .
( 3 ) If a stack cell contains more than one state and if at least
one of these states is able to handle the [ error ] token , then these
states are represented .
( 4 ) If the semantic action associated with a production mentions
the [ $ syntaxerror ] keyword , then the state that is being reduced to
( that is , the state that initiated the recognition of this
production ) is represented . ( Indeed , it will be passed as an
argument to [ errorcase ] . )
explicitly pushed onto the stack. For simplicity, a state is either
always represented or never represented. More fine-grained
strategies, where a single state is sometimes pushed onto the stack
and sometimes not pushed, depending on which outgoing transition is
being taken, are conceivable, but quite tricky, and probably not
worth the trouble.
(1) If two states are liable to appear within a single stack cell,
then one is represented if and only if the other is
represented. This ensures that the structure of stacks is known
everywhere and that we can propose types for stacks.
(2) If a state [s] has an outgoing transition along nonterminal
symbol [nt], and if the [goto] table for symbol [nt] has more than
one target, then state [s] is represented.
(3) If a stack cell contains more than one state and if at least
one of these states is able to handle the [error] token, then these
states are represented.
(4) If the semantic action associated with a production mentions
the [$syntaxerror] keyword, then the state that is being reduced to
(that is, the state that initiated the recognition of this
production) is represented. (Indeed, it will be passed as an
argument to [errorcase].) *)
(* Data. *)
let rep : bool UnionFind.point array =
Array.init Lr1.n (fun _ -> UnionFind.fresh false)
(* Getter. *)
let represented state =
rep.(Lr1.number state)
(* Setters. *)
let represent state =
UnionFind.set (represented state) true
let represents states =
represent (Lr1.NodeSet.choose states)
Enforce condition ( 1 ) above .
let share (v : property) =
Array.iter (fun states ->
let dummy = UnionFind.fresh false in
Lr1.NodeSet.iter (fun state ->
UnionFind.union dummy (represented state)
) states
) v
let () =
Lr1.iter (fun node ->
share (stack_states node)
);
Production.iter (fun prod ->
share (production_states prod)
)
Enforce condition ( 2 ) above .
let () =
Nonterminal.iter (fun nt ->
let count =
Lr1.targets (fun count _ _ ->
count + 1
) 0 (Symbol.N nt)
in
if count > 1 then
Lr1.targets (fun () sources _ ->
List.iter represent sources
) () (Symbol.N nt)
)
Enforce condition ( 3 ) above .
let handler state =
try
let _ = SymbolMap.find (Symbol.T Terminal.error) (Lr1.transitions state) in
true
with Not_found ->
try
let _ = TerminalMap.lookup Terminal.error (Lr1.reductions state) in
true
with Not_found ->
false
let handlers states =
Lr1.NodeSet.exists handler states
let () =
Lr1.iter (fun node ->
let v = stack_states node in
Array.iter (fun states ->
if Lr1.NodeSet.cardinal states >= 2 && handlers states then
represents states
) v
)
Enforce condition ( 4 ) above .
let () =
Production.iterx (fun prod ->
if Action.has_syntaxerror (Production.action prod) then
let sites = Lr1.production_where prod in
let length = Production.length prod in
if length = 0 then
Lr1.NodeSet.iter represent sites
else
let states = (production_states prod).(0) in
represents states
)
(* Define accessors. *)
(* If [--represent-states] is passed on the command line, then every state is
represented. The above computation is still performed. *)
let represented state =
Settings.represent_states ||
UnionFind.get (represented state)
let representeds states =
Settings.represent_states ||
if Lr1.NodeSet.is_empty states then
false
else
represented (Lr1.NodeSet.choose states)
(* Statistics. *)
let () =
Error.logC 1 (fun f ->
let count =
Lr1.fold (fun count node ->
if represented node then count + 1 else count
) 0
in
Printf.fprintf f "%d out of %d states are represented.\n" count Lr1.n
)
(* If requested, show a detailed table of which states are represented. *)
let () =
Error.logC 3 (fun f ->
Lr1.iter (fun node ->
Printf.fprintf f "represented(%s) = %b\n"
(Lr1.print node) (represented node)
)
)
(* ------------------------------------------------------------------------ *)
(* Machinery for the computation of which symbols must keep track of their
start or end positions. *)
open Keyword
type variable =
WhereStart or WhereEnd
module M : Fix.IMPERATIVE_MAPS with type key = variable = struct
type key = variable
type 'data t = {
mutable startp: 'data SymbolMap.t;
mutable endp: 'data SymbolMap.t;
}
open SymbolMap
let create() =
{ startp = empty; endp = empty }
let clear m =
m.startp <- empty; m.endp <- empty
let add (sym, where) data m =
match where with
| WhereStart ->
m.startp <- add sym data m.startp
| WhereEnd ->
m.endp <- add sym data m.endp
| WhereSymbolStart ->
assert false
let find (sym, where) m =
match where with
| WhereStart ->
find sym m.startp
| WhereEnd ->
find sym m.endp
| WhereSymbolStart ->
assert false
let iter f m =
iter (fun sym -> f (sym, WhereStart)) m.startp;
iter (fun sym -> f (sym, WhereEnd)) m.endp
end
(* ------------------------------------------------------------------------ *)
We now determine which positions must be kept track of . For simplicity , we
do this on a per - symbol basis . That is , for each symbol , either we never
keep track of position information , or we always do . In fact , we do
distinguish start and end positions . This leads to computing two sets of
symbols -- those that keep track of their start position and those that
keep track of their end position .
A symbol on the right - hand side of a production must keep track of its
( start or end ) position if that position is explicitly requested by a
semantic action .
Furthermore , if the left - hand symbol of a production must keep track of its
start ( resp . end ) position , then the first ( resp . last ) symbol of its
right - hand side ( if there is one ) must do so as well . That is , unless the
right - hand side is empty .
do this on a per-symbol basis. That is, for each symbol, either we never
keep track of position information, or we always do. In fact, we do
distinguish start and end positions. This leads to computing two sets of
symbols -- those that keep track of their start position and those that
keep track of their end position.
A symbol on the right-hand side of a production must keep track of its
(start or end) position if that position is explicitly requested by a
semantic action.
Furthermore, if the left-hand symbol of a production must keep track of its
start (resp. end) position, then the first (resp. last) symbol of its
right-hand side (if there is one) must do so as well. That is, unless the
right-hand side is empty. *)
2015/11/11 . When a production [ prod ] is reduced , the top stack cell may be
consulted for its end position . This implies that this cell must exist
and must store an end position ! Now , when does this happen ?
1- This happens if [ prod ] is an epsilon production and the left - hand symbol
of the production , [ nt prod ] , keeps track of its start or end position .
2- This happens if the semantic action explicitly mentions the keyword
[ $ endpos($0 ) ] .
Now , if this happens , what should we do ?
a- If this happens in a state [ s ] whose incoming symbol is [ sym ] , then [ sym ]
must keep track of its end position .
b- If this happens in an initial state , where the stack may be empty , then
the sentinel cell at the bottom of the stack must contain an end position .
Point ( b ) does n't concern us here , but point ( a ) does . We must implement the
constraint ( 1 ) \/ ( 2 ) - > ( a ) . Point ( b ) is taken care of in the code back - end ,
where , for simplicity , we always create a sentinel cell .
consulted for its end position. This implies that this cell must exist
and must store an end position! Now, when does this happen?
1- This happens if [prod] is an epsilon production and the left-hand symbol
of the production, [nt prod], keeps track of its start or end position.
2- This happens if the semantic action explicitly mentions the keyword
[$endpos($0)].
Now, if this happens, what should we do?
a- If this happens in a state [s] whose incoming symbol is [sym], then [sym]
must keep track of its end position.
b- If this happens in an initial state, where the stack may be empty, then
the sentinel cell at the bottom of the stack must contain an end position.
Point (b) doesn't concern us here, but point (a) does. We must implement the
constraint (1) \/ (2) -> (a). Point (b) is taken care of in the code back-end,
where, for simplicity, we always create a sentinel cell. *)
(* I will say that this is a lot more sophisticated than I would like. The code
back-end has been known for its efficiency and I am trying to maintain this
property -- in particular, I would like to keep track of no positions at all,
if the user doesn't use any position keyword. But I am suffering. *)
(* If [--represent-positions] is passed on the command line, then every position
is stored. *)
module F =
FixSolver.Make(M)(Fix.Prop.Boolean)
let () =
We gather the constraints explained above in two loops . The first loop
looks at every ( non - start ) production [ prod ] . The second loop looks at
every ( non - initial ) state [ s ] .
looks at every (non-start) production [prod]. The second loop looks at
every (non-initial) state [s]. *)
Production.iterx (fun prod ->
let nt, rhs = Production.def prod
and ids = Production.identifiers prod
and action = Production.action prod in
let length = Array.length rhs in
if length > 0 then begin
If [ nt ] keeps track of its start position , then the first symbol
in the right - hand side must do so as well .
in the right-hand side must do so as well. *)
F.record_VarVar (Symbol.N nt, WhereStart) (rhs.(0), WhereStart);
(* If [nt] keeps track of its end position, then the last symbol
in the right-hand side must do so as well. *)
F.record_VarVar (Symbol.N nt, WhereEnd) (rhs.(length - 1), WhereEnd)
end;
KeywordSet.iter (function
| SyntaxError ->
()
| Position (Before, _, _) ->
Doing nothing here because [ $ endpos($0 ) ] is dealt with in
the second loop .
the second loop. *)
()
| Position (Left, _, _) ->
(* [$startpos] and [$endpos] have been expanded away. *)
assert false
| Position (_, _, FlavorLocation) ->
(* [$loc] and [$sloc] have been expanded away. *)
assert false
| Position (RightNamed _, WhereSymbolStart, _) ->
(* [$symbolstartpos(x)] does not exist. *)
assert false
| Position (RightNamed id, where, _) ->
(* If the semantic action mentions [$startpos($i)], then the
[i]-th symbol in the right-hand side must keep track of
its start position. Similarly for end positions. *)
Array.iteri (fun i id' ->
if id = id' then
F.record_ConVar true (rhs.(i), where)
) ids
) (Action.keywords action)
); (* end of loop on productions *)
Lr1.iterx (fun s ->
(* Let [sym] be the incoming symbol of state [s]. *)
let sym = Option.force (Lr1.incoming_symbol s) in
Condition ( 1 ) in the long comment above ( 2015/11/11 ) . If an epsilon
production [ prod ] can be reduced in state [ s ] , if its left - hand side
[ nt ] keeps track of its start or end position , then [ sym ] must keep
track of its end position .
production [prod] can be reduced in state [s], if its left-hand side
[nt] keeps track of its start or end position, then [sym] must keep
track of its end position. *)
TerminalMap.iter (fun _ prods ->
let prod = Misc.single prods in
let nt, rhs = Production.def prod in
let length = Array.length rhs in
if length = 0 then begin
F.record_VarVar (Symbol.N nt, WhereStart) (sym, WhereEnd);
F.record_VarVar (Symbol.N nt, WhereEnd) (sym, WhereEnd)
end
) (Lr1.reductions s);
Condition ( 2 ) in the long comment above ( 2015/11/11 ) . If a production
can be reduced in state [ s ] and mentions [ $ endpos($0 ) ] , then [ sym ]
must keep track of its end position .
can be reduced in state [s] and mentions [$endpos($0)], then [sym]
must keep track of its end position. *)
if Lr1.has_beforeend s then
F.record_ConVar true (sym, WhereEnd)
)
let track : variable -> bool option =
let module S = F.Solve() in
S.solution
let track : variable -> bool =
fun x -> Option.value (track x) ~default:false
let startp symbol =
Settings.represent_positions ||
track (symbol, WhereStart)
let endp symbol =
Settings.represent_positions ||
track (symbol, WhereEnd)
let for_every_symbol (f : Symbol.t -> unit) : unit =
Terminal.iter (fun t -> f (Symbol.T t));
Nonterminal.iter (fun nt -> f (Symbol.N nt))
let sum_over_every_symbol (f : Symbol.t -> bool) : int =
let c = ref 0 in
for_every_symbol (fun sym -> if f sym then c := !c + 1);
!c
let () =
Error.logC 1 (fun f ->
Printf.fprintf f
"%d out of %d symbols keep track of their start position.\n\
%d out of %d symbols keep track of their end position.\n"
(sum_over_every_symbol startp) (Terminal.n + Nonterminal.n)
(sum_over_every_symbol endp) (Terminal.n + Nonterminal.n))
(* ------------------------------------------------------------------------ *)
(* Constructors and accessors for information about the stack. *)
(* Types. *)
type cell = {
symbol: Symbol.t;
states: Lr1.NodeSet.t;
holds_semv: bool;
holds_state: bool;
holds_startp: bool;
holds_endp: bool;
}
type word =
cell array
(* Constructors. *)
(* If [--represent-values] is passed on the command line, then every semantic
value is stored. *)
let has_semv symbol =
Settings.represent_values ||
match symbol with
| Symbol.N _nt ->
true
| Symbol.T tok ->
match Terminal.ocamltype tok with
| None ->
(* Token has unit type and is omitted in stack cell. *)
false
| Some _ocamltype ->
true
let cell symbol states =
let holds_semv = has_semv symbol in
let holds_state = representeds states in
let holds_startp, holds_endp = startp symbol, endp symbol in
{ symbol; states; holds_semv; holds_state; holds_startp; holds_endp }
(* Accessors. *)
let similar cell1 cell2 =
Symbol.equal cell1.symbol cell2.symbol &&
cell1.holds_state = cell2.holds_state
(* The fields [holds_semv], [holds_startp] and [holds_endp]
do not need to be compared, because they are determined
by the field [symbol]. The field [states] does not need
to be compared because it does not influence the layout
of the cell; comparing the field [holds_state] suffices. *)
let pop =
MArray.pop
let fold_top f default w =
let n = Array.length w in
if n = 0 then
default
else
f w.(n-1)
(* ------------------------------------------------------------------------ *)
(* Publish the short invariant. *)
module type STACK = sig
(**[stack s] is the known suffix of the stack at state [s]. *)
val stack: Lr1.node -> word
(**[prodstack prod] is the known suffix of the stack at a state where
production [prod] can be reduced. In the short invariant, the length of
this suffix is [Production.length prod]. In the long invariant, its
length can be greater. If there are no states where [prod] can be
reduced, then every cell contains an empty set of states. *)
val prodstack: Production.index -> word
* [ nt ] is the known suffix of the stack at a state where an
edge labeled [ nt ] has just been followed . In the short invariant , the
length of this suffix is [ 1 ] : indeed , it consists of just one cell ,
associated with the symbol [ nt ] . In the long invariant , its length can
be greater .
edge labeled [nt] has just been followed. In the short invariant, the
length of this suffix is [1]: indeed, it consists of just one cell,
associated with the symbol [nt]. In the long invariant, its length can
be greater. *)
val gotostack: Nonterminal.t -> word
end
(* Suppose we have a function [foo] that maps things to vectors of foos and
a function [bar] that maps things to vectors of bars. Suppose we have a
function [cell] that builds a cell out of a foo and a bar. Then, we want
to construct and tabulate a function that maps things to vectors of
cells. This is done in a generic way as follows. *)
let publish tabulate foo bar cell =
tabulate (fun thing ->
let foos, bars = foo thing, bar thing in
assert (Array.length foos >= Array.length bars);
(* We allow [bars] to be shorter than [foos]. This is required in the
computation of the long invariant, where [validate] can reject sets
of states that are not equi-represented. In that case, we truncate
[foos] to match [bars]. *)
let k = Array.length bars in
let foos = MArray.truncate k foos in
Array.init k (fun i -> cell foos.(i) bars.(i))
)
let stack : Lr1.node -> word =
publish Lr1.tabulate stack_symbols stack_states cell
let prodstack : Production.index -> word =
publish Production.tabulate production_symbols production_states cell
let gotostack : Nonterminal.t -> word =
publish Nonterminal.tabulate goto_symbols goto_states cell
(* ------------------------------------------------------------------------ *)
(* Explain how the stack should be deconstructed when an error is found.
We sometimes have a choice as to how many stack cells should be popped.
Indeed, several cells in the known suffix of the stack may physically hold
a state. If neither of these states handles errors, then we could jump to
either. (Indeed, if we jump to one that's nearer, it will in turn pop
further stack cells and jump to one that's farther.) In the interest of
code size, we should pop as few stack cells as possible. So, we jump to the
topmost represented state in the known suffix. *)
type state =
| Represented
| UnRepresented of Lr1.node
type instruction =
| Die
| DownTo of word * state
let rewind node : instruction =
let w = stack node in
let rec rewind w =
if Array.length w = 0 then
I believe that every stack description either is definite
( that is , ends with [ TailEmpty ] ) or contains at least one
represented state . Thus , if we find an empty [ w ] , this
means that the stack is definitely empty .
(that is, ends with [TailEmpty]) or contains at least one
represented state. Thus, if we find an empty [w], this
means that the stack is definitely empty. *)
Die
else
let { states; _ } as cell = MArray.last w in
let w = MArray.pop w in
if representeds states then
(* Here is a represented state. We will pop this
cell and no more. *)
DownTo ([| cell |], Represented)
else if handlers states then begin
(* Here is an unrepresented state that can handle
errors. The cell must hold a singleton set of states, so
we know which state to jump to, even though it isn't
represented. *)
assert (Lr1.NodeSet.cardinal states = 1);
let state = Lr1.NodeSet.choose states in
DownTo ([| cell |], UnRepresented state)
end
else
(* Here is an unrepresented state that does not handle
errors. Pop this cell and look further. *)
match rewind w with
| Die ->
Die
| DownTo (w, st) ->
DownTo (MArray.push w cell, st)
in
rewind w
(* ------------------------------------------------------------------------- *)
(* Miscellaneous. *)
let universal symbol =
Lr1.fold (fun universal s ->
universal && (if represented s then SymbolMap.mem symbol (Lr1.transitions s) else true)
) true
(* ------------------------------------------------------------------------ *)
Discover which states can peek at an error . These are the states
where an error token may be on the stream . These are the states
that are targets of a reduce action on [ error ] .
where an error token may be on the stream. These are the states
that are targets of a reduce action on [error]. *)
2012/08/25 I am optimizing this code , whose original version I found had
quadratic complexity . The problem is as follows . We can easily iterate over
all states to find which states [ s ] have a reduce action on error . What we
must find out , then , is into which state [ t ] this reduce action takes us .
This is not easy to predict , as it depends on the contents of the stack .
The original code used an overapproximation , as follows : if the reduction
concerns a production whose head symbol is [ nt ] , then all of the states
that have an incoming transition labeled [ nt ] are potential targets . The
new version of the code below relies on the same approximation , but uses
two successive loops instead of two nested loops .
quadratic complexity. The problem is as follows. We can easily iterate over
all states to find which states [s] have a reduce action on error. What we
must find out, then, is into which state [t] this reduce action takes us.
This is not easy to predict, as it depends on the contents of the stack.
The original code used an overapproximation, as follows: if the reduction
concerns a production whose head symbol is [nt], then all of the states
that have an incoming transition labeled [nt] are potential targets. The
new version of the code below relies on the same approximation, but uses
two successive loops instead of two nested loops. *)
let errorpeekers =
First compute a set of symbols [ nt ] ...
let nts : SymbolSet.t =
Lr1.fold (fun nts node ->
try
let prods = TerminalMap.lookup Terminal.error (Lr1.reductions node) in
let prod = Misc.single prods in
let nt = Production.nt prod in
SymbolSet.add (Symbol.N nt) nts
with Not_found ->
nts
) SymbolSet.empty
in
(* ... then compute the set of all target states of all transitions
labeled by some symbol in the set [nt]. *)
SymbolSet.fold (fun nt errorpeekers ->
Lr1.NodeSet.union errorpeekers (Lr1.all_targets nt)
) nts Lr1.NodeSet.empty
let errorpeeker node =
Lr1.NodeSet.mem node errorpeekers
(* ------------------------------------------------------------------------ *)
let () =
Time.tick "Constructing the invariant"
(* ------------------------------------------------------------------------ *)
(* Compute and publish the long invariant. *)
(* Fortunately, all of the building blocks are at hand, so this is easy. *)
(* A caveat: it is not obvious that the sets of states computed here are
equi-represented. (A set is equi-represented if all of its elements are
represented *or* all of its elements are unrepresented.) Yet, we need
this property, otherwise the long invariant cannot be safely translated
to an OCaml GADT.
One might think that this property is likely true, because every set of
states that appears somewhere in the long invariant must also appear
somewhere in the short invariant, and we know that every set of states in
the short invariant is equi-represented, because we have explicitly
imposed this requirement. However, this is *incorrect*: testing shows
that not every set of states in the long invariant is equi-represented.
To work around this problem, we truncate the long invariant so as to
forget about any stack cells that are not equi-represented. *)
module Long () = struct
(* Compute. *)
module SSy =
StackSymbols.Long()
module SSt =
StackStates.Run(SSy)
open SSy (* crucial! shadows the short invariant *)
open SSt (* crucial! shadows the short invariant *)
Validate .
let unrepresented node =
not (represented node)
let equi_represented nodes =
Lr1.NodeSet.for_all represented nodes ||
Lr1.NodeSet.for_all unrepresented nodes
let validate states =
MArray.greatest_suffix_forall equi_represented states
let stack_states s =
validate @@ stack_states s
let production_states prod =
validate @@ production_states prod
let goto_states nt =
validate @@ goto_states nt
(* Dump. *)
let () =
Error.logC 3 (dump "long")
(* Publish. *)
let stack : Lr1.node -> word =
publish Lr1.tabulate stack_symbols stack_states cell
let prodstack : Production.index -> word =
publish Production.tabulate production_symbols production_states cell
let gotostack : Nonterminal.t -> word =
publish Nonterminal.tabulate goto_symbols goto_states cell
let () =
Time.tick "Constructing the long invariant"
end (* Long *)
(* ------------------------------------------------------------------------ *)
(* Compute which entry states can reach each [run], [reduce], and [goto]
function. *)
(* This information is computed only on demand. *)
This information is used in the new code back - end to determine in which
states we have static knowledge of the final result type of the parser ,
[ ' final ] . This information can be built into the GADT that describes the
states , and this in turn can be used to perform certain optimizations ( such
as removing case analyses that have only one branch ) while preserving the
well - typedness of the OCaml code .
states we have static knowledge of the final result type of the parser,
['final]. This information can be built into the GADT that describes the
states, and this in turn can be used to perform certain optimizations (such
as removing case analyses that have only one branch) while preserving the
well-typedness of the OCaml code. *)
(* This information is computed via a forward data flow analysis. *)
(* The join semi-lattice of properties is as follows. *)
module P = struct
[ SingleOrigin s ] means that we are reachable via a single entry state
[ s ] . [ Top ] means that we are reachable via multiple entry states .
[s]. [Top] means that we are reachable via multiple entry states. *)
type property =
| SingleOrigin of Nonterminal.t
| Top
let leq_join p1 p2 =
match p1, p2 with
| _, Top
| Top, _ ->
Top
| SingleOrigin start1, SingleOrigin start2 ->
if Nonterminal.equal start1 start2 then p2 else Top
end
(* The call graph of the [run], [reduce] and [goto] functions. *)
module G = struct
include P
type variable =
| Run of Lr1.node
| Reduce of Production.index
| Goto of Nonterminal.t
type t = variable
let foreach_root yield =
(* The entry points are the [run] functions associated with each of
the entry states. *)
Lr1.entry |> ProductionMap.iter (fun prod node ->
let nt = Option.force (Production.classify prod) in
yield (Run node) (SingleOrigin nt)
)
let foreach_successor v origin yield =
match v with
| Run node ->
(* For each transition from [node] to [node'], the function [run node]
calls the function [run node']. In the case of [goto] transitions,
this is not a direct call (it goes through [reduce] and [goto]
functions), but it is nevertheless accounted for here. *)
Lr1.transitions node |> SymbolMap.iter begin fun _label node' ->
yield (Run node') origin
end;
Lr1.reductions node |> TerminalMap.iter begin fun _tok prods ->
let prod = Misc.single prods in
yield (Reduce prod) origin
end
| Reduce prod ->
(* A [reduce] function ends with a call to a [goto] function. *)
let nt = Production.nt prod in
yield (Goto nt) origin
| Goto _nt ->
(* A [goto] function appears to make no calls. The calls that it
makes have already been accounted for above. *)
()
end
(* Run the analysis on demand. *)
let solution : (G.variable -> P.property option) Lazy.t =
lazy (
let module D = Fix.DataFlow.ForType(G)(P)(G) in
D.solution
)
(* Convert a [property option] to something clearer for the end user. *)
module Origin = struct
type origin =
| Dead
| SingleOrigin of Nonterminal.t
| MultipleOrigins
let convert op =
match op with
| None ->
Dead
| Some (P.SingleOrigin nt) ->
SingleOrigin nt
| Some (P.Top) ->
MultipleOrigins
(* Publish the data. *)
let run node =
convert (Lazy.force solution (G.Run node))
let reduce prod =
convert (Lazy.force solution (G.Reduce prod))
let goto nt =
convert (Lazy.force solution (G.Goto nt))
end (* Origin *)
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/src/invariant.ml | ocaml | ****************************************************************************
file LICENSE.
****************************************************************************
This module discovers information about the shape and content of the stack
in each of the automaton's states.
------------------------------------------------------------------------
Compute the known suffix of the stack, a sequence of symbols,
at every state. This is the "short invariant".
------------------------------------------------------------------------
Now, compute which states may be held in the known suffix of the stack.
------------------------------------------------------------------------
If requested, print the information that has been computed above.
------------------------------------------------------------------------
Data.
Getter.
Setters.
Define accessors.
If [--represent-states] is passed on the command line, then every state is
represented. The above computation is still performed.
Statistics.
If requested, show a detailed table of which states are represented.
------------------------------------------------------------------------
Machinery for the computation of which symbols must keep track of their
start or end positions.
------------------------------------------------------------------------
I will say that this is a lot more sophisticated than I would like. The code
back-end has been known for its efficiency and I am trying to maintain this
property -- in particular, I would like to keep track of no positions at all,
if the user doesn't use any position keyword. But I am suffering.
If [--represent-positions] is passed on the command line, then every position
is stored.
If [nt] keeps track of its end position, then the last symbol
in the right-hand side must do so as well.
[$startpos] and [$endpos] have been expanded away.
[$loc] and [$sloc] have been expanded away.
[$symbolstartpos(x)] does not exist.
If the semantic action mentions [$startpos($i)], then the
[i]-th symbol in the right-hand side must keep track of
its start position. Similarly for end positions.
end of loop on productions
Let [sym] be the incoming symbol of state [s].
------------------------------------------------------------------------
Constructors and accessors for information about the stack.
Types.
Constructors.
If [--represent-values] is passed on the command line, then every semantic
value is stored.
Token has unit type and is omitted in stack cell.
Accessors.
The fields [holds_semv], [holds_startp] and [holds_endp]
do not need to be compared, because they are determined
by the field [symbol]. The field [states] does not need
to be compared because it does not influence the layout
of the cell; comparing the field [holds_state] suffices.
------------------------------------------------------------------------
Publish the short invariant.
*[stack s] is the known suffix of the stack at state [s].
*[prodstack prod] is the known suffix of the stack at a state where
production [prod] can be reduced. In the short invariant, the length of
this suffix is [Production.length prod]. In the long invariant, its
length can be greater. If there are no states where [prod] can be
reduced, then every cell contains an empty set of states.
Suppose we have a function [foo] that maps things to vectors of foos and
a function [bar] that maps things to vectors of bars. Suppose we have a
function [cell] that builds a cell out of a foo and a bar. Then, we want
to construct and tabulate a function that maps things to vectors of
cells. This is done in a generic way as follows.
We allow [bars] to be shorter than [foos]. This is required in the
computation of the long invariant, where [validate] can reject sets
of states that are not equi-represented. In that case, we truncate
[foos] to match [bars].
------------------------------------------------------------------------
Explain how the stack should be deconstructed when an error is found.
We sometimes have a choice as to how many stack cells should be popped.
Indeed, several cells in the known suffix of the stack may physically hold
a state. If neither of these states handles errors, then we could jump to
either. (Indeed, if we jump to one that's nearer, it will in turn pop
further stack cells and jump to one that's farther.) In the interest of
code size, we should pop as few stack cells as possible. So, we jump to the
topmost represented state in the known suffix.
Here is a represented state. We will pop this
cell and no more.
Here is an unrepresented state that can handle
errors. The cell must hold a singleton set of states, so
we know which state to jump to, even though it isn't
represented.
Here is an unrepresented state that does not handle
errors. Pop this cell and look further.
-------------------------------------------------------------------------
Miscellaneous.
------------------------------------------------------------------------
... then compute the set of all target states of all transitions
labeled by some symbol in the set [nt].
------------------------------------------------------------------------
------------------------------------------------------------------------
Compute and publish the long invariant.
Fortunately, all of the building blocks are at hand, so this is easy.
A caveat: it is not obvious that the sets of states computed here are
equi-represented. (A set is equi-represented if all of its elements are
represented *or* all of its elements are unrepresented.) Yet, we need
this property, otherwise the long invariant cannot be safely translated
to an OCaml GADT.
One might think that this property is likely true, because every set of
states that appears somewhere in the long invariant must also appear
somewhere in the short invariant, and we know that every set of states in
the short invariant is equi-represented, because we have explicitly
imposed this requirement. However, this is *incorrect*: testing shows
that not every set of states in the long invariant is equi-represented.
To work around this problem, we truncate the long invariant so as to
forget about any stack cells that are not equi-represented.
Compute.
crucial! shadows the short invariant
crucial! shadows the short invariant
Dump.
Publish.
Long
------------------------------------------------------------------------
Compute which entry states can reach each [run], [reduce], and [goto]
function.
This information is computed only on demand.
This information is computed via a forward data flow analysis.
The join semi-lattice of properties is as follows.
The call graph of the [run], [reduce] and [goto] functions.
The entry points are the [run] functions associated with each of
the entry states.
For each transition from [node] to [node'], the function [run node]
calls the function [run node']. In the case of [goto] transitions,
this is not a direct call (it goes through [reduce] and [goto]
functions), but it is nevertheless accounted for here.
A [reduce] function ends with a call to a [goto] function.
A [goto] function appears to make no calls. The calls that it
makes have already been accounted for above.
Run the analysis on demand.
Convert a [property option] to something clearer for the end user.
Publish the data.
Origin |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
open Grammar
artificial dependency ; ensures that [ Conflict ] runs first
module SSy =
StackSymbols.Run()
open SSy
module SSt =
StackStates.Run(SSy)
open SSt
let () =
Error.logC 3 (dump "short")
We now determine which states must be represented , that is ,
explicitly pushed onto the stack . For simplicity , a state is either
always represented or never represented . More fine - grained
strategies , where a single state is sometimes pushed onto the stack
and sometimes not pushed , depending on which outgoing transition is
being taken , are conceivable , but quite tricky , and probably not
worth the trouble .
( 1 ) If two states are liable to appear within a single stack cell ,
then one is represented if and only if the other is
represented . This ensures that the structure of stacks is known
everywhere and that we can propose types for stacks .
( 2 ) If a state [ s ] has an outgoing transition along nonterminal
symbol [ nt ] , and if the [ goto ] table for symbol [ nt ] has more than
one target , then state [ s ] is represented .
( 3 ) If a stack cell contains more than one state and if at least
one of these states is able to handle the [ error ] token , then these
states are represented .
( 4 ) If the semantic action associated with a production mentions
the [ $ syntaxerror ] keyword , then the state that is being reduced to
( that is , the state that initiated the recognition of this
production ) is represented . ( Indeed , it will be passed as an
argument to [ errorcase ] . )
explicitly pushed onto the stack. For simplicity, a state is either
always represented or never represented. More fine-grained
strategies, where a single state is sometimes pushed onto the stack
and sometimes not pushed, depending on which outgoing transition is
being taken, are conceivable, but quite tricky, and probably not
worth the trouble.
(1) If two states are liable to appear within a single stack cell,
then one is represented if and only if the other is
represented. This ensures that the structure of stacks is known
everywhere and that we can propose types for stacks.
(2) If a state [s] has an outgoing transition along nonterminal
symbol [nt], and if the [goto] table for symbol [nt] has more than
one target, then state [s] is represented.
(3) If a stack cell contains more than one state and if at least
one of these states is able to handle the [error] token, then these
states are represented.
(4) If the semantic action associated with a production mentions
the [$syntaxerror] keyword, then the state that is being reduced to
(that is, the state that initiated the recognition of this
production) is represented. (Indeed, it will be passed as an
argument to [errorcase].) *)
let rep : bool UnionFind.point array =
Array.init Lr1.n (fun _ -> UnionFind.fresh false)
let represented state =
rep.(Lr1.number state)
let represent state =
UnionFind.set (represented state) true
let represents states =
represent (Lr1.NodeSet.choose states)
Enforce condition ( 1 ) above .
let share (v : property) =
Array.iter (fun states ->
let dummy = UnionFind.fresh false in
Lr1.NodeSet.iter (fun state ->
UnionFind.union dummy (represented state)
) states
) v
let () =
Lr1.iter (fun node ->
share (stack_states node)
);
Production.iter (fun prod ->
share (production_states prod)
)
Enforce condition ( 2 ) above .
let () =
Nonterminal.iter (fun nt ->
let count =
Lr1.targets (fun count _ _ ->
count + 1
) 0 (Symbol.N nt)
in
if count > 1 then
Lr1.targets (fun () sources _ ->
List.iter represent sources
) () (Symbol.N nt)
)
Enforce condition ( 3 ) above .
let handler state =
try
let _ = SymbolMap.find (Symbol.T Terminal.error) (Lr1.transitions state) in
true
with Not_found ->
try
let _ = TerminalMap.lookup Terminal.error (Lr1.reductions state) in
true
with Not_found ->
false
let handlers states =
Lr1.NodeSet.exists handler states
let () =
Lr1.iter (fun node ->
let v = stack_states node in
Array.iter (fun states ->
if Lr1.NodeSet.cardinal states >= 2 && handlers states then
represents states
) v
)
Enforce condition ( 4 ) above .
let () =
Production.iterx (fun prod ->
if Action.has_syntaxerror (Production.action prod) then
let sites = Lr1.production_where prod in
let length = Production.length prod in
if length = 0 then
Lr1.NodeSet.iter represent sites
else
let states = (production_states prod).(0) in
represents states
)
let represented state =
Settings.represent_states ||
UnionFind.get (represented state)
let representeds states =
Settings.represent_states ||
if Lr1.NodeSet.is_empty states then
false
else
represented (Lr1.NodeSet.choose states)
let () =
Error.logC 1 (fun f ->
let count =
Lr1.fold (fun count node ->
if represented node then count + 1 else count
) 0
in
Printf.fprintf f "%d out of %d states are represented.\n" count Lr1.n
)
let () =
Error.logC 3 (fun f ->
Lr1.iter (fun node ->
Printf.fprintf f "represented(%s) = %b\n"
(Lr1.print node) (represented node)
)
)
open Keyword
type variable =
WhereStart or WhereEnd
module M : Fix.IMPERATIVE_MAPS with type key = variable = struct
type key = variable
type 'data t = {
mutable startp: 'data SymbolMap.t;
mutable endp: 'data SymbolMap.t;
}
open SymbolMap
let create() =
{ startp = empty; endp = empty }
let clear m =
m.startp <- empty; m.endp <- empty
let add (sym, where) data m =
match where with
| WhereStart ->
m.startp <- add sym data m.startp
| WhereEnd ->
m.endp <- add sym data m.endp
| WhereSymbolStart ->
assert false
let find (sym, where) m =
match where with
| WhereStart ->
find sym m.startp
| WhereEnd ->
find sym m.endp
| WhereSymbolStart ->
assert false
let iter f m =
iter (fun sym -> f (sym, WhereStart)) m.startp;
iter (fun sym -> f (sym, WhereEnd)) m.endp
end
We now determine which positions must be kept track of . For simplicity , we
do this on a per - symbol basis . That is , for each symbol , either we never
keep track of position information , or we always do . In fact , we do
distinguish start and end positions . This leads to computing two sets of
symbols -- those that keep track of their start position and those that
keep track of their end position .
A symbol on the right - hand side of a production must keep track of its
( start or end ) position if that position is explicitly requested by a
semantic action .
Furthermore , if the left - hand symbol of a production must keep track of its
start ( resp . end ) position , then the first ( resp . last ) symbol of its
right - hand side ( if there is one ) must do so as well . That is , unless the
right - hand side is empty .
do this on a per-symbol basis. That is, for each symbol, either we never
keep track of position information, or we always do. In fact, we do
distinguish start and end positions. This leads to computing two sets of
symbols -- those that keep track of their start position and those that
keep track of their end position.
A symbol on the right-hand side of a production must keep track of its
(start or end) position if that position is explicitly requested by a
semantic action.
Furthermore, if the left-hand symbol of a production must keep track of its
start (resp. end) position, then the first (resp. last) symbol of its
right-hand side (if there is one) must do so as well. That is, unless the
right-hand side is empty. *)
2015/11/11 . When a production [ prod ] is reduced , the top stack cell may be
consulted for its end position . This implies that this cell must exist
and must store an end position ! Now , when does this happen ?
1- This happens if [ prod ] is an epsilon production and the left - hand symbol
of the production , [ nt prod ] , keeps track of its start or end position .
2- This happens if the semantic action explicitly mentions the keyword
[ $ endpos($0 ) ] .
Now , if this happens , what should we do ?
a- If this happens in a state [ s ] whose incoming symbol is [ sym ] , then [ sym ]
must keep track of its end position .
b- If this happens in an initial state , where the stack may be empty , then
the sentinel cell at the bottom of the stack must contain an end position .
Point ( b ) does n't concern us here , but point ( a ) does . We must implement the
constraint ( 1 ) \/ ( 2 ) - > ( a ) . Point ( b ) is taken care of in the code back - end ,
where , for simplicity , we always create a sentinel cell .
consulted for its end position. This implies that this cell must exist
and must store an end position! Now, when does this happen?
1- This happens if [prod] is an epsilon production and the left-hand symbol
of the production, [nt prod], keeps track of its start or end position.
2- This happens if the semantic action explicitly mentions the keyword
[$endpos($0)].
Now, if this happens, what should we do?
a- If this happens in a state [s] whose incoming symbol is [sym], then [sym]
must keep track of its end position.
b- If this happens in an initial state, where the stack may be empty, then
the sentinel cell at the bottom of the stack must contain an end position.
Point (b) doesn't concern us here, but point (a) does. We must implement the
constraint (1) \/ (2) -> (a). Point (b) is taken care of in the code back-end,
where, for simplicity, we always create a sentinel cell. *)
module F =
FixSolver.Make(M)(Fix.Prop.Boolean)
let () =
We gather the constraints explained above in two loops . The first loop
looks at every ( non - start ) production [ prod ] . The second loop looks at
every ( non - initial ) state [ s ] .
looks at every (non-start) production [prod]. The second loop looks at
every (non-initial) state [s]. *)
Production.iterx (fun prod ->
let nt, rhs = Production.def prod
and ids = Production.identifiers prod
and action = Production.action prod in
let length = Array.length rhs in
if length > 0 then begin
If [ nt ] keeps track of its start position , then the first symbol
in the right - hand side must do so as well .
in the right-hand side must do so as well. *)
F.record_VarVar (Symbol.N nt, WhereStart) (rhs.(0), WhereStart);
F.record_VarVar (Symbol.N nt, WhereEnd) (rhs.(length - 1), WhereEnd)
end;
KeywordSet.iter (function
| SyntaxError ->
()
| Position (Before, _, _) ->
Doing nothing here because [ $ endpos($0 ) ] is dealt with in
the second loop .
the second loop. *)
()
| Position (Left, _, _) ->
assert false
| Position (_, _, FlavorLocation) ->
assert false
| Position (RightNamed _, WhereSymbolStart, _) ->
assert false
| Position (RightNamed id, where, _) ->
Array.iteri (fun i id' ->
if id = id' then
F.record_ConVar true (rhs.(i), where)
) ids
) (Action.keywords action)
Lr1.iterx (fun s ->
let sym = Option.force (Lr1.incoming_symbol s) in
Condition ( 1 ) in the long comment above ( 2015/11/11 ) . If an epsilon
production [ prod ] can be reduced in state [ s ] , if its left - hand side
[ nt ] keeps track of its start or end position , then [ sym ] must keep
track of its end position .
production [prod] can be reduced in state [s], if its left-hand side
[nt] keeps track of its start or end position, then [sym] must keep
track of its end position. *)
TerminalMap.iter (fun _ prods ->
let prod = Misc.single prods in
let nt, rhs = Production.def prod in
let length = Array.length rhs in
if length = 0 then begin
F.record_VarVar (Symbol.N nt, WhereStart) (sym, WhereEnd);
F.record_VarVar (Symbol.N nt, WhereEnd) (sym, WhereEnd)
end
) (Lr1.reductions s);
Condition ( 2 ) in the long comment above ( 2015/11/11 ) . If a production
can be reduced in state [ s ] and mentions [ $ endpos($0 ) ] , then [ sym ]
must keep track of its end position .
can be reduced in state [s] and mentions [$endpos($0)], then [sym]
must keep track of its end position. *)
if Lr1.has_beforeend s then
F.record_ConVar true (sym, WhereEnd)
)
let track : variable -> bool option =
let module S = F.Solve() in
S.solution
let track : variable -> bool =
fun x -> Option.value (track x) ~default:false
let startp symbol =
Settings.represent_positions ||
track (symbol, WhereStart)
let endp symbol =
Settings.represent_positions ||
track (symbol, WhereEnd)
let for_every_symbol (f : Symbol.t -> unit) : unit =
Terminal.iter (fun t -> f (Symbol.T t));
Nonterminal.iter (fun nt -> f (Symbol.N nt))
let sum_over_every_symbol (f : Symbol.t -> bool) : int =
let c = ref 0 in
for_every_symbol (fun sym -> if f sym then c := !c + 1);
!c
let () =
Error.logC 1 (fun f ->
Printf.fprintf f
"%d out of %d symbols keep track of their start position.\n\
%d out of %d symbols keep track of their end position.\n"
(sum_over_every_symbol startp) (Terminal.n + Nonterminal.n)
(sum_over_every_symbol endp) (Terminal.n + Nonterminal.n))
type cell = {
symbol: Symbol.t;
states: Lr1.NodeSet.t;
holds_semv: bool;
holds_state: bool;
holds_startp: bool;
holds_endp: bool;
}
type word =
cell array
let has_semv symbol =
Settings.represent_values ||
match symbol with
| Symbol.N _nt ->
true
| Symbol.T tok ->
match Terminal.ocamltype tok with
| None ->
false
| Some _ocamltype ->
true
let cell symbol states =
let holds_semv = has_semv symbol in
let holds_state = representeds states in
let holds_startp, holds_endp = startp symbol, endp symbol in
{ symbol; states; holds_semv; holds_state; holds_startp; holds_endp }
let similar cell1 cell2 =
Symbol.equal cell1.symbol cell2.symbol &&
cell1.holds_state = cell2.holds_state
let pop =
MArray.pop
let fold_top f default w =
let n = Array.length w in
if n = 0 then
default
else
f w.(n-1)
module type STACK = sig
val stack: Lr1.node -> word
val prodstack: Production.index -> word
* [ nt ] is the known suffix of the stack at a state where an
edge labeled [ nt ] has just been followed . In the short invariant , the
length of this suffix is [ 1 ] : indeed , it consists of just one cell ,
associated with the symbol [ nt ] . In the long invariant , its length can
be greater .
edge labeled [nt] has just been followed. In the short invariant, the
length of this suffix is [1]: indeed, it consists of just one cell,
associated with the symbol [nt]. In the long invariant, its length can
be greater. *)
val gotostack: Nonterminal.t -> word
end
let publish tabulate foo bar cell =
tabulate (fun thing ->
let foos, bars = foo thing, bar thing in
assert (Array.length foos >= Array.length bars);
let k = Array.length bars in
let foos = MArray.truncate k foos in
Array.init k (fun i -> cell foos.(i) bars.(i))
)
let stack : Lr1.node -> word =
publish Lr1.tabulate stack_symbols stack_states cell
let prodstack : Production.index -> word =
publish Production.tabulate production_symbols production_states cell
let gotostack : Nonterminal.t -> word =
publish Nonterminal.tabulate goto_symbols goto_states cell
type state =
| Represented
| UnRepresented of Lr1.node
type instruction =
| Die
| DownTo of word * state
let rewind node : instruction =
let w = stack node in
let rec rewind w =
if Array.length w = 0 then
I believe that every stack description either is definite
( that is , ends with [ TailEmpty ] ) or contains at least one
represented state . Thus , if we find an empty [ w ] , this
means that the stack is definitely empty .
(that is, ends with [TailEmpty]) or contains at least one
represented state. Thus, if we find an empty [w], this
means that the stack is definitely empty. *)
Die
else
let { states; _ } as cell = MArray.last w in
let w = MArray.pop w in
if representeds states then
DownTo ([| cell |], Represented)
else if handlers states then begin
assert (Lr1.NodeSet.cardinal states = 1);
let state = Lr1.NodeSet.choose states in
DownTo ([| cell |], UnRepresented state)
end
else
match rewind w with
| Die ->
Die
| DownTo (w, st) ->
DownTo (MArray.push w cell, st)
in
rewind w
let universal symbol =
Lr1.fold (fun universal s ->
universal && (if represented s then SymbolMap.mem symbol (Lr1.transitions s) else true)
) true
Discover which states can peek at an error . These are the states
where an error token may be on the stream . These are the states
that are targets of a reduce action on [ error ] .
where an error token may be on the stream. These are the states
that are targets of a reduce action on [error]. *)
2012/08/25 I am optimizing this code , whose original version I found had
quadratic complexity . The problem is as follows . We can easily iterate over
all states to find which states [ s ] have a reduce action on error . What we
must find out , then , is into which state [ t ] this reduce action takes us .
This is not easy to predict , as it depends on the contents of the stack .
The original code used an overapproximation , as follows : if the reduction
concerns a production whose head symbol is [ nt ] , then all of the states
that have an incoming transition labeled [ nt ] are potential targets . The
new version of the code below relies on the same approximation , but uses
two successive loops instead of two nested loops .
quadratic complexity. The problem is as follows. We can easily iterate over
all states to find which states [s] have a reduce action on error. What we
must find out, then, is into which state [t] this reduce action takes us.
This is not easy to predict, as it depends on the contents of the stack.
The original code used an overapproximation, as follows: if the reduction
concerns a production whose head symbol is [nt], then all of the states
that have an incoming transition labeled [nt] are potential targets. The
new version of the code below relies on the same approximation, but uses
two successive loops instead of two nested loops. *)
let errorpeekers =
First compute a set of symbols [ nt ] ...
let nts : SymbolSet.t =
Lr1.fold (fun nts node ->
try
let prods = TerminalMap.lookup Terminal.error (Lr1.reductions node) in
let prod = Misc.single prods in
let nt = Production.nt prod in
SymbolSet.add (Symbol.N nt) nts
with Not_found ->
nts
) SymbolSet.empty
in
SymbolSet.fold (fun nt errorpeekers ->
Lr1.NodeSet.union errorpeekers (Lr1.all_targets nt)
) nts Lr1.NodeSet.empty
let errorpeeker node =
Lr1.NodeSet.mem node errorpeekers
let () =
Time.tick "Constructing the invariant"
module Long () = struct
module SSy =
StackSymbols.Long()
module SSt =
StackStates.Run(SSy)
Validate .
let unrepresented node =
not (represented node)
let equi_represented nodes =
Lr1.NodeSet.for_all represented nodes ||
Lr1.NodeSet.for_all unrepresented nodes
let validate states =
MArray.greatest_suffix_forall equi_represented states
let stack_states s =
validate @@ stack_states s
let production_states prod =
validate @@ production_states prod
let goto_states nt =
validate @@ goto_states nt
let () =
Error.logC 3 (dump "long")
let stack : Lr1.node -> word =
publish Lr1.tabulate stack_symbols stack_states cell
let prodstack : Production.index -> word =
publish Production.tabulate production_symbols production_states cell
let gotostack : Nonterminal.t -> word =
publish Nonterminal.tabulate goto_symbols goto_states cell
let () =
Time.tick "Constructing the long invariant"
This information is used in the new code back - end to determine in which
states we have static knowledge of the final result type of the parser ,
[ ' final ] . This information can be built into the GADT that describes the
states , and this in turn can be used to perform certain optimizations ( such
as removing case analyses that have only one branch ) while preserving the
well - typedness of the OCaml code .
states we have static knowledge of the final result type of the parser,
['final]. This information can be built into the GADT that describes the
states, and this in turn can be used to perform certain optimizations (such
as removing case analyses that have only one branch) while preserving the
well-typedness of the OCaml code. *)
module P = struct
[ SingleOrigin s ] means that we are reachable via a single entry state
[ s ] . [ Top ] means that we are reachable via multiple entry states .
[s]. [Top] means that we are reachable via multiple entry states. *)
type property =
| SingleOrigin of Nonterminal.t
| Top
let leq_join p1 p2 =
match p1, p2 with
| _, Top
| Top, _ ->
Top
| SingleOrigin start1, SingleOrigin start2 ->
if Nonterminal.equal start1 start2 then p2 else Top
end
module G = struct
include P
type variable =
| Run of Lr1.node
| Reduce of Production.index
| Goto of Nonterminal.t
type t = variable
let foreach_root yield =
Lr1.entry |> ProductionMap.iter (fun prod node ->
let nt = Option.force (Production.classify prod) in
yield (Run node) (SingleOrigin nt)
)
let foreach_successor v origin yield =
match v with
| Run node ->
Lr1.transitions node |> SymbolMap.iter begin fun _label node' ->
yield (Run node') origin
end;
Lr1.reductions node |> TerminalMap.iter begin fun _tok prods ->
let prod = Misc.single prods in
yield (Reduce prod) origin
end
| Reduce prod ->
let nt = Production.nt prod in
yield (Goto nt) origin
| Goto _nt ->
()
end
let solution : (G.variable -> P.property option) Lazy.t =
lazy (
let module D = Fix.DataFlow.ForType(G)(P)(G) in
D.solution
)
module Origin = struct
type origin =
| Dead
| SingleOrigin of Nonterminal.t
| MultipleOrigins
let convert op =
match op with
| None ->
Dead
| Some (P.SingleOrigin nt) ->
SingleOrigin nt
| Some (P.Top) ->
MultipleOrigins
let run node =
convert (Lazy.force solution (G.Run node))
let reduce prod =
convert (Lazy.force solution (G.Reduce prod))
let goto nt =
convert (Lazy.force solution (G.Goto nt))
|
aae36b06d1e2d7eb8a044056f02de3cbca24e614fba3ba6d63f85239bcd3b70e | q60/dotfiles | xmonad.hs | import XMonad
import XMonad.Util.EZConfig
import XMonad.Util.Ungrab
import XMonad.Hooks.StatusBar
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.EwmhDesktops
main :: IO ()
main = xmonad
. withEasySB (statusBarProp "xmobar" $ pure def) defToggleStrutsKey
. ewmh
$ xmonadConfig
xmonadConfig = def { modMask = mod1Mask
, handleEventHook = fullscreenEventHook
, terminal = "termonad"
}
`additionalKeysP`
[ ("M-]" , spawn "firefox --private-window")
, ("M-y" , unGrab >> spawn "screenshot" )
, ("M--" , spawn "amixer sset Master 5%-" )
, ("M-=" , spawn "amixer sset Master 5%+" )
, ("M-p" , spawn "rofi -show run" )
]
| null | https://raw.githubusercontent.com/q60/dotfiles/b79f2578faf9e8efb77f3017ca491d1e74fd27da/config/xmonad/xmonad.hs | haskell | import XMonad
import XMonad.Util.EZConfig
import XMonad.Util.Ungrab
import XMonad.Hooks.StatusBar
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.EwmhDesktops
main :: IO ()
main = xmonad
. withEasySB (statusBarProp "xmobar" $ pure def) defToggleStrutsKey
. ewmh
$ xmonadConfig
xmonadConfig = def { modMask = mod1Mask
, handleEventHook = fullscreenEventHook
, terminal = "termonad"
}
`additionalKeysP`
[ ("M-]" , spawn "firefox --private-window")
, ("M-y" , unGrab >> spawn "screenshot" )
, ("M--" , spawn "amixer sset Master 5%-" )
, ("M-=" , spawn "amixer sset Master 5%+" )
, ("M-p" , spawn "rofi -show run" )
]
| |
0c8e52f7e27a9bde555b9d706f0b0c864ac0960869713518666baa319af38a51 | spurious/sagittarius-scheme-mirror | events.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; text/xml/dom/events.scm - DOM events
;;;
Copyright ( c ) 2018 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; reference
;; /
(library (text xml dom events)
(export (rename (event <event>)) make-event event?
event-type
(rename (event-%target event-target))
event-current-target event-event-phase
event-composed-path
event-bubbles? event-cancelable? event-default-prevented?
event-composed?
event-time-stamp
;; interface method
event:composed-path event:stop-propagation
event:stop-immediate-propagation event:prevent-default
;; interface constant
+event:none+ +event:capturing-phase+ +event:at-target+
+event:bubbling-phase+
make-event-init event-init?
(rename (custom-event <custom-event>))
make-custom-event custom-event?
custom-event-detail
make-custom-event-init custom-event-init?
(rename (event-target <event-target>))
make-event-target event-target?
event-target:add-event-listener event-target:remove-event-listener
event-target:dispatch-event
(rename (event-listener-options <event-listener-options>)
(add-event-listener-options <add-event-listener-options>))
make-event-listener-options event-listener-options?
make-add-event-listener-options add-event-listener-options?
)
(import (rnrs)
(sagittarius) ;; for define-constant
(srfi :19 time))
(define-record-type event-init
(fields bubbles?
cancelable?
composed?)
(protocol (lambda (p)
(lambda (:key (bubbles? #f) (cancelable? #f) (composed? #f))
(p bubbles? cancelable? composed?)))))
(define-constant +event:none+ 0)
(define-constant +event:capturing-phase+ 1)
(define-constant +event:at-target+ 2)
(define-constant +event:bubbling-phase+ 3)
;; internal flags
(define-constant +event-flag:stop-propagation+ 0)
(define-constant +event-flag:stop-immediate-propagation+ 1)
(define-constant +event-flag:canceled+ 2)
(define-constant +event-flag:in-pasive-listener+ 3)
(define-constant +event-flag:composed+ 4)
(define-constant +event-flag:initialized+ 5)
(define-constant +event-flag:dispatch+ 6)
;;; Event interface
(define-record-type event
DOMString
%target ;; EventTarget?
current-target ;; EventTarget?
event-phase ;; unsigned short
bubbles? ;; boolean
cancelable? ;; boolean
default-prevented? ;; boolean
composed? ;; boolean
DOMHighResTimeStamp ( using SRF-19 time )
internal use vector of 7
)
(protocol (lambda (p)
(lambda (type :optional (eid #f))
;; To be properly done
(p type
TODO consider current context
TODO consider current context
+event:none+
(and eid (event-init-bubbles? eid))
(and eid (event-init-cancelable? eid))
#f
(and eid (event-init-composed? eid))
(current-time)
(make-vector 7 #f))))))
;; event methods
(define (event:composed-path event)
;; TBD
'())
(define (event:stop-propagation event)
(vector-set! (event-flags event) +event-flag:stop-propagation+ #t))
(define (event:stop-immediate-propagation event)
(vector-set! (event-flags event) +event-flag:stop-immediate-propagation+ #t))
(define (event:prevent-default event)
(let ((flags (event-flags event)))
(and (event-cancelable? event)
(not (vector-ref flags +event-flag:in-pasive-listener+))
(vector-set! flags +event-flag:canceled+ #t))))
CustomEvent
(define-record-type custom-event-init
(parent event-init)
(fields detail)
(protocol (lambda (n)
(lambda (:key (detail #f) :allow-other-keys opt)
(let ((p (apply n opt)))
(p detail))))))
(define-record-type custom-event
(parent event)
(fields detail) ;; any
(protocol (lambda (n)
(lambda (type :optional (eid #f))
(let ((p (n type eid)))
(p (and (custom-event-init? eid)
(custom-event-init-detail eid))))))))
;;; EventTarget
(define-record-type event-target
(fields (mutable type) ;; a string
EventListener ( lambda ( event ) ... )
(mutable capture?) ;; boolean
(mutable passive?) ;; boolean
(mutable once?) ;; boolean
(mutable removed?) ;; boolean
)
(protocol (lambda (p)
(lambda ()
(p #f (lambda (_) ) #f #f #f #f)))))
(define-record-type event-listener-options
(fields capture?)
(protocol (lambda (p) (lambda (:key (capture? #f)) (p capture?)))))
(define-record-type add-event-listener-options
(parent event-listener-options)
(fields passive?
once?)
(protocol (lambda (n)
(lambda (:key (passive? #f) (once? #f) :allow-other-keys opt)
(let ((p (apply n opt)))
(p passive? once?))))))
(define (event-target:add-event-listener et type callback
:optional (options #f))
;; TBD
)
(define (event-target:remove-event-listener et type callback
:optional (options #f))
;; TBD
)
(define (event-target:dispatch-event et event)
;; TBD
)
)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/text/xml/dom/events.scm | scheme | coding : utf-8 ; -*-
text/xml/dom/events.scm - DOM events
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
reference
/
interface method
interface constant
for define-constant
internal flags
Event interface
EventTarget?
EventTarget?
unsigned short
boolean
boolean
boolean
boolean
To be properly done
event methods
TBD
any
EventTarget
a string
boolean
boolean
boolean
boolean
TBD
TBD
TBD | Copyright ( c ) 2018 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(library (text xml dom events)
(export (rename (event <event>)) make-event event?
event-type
(rename (event-%target event-target))
event-current-target event-event-phase
event-composed-path
event-bubbles? event-cancelable? event-default-prevented?
event-composed?
event-time-stamp
event:composed-path event:stop-propagation
event:stop-immediate-propagation event:prevent-default
+event:none+ +event:capturing-phase+ +event:at-target+
+event:bubbling-phase+
make-event-init event-init?
(rename (custom-event <custom-event>))
make-custom-event custom-event?
custom-event-detail
make-custom-event-init custom-event-init?
(rename (event-target <event-target>))
make-event-target event-target?
event-target:add-event-listener event-target:remove-event-listener
event-target:dispatch-event
(rename (event-listener-options <event-listener-options>)
(add-event-listener-options <add-event-listener-options>))
make-event-listener-options event-listener-options?
make-add-event-listener-options add-event-listener-options?
)
(import (rnrs)
(srfi :19 time))
(define-record-type event-init
(fields bubbles?
cancelable?
composed?)
(protocol (lambda (p)
(lambda (:key (bubbles? #f) (cancelable? #f) (composed? #f))
(p bubbles? cancelable? composed?)))))
(define-constant +event:none+ 0)
(define-constant +event:capturing-phase+ 1)
(define-constant +event:at-target+ 2)
(define-constant +event:bubbling-phase+ 3)
(define-constant +event-flag:stop-propagation+ 0)
(define-constant +event-flag:stop-immediate-propagation+ 1)
(define-constant +event-flag:canceled+ 2)
(define-constant +event-flag:in-pasive-listener+ 3)
(define-constant +event-flag:composed+ 4)
(define-constant +event-flag:initialized+ 5)
(define-constant +event-flag:dispatch+ 6)
(define-record-type event
DOMString
DOMHighResTimeStamp ( using SRF-19 time )
internal use vector of 7
)
(protocol (lambda (p)
(lambda (type :optional (eid #f))
(p type
TODO consider current context
TODO consider current context
+event:none+
(and eid (event-init-bubbles? eid))
(and eid (event-init-cancelable? eid))
#f
(and eid (event-init-composed? eid))
(current-time)
(make-vector 7 #f))))))
(define (event:composed-path event)
'())
(define (event:stop-propagation event)
(vector-set! (event-flags event) +event-flag:stop-propagation+ #t))
(define (event:stop-immediate-propagation event)
(vector-set! (event-flags event) +event-flag:stop-immediate-propagation+ #t))
(define (event:prevent-default event)
(let ((flags (event-flags event)))
(and (event-cancelable? event)
(not (vector-ref flags +event-flag:in-pasive-listener+))
(vector-set! flags +event-flag:canceled+ #t))))
CustomEvent
(define-record-type custom-event-init
(parent event-init)
(fields detail)
(protocol (lambda (n)
(lambda (:key (detail #f) :allow-other-keys opt)
(let ((p (apply n opt)))
(p detail))))))
(define-record-type custom-event
(parent event)
(protocol (lambda (n)
(lambda (type :optional (eid #f))
(let ((p (n type eid)))
(p (and (custom-event-init? eid)
(custom-event-init-detail eid))))))))
(define-record-type event-target
EventListener ( lambda ( event ) ... )
)
(protocol (lambda (p)
(lambda ()
(p #f (lambda (_) ) #f #f #f #f)))))
(define-record-type event-listener-options
(fields capture?)
(protocol (lambda (p) (lambda (:key (capture? #f)) (p capture?)))))
(define-record-type add-event-listener-options
(parent event-listener-options)
(fields passive?
once?)
(protocol (lambda (n)
(lambda (:key (passive? #f) (once? #f) :allow-other-keys opt)
(let ((p (apply n opt)))
(p passive? once?))))))
(define (event-target:add-event-listener et type callback
:optional (options #f))
)
(define (event-target:remove-event-listener et type callback
:optional (options #f))
)
(define (event-target:dispatch-event et event)
)
)
|
d0be96f744b2005de7c945522eb930cec8717e47e0fc8b2ff24d9d3c91b312f6 | FestCat/festival-ca | nitech_us_slt_arctic_other.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Carnegie Mellon University ; ; ;
and and ; ; ;
Copyright ( c ) 1998 - 2000 ; ; ;
All Rights Reserved . ; ; ;
;;; ;;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;;
;;; this software and its documentation without restriction, including ;;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;;
;;; permit persons to whom this work is furnished to do so, subject to ;;;
;;; the following conditions: ;;;
1 . The code must retain the above copyright notice , this list of ; ; ;
;;; conditions and the following disclaimer. ;;;
2 . Any modifications must be clearly marked as such . ; ; ;
3 . Original authors ' names are not deleted . ; ; ;
4 . The authors ' names are not used to endorse or promote products ; ; ;
;;; derived from this software without specific prior written ;;;
;;; permission. ;;;
;;; ;;;
CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK ; ; ;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE ; ; ;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
;;; THIS SOFTWARE. ;;;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Something else
;;;
;;; Load any necessary files here
(define (nitech_us_slt_arctic::select_other)
"(nitech_us_slt_arctic::select_other)
Set up the anything esle for the voice."
;; something else
)
(define (nitech_us_slt_arctic::reset_other)
"(nitech_us_slt_arctic::reset_other)
Reset other information."
t
)
(provide 'nitech_us_slt_arctic_other)
| null | https://raw.githubusercontent.com/FestCat/festival-ca/f6b2d9bf4fc4f77b80890ebb95770075ad36ccaf/src/data/festvox.orig/nitech_us_slt_arctic_other.scm | scheme |
;;;
; ;
; ;
; ;
; ;
;;;
Permission is hereby granted, free of charge, to use and distribute ;;;
this software and its documentation without restriction, including ;;;
without limitation the rights to use, copy, modify, merge, publish, ;;;
distribute, sublicense, and/or sell copies of this work, and to ;;;
permit persons to whom this work is furnished to do so, subject to ;;;
the following conditions: ;;;
; ;
conditions and the following disclaimer. ;;;
; ;
; ;
; ;
derived from this software without specific prior written ;;;
permission. ;;;
;;;
; ;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
; ;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
; ;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
THIS SOFTWARE. ;;;
;;;
Something else
Load any necessary files here
something else |
(define (nitech_us_slt_arctic::select_other)
"(nitech_us_slt_arctic::select_other)
Set up the anything esle for the voice."
)
(define (nitech_us_slt_arctic::reset_other)
"(nitech_us_slt_arctic::reset_other)
Reset other information."
t
)
(provide 'nitech_us_slt_arctic_other)
|
18ffd38d88ecb7314d317a6287be5afc8820f38ea50ef1ae7d56a3b3ea828bef | 2600hz/kazoo | kz_services_asr.erl | %%%-----------------------------------------------------------------------------
( C ) 2012 - 2020 , 2600Hz
%%% @doc
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kz_services_asr).
-export([fetch/1
,flat_rate/1, flat_rate/2
]).
-include("services.hrl").
-define(DEFAULT_FLAT_RATE, 0).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec fetch(kz_services:services() | kz_term:ne_binary()) -> kz_json:object().
fetch(?NE_BINARY=AccountId) ->
FetchOptions = ['hydrate_plans'],
fetch(kz_services:fetch(AccountId, FetchOptions));
fetch(Services) ->
ASRDict = kz_services_plans:foldl(fun fetch_foldl/3
,dict:new()
,kz_services:plans(Services)
),
kz_json:from_list(dict:to_list(ASRDict)).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec fetch_foldl(kz_term:ne_binary(), kz_services_plans:plans_list(), dict:dict()) -> dict:dict().
fetch_foldl(_BookkeeperHash, [], Providers) ->
Providers;
fetch_foldl(_BookkeeperHash, PlansList, Providers) ->
Plan = kz_services_plans:merge(PlansList),
kz_json:foldl(fun(K, V, A) ->
dict:store(K, V, A)
end
,Providers
,kz_services_plan:asr(Plan)
).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec flat_rate(kz_term:ne_binary()) -> kz_currency:dollars().
flat_rate(AccountId) ->
flat_rate(AccountId, kazoo_asr:default_provider()).
-spec flat_rate(kz_term:ne_binary(), kz_term:ne_binary()) -> kz_currency:dollars().
flat_rate(AccountId, Provider) ->
Items = fetch(AccountId),
kz_json:get_number_value([Provider, <<"rate">>], Items, ?DEFAULT_FLAT_RATE).
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_services/src/modules/kz_services_asr.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------ | ( C ) 2012 - 2020 , 2600Hz
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kz_services_asr).
-export([fetch/1
,flat_rate/1, flat_rate/2
]).
-include("services.hrl").
-define(DEFAULT_FLAT_RATE, 0).
-spec fetch(kz_services:services() | kz_term:ne_binary()) -> kz_json:object().
fetch(?NE_BINARY=AccountId) ->
FetchOptions = ['hydrate_plans'],
fetch(kz_services:fetch(AccountId, FetchOptions));
fetch(Services) ->
ASRDict = kz_services_plans:foldl(fun fetch_foldl/3
,dict:new()
,kz_services:plans(Services)
),
kz_json:from_list(dict:to_list(ASRDict)).
-spec fetch_foldl(kz_term:ne_binary(), kz_services_plans:plans_list(), dict:dict()) -> dict:dict().
fetch_foldl(_BookkeeperHash, [], Providers) ->
Providers;
fetch_foldl(_BookkeeperHash, PlansList, Providers) ->
Plan = kz_services_plans:merge(PlansList),
kz_json:foldl(fun(K, V, A) ->
dict:store(K, V, A)
end
,Providers
,kz_services_plan:asr(Plan)
).
-spec flat_rate(kz_term:ne_binary()) -> kz_currency:dollars().
flat_rate(AccountId) ->
flat_rate(AccountId, kazoo_asr:default_provider()).
-spec flat_rate(kz_term:ne_binary(), kz_term:ne_binary()) -> kz_currency:dollars().
flat_rate(AccountId, Provider) ->
Items = fetch(AccountId),
kz_json:get_number_value([Provider, <<"rate">>], Items, ?DEFAULT_FLAT_RATE).
|
a58351d1042a83c2c2dcdaccedfefe22e27c31bec4d4549cd5ca3e836b71d9c2 | honest-technology/api.unverified.email | Mailbox.hs | # LANGUAGE QuasiQuotes #
module Model.Mailbox where
import Data.Aeson
import Data.Text
import Data.Time
import Data.String.Interpolate (i)
import Data.Time.ISO8601 (formatISO8601Millis)
newtype EmailAddress = EmailAddress Text deriving ToJSON
newtype Url = Url Text deriving ToJSON
data Mailbox = Mailbox {
mailboxId :: Text
, created :: UTCTime
}
receiveUrl :: Mailbox -> Url
receiveUrl m = Url [i|/#{mailboxId m}|]
mailboxAddress :: Mailbox -> EmailAddress
mailboxAddress m = EmailAddress [i|#{mailboxId m}@unverified.email|]
instance ToJSON Mailbox where
toJSON m = object [
"mailbox" .= mailboxAddress m
, "receive" .= receiveUrl m
, "mailbox_id" .= mailboxId m
, "created" .= formatISO8601Millis (created m)
]
| null | https://raw.githubusercontent.com/honest-technology/api.unverified.email/75234625974d8054f28a5c05cc313c927ffae8d5/src/Model/Mailbox.hs | haskell | # LANGUAGE QuasiQuotes #
module Model.Mailbox where
import Data.Aeson
import Data.Text
import Data.Time
import Data.String.Interpolate (i)
import Data.Time.ISO8601 (formatISO8601Millis)
newtype EmailAddress = EmailAddress Text deriving ToJSON
newtype Url = Url Text deriving ToJSON
data Mailbox = Mailbox {
mailboxId :: Text
, created :: UTCTime
}
receiveUrl :: Mailbox -> Url
receiveUrl m = Url [i|/#{mailboxId m}|]
mailboxAddress :: Mailbox -> EmailAddress
mailboxAddress m = EmailAddress [i|#{mailboxId m}@unverified.email|]
instance ToJSON Mailbox where
toJSON m = object [
"mailbox" .= mailboxAddress m
, "receive" .= receiveUrl m
, "mailbox_id" .= mailboxId m
, "created" .= formatISO8601Millis (created m)
]
| |
5c0c37a7d90a6b4465d115f433ab853239a156b89016ea940704c65a22aac4ce | david-vanderson/warp | plasma.rkt | #lang racket/base
(require mode-lambda
mode-lambda/static)
(require "defs.rkt"
"utils.rkt"
"draw-utils.rkt")
(provide (all-defined-out))
(define PLASMA_LIFE 3000) ; ms after which plasma starts fading
energy loss per second after PLASMA_LIFE
(define (plasma-setup-pre! sd)
(add-sprite!/file sd 'plasma (build-path IMAGEDIR "plasma.png")))
(define PLASMA_SPRITE_IDX #f)
(define PLASMA_SPRITE_SIZE #f)
(define (plasma-setup-post! csd)
(set! PLASMA_SPRITE_IDX (sprite-idx csd 'plasma))
(define w (sprite-width csd PLASMA_SPRITE_IDX))
(define h (sprite-height csd PLASMA_SPRITE_IDX))
(set! PLASMA_SPRITE_SIZE (max w h)))
(define (plasma-damage space p)
(plasma-energy space p))
(define (plasma-energy space p)
(- (plasma-e p) (* (max 0.0 (- (obj-age space p) PLASMA_LIFE)) (/ PLASMA_FADE 1000.0))))
(define (plasma-energy->radius e)
(* 2.0 (sqrt (max 1.0 e))))
(define (plasma-radius space p)
(plasma-energy->radius (plasma-energy space p)))
(define (plasma-dead? space p)
((plasma-energy space p) . < . 1))
(define (reduce-plasma! space p damage)
(define changes '())
(define pr (plasma-radius space p))
(set-plasma-e! p (- (plasma-e p) damage))
(when (plasma-dead? space p)
(set-obj-alive?! p #f)
(when (client?)
(define e (effect (next-id) (space-time space) #t 1.0
(posvel (space-time space) (obj-x p) (obj-y p) 0.0 0.0 0.0 0.0)
pr 300))
(append! changes (chadd e #f))))
changes)
(define (draw-plasma csd center scale p space fowa layer-ships)
(define cycle 1000.0)
(define t (modulo (obj-age space p) cycle))
(define rot (* 2pi (/ t cycle)))
(define-values (x y) (obj->screen p center scale))
add 1 to plasma radius for the transparent pixel border
(define size (/ (* (* 2.0 (+ 1.0 (plasma-radius space p))) scale) PLASMA_SPRITE_SIZE))
(sprite x y PLASMA_SPRITE_IDX
#:layer layer-ships #:a fowa #:theta (exact->inexact (- rot)) #:m size))
| null | https://raw.githubusercontent.com/david-vanderson/warp/cdc1d0bd942780fb5360dc6a34a2a06cf9518408/plasma.rkt | racket | ms after which plasma starts fading | #lang racket/base
(require mode-lambda
mode-lambda/static)
(require "defs.rkt"
"utils.rkt"
"draw-utils.rkt")
(provide (all-defined-out))
energy loss per second after PLASMA_LIFE
(define (plasma-setup-pre! sd)
(add-sprite!/file sd 'plasma (build-path IMAGEDIR "plasma.png")))
(define PLASMA_SPRITE_IDX #f)
(define PLASMA_SPRITE_SIZE #f)
(define (plasma-setup-post! csd)
(set! PLASMA_SPRITE_IDX (sprite-idx csd 'plasma))
(define w (sprite-width csd PLASMA_SPRITE_IDX))
(define h (sprite-height csd PLASMA_SPRITE_IDX))
(set! PLASMA_SPRITE_SIZE (max w h)))
(define (plasma-damage space p)
(plasma-energy space p))
(define (plasma-energy space p)
(- (plasma-e p) (* (max 0.0 (- (obj-age space p) PLASMA_LIFE)) (/ PLASMA_FADE 1000.0))))
(define (plasma-energy->radius e)
(* 2.0 (sqrt (max 1.0 e))))
(define (plasma-radius space p)
(plasma-energy->radius (plasma-energy space p)))
(define (plasma-dead? space p)
((plasma-energy space p) . < . 1))
(define (reduce-plasma! space p damage)
(define changes '())
(define pr (plasma-radius space p))
(set-plasma-e! p (- (plasma-e p) damage))
(when (plasma-dead? space p)
(set-obj-alive?! p #f)
(when (client?)
(define e (effect (next-id) (space-time space) #t 1.0
(posvel (space-time space) (obj-x p) (obj-y p) 0.0 0.0 0.0 0.0)
pr 300))
(append! changes (chadd e #f))))
changes)
(define (draw-plasma csd center scale p space fowa layer-ships)
(define cycle 1000.0)
(define t (modulo (obj-age space p) cycle))
(define rot (* 2pi (/ t cycle)))
(define-values (x y) (obj->screen p center scale))
add 1 to plasma radius for the transparent pixel border
(define size (/ (* (* 2.0 (+ 1.0 (plasma-radius space p))) scale) PLASMA_SPRITE_SIZE))
(sprite x y PLASMA_SPRITE_IDX
#:layer layer-ships #:a fowa #:theta (exact->inexact (- rot)) #:m size))
|
7faa70b551d12de728dd027455e0fd9b081b4eabf246ac4de73be381883c6fad | jacekschae/learn-reitit-course-files | user.clj | (ns user
(:require [integrant.repl :as ig-repl]
[integrant.core :as ig]
[integrant.repl.state :as state]
[cheffy.server]))
(ig-repl/set-prep!
(fn [] (-> "resources/config.edn" slurp ig/read-string)))
(def go ig-repl/go)
(def halt ig-repl/halt)
(def reset ig-repl/reset)
(def reset-all ig-repl/reset-all)
(def app (-> state/system :cheffy/app))
(def db (-> state/system :db/postgres))
(comment
(app {:request-method :get
:uri "/swagger.json"})
(go)
(halt)
(reset)) | null | https://raw.githubusercontent.com/jacekschae/learn-reitit-course-files/c13a8eb622a371ad719d3d9023f1b4eff9392e4c/increments/16-list-all-recipes/dev/src/user.clj | clojure | (ns user
(:require [integrant.repl :as ig-repl]
[integrant.core :as ig]
[integrant.repl.state :as state]
[cheffy.server]))
(ig-repl/set-prep!
(fn [] (-> "resources/config.edn" slurp ig/read-string)))
(def go ig-repl/go)
(def halt ig-repl/halt)
(def reset ig-repl/reset)
(def reset-all ig-repl/reset-all)
(def app (-> state/system :cheffy/app))
(def db (-> state/system :db/postgres))
(comment
(app {:request-method :get
:uri "/swagger.json"})
(go)
(halt)
(reset)) | |
461e3ac1f0354b2895e1528a773a1991a0729c9de2326295fd539587f9d0af7b | ragkousism/Guix-on-Hurd | system.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (test-system)
#:use-module (gnu)
#:use-module (guix store)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-64))
Test the ( gnu system ) module .
(define %root-fs
(file-system
(device "my-root")
(title 'label)
(mount-point "/")
(type "ext4")))
(define %os
(operating-system
(host-name "komputilo")
(timezone "Europe/Berlin")
(locale "en_US.utf8")
(bootloader (grub-configuration (device "/dev/sdX")))
(file-systems (cons %root-fs %base-file-systems))
(users %base-user-accounts)))
(define %luks-device
(mapped-device
(source "/dev/foo") (target "my-luks-device")
(type luks-device-mapping)))
(define %os-with-mapped-device
(operating-system
(host-name "komputilo")
(timezone "Europe/Berlin")
(locale "en_US.utf8")
(bootloader (grub-configuration (device "/dev/sdX")))
(mapped-devices (list %luks-device))
(file-systems (cons (file-system
(inherit %root-fs)
(dependencies (list %luks-device)))
%base-file-systems))
(users %base-user-accounts)))
(test-begin "system")
(test-assert "operating-system-store-file-system"
;; %BASE-FILE-SYSTEMS defines a bind-mount for /gnu/store, but this
;; shouldn't be a problem.
(eq? %root-fs
(operating-system-store-file-system %os)))
(test-assert "operating-system-store-file-system, prefix"
(let* ((gnu (file-system
(device "foobar")
(mount-point (dirname (%store-prefix)))
(type "ext5")))
(os (operating-system
(inherit %os)
(file-systems (cons* gnu %root-fs
%base-file-systems)))))
(eq? gnu (operating-system-store-file-system os))))
(test-assert "operating-system-store-file-system, store"
(let* ((gnu (file-system
(device "foobar")
(mount-point (%store-prefix))
(type "ext5")))
(os (operating-system
(inherit %os)
(file-systems (cons* gnu %root-fs
%base-file-systems)))))
(eq? gnu (operating-system-store-file-system os))))
(test-equal "operating-system-user-mapped-devices"
'()
(operating-system-user-mapped-devices %os-with-mapped-device))
(test-equal "operating-system-boot-mapped-devices"
(list %luks-device)
(operating-system-boot-mapped-devices %os-with-mapped-device))
(test-equal "operating-system-boot-mapped-devices, implicit dependency"
(list %luks-device)
;; Here we expect the implicit dependency between "/" and
;; "/dev/mapper/my-luks-device" to be found, in spite of the lack of a
;; 'dependencies' field in the root file system.
(operating-system-boot-mapped-devices
(operating-system
(inherit %os-with-mapped-device)
(file-systems (cons (file-system
(device "/dev/mapper/my-luks-device")
(title 'device)
(mount-point "/")
(type "ext4"))
%base-file-systems)))))
(test-end)
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/tests/system.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
%BASE-FILE-SYSTEMS defines a bind-mount for /gnu/store, but this
shouldn't be a problem.
Here we expect the implicit dependency between "/" and
"/dev/mapper/my-luks-device" to be found, in spite of the lack of a
'dependencies' field in the root file system. | Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (test-system)
#:use-module (gnu)
#:use-module (guix store)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-64))
Test the ( gnu system ) module .
(define %root-fs
(file-system
(device "my-root")
(title 'label)
(mount-point "/")
(type "ext4")))
(define %os
(operating-system
(host-name "komputilo")
(timezone "Europe/Berlin")
(locale "en_US.utf8")
(bootloader (grub-configuration (device "/dev/sdX")))
(file-systems (cons %root-fs %base-file-systems))
(users %base-user-accounts)))
(define %luks-device
(mapped-device
(source "/dev/foo") (target "my-luks-device")
(type luks-device-mapping)))
(define %os-with-mapped-device
(operating-system
(host-name "komputilo")
(timezone "Europe/Berlin")
(locale "en_US.utf8")
(bootloader (grub-configuration (device "/dev/sdX")))
(mapped-devices (list %luks-device))
(file-systems (cons (file-system
(inherit %root-fs)
(dependencies (list %luks-device)))
%base-file-systems))
(users %base-user-accounts)))
(test-begin "system")
(test-assert "operating-system-store-file-system"
(eq? %root-fs
(operating-system-store-file-system %os)))
(test-assert "operating-system-store-file-system, prefix"
(let* ((gnu (file-system
(device "foobar")
(mount-point (dirname (%store-prefix)))
(type "ext5")))
(os (operating-system
(inherit %os)
(file-systems (cons* gnu %root-fs
%base-file-systems)))))
(eq? gnu (operating-system-store-file-system os))))
(test-assert "operating-system-store-file-system, store"
(let* ((gnu (file-system
(device "foobar")
(mount-point (%store-prefix))
(type "ext5")))
(os (operating-system
(inherit %os)
(file-systems (cons* gnu %root-fs
%base-file-systems)))))
(eq? gnu (operating-system-store-file-system os))))
(test-equal "operating-system-user-mapped-devices"
'()
(operating-system-user-mapped-devices %os-with-mapped-device))
(test-equal "operating-system-boot-mapped-devices"
(list %luks-device)
(operating-system-boot-mapped-devices %os-with-mapped-device))
(test-equal "operating-system-boot-mapped-devices, implicit dependency"
(list %luks-device)
(operating-system-boot-mapped-devices
(operating-system
(inherit %os-with-mapped-device)
(file-systems (cons (file-system
(device "/dev/mapper/my-luks-device")
(title 'device)
(mount-point "/")
(type "ext4"))
%base-file-systems)))))
(test-end)
|
9f9f24ec1f6a65af2751f6349569bef7e3d180b10fb826b4d58eaaedcf801c94 | lisp/de.setf.utility | package.lisp | -*- Mode : lisp ; Syntax : ansi - common - lisp ; Base : 10 ; Package : de.setf.utility.implementation ; -*-
(in-package :de.setf.utility.implementation)
This file is the system definition for the ETF codec module for the ' de.setf.utility ' Common Lisp library .
;;;
Copyright 2010 [ ) All Rights Reserved
` de.setf.utility ` is free software : you can redistribute it and/or modify it under the terms of version 3
of the the GNU Lesser General Public License as published by the Free Software Foundation .
;;;
;;; `de.setf.utility` is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
;;; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
;;; See the the GNU Lesser General Public License for more details.
;;;
A copy of the GNU Lesser General Public License should be included with ` de.setf.utility ` , as ` lgpl.txt ` .
;;; If not, see the GNU [site](/).
(defpackage :de.setf.utility.etf
(:use :de.setf.utility.codecs)
(:nicknames :etf)
(:export :*intern-operator*
:*package*
:*buffer-get-term-hook*
:*buffer-set-term-hook*
:*stream-read-term-hook*
:*stream-write-term-hook*
:atom_cache_ref
:atom_ext
:binary_ext
:bit_binary_ext
:export_ext
:float_ext
:fun_ext
:integer_ext
:large_tuple_ext
:new_float_ext
:new_fun_ext
:nil_ext
:large_big_ext
:list_ext
:new_reference_ext
:pid_ext
:port_ext
:reference_ext
:small_atom_ext
:small_big_ext
:small_integer_ext
:small_tuple_ext
:string_ext
:nil
:true
:false
:decode-term
:decode-bert-term
:encode-term
:encode-bert-term
NYI - need to promote vector streams
NYI
:stream-read-term
:stream-write-term
:buffer-set-term
:buffer-get-term)
(:documentation "The home package for the Erlang 'external term format' tag names, and interface
operators names. It includes all tag names, internal and api coding operator names and the
also uses the :de.setf.utility.codecs package for abbreviated access to its operator names.
It exports the api and all standard term tag names, even though not all are implemented."))
| null | https://raw.githubusercontent.com/lisp/de.setf.utility/782cd79d99ebf40deeed60c492be9873bbe42a15/codecs/etf/package.lisp | lisp | Syntax : ansi - common - lisp ; Base : 10 ; Package : de.setf.utility.implementation ; -*-
`de.setf.utility` is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the the GNU Lesser General Public License for more details.
If not, see the GNU [site](/). |
(in-package :de.setf.utility.implementation)
This file is the system definition for the ETF codec module for the ' de.setf.utility ' Common Lisp library .
Copyright 2010 [ ) All Rights Reserved
` de.setf.utility ` is free software : you can redistribute it and/or modify it under the terms of version 3
of the the GNU Lesser General Public License as published by the Free Software Foundation .
A copy of the GNU Lesser General Public License should be included with ` de.setf.utility ` , as ` lgpl.txt ` .
(defpackage :de.setf.utility.etf
(:use :de.setf.utility.codecs)
(:nicknames :etf)
(:export :*intern-operator*
:*package*
:*buffer-get-term-hook*
:*buffer-set-term-hook*
:*stream-read-term-hook*
:*stream-write-term-hook*
:atom_cache_ref
:atom_ext
:binary_ext
:bit_binary_ext
:export_ext
:float_ext
:fun_ext
:integer_ext
:large_tuple_ext
:new_float_ext
:new_fun_ext
:nil_ext
:large_big_ext
:list_ext
:new_reference_ext
:pid_ext
:port_ext
:reference_ext
:small_atom_ext
:small_big_ext
:small_integer_ext
:small_tuple_ext
:string_ext
:nil
:true
:false
:decode-term
:decode-bert-term
:encode-term
:encode-bert-term
NYI - need to promote vector streams
NYI
:stream-read-term
:stream-write-term
:buffer-set-term
:buffer-get-term)
(:documentation "The home package for the Erlang 'external term format' tag names, and interface
operators names. It includes all tag names, internal and api coding operator names and the
also uses the :de.setf.utility.codecs package for abbreviated access to its operator names.
It exports the api and all standard term tag names, even though not all are implemented."))
|
cd0f8fe75d0b6025e6bb65936896e5f491417a5adb51edff3c651082fd40b3cb | Kakadu/fp2022 | ast_utils.mli | * Copyright 2021 - 2022 , Kakadu , and contributors
* SPDX - License - Identifier : LGPL-3.0 - or - later
val show_typ : Ast.typ -> string
val eq_typ : Ast.typ -> Ast.typ -> bool
| null | https://raw.githubusercontent.com/Kakadu/fp2022/853ab6831fdce3b3e1b68d49e5163d0293d56bf5/Golang/lib/ast_utils.mli | ocaml | * Copyright 2021 - 2022 , Kakadu , and contributors
* SPDX - License - Identifier : LGPL-3.0 - or - later
val show_typ : Ast.typ -> string
val eq_typ : Ast.typ -> Ast.typ -> bool
| |
022278278db126f98ca53c606f406c3ccac9701e77361d13dad2857beb69f97f | auser/beehive | beehive_storage_srv.erl | %%%-------------------------------------------------------------------
%%% File : bh_storage_srv.erl
Author :
%%% Description :
%%%
Created : Thu Dec 3 10:38:18 PST 2009
%%%-------------------------------------------------------------------
-module (beehive_storage_srv).
-include ("beehive.hrl").
-include ("common.hrl").
-include_lib("kernel/include/file.hrl").
-behaviour(gen_cluster).
%% API
-export([
start_link/0,
fetch_or_build_bee/2,
build_bee/1, build_bee/2,
seed_nodes/1, has_bee_named/1
]).
callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
callback
-export([handle_join/2, handle_leave/3, handle_vote/2]).
-record(state, {
squashed_disk
}).
-define(SERVER, ?MODULE).
-define (TAB_NAME_TO_PATH, 'name_to_path_table').
%%====================================================================
%% API
%%====================================================================
seed_nodes(_State) -> [node(seed_pid())].
seed_pid() -> hd(seed_pids([])).
seed_pids(_State) ->
case global:whereis_name(?MODULE) of
undefined -> [self()]; % We are the master
_ ->
{ok, Plist} = gen_cluster:plist(?MODULE),
Plist
end.
%%--------------------------------------------------------------------
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
%% Description: Starts the server
%%--------------------------------------------------------------------
build_bee(App) when is_record(App, app) -> build_bee(App, undefined);
build_bee(Name) ->
case apps:find_by_name(Name) of
App when is_record(App, app) -> build_bee(App, undefined);
_ -> {error, app_not_found}
end.
build_bee(App, Caller) when is_record(App, app) -> gen_cluster:call(?SERVER, {build_bee, App, Caller}, infinity);
build_bee(Name, Caller) ->
case apps:find_by_name(Name) of
App when is_record(App, app) -> build_bee(App, Caller);
_ -> {error, app_not_found}
end.
fetch_or_build_bee(App, Caller) ->
gen_cluster:call(?SERVER, {fetch_or_build_bee, App, Caller}, infinity).
%%-------------------------------------------------------------------
%% @spec (Name) -> true | false
@doc Report if the bee has been bundled on this beehive_storage_srv
%%
%% @end
%%-------------------------------------------------------------------
has_bee_named(Name) -> lists:member(Name, beehive_bee_object:ls()).
start_link() ->
gen_cluster:start_link({local, ?SERVER}, ?MODULE, [], []).
%%====================================================================
callbacks
%%====================================================================
%%--------------------------------------------------------------------
%% Function: init(Args) -> {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
init([]) ->
SquashedDir = config:search_for_application_value(squashed_storage, ?BEEHIVE_DIR("squashed")),
bh_file_utils:ensure_dir_exists([SquashedDir]),
{ok, #state{
squashed_disk = SquashedDir
}}.
%%--------------------------------------------------------------------
Function : % % handle_call(Request , From , State ) - > { reply , Reply , State } |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% Description: Handling call messages
%%--------------------------------------------------------------------
handle_call({build_bee, App, Caller}, _From, State) ->
Resp = case internal_build_bee(App, Caller, State) of
{error, {ExitCode, Reasons}} ->
Error = #app_error{
stage = bundle, % erm?
stdout = lists:reverse(Reasons),
exit_status = ExitCode,
timestamp = date_util:now_to_seconds()
},
{ok, NewApp} = app_manager:request_to_save_app(App#app{latest_error = Error}),
{error, NewApp};
Props when is_list(Props) ->
{updated, NewApp} = apps:update(App#app{latest_error = undefined}, Props),
Bee = bees:new(Props),
{ok, NewApp, Bee}
end,
{reply, Resp, State};
handle_call({fetch_or_build_bee, App, Caller}, _From, State) ->
Resp = case fetch_bee(App, Caller, State) of
{error, _} -> internal_build_bee(App, Caller, State);
T ->
?LOG(debug, "fetch_bee(~p, ~p) returned ~p", [App, Caller, T]),
T
end,
{reply, Resp, State};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling cast messages
%%--------------------------------------------------------------------
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling all non call/cast messages
%%--------------------------------------------------------------------
beehive_bee_object : ) , Name )
% Info = beehive_bee_object:info(Name),
% erlang:display({fetch_bee, Info}),
? NOTIFY({bee , , Info } ) ,
% Caller ! {bee, bee_built, Info},
% Info.
handle_info(Info, State) ->
erlang:display({got, Info}),
{noreply, State}.
%%--------------------------------------------------------------------
%% Function: terminate(Reason, State) -> void()
Description : This function is called by a when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up . When it returns , the terminates with Reason .
%% The return value is ignored.
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
%% Description: Convert process state when code is changed
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Function : handle_join(JoiningPid , Pidlist , State ) - > { ok , State }
JoiningPid = pid ( ) ,
Pidlist = list ( ) of pids ( )
%% Description: Called whenever a node joins the cluster via this node
directly . JoiningPid is the node that joined . Note that JoiningPid may
join more than once . Pidlist contains all known pids . Pidlist includes
JoiningPid .
%%--------------------------------------------------------------------
handle_join(_JoiningPid, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
Function : handle_leave(LeavingPid , Pidlist , Info , State ) - > { ok , State }
JoiningPid = pid ( ) ,
Pidlist = list ( ) of pids ( )
%% Description: Called whenever a node joins the cluster via another node and
%% the joining node is simply announcing its presence.
%%--------------------------------------------------------------------
handle_leave(_LeavingPid, _Info, State) ->
{ok, State}.
% HANDLE VOTING
handle_vote(_Msg, State) ->
{reply, 0, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
fetch_bee(#app{name = Name} = _App, Caller, _State) ->
case lists:filter(fun(Pid) ->
rpc:call(node(Pid), ?MODULE, has_bee_named, [Name])
end, seed_pids({})) of
[] ->
?LOG(debug, "lists:filter on seed_pids([]) [~p] returned []", [seed_pids({})]),
{error, does_not_exist};
[H|_ServerPids] ->
% For now we won't verify the receipt of the bee
% we'll assume that it will be sent across the wire for simplicity
TODO : Add error checking to
O = rpc:call(node(H), beehive_bee_object, send_bee_object, [node(Caller), Name, Caller]),
?LOG(debug, "rpc:call(~p, beehive_bee_object, send_bee_object, [~p, ~p, ~p]) returned ~p", [node(H), node(Caller), Name, Caller, O]),
O
end.
%%-------------------------------------------------------------------
@spec ( App::app ( ) , Caller , State ) - > { ok , Value }
%% @doc This will call the bundle task on the application template
and bundle the application into a known file :
%%
%% @end
%%-------------------------------------------------------------------
internal_build_bee(App, Caller, _State) ->
case handle_repos_lookup(App) of
{ok, ReposUrl} ->
O = beehive_bee_object:bundle(apps:to_proplist(App#app{repo_url = ReposUrl}), Caller),
?LOG(debug, "internal_build_bee(~p, ~p) returned {ok, ~p} and bundle returned ~p", [App, Caller, ReposUrl, O]),
O;
{error, _} = T -> T
case babysitter_integration : command(bundle , App#app{repo_url = ReposUrl } , unusued , Proplist ) of
{ ok , _ OsPid , 0 } - >
case fetch_bee(App , State ) of
, _ Resp } = T - > T ;
% E -> E
% end;
{ error , Stage , _ OsPid , ExitCode , Stdout , Stderr } - >
% % stage, % stage at which the app failed
% % stderr, % string with the stderr
% % stdout, % string with the stdout
% exit_status , % exit status code
% % timestamp % time when the exit happened
% Error = #app_error{
% stage = Stage,
% stderr = Stderr,
% stdout = Stdout,
exit_status = ExitCode ,
timestamp = date_util : now_to_seconds ( )
% },
% { ok , NewApp } = app_manager : = Error } ) ,
% {error, {babysitter, App#app{latest_error = Error}}};
% Else ->
% erlang:display({got_something_else,babysitter_run, Else}),
% {error, Else}
% end;
% {error, _} = T -> T
end.
handle_repos_lookup(App) ->
case config:search_for_application_value(git_store, offsite) of
offsite ->
{ok, handle_offsite_repos_lookup(App)};
_ ->
io:format("Looking in local repos not yet supported~n"),
{error, repos_not_found}
end.
handle_offsite_repos_lookup([]) -> false;
handle_offsite_repos_lookup(App) when is_record(App, app) ->
App#app.repo_url;
handle_offsite_repos_lookup(AppName) ->
case apps:find_by_name(AppName) of
App when is_record(App, app) ->
handle_offsite_repos_lookup(App);
_ -> false
end.
| null | https://raw.githubusercontent.com/auser/beehive/dfe257701b21c56a50af73c8203ecac60ed21991/lib/erlang/apps/beehive/src/beehive/beehive_storage_srv.erl | erlang | -------------------------------------------------------------------
File : bh_storage_srv.erl
Description :
-------------------------------------------------------------------
API
====================================================================
API
====================================================================
We are the master
--------------------------------------------------------------------
Description: Starts the server
--------------------------------------------------------------------
-------------------------------------------------------------------
@spec (Name) -> true | false
@end
-------------------------------------------------------------------
====================================================================
====================================================================
--------------------------------------------------------------------
Function: init(Args) -> {ok, State} |
ignore |
{stop, Reason}
Description: Initiates the server
--------------------------------------------------------------------
--------------------------------------------------------------------
% handle_call(Request , From , State ) - > { reply , Reply , State } |
{stop, Reason, Reply, State} |
{stop, Reason, State}
Description: Handling call messages
--------------------------------------------------------------------
erm?
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling all non call/cast messages
--------------------------------------------------------------------
Info = beehive_bee_object:info(Name),
erlang:display({fetch_bee, Info}),
Caller ! {bee, bee_built, Info},
Info.
--------------------------------------------------------------------
Function: terminate(Reason, State) -> void()
terminate. It should be the opposite of Module:init/1 and do any necessary
The return value is ignored.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Convert process state when code is changed
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Called whenever a node joins the cluster via this node
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Called whenever a node joins the cluster via another node and
the joining node is simply announcing its presence.
--------------------------------------------------------------------
HANDLE VOTING
--------------------------------------------------------------------
--------------------------------------------------------------------
For now we won't verify the receipt of the bee
we'll assume that it will be sent across the wire for simplicity
-------------------------------------------------------------------
@doc This will call the bundle task on the application template
@end
-------------------------------------------------------------------
E -> E
end;
% stage, % stage at which the app failed
% stderr, % string with the stderr
% stdout, % string with the stdout
exit_status , % exit status code
% timestamp % time when the exit happened
Error = #app_error{
stage = Stage,
stderr = Stderr,
stdout = Stdout,
},
{ ok , NewApp } = app_manager : = Error } ) ,
{error, {babysitter, App#app{latest_error = Error}}};
Else ->
erlang:display({got_something_else,babysitter_run, Else}),
{error, Else}
end;
{error, _} = T -> T | Author :
Created : Thu Dec 3 10:38:18 PST 2009
-module (beehive_storage_srv).
-include ("beehive.hrl").
-include ("common.hrl").
-include_lib("kernel/include/file.hrl").
-behaviour(gen_cluster).
-export([
start_link/0,
fetch_or_build_bee/2,
build_bee/1, build_bee/2,
seed_nodes/1, has_bee_named/1
]).
callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
callback
-export([handle_join/2, handle_leave/3, handle_vote/2]).
-record(state, {
squashed_disk
}).
-define(SERVER, ?MODULE).
-define (TAB_NAME_TO_PATH, 'name_to_path_table').
seed_nodes(_State) -> [node(seed_pid())].
seed_pid() -> hd(seed_pids([])).
seed_pids(_State) ->
case global:whereis_name(?MODULE) of
_ ->
{ok, Plist} = gen_cluster:plist(?MODULE),
Plist
end.
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
build_bee(App) when is_record(App, app) -> build_bee(App, undefined);
build_bee(Name) ->
case apps:find_by_name(Name) of
App when is_record(App, app) -> build_bee(App, undefined);
_ -> {error, app_not_found}
end.
build_bee(App, Caller) when is_record(App, app) -> gen_cluster:call(?SERVER, {build_bee, App, Caller}, infinity);
build_bee(Name, Caller) ->
case apps:find_by_name(Name) of
App when is_record(App, app) -> build_bee(App, Caller);
_ -> {error, app_not_found}
end.
fetch_or_build_bee(App, Caller) ->
gen_cluster:call(?SERVER, {fetch_or_build_bee, App, Caller}, infinity).
@doc Report if the bee has been bundled on this beehive_storage_srv
has_bee_named(Name) -> lists:member(Name, beehive_bee_object:ls()).
start_link() ->
gen_cluster:start_link({local, ?SERVER}, ?MODULE, [], []).
callbacks
{ ok , State , Timeout } |
init([]) ->
SquashedDir = config:search_for_application_value(squashed_storage, ?BEEHIVE_DIR("squashed")),
bh_file_utils:ensure_dir_exists([SquashedDir]),
{ok, #state{
squashed_disk = SquashedDir
}}.
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({build_bee, App, Caller}, _From, State) ->
Resp = case internal_build_bee(App, Caller, State) of
{error, {ExitCode, Reasons}} ->
Error = #app_error{
stdout = lists:reverse(Reasons),
exit_status = ExitCode,
timestamp = date_util:now_to_seconds()
},
{ok, NewApp} = app_manager:request_to_save_app(App#app{latest_error = Error}),
{error, NewApp};
Props when is_list(Props) ->
{updated, NewApp} = apps:update(App#app{latest_error = undefined}, Props),
Bee = bees:new(Props),
{ok, NewApp, Bee}
end,
{reply, Resp, State};
handle_call({fetch_or_build_bee, App, Caller}, _From, State) ->
Resp = case fetch_bee(App, Caller, State) of
{error, _} -> internal_build_bee(App, Caller, State);
T ->
?LOG(debug, "fetch_bee(~p, ~p) returned ~p", [App, Caller, T]),
T
end,
{reply, Resp, State};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
beehive_bee_object : ) , Name )
? NOTIFY({bee , , Info } ) ,
handle_info(Info, State) ->
erlang:display({got, Info}),
{noreply, State}.
Description : This function is called by a when it is about to
cleaning up . When it returns , the terminates with Reason .
terminate(_Reason, _State) ->
ok.
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Function : handle_join(JoiningPid , Pidlist , State ) - > { ok , State }
JoiningPid = pid ( ) ,
Pidlist = list ( ) of pids ( )
directly . JoiningPid is the node that joined . Note that JoiningPid may
join more than once . Pidlist contains all known pids . Pidlist includes
JoiningPid .
handle_join(_JoiningPid, State) ->
{noreply, State}.
Function : handle_leave(LeavingPid , Pidlist , Info , State ) - > { ok , State }
JoiningPid = pid ( ) ,
Pidlist = list ( ) of pids ( )
handle_leave(_LeavingPid, _Info, State) ->
{ok, State}.
handle_vote(_Msg, State) ->
{reply, 0, State}.
Internal functions
fetch_bee(#app{name = Name} = _App, Caller, _State) ->
case lists:filter(fun(Pid) ->
rpc:call(node(Pid), ?MODULE, has_bee_named, [Name])
end, seed_pids({})) of
[] ->
?LOG(debug, "lists:filter on seed_pids([]) [~p] returned []", [seed_pids({})]),
{error, does_not_exist};
[H|_ServerPids] ->
TODO : Add error checking to
O = rpc:call(node(H), beehive_bee_object, send_bee_object, [node(Caller), Name, Caller]),
?LOG(debug, "rpc:call(~p, beehive_bee_object, send_bee_object, [~p, ~p, ~p]) returned ~p", [node(H), node(Caller), Name, Caller, O]),
O
end.
@spec ( App::app ( ) , Caller , State ) - > { ok , Value }
and bundle the application into a known file :
internal_build_bee(App, Caller, _State) ->
case handle_repos_lookup(App) of
{ok, ReposUrl} ->
O = beehive_bee_object:bundle(apps:to_proplist(App#app{repo_url = ReposUrl}), Caller),
?LOG(debug, "internal_build_bee(~p, ~p) returned {ok, ~p} and bundle returned ~p", [App, Caller, ReposUrl, O]),
O;
{error, _} = T -> T
case babysitter_integration : command(bundle , App#app{repo_url = ReposUrl } , unusued , Proplist ) of
{ ok , _ OsPid , 0 } - >
case fetch_bee(App , State ) of
, _ Resp } = T - > T ;
{ error , Stage , _ OsPid , ExitCode , Stdout , Stderr } - >
exit_status = ExitCode ,
timestamp = date_util : now_to_seconds ( )
end.
handle_repos_lookup(App) ->
case config:search_for_application_value(git_store, offsite) of
offsite ->
{ok, handle_offsite_repos_lookup(App)};
_ ->
io:format("Looking in local repos not yet supported~n"),
{error, repos_not_found}
end.
handle_offsite_repos_lookup([]) -> false;
handle_offsite_repos_lookup(App) when is_record(App, app) ->
App#app.repo_url;
handle_offsite_repos_lookup(AppName) ->
case apps:find_by_name(AppName) of
App when is_record(App, app) ->
handle_offsite_repos_lookup(App);
_ -> false
end.
|
52734f27496c4d2fcbd98b8ea981025e9f81b225f02dde63f2cfbb7c20dd8de7 | ktakashi/sagittarius-scheme | algorithms.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; sagittarius/crypto/pkcs/algorithms.scm - PKCS algorithms
;;;
Copyright ( c ) 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
#!nounbound
(library (sagittarius crypto pkcs algorithms)
(export pkcs-encrypt-data
pkcs-decrypt-data
x509-algorithm-identifier->kdf
x509-algorithm-identifier->cipher
oid->kdf
oid->cipher
oid->encryption-scheme)
(import (rnrs)
(clos user)
(sagittarius crypto ciphers)
(sagittarius crypto keys)
(sagittarius crypto pkix algorithms))
(define (pkcs-encrypt-data aid key data . opts)
(let* ((cipher (apply pkcs-make-cipher aid (cipher-direction encrypt)
key opts))
(r (block-cipher-encrypt-last-block cipher data)))
(block-cipher-done! cipher)
r))
(define (pkcs-decrypt-data aid key data . opts)
(let* ((cipher (apply pkcs-make-cipher aid (cipher-direction decrypt)
key opts))
(r (block-cipher-decrypt-last-block cipher data)))
(block-cipher-done! cipher)
r))
(define (pkcs-make-cipher aid direction key . opts)
(let ((kdf (x509-algorithm-identifier->kdf aid))
(make-cipher (x509-algorithm-identifier->cipher aid)))
(let-values (((cipher parameters) (make-cipher key)))
(block-cipher-init! cipher direction
(make-symmetric-key (apply kdf key opts))
parameters))))
(define (x509-algorithm-identifier->kdf x509-algorithm-identifier)
(let ((oid (x509-algorithm-identifier-oid x509-algorithm-identifier))
(param (x509-algorithm-identifier-parameters
x509-algorithm-identifier)))
(oid->kdf oid param)))
(define (x509-algorithm-identifier->cipher x509-algorithm-identifier)
(let ((oid (x509-algorithm-identifier-oid x509-algorithm-identifier))
(param (x509-algorithm-identifier-parameters
x509-algorithm-identifier)))
(oid->cipher oid param)))
(define-generic oid->kdf)
(define-generic oid->cipher)
(define-generic oid->encryption-scheme)
)
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/80d73e019394331afbb6c7b6bf6a6efc0af8ed6e/ext/crypto/sagittarius/crypto/pkcs/algorithms.scm | scheme | coding : utf-8 ; -*-
sagittarius/crypto/pkcs/algorithms.scm - PKCS algorithms
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Copyright ( c ) 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
#!nounbound
(library (sagittarius crypto pkcs algorithms)
(export pkcs-encrypt-data
pkcs-decrypt-data
x509-algorithm-identifier->kdf
x509-algorithm-identifier->cipher
oid->kdf
oid->cipher
oid->encryption-scheme)
(import (rnrs)
(clos user)
(sagittarius crypto ciphers)
(sagittarius crypto keys)
(sagittarius crypto pkix algorithms))
(define (pkcs-encrypt-data aid key data . opts)
(let* ((cipher (apply pkcs-make-cipher aid (cipher-direction encrypt)
key opts))
(r (block-cipher-encrypt-last-block cipher data)))
(block-cipher-done! cipher)
r))
(define (pkcs-decrypt-data aid key data . opts)
(let* ((cipher (apply pkcs-make-cipher aid (cipher-direction decrypt)
key opts))
(r (block-cipher-decrypt-last-block cipher data)))
(block-cipher-done! cipher)
r))
(define (pkcs-make-cipher aid direction key . opts)
(let ((kdf (x509-algorithm-identifier->kdf aid))
(make-cipher (x509-algorithm-identifier->cipher aid)))
(let-values (((cipher parameters) (make-cipher key)))
(block-cipher-init! cipher direction
(make-symmetric-key (apply kdf key opts))
parameters))))
(define (x509-algorithm-identifier->kdf x509-algorithm-identifier)
(let ((oid (x509-algorithm-identifier-oid x509-algorithm-identifier))
(param (x509-algorithm-identifier-parameters
x509-algorithm-identifier)))
(oid->kdf oid param)))
(define (x509-algorithm-identifier->cipher x509-algorithm-identifier)
(let ((oid (x509-algorithm-identifier-oid x509-algorithm-identifier))
(param (x509-algorithm-identifier-parameters
x509-algorithm-identifier)))
(oid->cipher oid param)))
(define-generic oid->kdf)
(define-generic oid->cipher)
(define-generic oid->encryption-scheme)
)
|
6180af5c685d85a2166744b3399095dad486e6bb593b3db7dae76b84d0d69992 | RisingFisan/Programacao-Funcional | Teste1718.hs | import Data.List (sort)
import System.Random
Exercicio 1
insert :: Ord a => a -> [a] -> [a]
insert x l | null l = [x]
| x > h = h:insert x t
| otherwise = x:h:t
where (h:t) = l
Exercicio 2
catMaybes :: [Maybe a] -> [a]
catMaybes [] = []
catMaybes (h:t) = case h of Just x -> x:catMaybes t
otherwise -> catMaybes t
Exercicio 3
data Exp a = Const a | Var String | Mais (Exp a) (Exp a) | Mult (Exp a) (Exp a)
instance Show a => Show (Exp a) where
show (Const a) = show a
show (Var a) = a
show (Mais a b) = "(" ++ show a ++ " + " ++ show b ++ ")"
show (Mult a b) = "(" ++ show a ++ " * " ++ show b ++ ")"
Exercicio 4
sortOn :: Ord b => (a -> b) -> [a] -> [a]
sortOn _ [] = []
sortOn f (h:t) = insert' h (sortOn f t)
where insert' a [] = [a]
insert' a (x:y) = if f a > f x then x:insert' a y else a:x:y
Exercicio 5
amplitude :: [Int] -> Int
amplitude [] = 0
amplitude l = mx - mn
where (mx,mn) = foldl (\(a,b) n -> (if n > a then n else a,if n < b then n else b)) (head l,head l) l
parte :: [Int] -> ([Int],[Int])
parte l = foldl1 (\(acc1,acc2) (a,b) -> if amplitude acc1 + amplitude acc2 < amplitude a + amplitude b then (acc1,acc2) else (a,b)) combinacoes
where combinacoes = foldl (\acc n -> splitAt n sl : acc) [] [1..(length l - 1)]
sl = sort l
Exercicio 6
data Imagem = Quadrado Int
| Mover (Int,Int) Imagem
| Juntar [Imagem] deriving (Show)
ex :: Imagem
ex = Mover (5,5) (Juntar [Mover (0,1) (Quadrado 5),
Quadrado 4,
Mover (4,3) (Quadrado 2)])
conta :: Imagem -> Int
conta (Quadrado _) = 1
conta (Mover (_,_) im) = conta im
conta (Juntar l) = sum (map conta l)
apaga :: Imagem -> IO Imagem
apaga im = do
let indquad = indices_quadrados im
randNum <- randomRIO (1,length indquad)
let indtoremove = indquad !! (randNum - 1)
return $ apaga_indice indtoremove im
indices_quadrados :: Imagem -> [Int]
indices_quadrados (Quadrado n) = [n]
indices_quadrados (Mover (_,_) im) = indices_quadrados im
indices_quadrados (Juntar l) = concatMap indices_quadrados l
apaga_indice :: Int -> Imagem -> Imagem
apaga_indice x (Quadrado n) = if x == n then Juntar [] else Quadrado n
apaga_indice x (Mover (a,b) im) = Mover (a,b) (apaga_indice x im)
apaga_indice x (Juntar l) = Juntar (map (apaga_indice x) l) | null | https://raw.githubusercontent.com/RisingFisan/Programacao-Funcional/fb52a6f256a60e129d8a46e2aa0b36d86117155a/Testes/Teste1718.hs | haskell | import Data.List (sort)
import System.Random
Exercicio 1
insert :: Ord a => a -> [a] -> [a]
insert x l | null l = [x]
| x > h = h:insert x t
| otherwise = x:h:t
where (h:t) = l
Exercicio 2
catMaybes :: [Maybe a] -> [a]
catMaybes [] = []
catMaybes (h:t) = case h of Just x -> x:catMaybes t
otherwise -> catMaybes t
Exercicio 3
data Exp a = Const a | Var String | Mais (Exp a) (Exp a) | Mult (Exp a) (Exp a)
instance Show a => Show (Exp a) where
show (Const a) = show a
show (Var a) = a
show (Mais a b) = "(" ++ show a ++ " + " ++ show b ++ ")"
show (Mult a b) = "(" ++ show a ++ " * " ++ show b ++ ")"
Exercicio 4
sortOn :: Ord b => (a -> b) -> [a] -> [a]
sortOn _ [] = []
sortOn f (h:t) = insert' h (sortOn f t)
where insert' a [] = [a]
insert' a (x:y) = if f a > f x then x:insert' a y else a:x:y
Exercicio 5
amplitude :: [Int] -> Int
amplitude [] = 0
amplitude l = mx - mn
where (mx,mn) = foldl (\(a,b) n -> (if n > a then n else a,if n < b then n else b)) (head l,head l) l
parte :: [Int] -> ([Int],[Int])
parte l = foldl1 (\(acc1,acc2) (a,b) -> if amplitude acc1 + amplitude acc2 < amplitude a + amplitude b then (acc1,acc2) else (a,b)) combinacoes
where combinacoes = foldl (\acc n -> splitAt n sl : acc) [] [1..(length l - 1)]
sl = sort l
Exercicio 6
data Imagem = Quadrado Int
| Mover (Int,Int) Imagem
| Juntar [Imagem] deriving (Show)
ex :: Imagem
ex = Mover (5,5) (Juntar [Mover (0,1) (Quadrado 5),
Quadrado 4,
Mover (4,3) (Quadrado 2)])
conta :: Imagem -> Int
conta (Quadrado _) = 1
conta (Mover (_,_) im) = conta im
conta (Juntar l) = sum (map conta l)
apaga :: Imagem -> IO Imagem
apaga im = do
let indquad = indices_quadrados im
randNum <- randomRIO (1,length indquad)
let indtoremove = indquad !! (randNum - 1)
return $ apaga_indice indtoremove im
indices_quadrados :: Imagem -> [Int]
indices_quadrados (Quadrado n) = [n]
indices_quadrados (Mover (_,_) im) = indices_quadrados im
indices_quadrados (Juntar l) = concatMap indices_quadrados l
apaga_indice :: Int -> Imagem -> Imagem
apaga_indice x (Quadrado n) = if x == n then Juntar [] else Quadrado n
apaga_indice x (Mover (a,b) im) = Mover (a,b) (apaga_indice x im)
apaga_indice x (Juntar l) = Juntar (map (apaga_indice x) l) | |
3e1e1e9535c4409efdeb4377971a78187dd9d35a93fe721e53e8930a8f227121 | fission-codes/fission | Validation.hs | -- | Raw validation
module Fission.URL.Validation
( isValid
, isURLChar
) where
import qualified Data.Char as Char
import qualified RIO.Text as Text
import Fission.Prelude
import qualified Fission.Security as Security
-- | Confirm that a raw is valid
isValid :: Text -> Bool
isValid txt =
all (== True) preds
where
preds :: [Bool]
preds = [ okChars
, not blank
, not startsWithHyphen
, not endsWithHyphen
, not startsWithUnderscore
, not inBlocklist
]
blank = Text.null txt
inBlocklist = elem txt Security.blocklist
okChars = Text.all isURLChar txt
startsWithHyphen = Text.isPrefixOf "-" txt
endsWithHyphen = Text.isSuffixOf "-" txt
startsWithUnderscore = Text.isPrefixOf "_" txt
isURLChar :: Char -> Bool
isURLChar c =
Char.isAsciiLower c
|| Char.isDigit c
|| c == '-'
|| c == '_'
| null | https://raw.githubusercontent.com/fission-codes/fission/11d14b729ccebfd69499a534445fb072ac3433a3/fission-core/library/Fission/URL/Validation.hs | haskell | | Raw validation
| Confirm that a raw is valid | module Fission.URL.Validation
( isValid
, isURLChar
) where
import qualified Data.Char as Char
import qualified RIO.Text as Text
import Fission.Prelude
import qualified Fission.Security as Security
isValid :: Text -> Bool
isValid txt =
all (== True) preds
where
preds :: [Bool]
preds = [ okChars
, not blank
, not startsWithHyphen
, not endsWithHyphen
, not startsWithUnderscore
, not inBlocklist
]
blank = Text.null txt
inBlocklist = elem txt Security.blocklist
okChars = Text.all isURLChar txt
startsWithHyphen = Text.isPrefixOf "-" txt
endsWithHyphen = Text.isSuffixOf "-" txt
startsWithUnderscore = Text.isPrefixOf "_" txt
isURLChar :: Char -> Bool
isURLChar c =
Char.isAsciiLower c
|| Char.isDigit c
|| c == '-'
|| c == '_'
|
3acd333e232f8d10bd2848610de77b7654138c78a7a18c895ffd03702232684c | lambdamikel/Common-Lisp-Persistency-Manager | persistence-package.lisp | -*- Mode : Lisp ; Syntax : Ansi - Common - Lisp ; Package : CL - USER ; Base : 10 -*-
(in-package :CL-USER)
;;;
;;; Define Packages
;;;
(defpackage persistence
(:use common-lisp)
(:shadow #:defclass #:defstruct)
(:export
#:defpersistentclass
#:defpersistentstruct
#:initialize-loaded-persistent-object
#:make-object-persistent
#:load-persistent-object
#:print-persistence-manager-info
#:user-write-constructor
#:user-write-component-constructor
#:user-write-initializer
#:user-write-component-initializer
#:user-fill-object
#:user-read-component-object
#:user-create-empty-object))
| null | https://raw.githubusercontent.com/lambdamikel/Common-Lisp-Persistency-Manager/f6a76c8e7ff3375e58f97bc586026125155f154a/src/persistence-package.lisp | lisp | Syntax : Ansi - Common - Lisp ; Package : CL - USER ; Base : 10 -*-
Define Packages
|
(in-package :CL-USER)
(defpackage persistence
(:use common-lisp)
(:shadow #:defclass #:defstruct)
(:export
#:defpersistentclass
#:defpersistentstruct
#:initialize-loaded-persistent-object
#:make-object-persistent
#:load-persistent-object
#:print-persistence-manager-info
#:user-write-constructor
#:user-write-component-constructor
#:user-write-initializer
#:user-write-component-initializer
#:user-fill-object
#:user-read-component-object
#:user-create-empty-object))
|
a4c4772eeea8d2d0cfe1d4fe46fbd9f6d36f9db283e454424567e37fb104d119 | karetsu/xmonad-aloysius | Launcher.hs | -- | Launchers (using dmenu) for finding windows and launching apps
module App.Launcher where
-- Imports ----------------------------------------------------------------------
import XMonad
import XMonad.Util.Dzen
import XMonad.Util.Font ( Align(..) )
import App.Alias
import Theme.ChosenTheme
-- Definitions ------------------------------------------------------------------
powerMenu :: X ()
powerMenu =
dzenConfig
( font sansserif
-- >=> xScreen 0
>=> x 2352
>=> y 0
>=> align AlignCenter
>=> bgColor basebg
>=> addArgs
[ "-h"
, "52"
, "-w"
, "212"
, "-l"
, "3"
, "-m"
, "-e"
, "onstart=uncollapse,grabkeys;"
++ "button3=exit:1;"
++ "key_Escape=ungrabkeys,exit;"
++ "key_s=exec:"
++ suspend
++ ";"
++ "key_r=exec:systemctl reboot;"
++ "key_p=exec:systemctl poweroff;"
, "-p"
]
)
$ "^fg("
++ base12
++ ")-^fg() Power Menu ^fg("
++ base12
++ ")-^fg()\n"
++ " ^fg("
++ base14
++ ")^ca(1, "
++ suspend
++ ")+^fg() Suspend^ca()\n"
++ " ^fg("
++ base10
++ ")^ca(1, systemctl reboot)+^fg() Reboot^ca()\n"
++ " ^fg("
++ base11
++ ")^ca(1, systemctl poweroff)+^fg() Power off ^ca()"
appLauncher :: String
appLauncher =
"dmenu_run -p 'Launch application: ' "
++ "-fn \""
++ sansserif'
++ "\" "
++ "-nb \""
++ basebg
++ "\" "
++ "-nf \""
++ basefg
++ "\" "
++ "-sb \""
++ base14
++ "\" "
++ "-sf \""
++ base00
++ "\" "
-- non-standard dmenu options, please see: /
-- my nixOS overlay (aloysius) includes these patches by default
height 52
++ "-F" -- fuzzy matching
| null | https://raw.githubusercontent.com/karetsu/xmonad-aloysius/9910c8db4bb75600fc9af51b9e64ab5704e8126c/app/App/Launcher.hs | haskell | | Launchers (using dmenu) for finding windows and launching apps
Imports ----------------------------------------------------------------------
Definitions ------------------------------------------------------------------
>=> xScreen 0
non-standard dmenu options, please see: /
my nixOS overlay (aloysius) includes these patches by default
fuzzy matching |
module App.Launcher where
import XMonad
import XMonad.Util.Dzen
import XMonad.Util.Font ( Align(..) )
import App.Alias
import Theme.ChosenTheme
powerMenu :: X ()
powerMenu =
dzenConfig
( font sansserif
>=> x 2352
>=> y 0
>=> align AlignCenter
>=> bgColor basebg
>=> addArgs
[ "-h"
, "52"
, "-w"
, "212"
, "-l"
, "3"
, "-m"
, "-e"
, "onstart=uncollapse,grabkeys;"
++ "button3=exit:1;"
++ "key_Escape=ungrabkeys,exit;"
++ "key_s=exec:"
++ suspend
++ ";"
++ "key_r=exec:systemctl reboot;"
++ "key_p=exec:systemctl poweroff;"
, "-p"
]
)
$ "^fg("
++ base12
++ ")-^fg() Power Menu ^fg("
++ base12
++ ")-^fg()\n"
++ " ^fg("
++ base14
++ ")^ca(1, "
++ suspend
++ ")+^fg() Suspend^ca()\n"
++ " ^fg("
++ base10
++ ")^ca(1, systemctl reboot)+^fg() Reboot^ca()\n"
++ " ^fg("
++ base11
++ ")^ca(1, systemctl poweroff)+^fg() Power off ^ca()"
appLauncher :: String
appLauncher =
"dmenu_run -p 'Launch application: ' "
++ "-fn \""
++ sansserif'
++ "\" "
++ "-nb \""
++ basebg
++ "\" "
++ "-nf \""
++ basefg
++ "\" "
++ "-sb \""
++ base14
++ "\" "
++ "-sf \""
++ base00
++ "\" "
height 52
|
9f4acd59f0c47c90363c8ef6ba1ddcfed5387bacca5253bba80724579daa8f68 | herbelin/coq-hh | gtk_parsing.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Ideutils
let underscore = Glib.Utf8.to_unichar "_" ~pos:(ref 0)
let arobase = Glib.Utf8.to_unichar "@" ~pos:(ref 0)
let prime = Glib.Utf8.to_unichar "'" ~pos:(ref 0)
let bn = Glib.Utf8.to_unichar "\n" ~pos:(ref 0)
let space = Glib.Utf8.to_unichar " " ~pos:(ref 0)
let tab = Glib.Utf8.to_unichar "\t" ~pos:(ref 0)
(* TODO: avoid num and prime at the head of a word *)
let is_word_char c =
Glib.Unichar.isalnum c || c = underscore || c = prime
let starts_word (it:GText.iter) =
prerr_endline ("Starts word ? '"^(Glib.Utf8.from_unichar it#char)^"'");
(not it#copy#nocopy#backward_char ||
(let c = it#backward_char#char in
not (is_word_char c)))
let ends_word (it:GText.iter) =
(not it#copy#nocopy#forward_char ||
let c = it#forward_char#char in
not (is_word_char c)
)
let inside_word (it:GText.iter) =
let c = it#char in
not (starts_word it) &&
not (ends_word it) &&
is_word_char c
let is_on_word_limit (it:GText.iter) = inside_word it || ends_word it
let find_word_start (it:GText.iter) =
let rec step_to_start it =
prerr_endline "Find word start";
if not it#nocopy#backward_char then
(prerr_endline "find_word_start: cannot backward"; it)
else if is_word_char it#char
then step_to_start it
else (it#nocopy#forward_char;
prerr_endline ("Word start at: "^(string_of_int it#offset));it)
in
step_to_start it#copy
let find_word_end (it:GText.iter) =
let rec step_to_end (it:GText.iter) =
prerr_endline "Find word end";
let c = it#char in
if c<>0 && is_word_char c then (
ignore (it#nocopy#forward_char);
step_to_end it
) else (
prerr_endline ("Word end at: "^(string_of_int it#offset));
it)
in
step_to_end it#copy
let get_word_around (it:GText.iter) =
let start = find_word_start it in
let stop = find_word_end it in
start,stop
let rec complete_backward w (it:GText.iter) =
prerr_endline "Complete backward...";
match it#backward_search w with
| None -> (prerr_endline "backward_search failed";None)
| Some (start,stop) ->
prerr_endline ("complete_backward got a match:"^(string_of_int start#offset)^(string_of_int stop#offset));
if starts_word start then
let ne = find_word_end stop in
if ne#compare stop = 0
then complete_backward w start
else Some (start,stop,ne)
else complete_backward w start
let rec complete_forward w (it:GText.iter) =
prerr_endline "Complete forward...";
match it#forward_search w with
| None -> None
| Some (start,stop) ->
if starts_word start then
let ne = find_word_end stop in
if ne#compare stop = 0 then
complete_forward w stop
else Some (stop,stop,ne)
else complete_forward w stop
let find_comment_end (start:GText.iter) =
let rec find_nested_comment (search_start:GText.iter) (search_end:GText.iter) (comment_end:GText.iter) =
match (search_start#forward_search ~limit:search_end "(*"),(comment_end#forward_search "*)") with
| None,_ -> comment_end
| Some _, None -> raise Not_found
| Some (_,next_search_start),Some (next_search_end,next_comment_end) ->
find_nested_comment next_search_start next_search_end next_comment_end
in
match start#forward_search "*)" with
| None -> raise Not_found
| Some (search_end,comment_end) -> find_nested_comment start search_end comment_end
let rec find_string_end (start:GText.iter) =
let dblquote = int_of_char '"' in
let rec escaped_dblquote c =
(c#char = dblquote) && not (escaped_dblquote c#backward_char)
in
match start#forward_search "\"" with
| None -> raise Not_found
| Some (stop,next_start) ->
if escaped_dblquote stop#backward_char
then find_string_end next_start
else next_start
let rec find_next_sentence (from:GText.iter) =
match (from#forward_search ".") with
| None -> raise Not_found
| Some (non_vernac_search_end,next_sentence) ->
match from#forward_search ~limit:non_vernac_search_end "(*",from#forward_search ~limit:non_vernac_search_end "\"" with
| None,None ->
if Glib.Unichar.isspace next_sentence#char || next_sentence#compare next_sentence#forward_char == 0
then next_sentence else find_next_sentence next_sentence
| None,Some (_,string_search_start) -> find_next_sentence (find_string_end string_search_start)
| Some (_,comment_search_start),None -> find_next_sentence (find_comment_end comment_search_start)
| Some (_,comment_search_start),Some (_,string_search_start) ->
find_next_sentence (
if comment_search_start#compare string_search_start < 0
then find_comment_end comment_search_start
else find_string_end string_search_start)
let find_nearest_forward (cursor:GText.iter) targets =
let fold_targets acc target =
match cursor#forward_search target,acc with
| Some (t_start,_),Some nearest when (t_start#compare nearest < 0) -> Some t_start
| Some (t_start,_),None -> Some t_start
| _ -> acc
in
match List.fold_left fold_targets None targets with
| None -> raise Not_found
| Some nearest -> nearest
let find_nearest_backward (cursor:GText.iter) targets =
let fold_targets acc target =
match cursor#backward_search target,acc with
| Some (t_start,_),Some nearest when (t_start#compare nearest > 0) -> Some t_start
| Some (t_start,_),None -> Some t_start
| _ -> acc
in
match List.fold_left fold_targets None targets with
| None -> raise Not_found
| Some nearest -> nearest
| null | https://raw.githubusercontent.com/herbelin/coq-hh/296d03d5049fea661e8bdbaf305ed4bf6d2001d2/ide/gtk_parsing.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
TODO: avoid num and prime at the head of a word | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Ideutils
let underscore = Glib.Utf8.to_unichar "_" ~pos:(ref 0)
let arobase = Glib.Utf8.to_unichar "@" ~pos:(ref 0)
let prime = Glib.Utf8.to_unichar "'" ~pos:(ref 0)
let bn = Glib.Utf8.to_unichar "\n" ~pos:(ref 0)
let space = Glib.Utf8.to_unichar " " ~pos:(ref 0)
let tab = Glib.Utf8.to_unichar "\t" ~pos:(ref 0)
let is_word_char c =
Glib.Unichar.isalnum c || c = underscore || c = prime
let starts_word (it:GText.iter) =
prerr_endline ("Starts word ? '"^(Glib.Utf8.from_unichar it#char)^"'");
(not it#copy#nocopy#backward_char ||
(let c = it#backward_char#char in
not (is_word_char c)))
let ends_word (it:GText.iter) =
(not it#copy#nocopy#forward_char ||
let c = it#forward_char#char in
not (is_word_char c)
)
let inside_word (it:GText.iter) =
let c = it#char in
not (starts_word it) &&
not (ends_word it) &&
is_word_char c
let is_on_word_limit (it:GText.iter) = inside_word it || ends_word it
let find_word_start (it:GText.iter) =
let rec step_to_start it =
prerr_endline "Find word start";
if not it#nocopy#backward_char then
(prerr_endline "find_word_start: cannot backward"; it)
else if is_word_char it#char
then step_to_start it
else (it#nocopy#forward_char;
prerr_endline ("Word start at: "^(string_of_int it#offset));it)
in
step_to_start it#copy
let find_word_end (it:GText.iter) =
let rec step_to_end (it:GText.iter) =
prerr_endline "Find word end";
let c = it#char in
if c<>0 && is_word_char c then (
ignore (it#nocopy#forward_char);
step_to_end it
) else (
prerr_endline ("Word end at: "^(string_of_int it#offset));
it)
in
step_to_end it#copy
let get_word_around (it:GText.iter) =
let start = find_word_start it in
let stop = find_word_end it in
start,stop
let rec complete_backward w (it:GText.iter) =
prerr_endline "Complete backward...";
match it#backward_search w with
| None -> (prerr_endline "backward_search failed";None)
| Some (start,stop) ->
prerr_endline ("complete_backward got a match:"^(string_of_int start#offset)^(string_of_int stop#offset));
if starts_word start then
let ne = find_word_end stop in
if ne#compare stop = 0
then complete_backward w start
else Some (start,stop,ne)
else complete_backward w start
let rec complete_forward w (it:GText.iter) =
prerr_endline "Complete forward...";
match it#forward_search w with
| None -> None
| Some (start,stop) ->
if starts_word start then
let ne = find_word_end stop in
if ne#compare stop = 0 then
complete_forward w stop
else Some (stop,stop,ne)
else complete_forward w stop
let find_comment_end (start:GText.iter) =
let rec find_nested_comment (search_start:GText.iter) (search_end:GText.iter) (comment_end:GText.iter) =
match (search_start#forward_search ~limit:search_end "(*"),(comment_end#forward_search "*)") with
| None,_ -> comment_end
| Some _, None -> raise Not_found
| Some (_,next_search_start),Some (next_search_end,next_comment_end) ->
find_nested_comment next_search_start next_search_end next_comment_end
in
match start#forward_search "*)" with
| None -> raise Not_found
| Some (search_end,comment_end) -> find_nested_comment start search_end comment_end
let rec find_string_end (start:GText.iter) =
let dblquote = int_of_char '"' in
let rec escaped_dblquote c =
(c#char = dblquote) && not (escaped_dblquote c#backward_char)
in
match start#forward_search "\"" with
| None -> raise Not_found
| Some (stop,next_start) ->
if escaped_dblquote stop#backward_char
then find_string_end next_start
else next_start
let rec find_next_sentence (from:GText.iter) =
match (from#forward_search ".") with
| None -> raise Not_found
| Some (non_vernac_search_end,next_sentence) ->
match from#forward_search ~limit:non_vernac_search_end "(*",from#forward_search ~limit:non_vernac_search_end "\"" with
| None,None ->
if Glib.Unichar.isspace next_sentence#char || next_sentence#compare next_sentence#forward_char == 0
then next_sentence else find_next_sentence next_sentence
| None,Some (_,string_search_start) -> find_next_sentence (find_string_end string_search_start)
| Some (_,comment_search_start),None -> find_next_sentence (find_comment_end comment_search_start)
| Some (_,comment_search_start),Some (_,string_search_start) ->
find_next_sentence (
if comment_search_start#compare string_search_start < 0
then find_comment_end comment_search_start
else find_string_end string_search_start)
let find_nearest_forward (cursor:GText.iter) targets =
let fold_targets acc target =
match cursor#forward_search target,acc with
| Some (t_start,_),Some nearest when (t_start#compare nearest < 0) -> Some t_start
| Some (t_start,_),None -> Some t_start
| _ -> acc
in
match List.fold_left fold_targets None targets with
| None -> raise Not_found
| Some nearest -> nearest
let find_nearest_backward (cursor:GText.iter) targets =
let fold_targets acc target =
match cursor#backward_search target,acc with
| Some (t_start,_),Some nearest when (t_start#compare nearest > 0) -> Some t_start
| Some (t_start,_),None -> Some t_start
| _ -> acc
in
match List.fold_left fold_targets None targets with
| None -> raise Not_found
| Some nearest -> nearest
|
990ad45b9cbc9c959003d1b508923f258eba20fe10d0c2d62eb0107c1701ffbd | tomfaulhaber/excel-templates | charts.clj | (ns excel-templates.charts
(:import [org.apache.commons.lang3 StringEscapeUtils]
[org.apache.poi.xssf.usermodel XSSFChartSheet]
[org.openxmlformats.schemas.drawingml.x2006.chart CTChart$Factory])
(:require [clojure.data.zip :as zf]
[clojure.data.zip.xml :as zx]
[clojure.set :as set]
[clojure.string :as str]
[clojure.xml :as xml]
[clojure.walk :as walk]
[clojure.zip :as zip]
[excel-templates.formulas :as fo]))
POI uses Java class wrappers based on Apache XML Beans to manage the contents of Office files .
However the set of classes to describe charts is incredibly complex , so to avoid having a million
special cases , I pull out the XML and edit that directly and then replace the original object .
This works better because there are only a few common cases at the leaves of the XML tree that
;;; we need to transform.
Manipulation of the POI objects for charts
(defmacro mjuxt
"Like juxt, but for Java methods"
[& methods]
`(juxt ~@(map #(list 'memfn %) methods)))
TODO createDrawingPatriarch should be replaced by getDrawingPatriarch when that 's available in POI 3.12
(defn get-charts
"Get the charts from a worksheet"
[sheet]
(-> sheet .createDrawingPatriarch .getCharts))
(defn has-chart?
"Return true if the sheet has any charts on it"
[sheet]
(pos? (count (get-charts sheet))))
(defn get-xml
"Get the XML representation of a chart"
[chart]
(-> chart .getCTChart .xmlText))
(defn set-xml
"Set new XML for the chart"
[chart xml-str]
(let [new-chart (CTChart$Factory/parse xml-str)]
(-> chart .getCTChart (.set new-chart))))
;;; XML transformation for charts
(defn parse-xml
"Parse the XML string using clojure.xml and return a zipper"
[xml-string]
(-> xml-string
(.getBytes (java.nio.charset.Charset/forName "UTF-8"))
(java.io.ByteArrayInputStream.)
xml/parse
zip/xml-zip))
(defn escape-strings
"Escape any illegal XML strings"
[tree]
(walk/postwalk
#(if (and (map? %) (contains? % :content))
(assoc % :content (seq (for [e (:content %)]
(if (string? e)
(StringEscapeUtils/escapeXml11 e)
e))))
%)
tree))
(defn emit-xml
"Generate an XML string from a zipper using clojure.xml"
[loc]
(-> (with-out-str (-> loc zip/root escape-strings xml/emit))
(str/replace #"^.*\n" "")
(str/replace #"(\r?\n|\r)" "")))
(defn transform-formula
"Transform a single chart formula according to the translation table"
[sheet translation-table formula]
(fo/translate-formula translation-table (.getWorkbook sheet) sheet [2000000 2000000] formula))
tree - edit is based on a blog post by at
;;; -xml-editing-using-zippers-in-clojure/
(defn tree-loc-edit
"The rawer version of tree edit, this operates on a loc rather than a node.
As a result, it allows for non-local manipulation of the tree."
[zipper matcher editor & colls]
(loop [loc zipper
colls colls]
(if (zip/end? loc)
loc
(if (matcher loc)
(let [new-loc (apply editor loc (map first colls))]
(recur (zip/next new-loc) (map next colls)))
(recur (zip/next loc) colls)))))
(defn tree-edit
"Take a zipper, a function that matches a pattern in the tree,
and a function that edits the current location in the tree. Examine the tree
nodes in depth-first order, determine whether the matcher matches, and if so
apply the editor.
Optional colls are used as in clojure.core/map with one element of each coll passed
as an argument to editor in sequence. These will be nil padded if necessary if
the number of matches is longer that the length of the collection."
[zipper matcher editor & colls]
(apply
tree-loc-edit
zipper matcher
(fn [loc & args]
(apply zip/edit loc editor args))
colls))
(defn formula?
"Return true if the node at the loc is a formula"
[loc]
(= :c:f (-> loc zip/node :tag)))
(defn series?
"Return true if the node at the loc is a series"
[loc]
(= :c:ser (-> loc zip/node :tag)))
(defn transform-xml
"Transform the zipper representing the chart into a zipper with expansions"
[sheet translation-table loc]
(letfn [(editor [node]
(assoc node
:content [(->> node :content first (transform-formula sheet translation-table))]))]
(tree-edit loc formula? editor)))
(defn chart-transform
"Transform the formulas in the XML representation of a chart"
[sheet translation-table chart-xml]
(->> chart-xml parse-xml (transform-xml sheet translation-table) emit-xml))
;;; Combine the above to edit all charts in a sheet
(defn transform-charts
"Transform the charts in a sheet according to the translation table"
[sheet translation-table]
( println ( str " Transforming sheet " ( .getSheetName sheet ) " ( " ( - > sheet .getWorkbook ( .getSheetIndex sheet ) ) " ) " ) )
;; (println (str "relations = " (-> sheet .createDrawingPatriarch .getRelations)))
(doseq [chart (get-charts sheet)]
( println " xform chart " )
(->> chart get-xml (chart-transform sheet translation-table) (set-xml chart))))
(defn relocate-formula
"Relocate a single chart formula from old-sheet to new-sheet"
[sheet old-index new-index formula]
(fo/relocate-formula (.getWorkbook sheet) sheet old-index new-index formula))
(defn relocate-xml
"Find the formulas in the XML that refer to the sheet at old-index and make them point to the sheet at new-index"
[sheet old-index new-index loc]
(letfn [(editor [node]
(assoc node
:content [(->> node :content first (relocate-formula sheet old-index new-index))]))]
(tree-edit loc formula? editor)))
(defn expand-series
"Expand a single series into 0 or more destination series leaving the cursor such that zip/next
will return the same result as when called."
[sheet src-sheet dst-sheets series-loc]
(let [series-formulas (mapcat :content (zx/xml-> series-loc zf/descendants (zx/tag= :c:f) zip/node))
sheet-refs (reduce
set/union
(map
(partial fo/external-sheets (.getWorkbook sheet) sheet)
series-formulas))
wb (.getWorkbook sheet)
src-index (.getSheetIndex wb src-sheet)]
(if (sheet-refs src-sheet)
(let [series-xml (-> series-loc zip/node zip/xml-zip)
base-loc (zip/remove series-loc)]
(letfn [(add-series [loc dst-sheet]
(let [dst-index (.getSheetIndex wb dst-sheet)
new-xml (zip/node (relocate-xml sheet src-index dst-index series-xml))]
(zip/right (zip/insert-right loc new-xml))))]
(reduce add-series base-loc dst-sheets)))
series-loc)))
(defn reindex-series
"After modifying a chart, make sure that the indices and order of the series is correct"
[key values chart-xml]
(letfn [(editor [loc index]
(zip/edit (zx/xml1-> loc (zx/tag= key)) assoc-in [:attrs :val] (str index)))]
(tree-loc-edit chart-xml series? editor values)))
(defn expand-all-series
"Replicate the various series"
[sheet src-sheet dst-sheets chart-xml]
(tree-loc-edit chart-xml series? (partial expand-series sheet src-sheet dst-sheets)))
(defn px
"Put this in the middle of a thread op to print the current state and keep threading the operand"
[x]
(println "px:" x)
x)
(defn expand-xml-str
"Replace any series in a chart that references a sheet that's being cloned to point to all
the clones. "
[sheet src-sheet dst-sheets xml-str]
(->> xml-str
parse-xml
(expand-all-series sheet src-sheet dst-sheets)
zip/root
zip/xml-zip
(reindex-series :c:idx (range))
zip/root
zip/xml-zip
(reindex-series :c:order (range))
emit-xml))
(defn expand-charts
"Replace any series in charts on the sheet that reference src-sheet with multiple series
each referencing a single element of dst-sheets"
[sheet src-sheet dst-sheets]
(doseq [chart (get-charts sheet)]
(->> chart
get-xml
(expand-xml-str sheet src-sheet dst-sheets)
(set-xml chart))))
;;; When we duplicate a sheet with charts on it, we need to make sure
;;; that any charts on that sheet point to the new sheet in any places
;;; where they were pointing to the base sheet
(defn chart-change-sheet
"Handle a single chart that was duplicated from an old sheet to a new sheet"
[sheet old-index new-index chart-xml]
(->> chart-xml parse-xml (relocate-xml sheet old-index new-index) emit-xml))
(defn change-sheet
"Update any reference in the charts on this sheet that points to the base sheet to
point to this sheet"
[sheet old-index new-index]
(println (str "Changing sheet " (.getSheetName sheet) "(" (-> sheet .getWorkbook (.getSheetIndex sheet)) ") from " old-index " to " new-index))
(println (str "relations = " (-> sheet .createDrawingPatriarch .getRelations count)))
(doseq [chart (get-charts sheet)]
(println "found chart")
(->> chart get-xml (chart-change-sheet sheet old-index new-index) (set-xml chart))))
;;; Code for copying charts when we're duplicating a sheet
Because POI ca n't clone a sheet with charts on it , we have to do the
;;; following:
1 ) Get the data about all the charts that are on worksheets
2 ) Delete charts from the worksheets ( leave charts on the chartsheets , because they 're different )
3 ) Rename the charts on chartsheets to be , chart2 , etc . because of the way POI creates
;;; new charts
4 ) Add the charts back onto the sheets after they 've been cloned ( we do all worksheets because
;;; it's easier that restricting to only cloned ones).
5 ) Make any charts that point to the new cloned charts have the right references
6 ) Do the same for all the saved charts since some of them wo n't have been added back into the
;;; sheets yet.
;;;
;;; The logic to do this is split between here and create-missing-sheets in build.clj
(defn chart-sheet?
"Return true if this sheet is a chart sheet"
[sheet]
(instance? XSSFChartSheet sheet))
(defn anchors-by-id
"Gets a map of anchor objects by ID that show where the graphic with that ID is on the sheet"
[sheet]
(let [anchors (-> sheet .createDrawingPatriarch .getCTDrawing .getTwoCellAnchorList)]
(into {} (for [anchor anchors]
[(-> anchor .getGraphicFrame .getGraphic .getGraphicData .getDomNode
.getChildNodes (.item 0) (.getAttribute "r:id"))
anchor]))))
(defn get-part-id
"Get the part id for a document part in the drawing patriarch"
[sheet part]
(.getRelationId (.createDrawingPatriarch sheet) part))
(defn get-charts-and-anchors
"Get maps representing each chart on the sheet along with its anchor"
[sheet]
(let [anchors (anchors-by-id sheet)]
(for [chart (get-charts sheet)]
{:chart chart, :anchor (anchors (get-part-id sheet chart))})))
(defn new-anchor
"Get an anchor for a duplicated chart based on an anchor pulled from the original"
[sheet old-anchor]
(let [from (.getFrom old-anchor)
to (.getTo old-anchor)]
(.createAnchor (.createDrawingPatriarch sheet)
(.getColOff from) (.getRowOff from)
(.getColOff to) (.getRowOff to)
(.getCol from) (.getRow from)
(.getCol to) (.getRow to))))
(defn get-anchor-location
"Get the location info from an anchor so we can create a new one later"
[anchor]
(when anchor
(zipmap [:from :to]
(map (comp (partial zipmap [:col-off :row-off :col :row])
(mjuxt getColOff getRowOff getCol getRow))
((mjuxt getFrom getTo) anchor)))))
(defn part-path
"Get the path to this part for this object in the zip file"
[part]
(-> part .getPackagePart .getPartName .getName (subs 1)))
(defn rels-path
"Get the path to the relationship definitions for this object in the zip file"
[part]
(let [[_ head tail] (re-matches #"^(.*)/([^/]+)" (part-path part))]
(str head "/_rels/" tail ".rels")))
(defn get-chart-data
"Get all the data the we need to delete and then recreate the charts for this sheet"
[sheet]
(let [anchors (anchors-by-id sheet)]
(for [chart (get-charts sheet)
:let [drawing (.createDrawingPatriarch sheet)
chart-id (get-part-id sheet chart)]]
{:sheet (.getSheetName sheet)
:chart-sheet? (chart-sheet? sheet)
:chart-path (part-path chart)
:drawing-path (part-path drawing)
:drawing-rels (rels-path drawing)
:chart-id chart-id
:chart-location (get-anchor-location (anchors chart-id))
:chart-xml (get-xml chart)})))
(defn chart-sheets
"filter the chart data for charts from chart sheets only"
[chart-data]
(filter :chart-sheet? chart-data))
(defn work-sheets
"filter the chart data for charts from worksheets only"
[chart-data]
(filter (complement :chart-sheet?) chart-data))
(defn remove-charts
"Returns a map of chart names to a map with :delete set to true. This will cause the chart objects to be
dropped."
[chart-data]
(into {}
(for [chart (work-sheets chart-data)] [(:chart-path chart) {:delete true}])))
(defn remove-drawing-rels
"Returns a map of relationship sheets to a function that will remove the correct relationships on each one"
[chart-data]
(let [ids-by-rels (fo/map-values
#(set (map :chart-id %))
(group-by :drawing-rels (work-sheets chart-data)))]
(fo/map-values
(fn [id-set]
{:edit
(fn [xml-data]
(assoc xml-data :content (filter #(not (id-set (get-in % [:attrs :Id])))
(:content xml-data))))})
ids-by-rels)))
(defn remove-anchors
"Returns a map of drawing sheets to functions that will move the anchors corresponding to the charts"
[chart-data]
(let [ids-by-drawings (fo/map-values
#(set (map :chart-id %))
(group-by :drawing-path (work-sheets chart-data)))]
(fo/map-values
(fn [id-set]
{:edit
(fn [xml-data]
(loop [data xml-data]
(if-let [new-data (zx/xml1->
(zip/xml-zip data)
zf/descendants
(zx/tag= :c:chart)
#(boolean (id-set (zx/attr % :r:id)))
zf/ancestors
(zx/tag= :xdr:twoCellAnchor)
zip/remove
zip/root)]
(recur new-data)
data)))})
ids-by-drawings)))
(defn drawing-rel
"Change a chart reference to be relative to the drawing object"
[link]
(.replaceFirst link "^xl/" "../"))
(defn renumber-chart-sheets
"Returns a map with instructions about how to renumber the charts on chart sheets so that they
a 1..n with no holes so that POI can re-add the charts on worksheets correctly."
[chart-data]
(let [chart-sheet-data (->> chart-data
(filter :chart-sheet?)
(map-indexed #(assoc %2 :new-chart-path
(format "xl/charts/chart%d.xml" (inc %1)))))]
(apply
merge
(for [c chart-sheet-data
:let [path (:chart-path c)
rel-path (drawing-rel path)
new-path (:new-chart-path c)
rels (:drawing-rels c)]]
{path {:rename new-path}
rels {:edit (fn [xml-data]
(assoc xml-data
:content (map #(if (= rel-path (get-in % [:attrs :Target]))
(assoc-in % [:attrs :Target] (drawing-rel new-path))
%)
(:content xml-data))))}}))))
(defn chart-removal-rules
"Combine all the rules to remove charts from this workbook"
[chart-data]
(apply merge
(map #(% chart-data)
[remove-charts remove-drawing-rels remove-anchors renumber-chart-sheets])))
(defn expand-chart-data
"Expand the xml charts that we've captured if they have references to sheets that are being cloned"
[workbook src-sheet dst-sheets chart-data]
(doall
(for [{:keys [sheet chart-xml] :as chart} chart-data
:let [sheet-obj (.getSheet workbook sheet)]
:when sheet-obj] ;;; if sheet-obj is nil, this chart has already been added back
(assoc chart :chart-xml (expand-xml-str sheet-obj src-sheet dst-sheets chart-xml)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; When a chart refers to a series on a sheet that's been duplicated, duplicate the series to match
(defn chart-formulas
"Get all the formulas in each chart on the sheet"
[sheet]
(for [chart (get-charts sheet)
:let [chart-xml (-> chart get-xml parse-xml)]]
(mapcat :content (zx/xml-> chart-xml zf/descendants (zx/tag= :c:f) zip/node))))
| null | https://raw.githubusercontent.com/tomfaulhaber/excel-templates/17871b86a41de4a0b3a7bba9d390439b42c97c3c/src/excel_templates/charts.clj | clojure | we need to transform.
XML transformation for charts
-xml-editing-using-zippers-in-clojure/
Combine the above to edit all charts in a sheet
(println (str "relations = " (-> sheet .createDrawingPatriarch .getRelations)))
When we duplicate a sheet with charts on it, we need to make sure
that any charts on that sheet point to the new sheet in any places
where they were pointing to the base sheet
Code for copying charts when we're duplicating a sheet
following:
new charts
it's easier that restricting to only cloned ones).
sheets yet.
The logic to do this is split between here and create-missing-sheets in build.clj
if sheet-obj is nil, this chart has already been added back
When a chart refers to a series on a sheet that's been duplicated, duplicate the series to match | (ns excel-templates.charts
(:import [org.apache.commons.lang3 StringEscapeUtils]
[org.apache.poi.xssf.usermodel XSSFChartSheet]
[org.openxmlformats.schemas.drawingml.x2006.chart CTChart$Factory])
(:require [clojure.data.zip :as zf]
[clojure.data.zip.xml :as zx]
[clojure.set :as set]
[clojure.string :as str]
[clojure.xml :as xml]
[clojure.walk :as walk]
[clojure.zip :as zip]
[excel-templates.formulas :as fo]))
POI uses Java class wrappers based on Apache XML Beans to manage the contents of Office files .
However the set of classes to describe charts is incredibly complex , so to avoid having a million
special cases , I pull out the XML and edit that directly and then replace the original object .
This works better because there are only a few common cases at the leaves of the XML tree that
Manipulation of the POI objects for charts
(defmacro mjuxt
"Like juxt, but for Java methods"
[& methods]
`(juxt ~@(map #(list 'memfn %) methods)))
TODO createDrawingPatriarch should be replaced by getDrawingPatriarch when that 's available in POI 3.12
(defn get-charts
"Get the charts from a worksheet"
[sheet]
(-> sheet .createDrawingPatriarch .getCharts))
(defn has-chart?
"Return true if the sheet has any charts on it"
[sheet]
(pos? (count (get-charts sheet))))
(defn get-xml
"Get the XML representation of a chart"
[chart]
(-> chart .getCTChart .xmlText))
(defn set-xml
"Set new XML for the chart"
[chart xml-str]
(let [new-chart (CTChart$Factory/parse xml-str)]
(-> chart .getCTChart (.set new-chart))))
(defn parse-xml
"Parse the XML string using clojure.xml and return a zipper"
[xml-string]
(-> xml-string
(.getBytes (java.nio.charset.Charset/forName "UTF-8"))
(java.io.ByteArrayInputStream.)
xml/parse
zip/xml-zip))
(defn escape-strings
"Escape any illegal XML strings"
[tree]
(walk/postwalk
#(if (and (map? %) (contains? % :content))
(assoc % :content (seq (for [e (:content %)]
(if (string? e)
(StringEscapeUtils/escapeXml11 e)
e))))
%)
tree))
(defn emit-xml
"Generate an XML string from a zipper using clojure.xml"
[loc]
(-> (with-out-str (-> loc zip/root escape-strings xml/emit))
(str/replace #"^.*\n" "")
(str/replace #"(\r?\n|\r)" "")))
(defn transform-formula
"Transform a single chart formula according to the translation table"
[sheet translation-table formula]
(fo/translate-formula translation-table (.getWorkbook sheet) sheet [2000000 2000000] formula))
tree - edit is based on a blog post by at
(defn tree-loc-edit
"The rawer version of tree edit, this operates on a loc rather than a node.
As a result, it allows for non-local manipulation of the tree."
[zipper matcher editor & colls]
(loop [loc zipper
colls colls]
(if (zip/end? loc)
loc
(if (matcher loc)
(let [new-loc (apply editor loc (map first colls))]
(recur (zip/next new-loc) (map next colls)))
(recur (zip/next loc) colls)))))
(defn tree-edit
"Take a zipper, a function that matches a pattern in the tree,
and a function that edits the current location in the tree. Examine the tree
nodes in depth-first order, determine whether the matcher matches, and if so
apply the editor.
Optional colls are used as in clojure.core/map with one element of each coll passed
as an argument to editor in sequence. These will be nil padded if necessary if
the number of matches is longer that the length of the collection."
[zipper matcher editor & colls]
(apply
tree-loc-edit
zipper matcher
(fn [loc & args]
(apply zip/edit loc editor args))
colls))
(defn formula?
"Return true if the node at the loc is a formula"
[loc]
(= :c:f (-> loc zip/node :tag)))
(defn series?
"Return true if the node at the loc is a series"
[loc]
(= :c:ser (-> loc zip/node :tag)))
(defn transform-xml
"Transform the zipper representing the chart into a zipper with expansions"
[sheet translation-table loc]
(letfn [(editor [node]
(assoc node
:content [(->> node :content first (transform-formula sheet translation-table))]))]
(tree-edit loc formula? editor)))
(defn chart-transform
"Transform the formulas in the XML representation of a chart"
[sheet translation-table chart-xml]
(->> chart-xml parse-xml (transform-xml sheet translation-table) emit-xml))
(defn transform-charts
"Transform the charts in a sheet according to the translation table"
[sheet translation-table]
( println ( str " Transforming sheet " ( .getSheetName sheet ) " ( " ( - > sheet .getWorkbook ( .getSheetIndex sheet ) ) " ) " ) )
(doseq [chart (get-charts sheet)]
( println " xform chart " )
(->> chart get-xml (chart-transform sheet translation-table) (set-xml chart))))
(defn relocate-formula
"Relocate a single chart formula from old-sheet to new-sheet"
[sheet old-index new-index formula]
(fo/relocate-formula (.getWorkbook sheet) sheet old-index new-index formula))
(defn relocate-xml
"Find the formulas in the XML that refer to the sheet at old-index and make them point to the sheet at new-index"
[sheet old-index new-index loc]
(letfn [(editor [node]
(assoc node
:content [(->> node :content first (relocate-formula sheet old-index new-index))]))]
(tree-edit loc formula? editor)))
(defn expand-series
"Expand a single series into 0 or more destination series leaving the cursor such that zip/next
will return the same result as when called."
[sheet src-sheet dst-sheets series-loc]
(let [series-formulas (mapcat :content (zx/xml-> series-loc zf/descendants (zx/tag= :c:f) zip/node))
sheet-refs (reduce
set/union
(map
(partial fo/external-sheets (.getWorkbook sheet) sheet)
series-formulas))
wb (.getWorkbook sheet)
src-index (.getSheetIndex wb src-sheet)]
(if (sheet-refs src-sheet)
(let [series-xml (-> series-loc zip/node zip/xml-zip)
base-loc (zip/remove series-loc)]
(letfn [(add-series [loc dst-sheet]
(let [dst-index (.getSheetIndex wb dst-sheet)
new-xml (zip/node (relocate-xml sheet src-index dst-index series-xml))]
(zip/right (zip/insert-right loc new-xml))))]
(reduce add-series base-loc dst-sheets)))
series-loc)))
(defn reindex-series
"After modifying a chart, make sure that the indices and order of the series is correct"
[key values chart-xml]
(letfn [(editor [loc index]
(zip/edit (zx/xml1-> loc (zx/tag= key)) assoc-in [:attrs :val] (str index)))]
(tree-loc-edit chart-xml series? editor values)))
(defn expand-all-series
"Replicate the various series"
[sheet src-sheet dst-sheets chart-xml]
(tree-loc-edit chart-xml series? (partial expand-series sheet src-sheet dst-sheets)))
(defn px
"Put this in the middle of a thread op to print the current state and keep threading the operand"
[x]
(println "px:" x)
x)
(defn expand-xml-str
"Replace any series in a chart that references a sheet that's being cloned to point to all
the clones. "
[sheet src-sheet dst-sheets xml-str]
(->> xml-str
parse-xml
(expand-all-series sheet src-sheet dst-sheets)
zip/root
zip/xml-zip
(reindex-series :c:idx (range))
zip/root
zip/xml-zip
(reindex-series :c:order (range))
emit-xml))
(defn expand-charts
"Replace any series in charts on the sheet that reference src-sheet with multiple series
each referencing a single element of dst-sheets"
[sheet src-sheet dst-sheets]
(doseq [chart (get-charts sheet)]
(->> chart
get-xml
(expand-xml-str sheet src-sheet dst-sheets)
(set-xml chart))))
(defn chart-change-sheet
"Handle a single chart that was duplicated from an old sheet to a new sheet"
[sheet old-index new-index chart-xml]
(->> chart-xml parse-xml (relocate-xml sheet old-index new-index) emit-xml))
(defn change-sheet
"Update any reference in the charts on this sheet that points to the base sheet to
point to this sheet"
[sheet old-index new-index]
(println (str "Changing sheet " (.getSheetName sheet) "(" (-> sheet .getWorkbook (.getSheetIndex sheet)) ") from " old-index " to " new-index))
(println (str "relations = " (-> sheet .createDrawingPatriarch .getRelations count)))
(doseq [chart (get-charts sheet)]
(println "found chart")
(->> chart get-xml (chart-change-sheet sheet old-index new-index) (set-xml chart))))
Because POI ca n't clone a sheet with charts on it , we have to do the
1 ) Get the data about all the charts that are on worksheets
2 ) Delete charts from the worksheets ( leave charts on the chartsheets , because they 're different )
3 ) Rename the charts on chartsheets to be , chart2 , etc . because of the way POI creates
4 ) Add the charts back onto the sheets after they 've been cloned ( we do all worksheets because
5 ) Make any charts that point to the new cloned charts have the right references
6 ) Do the same for all the saved charts since some of them wo n't have been added back into the
(defn chart-sheet?
"Return true if this sheet is a chart sheet"
[sheet]
(instance? XSSFChartSheet sheet))
(defn anchors-by-id
"Gets a map of anchor objects by ID that show where the graphic with that ID is on the sheet"
[sheet]
(let [anchors (-> sheet .createDrawingPatriarch .getCTDrawing .getTwoCellAnchorList)]
(into {} (for [anchor anchors]
[(-> anchor .getGraphicFrame .getGraphic .getGraphicData .getDomNode
.getChildNodes (.item 0) (.getAttribute "r:id"))
anchor]))))
(defn get-part-id
"Get the part id for a document part in the drawing patriarch"
[sheet part]
(.getRelationId (.createDrawingPatriarch sheet) part))
(defn get-charts-and-anchors
"Get maps representing each chart on the sheet along with its anchor"
[sheet]
(let [anchors (anchors-by-id sheet)]
(for [chart (get-charts sheet)]
{:chart chart, :anchor (anchors (get-part-id sheet chart))})))
(defn new-anchor
"Get an anchor for a duplicated chart based on an anchor pulled from the original"
[sheet old-anchor]
(let [from (.getFrom old-anchor)
to (.getTo old-anchor)]
(.createAnchor (.createDrawingPatriarch sheet)
(.getColOff from) (.getRowOff from)
(.getColOff to) (.getRowOff to)
(.getCol from) (.getRow from)
(.getCol to) (.getRow to))))
(defn get-anchor-location
"Get the location info from an anchor so we can create a new one later"
[anchor]
(when anchor
(zipmap [:from :to]
(map (comp (partial zipmap [:col-off :row-off :col :row])
(mjuxt getColOff getRowOff getCol getRow))
((mjuxt getFrom getTo) anchor)))))
(defn part-path
"Get the path to this part for this object in the zip file"
[part]
(-> part .getPackagePart .getPartName .getName (subs 1)))
(defn rels-path
"Get the path to the relationship definitions for this object in the zip file"
[part]
(let [[_ head tail] (re-matches #"^(.*)/([^/]+)" (part-path part))]
(str head "/_rels/" tail ".rels")))
(defn get-chart-data
"Get all the data the we need to delete and then recreate the charts for this sheet"
[sheet]
(let [anchors (anchors-by-id sheet)]
(for [chart (get-charts sheet)
:let [drawing (.createDrawingPatriarch sheet)
chart-id (get-part-id sheet chart)]]
{:sheet (.getSheetName sheet)
:chart-sheet? (chart-sheet? sheet)
:chart-path (part-path chart)
:drawing-path (part-path drawing)
:drawing-rels (rels-path drawing)
:chart-id chart-id
:chart-location (get-anchor-location (anchors chart-id))
:chart-xml (get-xml chart)})))
(defn chart-sheets
"filter the chart data for charts from chart sheets only"
[chart-data]
(filter :chart-sheet? chart-data))
(defn work-sheets
"filter the chart data for charts from worksheets only"
[chart-data]
(filter (complement :chart-sheet?) chart-data))
(defn remove-charts
"Returns a map of chart names to a map with :delete set to true. This will cause the chart objects to be
dropped."
[chart-data]
(into {}
(for [chart (work-sheets chart-data)] [(:chart-path chart) {:delete true}])))
(defn remove-drawing-rels
"Returns a map of relationship sheets to a function that will remove the correct relationships on each one"
[chart-data]
(let [ids-by-rels (fo/map-values
#(set (map :chart-id %))
(group-by :drawing-rels (work-sheets chart-data)))]
(fo/map-values
(fn [id-set]
{:edit
(fn [xml-data]
(assoc xml-data :content (filter #(not (id-set (get-in % [:attrs :Id])))
(:content xml-data))))})
ids-by-rels)))
(defn remove-anchors
"Returns a map of drawing sheets to functions that will move the anchors corresponding to the charts"
[chart-data]
(let [ids-by-drawings (fo/map-values
#(set (map :chart-id %))
(group-by :drawing-path (work-sheets chart-data)))]
(fo/map-values
(fn [id-set]
{:edit
(fn [xml-data]
(loop [data xml-data]
(if-let [new-data (zx/xml1->
(zip/xml-zip data)
zf/descendants
(zx/tag= :c:chart)
#(boolean (id-set (zx/attr % :r:id)))
zf/ancestors
(zx/tag= :xdr:twoCellAnchor)
zip/remove
zip/root)]
(recur new-data)
data)))})
ids-by-drawings)))
(defn drawing-rel
"Change a chart reference to be relative to the drawing object"
[link]
(.replaceFirst link "^xl/" "../"))
(defn renumber-chart-sheets
"Returns a map with instructions about how to renumber the charts on chart sheets so that they
a 1..n with no holes so that POI can re-add the charts on worksheets correctly."
[chart-data]
(let [chart-sheet-data (->> chart-data
(filter :chart-sheet?)
(map-indexed #(assoc %2 :new-chart-path
(format "xl/charts/chart%d.xml" (inc %1)))))]
(apply
merge
(for [c chart-sheet-data
:let [path (:chart-path c)
rel-path (drawing-rel path)
new-path (:new-chart-path c)
rels (:drawing-rels c)]]
{path {:rename new-path}
rels {:edit (fn [xml-data]
(assoc xml-data
:content (map #(if (= rel-path (get-in % [:attrs :Target]))
(assoc-in % [:attrs :Target] (drawing-rel new-path))
%)
(:content xml-data))))}}))))
(defn chart-removal-rules
"Combine all the rules to remove charts from this workbook"
[chart-data]
(apply merge
(map #(% chart-data)
[remove-charts remove-drawing-rels remove-anchors renumber-chart-sheets])))
(defn expand-chart-data
"Expand the xml charts that we've captured if they have references to sheets that are being cloned"
[workbook src-sheet dst-sheets chart-data]
(doall
(for [{:keys [sheet chart-xml] :as chart} chart-data
:let [sheet-obj (.getSheet workbook sheet)]
(assoc chart :chart-xml (expand-xml-str sheet-obj src-sheet dst-sheets chart-xml)))))
(defn chart-formulas
"Get all the formulas in each chart on the sheet"
[sheet]
(for [chart (get-charts sheet)
:let [chart-xml (-> chart get-xml parse-xml)]]
(mapcat :content (zx/xml-> chart-xml zf/descendants (zx/tag= :c:f) zip/node))))
|
a531a0a81e4cb26dc7f162b471726b60568a922249cca0afad12fa9a393125f1 | athos/syntactic-closure | identifier.clj | (ns identifier
(:use syntactic-closure.core))
(define-syntax foo []
(sc-macro-transformer
(fn [env]
(capture-syntactic-environment
(fn [transformer-env]
(identifier=? transformer-env 'x env 'x))))))
(comment
[(foo) (let [x 2] (foo))] ;=> [true false]
)
| null | https://raw.githubusercontent.com/athos/syntactic-closure/e251b03a199507df4bbc35788230d434d6506634/examples/identifier.clj | clojure | => [true false] | (ns identifier
(:use syntactic-closure.core))
(define-syntax foo []
(sc-macro-transformer
(fn [env]
(capture-syntactic-environment
(fn [transformer-env]
(identifier=? transformer-env 'x env 'x))))))
(comment
)
|
35b61a1f85814e1abe0c1f8185fa64e723ef327cf2ff2da94a088b67f6954d5b | rd--/hsc3 | decay2.help.hs | -- decay2 ; used as an envelope
sinOsc ar 11000 0 * 0.25
f = xLine kr 1 50 20 RemoveSynth
in decay2 (impulse ar f 0) 0.01 0.2 * s
-- decay2 ; compare with decay used as the envelope
let s = fSinOsc ar 600 0 * 0.25
f = xLine kr 1 50 20 RemoveSynth
in decay (impulse ar f 0) 0.2 * s
-- ; drawings ; attack and decay are a difference of two decays , hence inversion
Sound.Sc3.Plot.plot_ugen1 0.05 (decay2 (impulse ar 1 0) 0.001 0.01)
Sound.Sc3.Plot.plot_ugen1 0.05 (decay2 (impulse ar 1 0) 0.01 0.001)
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Help/Ugen/decay2.help.hs | haskell | decay2 ; used as an envelope
decay2 ; compare with decay used as the envelope
; drawings ; attack and decay are a difference of two decays , hence inversion | sinOsc ar 11000 0 * 0.25
f = xLine kr 1 50 20 RemoveSynth
in decay2 (impulse ar f 0) 0.01 0.2 * s
let s = fSinOsc ar 600 0 * 0.25
f = xLine kr 1 50 20 RemoveSynth
in decay (impulse ar f 0) 0.2 * s
Sound.Sc3.Plot.plot_ugen1 0.05 (decay2 (impulse ar 1 0) 0.001 0.01)
Sound.Sc3.Plot.plot_ugen1 0.05 (decay2 (impulse ar 1 0) 0.01 0.001)
|
6ce47a756c870ed7f8d67318431c78eef3ebc14394a1de33dea3f3ac3e999ca9 | marigold-dev/openapi-router | router.mli | module type Config_Type = sig
type app
type route
type handler
val json_path : string
val doc_path : string
val json_route : string -> route
val doc_route : string -> route
val get : string -> handler -> route
val post : string -> handler -> route
val delete : string -> handler -> route
val put : string -> handler -> route
val options : string -> handler -> route
val head : string -> handler -> route
val patch : string -> handler -> route
val build_routes : route list -> app
end
module Make : functor (Config : Config_Type) -> sig
type t = {
spec : Spec.t;
routes : Config.route list;
}
val empty : t
val title : string -> t -> t
(** Specify Openapi title metadata *)
val description : string -> t -> t
(** Specify Openapi description metadata *)
val terms_of_service : string -> t -> t
(** Specify Openapi terms of service metadata *)
val contact : Spec.contact_object -> t -> t
(** Specify Openapi contact metadata *)
val license : Spec.license_object -> t -> t
(** Specify Openapi license metadata *)
val version : string -> t -> t
(** Specify Openapi version metadata *)
val schema : string -> Json_schema.schema Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val response : string -> Spec.response_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val parameter : string -> Spec.parameter_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val example : string -> Spec.example_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val request_body :
string -> Spec.request_body_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val header : string -> Spec.header_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val security_scheme :
string -> Spec.security_scheme_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val link : string -> Spec.link_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val callback : string -> Spec.callback_object Json_schema.or_ref -> t -> t
(** Add a named component to an application spec *)
val schema_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val response_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val parameter_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val example_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val request_body_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val header_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val security_scheme_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val link_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val callback_ref : string -> 'a Json_schema.or_ref
(** Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)*)
val get :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val default_request_body : Spec.request_body_object Json_schema.or_ref
val post :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val delete :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val put :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val options :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val head :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val patch :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val build : t -> Config.app
end
| null | https://raw.githubusercontent.com/marigold-dev/openapi-router/941eb46176482aa931ac2dbe53f082b20d12fc8b/lib/router.mli | ocaml | * Specify Openapi title metadata
* Specify Openapi description metadata
* Specify Openapi terms of service metadata
* Specify Openapi contact metadata
* Specify Openapi license metadata
* Specify Openapi version metadata
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Add a named component to an application spec
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists)
* Return a JSON reference to a named component of a spec (Note: doesn't confirm that the component actually exists) | module type Config_Type = sig
type app
type route
type handler
val json_path : string
val doc_path : string
val json_route : string -> route
val doc_route : string -> route
val get : string -> handler -> route
val post : string -> handler -> route
val delete : string -> handler -> route
val put : string -> handler -> route
val options : string -> handler -> route
val head : string -> handler -> route
val patch : string -> handler -> route
val build_routes : route list -> app
end
module Make : functor (Config : Config_Type) -> sig
type t = {
spec : Spec.t;
routes : Config.route list;
}
val empty : t
val title : string -> t -> t
val description : string -> t -> t
val terms_of_service : string -> t -> t
val contact : Spec.contact_object -> t -> t
val license : Spec.license_object -> t -> t
val version : string -> t -> t
val schema : string -> Json_schema.schema Json_schema.or_ref -> t -> t
val response : string -> Spec.response_object Json_schema.or_ref -> t -> t
val parameter : string -> Spec.parameter_object Json_schema.or_ref -> t -> t
val example : string -> Spec.example_object Json_schema.or_ref -> t -> t
val request_body :
string -> Spec.request_body_object Json_schema.or_ref -> t -> t
val header : string -> Spec.header_object Json_schema.or_ref -> t -> t
val security_scheme :
string -> Spec.security_scheme_object Json_schema.or_ref -> t -> t
val link : string -> Spec.link_object Json_schema.or_ref -> t -> t
val callback : string -> Spec.callback_object Json_schema.or_ref -> t -> t
val schema_ref : string -> 'a Json_schema.or_ref
val response_ref : string -> 'a Json_schema.or_ref
val parameter_ref : string -> 'a Json_schema.or_ref
val example_ref : string -> 'a Json_schema.or_ref
val request_body_ref : string -> 'a Json_schema.or_ref
val header_ref : string -> 'a Json_schema.or_ref
val security_scheme_ref : string -> 'a Json_schema.or_ref
val link_ref : string -> 'a Json_schema.or_ref
val callback_ref : string -> 'a Json_schema.or_ref
val get :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val default_request_body : Spec.request_body_object Json_schema.or_ref
val post :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val delete :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val put :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val options :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val head :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val patch :
?tags:string list ->
?summary:string ->
?description:string ->
?external_docs:Spec.external_documentation_object ->
?operation_id:string ->
?parameters:Spec.parameter_object Json_schema.or_ref list ->
?request_body:Spec.request_body_object Json_schema.or_ref ->
?responses:Spec.responses_object ->
?callbacks:Spec.callback_object Json_schema.or_ref Json_schema.map ->
?deprecated:bool ->
?security:Json_schema.any ->
?servers:Spec.server_object list ->
string ->
Config.handler ->
t ->
t
val build : t -> Config.app
end
|
32b8478bddc291a1b6ffe43dc66f18265a6437ad2186887354231a3e80209313 | input-output-hk/cardano-ledger-byron | Json.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module Test.Cardano.Chain.Genesis.Json
( tests
) where
import Cardano.Prelude
import Test.Cardano.Prelude
import Hedgehog (Property)
import Test.Cardano.Chain.Genesis.Example (exampleGenesisData0)
import Test.Cardano.Chain.Genesis.Gen
( genGenesisAvvmBalances
, genGenesisNonAvvmBalances
, genGenesisKeyHashes
)
import Test.Cardano.Chain.Delegation.Gen (genCanonicalCertificate)
import Test.Cardano.Chain.Update.Gen
(genCanonicalProtocolParameters)
import Test.Cardano.Chain.Genesis.Gen
(genCanonicalGenesisData, genCanonicalGenesisDelegation)
import Test.Cardano.Crypto.Gen (feedPM)
import Test.Options (TSGroup, TSProperty, concatTSGroups, eachOfTS)
--------------------------------------------------------------------------------
-- JSON Canonical Tests
--------------------------------------------------------------------------------
ts_roundTripCanonicalCertificate :: TSProperty
ts_roundTripCanonicalCertificate =
eachOfTS 100 (feedPM genCanonicalCertificate) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisAvvmBalances :: TSProperty
ts_roundTripCanonicalGenesisAvvmBalances =
eachOfTS 100 genGenesisAvvmBalances roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisData :: TSProperty
ts_roundTripCanonicalGenesisData =
eachOfTS 100 (feedPM genCanonicalGenesisData) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisDelegation :: TSProperty
ts_roundTripCanonicalGenesisDelegation =
eachOfTS 100 (feedPM genCanonicalGenesisDelegation) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisNonAvvmBalances :: TSProperty
ts_roundTripCanonicalGenesisNonAvvmBalances =
eachOfTS 100 genGenesisNonAvvmBalances roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisKeyHashes :: TSProperty
ts_roundTripCanonicalGenesisKeyHashes =
eachOfTS 100 genGenesisKeyHashes roundTripsCanonicalJsonPretty
ts_roundTripCanonicalProtocolParameters :: TSProperty
ts_roundTripCanonicalProtocolParameters =
eachOfTS 100 genCanonicalProtocolParameters roundTripsCanonicalJsonPretty
--------------------------------------------------------------------------------
GenesisData ( Canonical JSON )
--------------------------------------------------------------------------------
-- Decode-only golden tests for ensuring that, when decoding the legacy
` GenesisData ` canonical JSON format , the ` RequiresNetworkMagic ` field
-- defaults to `RequiresMagic`.
golden_GenesisData0Dec :: Property
golden_GenesisData0Dec =
goldenTestCanonicalJSONDec exampleGenesisData0
"test/golden/json/genesis/GenesisData0_Legacy_HasNetworkMagic"
-------------------------------------------------------------------------------
-- Main test export
-------------------------------------------------------------------------------
tests :: TSGroup
tests = concatTSGroups [const $$discoverGolden, $$discoverPropArg]
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger-byron/d309449e6c303a9f0dcc8dcf172df6f0b3195ed5/cardano-ledger/test/Test/Cardano/Chain/Genesis/Json.hs | haskell | ------------------------------------------------------------------------------
JSON Canonical Tests
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Decode-only golden tests for ensuring that, when decoding the legacy
defaults to `RequiresMagic`.
-----------------------------------------------------------------------------
Main test export
----------------------------------------------------------------------------- | # LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module Test.Cardano.Chain.Genesis.Json
( tests
) where
import Cardano.Prelude
import Test.Cardano.Prelude
import Hedgehog (Property)
import Test.Cardano.Chain.Genesis.Example (exampleGenesisData0)
import Test.Cardano.Chain.Genesis.Gen
( genGenesisAvvmBalances
, genGenesisNonAvvmBalances
, genGenesisKeyHashes
)
import Test.Cardano.Chain.Delegation.Gen (genCanonicalCertificate)
import Test.Cardano.Chain.Update.Gen
(genCanonicalProtocolParameters)
import Test.Cardano.Chain.Genesis.Gen
(genCanonicalGenesisData, genCanonicalGenesisDelegation)
import Test.Cardano.Crypto.Gen (feedPM)
import Test.Options (TSGroup, TSProperty, concatTSGroups, eachOfTS)
ts_roundTripCanonicalCertificate :: TSProperty
ts_roundTripCanonicalCertificate =
eachOfTS 100 (feedPM genCanonicalCertificate) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisAvvmBalances :: TSProperty
ts_roundTripCanonicalGenesisAvvmBalances =
eachOfTS 100 genGenesisAvvmBalances roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisData :: TSProperty
ts_roundTripCanonicalGenesisData =
eachOfTS 100 (feedPM genCanonicalGenesisData) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisDelegation :: TSProperty
ts_roundTripCanonicalGenesisDelegation =
eachOfTS 100 (feedPM genCanonicalGenesisDelegation) roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisNonAvvmBalances :: TSProperty
ts_roundTripCanonicalGenesisNonAvvmBalances =
eachOfTS 100 genGenesisNonAvvmBalances roundTripsCanonicalJsonPretty
ts_roundTripCanonicalGenesisKeyHashes :: TSProperty
ts_roundTripCanonicalGenesisKeyHashes =
eachOfTS 100 genGenesisKeyHashes roundTripsCanonicalJsonPretty
ts_roundTripCanonicalProtocolParameters :: TSProperty
ts_roundTripCanonicalProtocolParameters =
eachOfTS 100 genCanonicalProtocolParameters roundTripsCanonicalJsonPretty
GenesisData ( Canonical JSON )
` GenesisData ` canonical JSON format , the ` RequiresNetworkMagic ` field
golden_GenesisData0Dec :: Property
golden_GenesisData0Dec =
goldenTestCanonicalJSONDec exampleGenesisData0
"test/golden/json/genesis/GenesisData0_Legacy_HasNetworkMagic"
tests :: TSGroup
tests = concatTSGroups [const $$discoverGolden, $$discoverPropArg]
|
09fd6707fbe0e024b04eedac8b72767dce8f7b53cdc9da320e70950ad278ee44 | fukamachi/clozure-cl | lambda-list.lisp | -*-Mode : LISP ; Package : CCL -*-
;;;
Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 1994 - 2001 Digitool , Inc
This file is part of Clozure CL .
;;;
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
;;; file "LICENSE". The LLGPL consists of a preamble and the LGPL,
which is distributed with Clozure CL as the file " LGPL " . Where these
;;; conflict, the preamble takes precedence.
;;;
;;; Clozure CL is referenced in the preamble as the "LIBRARY."
;;;
;;; The LLGPL is also available online at
;;;
(in-package "CCL")
Compiler functions needed elsewhere
(defun %lfun-info-index (fn)
(and (compiled-function-p fn)
(let ((bits (lfun-bits fn)))
(declare (fixnum bits))
(and (logbitp $lfbits-info-bit bits)
(%i- (uvsize (function-to-function-vector fn))
(if (logbitp $lfbits-noname-bit bits) 2 3))))))
(defun %lfun-info (fn)
(let* ((index (%lfun-info-index fn)))
(if index (%svref (function-to-function-vector fn) index))))
(defun function-source-note (fn)
(getf (%lfun-info fn) '%function-source-note))
(defun %function-acode-string (fn)
(getf (%lfun-info fn) '%function-acode-string))
(defun uncompile-function (fn)
(getf (%lfun-info fn) 'function-lambda-expression ))
used - by : backtrace , arglist
(defun function-symbol-map (fn)
(getf (%lfun-info fn) 'function-symbol-map))
(defun find-source-note-at-pc (fn pc)
;(declare (values source-note start-pc end-pc))
(let* ((function-note (function-source-note fn))
(pc-source-map (getf (%lfun-info fn) 'pc-source-map))
(best-guess -1)
(best-length 0)
(len (length pc-source-map)))
(declare (fixnum best-guess best-length len))
(when (and function-note pc-source-map)
(do ((q 0 (+ q 4)))
((= q len))
(declare (fixnum q))
(let* ((pc-start (aref pc-source-map q))
(pc-end (aref pc-source-map (%i+ q 1))))
(declare (fixnum pc-start pc-end))
(when (and (<= pc-start pc)
(< pc pc-end)
(or (eql best-guess -1)
(< (%i- pc-end pc-start) best-length)))
(setf best-guess q
best-length (- pc-end pc-start)))))
(unless (eql best-guess -1)
(values
(let ((def-pos (source-note-start-pos function-note)))
(make-source-note :source function-note
:filename (source-note-filename function-note)
:start-pos (+ def-pos (aref pc-source-map (+ best-guess 2)))
:end-pos (+ def-pos (aref pc-source-map (+ best-guess 3)))))
(aref pc-source-map best-guess)
(aref pc-source-map (+ best-guess 1)))))))
;;; Lambda-list utilities
;;; Lambda-list verification:
;;; these things MUST be compiled.
(eval-when (:load-toplevel)
(defvar *structured-lambda-list* nil)
(defun parse-body (body env &optional (doc-string-allowed t) &aux
decls
doc
(tail body)
form)
(declare (ignore env))
(loop
(if (endp tail) (return)) ; otherwise, it has a %car and a %cdr
(if (and (stringp (setq form (%car tail))) (%cdr tail))
(if doc-string-allowed
(setq doc form)
(return))
(if (not (and (consp form) (symbolp (%car form))))
(return)
(if (eq (%car form) 'declare)
(push form decls)
(return))))
(setq tail (%cdr tail)))
(return-from parse-body (values tail (nreverse decls) doc)))
) ; end of eval-when (load)
;;; End of verify-lambda-list.lisp
| null | https://raw.githubusercontent.com/fukamachi/clozure-cl/4b0c69452386ae57b08984ed815d9b50b4bcc8a2/compiler/lambda-list.lisp | lisp | Package : CCL -*-
file "LICENSE". The LLGPL consists of a preamble and the LGPL,
conflict, the preamble takes precedence.
Clozure CL is referenced in the preamble as the "LIBRARY."
The LLGPL is also available online at
(declare (values source-note start-pc end-pc))
Lambda-list utilities
Lambda-list verification:
these things MUST be compiled.
otherwise, it has a %car and a %cdr
end of eval-when (load)
End of verify-lambda-list.lisp | Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 1994 - 2001 Digitool , Inc
This file is part of Clozure CL .
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
which is distributed with Clozure CL as the file " LGPL " . Where these
(in-package "CCL")
Compiler functions needed elsewhere
(defun %lfun-info-index (fn)
(and (compiled-function-p fn)
(let ((bits (lfun-bits fn)))
(declare (fixnum bits))
(and (logbitp $lfbits-info-bit bits)
(%i- (uvsize (function-to-function-vector fn))
(if (logbitp $lfbits-noname-bit bits) 2 3))))))
(defun %lfun-info (fn)
(let* ((index (%lfun-info-index fn)))
(if index (%svref (function-to-function-vector fn) index))))
(defun function-source-note (fn)
(getf (%lfun-info fn) '%function-source-note))
(defun %function-acode-string (fn)
(getf (%lfun-info fn) '%function-acode-string))
(defun uncompile-function (fn)
(getf (%lfun-info fn) 'function-lambda-expression ))
used - by : backtrace , arglist
(defun function-symbol-map (fn)
(getf (%lfun-info fn) 'function-symbol-map))
(defun find-source-note-at-pc (fn pc)
(let* ((function-note (function-source-note fn))
(pc-source-map (getf (%lfun-info fn) 'pc-source-map))
(best-guess -1)
(best-length 0)
(len (length pc-source-map)))
(declare (fixnum best-guess best-length len))
(when (and function-note pc-source-map)
(do ((q 0 (+ q 4)))
((= q len))
(declare (fixnum q))
(let* ((pc-start (aref pc-source-map q))
(pc-end (aref pc-source-map (%i+ q 1))))
(declare (fixnum pc-start pc-end))
(when (and (<= pc-start pc)
(< pc pc-end)
(or (eql best-guess -1)
(< (%i- pc-end pc-start) best-length)))
(setf best-guess q
best-length (- pc-end pc-start)))))
(unless (eql best-guess -1)
(values
(let ((def-pos (source-note-start-pos function-note)))
(make-source-note :source function-note
:filename (source-note-filename function-note)
:start-pos (+ def-pos (aref pc-source-map (+ best-guess 2)))
:end-pos (+ def-pos (aref pc-source-map (+ best-guess 3)))))
(aref pc-source-map best-guess)
(aref pc-source-map (+ best-guess 1)))))))
(eval-when (:load-toplevel)
(defvar *structured-lambda-list* nil)
(defun parse-body (body env &optional (doc-string-allowed t) &aux
decls
doc
(tail body)
form)
(declare (ignore env))
(loop
(if (and (stringp (setq form (%car tail))) (%cdr tail))
(if doc-string-allowed
(setq doc form)
(return))
(if (not (and (consp form) (symbolp (%car form))))
(return)
(if (eq (%car form) 'declare)
(push form decls)
(return))))
(setq tail (%cdr tail)))
(return-from parse-body (values tail (nreverse decls) doc)))
|
bc3fdb0a7bc80503c3dc5f4c7808de7eb012bdff0deb0c5e8d8bb96900b40969 | robert-strandh/SICL | char-not-equal-1-define-compiler-macro.lisp | (cl:in-package #:sicl-character)
(define-compiler-macro char/= (&whole form &rest arguments)
(cond ((not (and (cleavir-code-utilities:proper-list-p arguments)
(>= (length arguments) 1)))
form)
((= (length arguments) 1)
`(characterp ,(car arguments)))
(t (let* ((vars (loop for argument in arguments collect (gensym))))
`(let ,(loop for var in vars
for arg in arguments
collect `(,var ,arg))
(and ,@(loop for (var1 . rest-vars) on vars
repeat (1- (length vars))
nconc (loop for var2 in rest-vars
collect `(not (binary-char= ,var1 ,var2))))))))))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/2e0699808e9f2d2b358ef790eba8f8bb02e3ec4e/Code/Character/char-not-equal-1-define-compiler-macro.lisp | lisp | (cl:in-package #:sicl-character)
(define-compiler-macro char/= (&whole form &rest arguments)
(cond ((not (and (cleavir-code-utilities:proper-list-p arguments)
(>= (length arguments) 1)))
form)
((= (length arguments) 1)
`(characterp ,(car arguments)))
(t (let* ((vars (loop for argument in arguments collect (gensym))))
`(let ,(loop for var in vars
for arg in arguments
collect `(,var ,arg))
(and ,@(loop for (var1 . rest-vars) on vars
repeat (1- (length vars))
nconc (loop for var2 in rest-vars
collect `(not (binary-char= ,var1 ,var2))))))))))
| |
063401c2ac87362f939c99416ea63cbe1c11fb979dfecb9bfb67f1f22b50ed5a | hemmi/coq2scala | coq_omega.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(**************************************************************************)
(* *)
Omega : a solver of quantifier - free problems in Presburger Arithmetic
(* *)
( CNET , Lannion , France )
(* *)
(**************************************************************************)
open Util
open Pp
open Reduction
open Proof_type
open Names
open Nameops
open Term
open Declarations
open Environ
open Sign
open Inductive
open Tacticals
open Tacmach
open Evar_refiner
open Tactics
open Clenv
open Logic
open Libnames
open Nametab
open Contradiction
module OmegaSolver = Omega.MakeOmegaSolver (Bigint)
open OmegaSolver
Added by JCF , 09/03/98
let elim_id id gl = simplest_elim (pf_global gl id) gl
let resolve_id id gl = apply (pf_global gl id) gl
let timing timer_name f arg = f arg
let display_time_flag = ref false
let display_system_flag = ref false
let display_action_flag = ref false
let old_style_flag = ref false
let read f () = !f
let write f x = f:=x
open Goptions
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega system time displaying flag";
optkey = ["Omega";"System"];
optread = read display_system_flag;
optwrite = write display_system_flag }
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega action display flag";
optkey = ["Omega";"Action"];
optread = read display_action_flag;
optwrite = write display_action_flag }
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega old style flag";
optkey = ["Omega";"OldStyle"];
optread = read old_style_flag;
optwrite = write old_style_flag }
let all_time = timing "Omega "
let solver_time = timing "Solver "
let exact_time = timing "Rewrites "
let elim_time = timing "Elim "
let simpl_time = timing "Simpl "
let generalize_time = timing "Generalize"
let new_identifier =
let cpt = ref 0 in
(fun () -> let s = "Omega" ^ string_of_int !cpt in incr cpt; id_of_string s)
let new_identifier_state =
let cpt = ref 0 in
(fun () -> let s = make_ident "State" (Some !cpt) in incr cpt; s)
let new_identifier_var =
let cpt = ref 0 in
(fun () -> let s = "Zvar" ^ string_of_int !cpt in incr cpt; id_of_string s)
let new_id =
let cpt = ref 0 in fun () -> incr cpt; !cpt
let new_var_num =
let cpt = ref 1000 in (fun () -> incr cpt; !cpt)
let new_var =
let cpt = ref 0 in fun () -> incr cpt; Nameops.make_ident "WW" (Some !cpt)
let display_var i = Printf.sprintf "X%d" i
let intern_id,unintern_id =
let cpt = ref 0 in
let table = Hashtbl.create 7 and co_table = Hashtbl.create 7 in
(fun (name : identifier) ->
try Hashtbl.find table name with Not_found ->
let idx = !cpt in
Hashtbl.add table name idx;
Hashtbl.add co_table idx name;
incr cpt; idx),
(fun idx ->
try Hashtbl.find co_table idx with Not_found ->
let v = new_var () in
Hashtbl.add table v idx; Hashtbl.add co_table idx v; v)
let mk_then = tclTHENLIST
let exists_tac c = constructor_tac false (Some 1) 1 (Glob_term.ImplicitBindings [c])
let generalize_tac t = generalize_time (generalize t)
let elim t = elim_time (simplest_elim t)
let exact t = exact_time (Tactics.refine t)
let unfold s = Tactics.unfold_in_concl [Termops.all_occurrences, Lazy.force s]
let rev_assoc k =
let rec loop = function
| [] -> raise Not_found | (v,k')::_ when k = k' -> v | _ :: l -> loop l
in
loop
let tag_hypothesis,tag_of_hyp, hyp_of_tag =
let l = ref ([]:(identifier * int) list) in
(fun h id -> l := (h,id):: !l),
(fun h -> try List.assoc h !l with Not_found -> failwith "tag_hypothesis"),
(fun h -> try rev_assoc h !l with Not_found -> failwith "tag_hypothesis")
let hide_constr,find_constr,clear_tables,dump_tables =
let l = ref ([]:(constr * (identifier * identifier * bool)) list) in
(fun h id eg b -> l := (h,(id,eg,b)):: !l),
(fun h -> try list_assoc_f eq_constr h !l with Not_found -> failwith "find_contr"),
(fun () -> l := []),
(fun () -> !l)
Lazy evaluation is used for Coq constants , because this code
is evaluated before the compiled modules are loaded .
To use the constant Zplus , one must type " Lazy.force coq_Zplus "
This is the right way to access to Coq constants in tactics ML code
is evaluated before the compiled modules are loaded.
To use the constant Zplus, one must type "Lazy.force coq_Zplus"
This is the right way to access to Coq constants in tactics ML code *)
open Coqlib
let logic_dir = ["Coq";"Logic";"Decidable"]
let coq_modules =
init_modules @arith_modules @ [logic_dir] @ zarith_base_modules
@ [["Coq"; "omega"; "OmegaLemmas"]]
let init_constant = gen_constant_in_modules "Omega" init_modules
let constant = gen_constant_in_modules "Omega" coq_modules
let z_constant = gen_constant_in_modules "Omega" [["Coq";"ZArith"]]
let zbase_constant =
gen_constant_in_modules "Omega" [["Coq";"ZArith";"BinInt"]]
Zarith
let coq_xH = lazy (constant "xH")
let coq_xO = lazy (constant "xO")
let coq_xI = lazy (constant "xI")
let coq_Z0 = lazy (constant "Z0")
let coq_Zpos = lazy (constant "Zpos")
let coq_Zneg = lazy (constant "Zneg")
let coq_Z = lazy (constant "Z")
let coq_comparison = lazy (constant "comparison")
let coq_Gt = lazy (constant "Gt")
let coq_Zplus = lazy (zbase_constant "Z.add")
let coq_Zmult = lazy (zbase_constant "Z.mul")
let coq_Zopp = lazy (zbase_constant "Z.opp")
let coq_Zminus = lazy (zbase_constant "Z.sub")
let coq_Zsucc = lazy (zbase_constant "Z.succ")
let coq_Zpred = lazy (zbase_constant "Z.pred")
let coq_Zgt = lazy (zbase_constant "Z.gt")
let coq_Zle = lazy (zbase_constant "Z.le")
let coq_Z_of_nat = lazy (zbase_constant "Z.of_nat")
let coq_inj_plus = lazy (z_constant "Nat2Z.inj_add")
let coq_inj_mult = lazy (z_constant "Nat2Z.inj_mul")
let coq_inj_minus1 = lazy (z_constant "Nat2Z.inj_sub")
let coq_inj_minus2 = lazy (constant "inj_minus2")
let coq_inj_S = lazy (z_constant "Nat2Z.inj_succ")
let coq_inj_le = lazy (z_constant "Znat.inj_le")
let coq_inj_lt = lazy (z_constant "Znat.inj_lt")
let coq_inj_ge = lazy (z_constant "Znat.inj_ge")
let coq_inj_gt = lazy (z_constant "Znat.inj_gt")
let coq_inj_neq = lazy (z_constant "inj_neq")
let coq_inj_eq = lazy (z_constant "inj_eq")
let coq_fast_Zplus_assoc_reverse = lazy (constant "fast_Zplus_assoc_reverse")
let coq_fast_Zplus_assoc = lazy (constant "fast_Zplus_assoc")
let coq_fast_Zmult_assoc_reverse = lazy (constant "fast_Zmult_assoc_reverse")
let coq_fast_Zplus_permute = lazy (constant "fast_Zplus_permute")
let coq_fast_Zplus_comm = lazy (constant "fast_Zplus_comm")
let coq_fast_Zmult_comm = lazy (constant "fast_Zmult_comm")
let coq_Zmult_le_approx = lazy (constant "Zmult_le_approx")
let coq_OMEGA1 = lazy (constant "OMEGA1")
let coq_OMEGA2 = lazy (constant "OMEGA2")
let coq_OMEGA3 = lazy (constant "OMEGA3")
let coq_OMEGA4 = lazy (constant "OMEGA4")
let coq_OMEGA5 = lazy (constant "OMEGA5")
let coq_OMEGA6 = lazy (constant "OMEGA6")
let coq_OMEGA7 = lazy (constant "OMEGA7")
let coq_OMEGA8 = lazy (constant "OMEGA8")
let coq_OMEGA9 = lazy (constant "OMEGA9")
let coq_fast_OMEGA10 = lazy (constant "fast_OMEGA10")
let coq_fast_OMEGA11 = lazy (constant "fast_OMEGA11")
let coq_fast_OMEGA12 = lazy (constant "fast_OMEGA12")
let coq_fast_OMEGA13 = lazy (constant "fast_OMEGA13")
let coq_fast_OMEGA14 = lazy (constant "fast_OMEGA14")
let coq_fast_OMEGA15 = lazy (constant "fast_OMEGA15")
let coq_fast_OMEGA16 = lazy (constant "fast_OMEGA16")
let coq_OMEGA17 = lazy (constant "OMEGA17")
let coq_OMEGA18 = lazy (constant "OMEGA18")
let coq_OMEGA19 = lazy (constant "OMEGA19")
let coq_OMEGA20 = lazy (constant "OMEGA20")
let coq_fast_Zred_factor0 = lazy (constant "fast_Zred_factor0")
let coq_fast_Zred_factor1 = lazy (constant "fast_Zred_factor1")
let coq_fast_Zred_factor2 = lazy (constant "fast_Zred_factor2")
let coq_fast_Zred_factor3 = lazy (constant "fast_Zred_factor3")
let coq_fast_Zred_factor4 = lazy (constant "fast_Zred_factor4")
let coq_fast_Zred_factor5 = lazy (constant "fast_Zred_factor5")
let coq_fast_Zred_factor6 = lazy (constant "fast_Zred_factor6")
let coq_fast_Zmult_plus_distr_l = lazy (constant "fast_Zmult_plus_distr_l")
let coq_fast_Zmult_opp_comm = lazy (constant "fast_Zmult_opp_comm")
let coq_fast_Zopp_plus_distr = lazy (constant "fast_Zopp_plus_distr")
let coq_fast_Zopp_mult_distr_r = lazy (constant "fast_Zopp_mult_distr_r")
let coq_fast_Zopp_eq_mult_neg_1 = lazy (constant "fast_Zopp_eq_mult_neg_1")
let coq_fast_Zopp_involutive = lazy (constant "fast_Zopp_involutive")
let coq_Zegal_left = lazy (constant "Zegal_left")
let coq_Zne_left = lazy (constant "Zne_left")
let coq_Zlt_left = lazy (constant "Zlt_left")
let coq_Zge_left = lazy (constant "Zge_left")
let coq_Zgt_left = lazy (constant "Zgt_left")
let coq_Zle_left = lazy (constant "Zle_left")
let coq_new_var = lazy (constant "new_var")
let coq_intro_Z = lazy (constant "intro_Z")
let coq_dec_eq = lazy (zbase_constant "Z.eq_decidable")
let coq_dec_Zne = lazy (constant "dec_Zne")
let coq_dec_Zle = lazy (zbase_constant "Z.le_decidable")
let coq_dec_Zlt = lazy (zbase_constant "Z.lt_decidable")
let coq_dec_Zgt = lazy (constant "dec_Zgt")
let coq_dec_Zge = lazy (constant "dec_Zge")
let coq_not_Zeq = lazy (constant "not_Zeq")
let coq_not_Zne = lazy (constant "not_Zne")
let coq_Znot_le_gt = lazy (constant "Znot_le_gt")
let coq_Znot_lt_ge = lazy (constant "Znot_lt_ge")
let coq_Znot_ge_lt = lazy (constant "Znot_ge_lt")
let coq_Znot_gt_le = lazy (constant "Znot_gt_le")
let coq_neq = lazy (constant "neq")
let coq_Zne = lazy (constant "Zne")
let coq_Zle = lazy (zbase_constant "Z.le")
let coq_Zgt = lazy (zbase_constant "Z.gt")
let coq_Zge = lazy (zbase_constant "Z.ge")
let coq_Zlt = lazy (zbase_constant "Z.lt")
Peano /
let coq_le = lazy (init_constant "le")
let coq_lt = lazy (init_constant "lt")
let coq_ge = lazy (init_constant "ge")
let coq_gt = lazy (init_constant "gt")
let coq_minus = lazy (init_constant "minus")
let coq_plus = lazy (init_constant "plus")
let coq_mult = lazy (init_constant "mult")
let coq_pred = lazy (init_constant "pred")
let coq_nat = lazy (init_constant "nat")
let coq_S = lazy (init_constant "S")
let coq_O = lazy (init_constant "O")
Compare_dec / Peano_dec / Minus
let coq_pred_of_minus = lazy (constant "pred_of_minus")
let coq_le_gt_dec = lazy (constant "le_gt_dec")
let coq_dec_eq_nat = lazy (constant "dec_eq_nat")
let coq_dec_le = lazy (constant "dec_le")
let coq_dec_lt = lazy (constant "dec_lt")
let coq_dec_ge = lazy (constant "dec_ge")
let coq_dec_gt = lazy (constant "dec_gt")
let coq_not_eq = lazy (constant "not_eq")
let coq_not_le = lazy (constant "not_le")
let coq_not_lt = lazy (constant "not_lt")
let coq_not_ge = lazy (constant "not_ge")
let coq_not_gt = lazy (constant "not_gt")
Logic / Decidable
let coq_eq_ind_r = lazy (constant "eq_ind_r")
let coq_dec_or = lazy (constant "dec_or")
let coq_dec_and = lazy (constant "dec_and")
let coq_dec_imp = lazy (constant "dec_imp")
let coq_dec_iff = lazy (constant "dec_iff")
let coq_dec_not = lazy (constant "dec_not")
let coq_dec_False = lazy (constant "dec_False")
let coq_dec_not_not = lazy (constant "dec_not_not")
let coq_dec_True = lazy (constant "dec_True")
let coq_not_or = lazy (constant "not_or")
let coq_not_and = lazy (constant "not_and")
let coq_not_imp = lazy (constant "not_imp")
let coq_not_iff = lazy (constant "not_iff")
let coq_not_not = lazy (constant "not_not")
let coq_imp_simp = lazy (constant "imp_simp")
let coq_iff = lazy (constant "iff")
(* uses build_coq_and, build_coq_not, build_coq_or, build_coq_ex *)
(* For unfold *)
open Closure
let evaluable_ref_of_constr s c = match kind_of_term (Lazy.force c) with
| Const kn when Tacred.is_evaluable (Global.env()) (EvalConstRef kn) ->
EvalConstRef kn
| _ -> anomaly ("Coq_omega: "^s^" is not an evaluable constant")
let sp_Zsucc = lazy (evaluable_ref_of_constr "Z.succ" coq_Zsucc)
let sp_Zpred = lazy (evaluable_ref_of_constr "Z.pred" coq_Zpred)
let sp_Zminus = lazy (evaluable_ref_of_constr "Z.sub" coq_Zminus)
let sp_Zle = lazy (evaluable_ref_of_constr "Z.le" coq_Zle)
let sp_Zgt = lazy (evaluable_ref_of_constr "Z.gt" coq_Zgt)
let sp_Zge = lazy (evaluable_ref_of_constr "Z.ge" coq_Zge)
let sp_Zlt = lazy (evaluable_ref_of_constr "Z.lt" coq_Zlt)
let sp_not = lazy (evaluable_ref_of_constr "not" (lazy (build_coq_not ())))
let mk_var v = mkVar (id_of_string v)
let mk_plus t1 t2 = mkApp (Lazy.force coq_Zplus, [| t1; t2 |])
let mk_times t1 t2 = mkApp (Lazy.force coq_Zmult, [| t1; t2 |])
let mk_minus t1 t2 = mkApp (Lazy.force coq_Zminus, [| t1;t2 |])
let mk_eq t1 t2 = mkApp (build_coq_eq (), [| Lazy.force coq_Z; t1; t2 |])
let mk_le t1 t2 = mkApp (Lazy.force coq_Zle, [| t1; t2 |])
let mk_gt t1 t2 = mkApp (Lazy.force coq_Zgt, [| t1; t2 |])
let mk_inv t = mkApp (Lazy.force coq_Zopp, [| t |])
let mk_and t1 t2 = mkApp (build_coq_and (), [| t1; t2 |])
let mk_or t1 t2 = mkApp (build_coq_or (), [| t1; t2 |])
let mk_not t = mkApp (build_coq_not (), [| t |])
let mk_eq_rel t1 t2 = mkApp (build_coq_eq (),
[| Lazy.force coq_comparison; t1; t2 |])
let mk_inj t = mkApp (Lazy.force coq_Z_of_nat, [| t |])
let mk_integer n =
let rec loop n =
if n =? one then Lazy.force coq_xH else
mkApp((if n mod two =? zero then Lazy.force coq_xO else Lazy.force coq_xI),
[| loop (n/two) |])
in
if n =? zero then Lazy.force coq_Z0
else mkApp ((if n >? zero then Lazy.force coq_Zpos else Lazy.force coq_Zneg),
[| loop (abs n) |])
type omega_constant =
| Zplus | Zmult | Zminus | Zsucc | Zopp | Zpred
| Plus | Mult | Minus | Pred | S | O
| Zpos | Zneg | Z0 | Z_of_nat
| Eq | Neq
| Zne | Zle | Zlt | Zge | Zgt
| Z | Nat
| And | Or | False | True | Not | Iff
| Le | Lt | Ge | Gt
| Other of string
type omega_proposition =
| Keq of constr * constr * constr
| Kn
type result =
| Kvar of identifier
| Kapp of omega_constant * constr list
| Kimp of constr * constr
| Kufo
Nota : Kimp correspond to a binder ( Prod ) , but hopefully we wo n't
have to bother with term lifting : Kimp will correspond to anonymous
product , for which ( Rel 1 ) does n't occur in the right term .
Moreover , we 'll work on fully introduced goals , hence no Rel 's in
the term parts that we manipulate , but rather Var 's .
otherwise : all constr manipulated here are closed
have to bother with term lifting: Kimp will correspond to anonymous
product, for which (Rel 1) doesn't occur in the right term.
Moreover, we'll work on fully introduced goals, hence no Rel's in
the term parts that we manipulate, but rather Var's.
Said otherwise: all constr manipulated here are closed *)
let destructurate_prop t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [_;_;_] when eq_constr c (build_coq_eq ()) -> Kapp (Eq,args)
| _, [_;_] when eq_constr c (Lazy.force coq_neq) -> Kapp (Neq,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zne) -> Kapp (Zne,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zle) -> Kapp (Zle,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zlt) -> Kapp (Zlt,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zge) -> Kapp (Zge,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zgt) -> Kapp (Zgt,args)
| _, [_;_] when eq_constr c (build_coq_and ()) -> Kapp (And,args)
| _, [_;_] when eq_constr c (build_coq_or ()) -> Kapp (Or,args)
| _, [_;_] when eq_constr c (Lazy.force coq_iff) -> Kapp (Iff, args)
| _, [_] when eq_constr c (build_coq_not ()) -> Kapp (Not,args)
| _, [] when eq_constr c (build_coq_False ()) -> Kapp (False,args)
| _, [] when eq_constr c (build_coq_True ()) -> Kapp (True,args)
| _, [_;_] when eq_constr c (Lazy.force coq_le) -> Kapp (Le,args)
| _, [_;_] when eq_constr c (Lazy.force coq_lt) -> Kapp (Lt,args)
| _, [_;_] when eq_constr c (Lazy.force coq_ge) -> Kapp (Ge,args)
| _, [_;_] when eq_constr c (Lazy.force coq_gt) -> Kapp (Gt,args)
| Const sp, args ->
Kapp (Other (string_of_path (path_of_global (ConstRef sp))),args)
| Construct csp , args ->
Kapp (Other (string_of_path (path_of_global (ConstructRef csp))), args)
| Ind isp, args ->
Kapp (Other (string_of_path (path_of_global (IndRef isp))),args)
| Var id,[] -> Kvar id
| Prod (Anonymous,typ,body), [] -> Kimp(typ,body)
| Prod (Name _,_,_),[] -> error "Omega: Not a quantifier-free goal"
| _ -> Kufo
let destructurate_type t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [] when eq_constr c (Lazy.force coq_Z) -> Kapp (Z,args)
| _, [] when eq_constr c (Lazy.force coq_nat) -> Kapp (Nat,args)
| _ -> Kufo
let destructurate_term t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [_;_] when eq_constr c (Lazy.force coq_Zplus) -> Kapp (Zplus,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zmult) -> Kapp (Zmult,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zminus) -> Kapp (Zminus,args)
| _, [_] when eq_constr c (Lazy.force coq_Zsucc) -> Kapp (Zsucc,args)
| _, [_] when eq_constr c (Lazy.force coq_Zpred) -> Kapp (Zpred,args)
| _, [_] when eq_constr c (Lazy.force coq_Zopp) -> Kapp (Zopp,args)
| _, [_;_] when eq_constr c (Lazy.force coq_plus) -> Kapp (Plus,args)
| _, [_;_] when eq_constr c (Lazy.force coq_mult) -> Kapp (Mult,args)
| _, [_;_] when eq_constr c (Lazy.force coq_minus) -> Kapp (Minus,args)
| _, [_] when eq_constr c (Lazy.force coq_pred) -> Kapp (Pred,args)
| _, [_] when eq_constr c (Lazy.force coq_S) -> Kapp (S,args)
| _, [] when eq_constr c (Lazy.force coq_O) -> Kapp (O,args)
| _, [_] when eq_constr c (Lazy.force coq_Zpos) -> Kapp (Zneg,args)
| _, [_] when eq_constr c (Lazy.force coq_Zneg) -> Kapp (Zpos,args)
| _, [] when eq_constr c (Lazy.force coq_Z0) -> Kapp (Z0,args)
| _, [_] when eq_constr c (Lazy.force coq_Z_of_nat) -> Kapp (Z_of_nat,args)
| Var id,[] -> Kvar id
| _ -> Kufo
let recognize_number t =
let rec loop t =
match decompose_app t with
| f, [t] when eq_constr f (Lazy.force coq_xI) -> one + two * loop t
| f, [t] when eq_constr f (Lazy.force coq_xO) -> two * loop t
| f, [] when eq_constr f (Lazy.force coq_xH) -> one
| _ -> failwith "not a number"
in
match decompose_app t with
| f, [t] when eq_constr f (Lazy.force coq_Zpos) -> loop t
| f, [t] when eq_constr f (Lazy.force coq_Zneg) -> neg (loop t)
| f, [] when eq_constr f (Lazy.force coq_Z0) -> zero
| _ -> failwith "not a number"
type constr_path =
| P_APP of int
(* Abstraction and product *)
| P_BODY
| P_TYPE
(* Case *)
| P_BRANCH of int
| P_ARITY
| P_ARG
let context operation path (t : constr) =
let rec loop i p0 t =
match (p0,kind_of_term t) with
| (p, Cast (c,k,t)) -> mkCast (loop i p c,k,t)
| ([], _) -> operation i t
| ((P_APP n :: p), App (f,v)) ->
let v' = Array.copy v in
v'.(pred n) <- loop i p v'.(pred n); mkApp (f, v')
| ((P_BRANCH n :: p), Case (ci,q,c,v)) ->
avant , y avait mkApp ... anyway , BRANCH seems nowhere used
let v' = Array.copy v in
v'.(n) <- loop i p v'.(n); (mkCase (ci,q,c,v'))
| ((P_ARITY :: p), App (f,l)) ->
appvect (loop i p f,l)
| ((P_ARG :: p), App (f,v)) ->
let v' = Array.copy v in
v'.(0) <- loop i p v'.(0); mkApp (f,v')
| (p, Fix ((_,n as ln),(tys,lna,v))) ->
let l = Array.length v in
let v' = Array.copy v in
v'.(n)<- loop (Pervasives.(+) i l) p v.(n); (mkFix (ln,(tys,lna,v')))
| ((P_BODY :: p), Prod (n,t,c)) ->
(mkProd (n,t,loop (succ i) p c))
| ((P_BODY :: p), Lambda (n,t,c)) ->
(mkLambda (n,t,loop (succ i) p c))
| ((P_BODY :: p), LetIn (n,b,t,c)) ->
(mkLetIn (n,b,t,loop (succ i) p c))
| ((P_TYPE :: p), Prod (n,t,c)) ->
(mkProd (n,loop i p t,c))
| ((P_TYPE :: p), Lambda (n,t,c)) ->
(mkLambda (n,loop i p t,c))
| ((P_TYPE :: p), LetIn (n,b,t,c)) ->
(mkLetIn (n,b,loop i p t,c))
| (p, _) ->
ppnl (Printer.pr_lconstr t);
failwith ("abstract_path " ^ string_of_int(List.length p))
in
loop 1 path t
let occurence path (t : constr) =
let rec loop p0 t = match (p0,kind_of_term t) with
| (p, Cast (c,_,_)) -> loop p c
| ([], _) -> t
| ((P_APP n :: p), App (f,v)) -> loop p v.(pred n)
| ((P_BRANCH n :: p), Case (_,_,_,v)) -> loop p v.(n)
| ((P_ARITY :: p), App (f,_)) -> loop p f
| ((P_ARG :: p), App (f,v)) -> loop p v.(0)
| (p, Fix((_,n) ,(_,_,v))) -> loop p v.(n)
| ((P_BODY :: p), Prod (n,t,c)) -> loop p c
| ((P_BODY :: p), Lambda (n,t,c)) -> loop p c
| ((P_BODY :: p), LetIn (n,b,t,c)) -> loop p c
| ((P_TYPE :: p), Prod (n,term,c)) -> loop p term
| ((P_TYPE :: p), Lambda (n,term,c)) -> loop p term
| ((P_TYPE :: p), LetIn (n,b,term,c)) -> loop p term
| (p, _) ->
ppnl (Printer.pr_lconstr t);
failwith ("occurence " ^ string_of_int(List.length p))
in
loop path t
let abstract_path typ path t =
let term_occur = ref (mkRel 0) in
let abstract = context (fun i t -> term_occur:= t; mkRel i) path t in
mkLambda (Name (id_of_string "x"), typ, abstract), !term_occur
let focused_simpl path gl =
let newc = context (fun i t -> pf_nf gl t) (List.rev path) (pf_concl gl) in
convert_concl_no_check newc DEFAULTcast gl
let focused_simpl path = simpl_time (focused_simpl path)
type oformula =
| Oplus of oformula * oformula
| Oinv of oformula
| Otimes of oformula * oformula
| Oatom of identifier
| Oz of bigint
| Oufo of constr
let rec oprint = function
| Oplus(t1,t2) ->
print_string "("; oprint t1; print_string "+";
oprint t2; print_string ")"
| Oinv t -> print_string "~"; oprint t
| Otimes (t1,t2) ->
print_string "("; oprint t1; print_string "*";
oprint t2; print_string ")"
| Oatom s -> print_string (string_of_id s)
| Oz i -> print_string (string_of_bigint i)
| Oufo f -> print_string "?"
let rec weight = function
| Oatom c -> intern_id c
| Oz _ -> -1
| Oinv c -> weight c
| Otimes(c,_) -> weight c
| Oplus _ -> failwith "weight"
| Oufo _ -> -1
let rec val_of = function
| Oatom c -> mkVar c
| Oz c -> mk_integer c
| Oinv c -> mkApp (Lazy.force coq_Zopp, [| val_of c |])
| Otimes (t1,t2) -> mkApp (Lazy.force coq_Zmult, [| val_of t1; val_of t2 |])
| Oplus(t1,t2) -> mkApp (Lazy.force coq_Zplus, [| val_of t1; val_of t2 |])
| Oufo c -> c
let compile name kind =
let rec loop accu = function
| Oplus(Otimes(Oatom v,Oz n),r) -> loop ({v=intern_id v; c=n} :: accu) r
| Oz n ->
let id = new_id () in
tag_hypothesis name id;
{kind = kind; body = List.rev accu; constant = n; id = id}
| _ -> anomaly "compile_equation"
in
loop []
let rec decompile af =
let rec loop = function
| ({v=v; c=n}::r) -> Oplus(Otimes(Oatom (unintern_id v),Oz n),loop r)
| [] -> Oz af.constant
in
loop af.body
let mkNewMeta () = mkMeta (Evarutil.new_meta())
let clever_rewrite_base_poly typ p result theorem gl =
let full = pf_concl gl in
let (abstracted,occ) = abstract_path typ (List.rev p) full in
let t =
applist
(mkLambda
(Name (id_of_string "P"),
mkArrow typ mkProp,
mkLambda
(Name (id_of_string "H"),
applist (mkRel 1,[result]),
mkApp (Lazy.force coq_eq_ind_r,
[| typ; result; mkRel 2; mkRel 1; occ; theorem |]))),
[abstracted])
in
exact (applist(t,[mkNewMeta()])) gl
let clever_rewrite_base p result theorem gl =
clever_rewrite_base_poly (Lazy.force coq_Z) p result theorem gl
let clever_rewrite_base_nat p result theorem gl =
clever_rewrite_base_poly (Lazy.force coq_nat) p result theorem gl
let clever_rewrite_gen p result (t,args) =
let theorem = applist(t, args) in
clever_rewrite_base p result theorem
let clever_rewrite_gen_nat p result (t,args) =
let theorem = applist(t, args) in
clever_rewrite_base_nat p result theorem
let clever_rewrite p vpath t gl =
let full = pf_concl gl in
let (abstracted,occ) = abstract_path (Lazy.force coq_Z) (List.rev p) full in
let vargs = List.map (fun p -> occurence p occ) vpath in
let t' = applist(t, (vargs @ [abstracted])) in
exact (applist(t',[mkNewMeta()])) gl
let rec shuffle p (t1,t2) =
match t1,t2 with
| Oplus(l1,r1), Oplus(l2,r2) ->
if weight l1 > weight l2 then
let (tac,t') = shuffle (P_APP 2 :: p) (r1,t2) in
(clever_rewrite p [[P_APP 1;P_APP 1];
[P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse)
:: tac,
Oplus(l1,t'))
else
let (tac,t') = shuffle (P_APP 2 :: p) (t1,r2) in
(clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_permute)
:: tac,
Oplus(l2,t'))
| Oplus(l1,r1), t2 ->
if weight l1 > weight t2 then
let (tac,t') = shuffle (P_APP 2 :: p) (r1,t2) in
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse)
:: tac,
Oplus(l1, t')
else
[clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zplus_comm)],
Oplus(t2,t1)
| t1,Oplus(l2,r2) ->
if weight l2 > weight t1 then
let (tac,t') = shuffle (P_APP 2 :: p) (t1,r2) in
clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_permute)
:: tac,
Oplus(l2,t')
else [],Oplus(t1,t2)
| Oz t1,Oz t2 ->
[focused_simpl p], Oz(Bigint.add t1 t2)
| t1,t2 ->
if weight t1 < weight t2 then
[clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zplus_comm)],
Oplus(t2,t1)
else [],Oplus(t1,t2)
let rec shuffle_mult p_init k1 e1 k2 e2 =
let rec loop p = function
| (({c=c1;v=v1}::l1) as l1'),(({c=c2;v=v2}::l2) as l2') ->
if v1 = v2 then
let tac =
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA10)
in
if Bigint.add (Bigint.mult k1 c1) (Bigint.mult k2 c2) =? zero then
let tac' =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5) in
tac :: focused_simpl (P_APP 1::P_APP 2:: p) :: tac' ::
loop p (l1,l2)
else tac :: loop (P_APP 2 :: p) (l1,l2)
else if v1 > v2 then
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2];
[P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) ::
loop (P_APP 2 :: p) (l1,l2')
else
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) (l1',l2)
| ({c=c1;v=v1}::l1), [] ->
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2];
[P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) ::
loop (P_APP 2 :: p) (l1,[])
| [],({c=c2;v=v2}::l2) ->
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) ([],l2)
| [],[] -> [focused_simpl p_init]
in
loop p_init (e1,e2)
let rec shuffle_mult_right p_init e1 k2 e2 =
let rec loop p = function
| (({c=c1;v=v1}::l1) as l1'),(({c=c2;v=v2}::l2) as l2') ->
if v1 = v2 then
let tac =
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA15)
in
if Bigint.add c1 (Bigint.mult k2 c2) =? zero then
let tac' =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5)
in
tac :: focused_simpl (P_APP 1::P_APP 2:: p) :: tac' ::
loop p (l1,l2)
else tac :: loop (P_APP 2 :: p) (l1,l2)
else if v1 > v2 then
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) (l1,l2')
else
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) (l1',l2)
| ({c=c1;v=v1}::l1), [] ->
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) (l1,[])
| [],({c=c2;v=v2}::l2) ->
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) ([],l2)
| [],[] -> [focused_simpl p_init]
in
loop p_init (e1,e2)
let rec shuffle_cancel p = function
| [] -> [focused_simpl p]
| ({c=c1}::l1) ->
let tac =
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1];[P_APP 1; P_APP 2];
[P_APP 2; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2; P_APP 1]]
(if c1 >? zero then
(Lazy.force coq_fast_OMEGA13)
else
(Lazy.force coq_fast_OMEGA14))
in
tac :: shuffle_cancel p l1
let rec scalar p n = function
| Oplus(t1,t2) ->
let tac1,t1' = scalar (P_APP 1 :: p) n t1 and
tac2,t2' = scalar (P_APP 2 :: p) n t2 in
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zmult_plus_distr_l) ::
(tac1 @ tac2), Oplus(t1',t2')
| Oinv t ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zmult_opp_comm);
focused_simpl (P_APP 2 :: p)], Otimes(t,Oz(neg n))
| Otimes(t1,Oz x) ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zmult_assoc_reverse);
focused_simpl (P_APP 2 :: p)],
Otimes(t1,Oz (n*x))
| Otimes(t1,t2) -> error "Omega: Can't solve a goal with non-linear products"
| (Oatom _ as t) -> [], Otimes(t,Oz n)
| Oz i -> [focused_simpl p],Oz(n*i)
| Oufo c -> [], Oufo (mkApp (Lazy.force coq_Zmult, [| mk_integer n; c |]))
let rec scalar_norm p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| (_::l) ->
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1];[P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_OMEGA16) :: loop (P_APP 2 :: p) l
in
loop p_init
let rec norm_add p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| _:: l ->
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) l
in
loop p_init
let rec scalar_norm_add p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| _ :: l ->
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2]; [P_APP 2]; [P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) :: loop (P_APP 2 :: p) l
in
loop p_init
let rec negate p = function
| Oplus(t1,t2) ->
let tac1,t1' = negate (P_APP 1 :: p) t1 and
tac2,t2' = negate (P_APP 2 :: p) t2 in
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zopp_plus_distr) ::
(tac1 @ tac2),
Oplus(t1',t2')
| Oinv t ->
[clever_rewrite p [[P_APP 1;P_APP 1]] (Lazy.force coq_fast_Zopp_involutive)], t
| Otimes(t1,Oz x) ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zopp_mult_distr_r);
focused_simpl (P_APP 2 :: p)], Otimes(t1,Oz (neg x))
| Otimes(t1,t2) -> error "Omega: Can't solve a goal with non-linear products"
| (Oatom _ as t) ->
let r = Otimes(t,Oz(negone)) in
[clever_rewrite p [[P_APP 1]] (Lazy.force coq_fast_Zopp_eq_mult_neg_1)], r
| Oz i -> [focused_simpl p],Oz(neg i)
| Oufo c -> [], Oufo (mkApp (Lazy.force coq_Zopp, [| c |]))
let rec transform p t =
let default isnat t' =
try
let v,th,_ = find_constr t' in
[clever_rewrite_base p (mkVar v) (mkVar th)], Oatom v
with e when Errors.noncritical e ->
let v = new_identifier_var ()
and th = new_identifier () in
hide_constr t' v th isnat;
[clever_rewrite_base p (mkVar v) (mkVar th)], Oatom v
in
try match destructurate_term t with
| Kapp(Zplus,[t1;t2]) ->
let tac1,t1' = transform (P_APP 1 :: p) t1
and tac2,t2' = transform (P_APP 2 :: p) t2 in
let tac,t' = shuffle p (t1',t2') in
tac1 @ tac2 @ tac, t'
| Kapp(Zminus,[t1;t2]) ->
let tac,t =
transform p
(mkApp (Lazy.force coq_Zplus,
[| t1; (mkApp (Lazy.force coq_Zopp, [| t2 |])) |])) in
unfold sp_Zminus :: tac,t
| Kapp(Zsucc,[t1]) ->
let tac,t = transform p (mkApp (Lazy.force coq_Zplus,
[| t1; mk_integer one |])) in
unfold sp_Zsucc :: tac,t
| Kapp(Zpred,[t1]) ->
let tac,t = transform p (mkApp (Lazy.force coq_Zplus,
[| t1; mk_integer negone |])) in
unfold sp_Zpred :: tac,t
| Kapp(Zmult,[t1;t2]) ->
let tac1,t1' = transform (P_APP 1 :: p) t1
and tac2,t2' = transform (P_APP 2 :: p) t2 in
begin match t1',t2' with
| (_,Oz n) -> let tac,t' = scalar p n t1' in tac1 @ tac2 @ tac,t'
| (Oz n,_) ->
let sym =
clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zmult_comm) in
let tac,t' = scalar p n t2' in tac1 @ tac2 @ (sym :: tac),t'
| _ -> default false t
end
| Kapp((Zpos|Zneg|Z0),_) ->
(try ([],Oz(recognize_number t))
with e when Errors.noncritical e -> default false t)
| Kvar s -> [],Oatom s
| Kapp(Zopp,[t]) ->
let tac,t' = transform (P_APP 1 :: p) t in
let tac',t'' = negate p t' in
tac @ tac', t''
| Kapp(Z_of_nat,[t']) -> default true t'
| _ -> default false t
with e when catchable_exception e -> default false t
let shrink_pair p f1 f2 =
match f1,f2 with
| Oatom v,Oatom _ ->
let r = Otimes(Oatom v,Oz two) in
clever_rewrite p [[P_APP 1]] (Lazy.force coq_fast_Zred_factor1), r
| Oatom v, Otimes(_,c2) ->
let r = Otimes(Oatom v,Oplus(c2,Oz one)) in
clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zred_factor2), r
| Otimes (v1,c1),Oatom v ->
let r = Otimes(Oatom v,Oplus(c1,Oz one)) in
clever_rewrite p [[P_APP 2];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zred_factor3), r
| Otimes (Oatom v,c1),Otimes (v2,c2) ->
let r = Otimes(Oatom v,Oplus(c1,c2)) in
clever_rewrite p
[[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zred_factor4),r
| t1,t2 ->
begin
oprint t1; print_newline (); oprint t2; print_newline ();
flush Pervasives.stdout; error "shrink.1"
end
let reduce_factor p = function
| Oatom v ->
let r = Otimes(Oatom v,Oz one) in
[clever_rewrite p [[]] (Lazy.force coq_fast_Zred_factor0)],r
| Otimes(Oatom v,Oz n) as f -> [],f
| Otimes(Oatom v,c) ->
let rec compute = function
| Oz n -> n
| Oplus(t1,t2) -> Bigint.add (compute t1) (compute t2)
| _ -> error "condense.1"
in
[focused_simpl (P_APP 2 :: p)], Otimes(Oatom v,Oz(compute c))
| t -> oprint t; error "reduce_factor.1"
let rec condense p = function
| Oplus(f1,(Oplus(f2,r) as t)) ->
if weight f1 = weight f2 then begin
let shrink_tac,t = shrink_pair (P_APP 1 :: p) f1 f2 in
let assoc_tac =
clever_rewrite p
[[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc) in
let tac_list,t' = condense p (Oplus(t,r)) in
(assoc_tac :: shrink_tac :: tac_list), t'
end else begin
let tac,f = reduce_factor (P_APP 1 :: p) f1 in
let tac',t' = condense (P_APP 2 :: p) t in
(tac @ tac'), Oplus(f,t')
end
| Oplus(f1,Oz n) ->
let tac,f1' = reduce_factor (P_APP 1 :: p) f1 in tac,Oplus(f1',Oz n)
| Oplus(f1,f2) ->
if weight f1 = weight f2 then begin
let tac_shrink,t = shrink_pair p f1 f2 in
let tac,t' = condense p t in
tac_shrink :: tac,t'
end else begin
let tac,f = reduce_factor (P_APP 1 :: p) f1 in
let tac',t' = condense (P_APP 2 :: p) f2 in
(tac @ tac'),Oplus(f,t')
end
| Oz _ as t -> [],t
| t ->
let tac,t' = reduce_factor p t in
let final = Oplus(t',Oz zero) in
let tac' = clever_rewrite p [[]] (Lazy.force coq_fast_Zred_factor6) in
tac @ [tac'], final
let rec clear_zero p = function
| Oplus(Otimes(Oatom v,Oz n),r) when n =? zero ->
let tac =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5) in
let tac',t = clear_zero p r in
tac :: tac',t
| Oplus(f,r) ->
let tac,t = clear_zero (P_APP 2 :: p) r in tac,Oplus(f,t)
| t -> [],t
let replay_history tactic_normalisation =
let aux = id_of_string "auxiliary" in
let aux1 = id_of_string "auxiliary_1" in
let aux2 = id_of_string "auxiliary_2" in
let izero = mk_integer zero in
let rec loop t =
match t with
| HYP e :: l ->
begin
try
tclTHEN
(List.assoc (hyp_of_tag e.id) tactic_normalisation)
(loop l)
with Not_found -> loop l end
| NEGATE_CONTRADICT (e2,e1,b) :: l ->
let eq1 = decompile e1
and eq2 = decompile e2 in
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2.id in
let k = if b then negone else one in
let p_initial = [P_APP 1;P_TYPE] in
let tac= shuffle_mult_right p_initial e1.body k e2.body in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_OMEGA17, [|
val_of eq1;
val_of eq2;
mk_integer k;
mkVar id1; mkVar id2 |])]);
(mk_then tac);
(intros_using [aux]);
(resolve_id aux);
reflexivity
]
| CONTRADICTION (e1,e2) :: l ->
let eq1 = decompile e1
and eq2 = decompile e2 in
let p_initial = [P_APP 2;P_TYPE] in
let tac = shuffle_cancel p_initial e1.body in
let solve_le =
let not_sup_sup = mkApp (build_coq_eq (), [|
Lazy.force coq_comparison;
Lazy.force coq_Gt;
Lazy.force coq_Gt |])
in
tclTHENS
(tclTHENLIST [
(unfold sp_Zle);
(simpl_in_concl);
intro;
(absurd not_sup_sup) ])
[ assumption ; reflexivity ]
in
let theorem =
mkApp (Lazy.force coq_OMEGA2, [|
val_of eq1; val_of eq2;
mkVar (hyp_of_tag e1.id);
mkVar (hyp_of_tag e2.id) |])
in
tclTHEN (tclTHEN (generalize_tac [theorem]) (mk_then tac)) (solve_le)
| DIVIDE_AND_APPROX (e1,e2,k,d) :: l ->
let id = hyp_of_tag e1.id in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
let kk = mk_integer k
and dd = mk_integer d in
let rhs = mk_plus (mk_times eq2 kk) dd in
let state_eg = mk_eq eq1 rhs in
let tac = scalar_norm_add [P_APP 3] e2.body in
tclTHENS
(cut state_eg)
[ tclTHENS
(tclTHENLIST [
(intros_using [aux]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA1,
[| eq1; rhs; mkVar aux; mkVar id |])]);
(clear [aux;id]);
(intros_using [id]);
(cut (mk_gt kk dd)) ])
[ tclTHENS
(cut (mk_gt kk izero))
[ tclTHENLIST [
(intros_using [aux1; aux2]);
(generalize_tac
[mkApp (Lazy.force coq_Zmult_le_approx,
[| kk;eq2;dd;mkVar aux1;mkVar aux2; mkVar id |])]);
(clear [aux1;aux2;id]);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
(simpl_in_concl);
reflexivity ] ];
tclTHENLIST [ (unfold sp_Zgt); simpl_in_concl; reflexivity ]
];
tclTHEN (mk_then tac) reflexivity ]
| NOT_EXACT_DIVIDE (e1,k) :: l ->
let c = floor_div e1.constant k in
let d = Bigint.sub e1.constant (Bigint.mult c k) in
let e2 = {id=e1.id; kind=EQUA;constant = c;
body = map_eq_linear (fun c -> c / k) e1.body } in
let eq2 = val_of(decompile e2) in
let kk = mk_integer k
and dd = mk_integer d in
let tac = scalar_norm_add [P_APP 2] e2.body in
tclTHENS
(cut (mk_gt dd izero))
[ tclTHENS (cut (mk_gt kk dd))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA4,
[| dd;kk;eq2;mkVar aux1; mkVar aux2 |])]);
(clear [aux1;aux2]);
(unfold sp_not);
(intros_using [aux]);
(resolve_id aux);
(mk_then tac);
assumption ] ;
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ]
| EXACT_DIVIDE (e1,k) :: l ->
let id = hyp_of_tag e1.id in
let e2 = map_eq_afine (fun c -> c / k) e1 in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
let kk = mk_integer k in
let state_eq = mk_eq eq1 (mk_times eq2 kk) in
if e1.kind = DISE then
let tac = scalar_norm [P_APP 3] e2.body in
tclTHENS
(cut state_eq)
[tclTHENLIST [
(intros_using [aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA18,
[| eq1;eq2;kk;mkVar aux1; mkVar id |])]);
(clear [aux1;id]);
(intros_using [id]);
(loop l) ];
tclTHEN (mk_then tac) reflexivity ]
else
let tac = scalar_norm [P_APP 3] e2.body in
tclTHENS (cut state_eq)
[
tclTHENS
(cut (mk_gt kk izero))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA3,
[| eq1; eq2; kk; mkVar aux2; mkVar aux1;mkVar id|])]);
(clear [aux1;aux2;id]);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHEN (mk_then tac) reflexivity ]
| (MERGE_EQ(e3,e1,e2)) :: l ->
let id = new_identifier () in
tag_hypothesis id e3;
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2 in
let eq1 = val_of(decompile e1)
and eq2 = val_of (decompile (negate_eq e1)) in
let tac =
clever_rewrite [P_APP 3] [[P_APP 1]]
(Lazy.force coq_fast_Zopp_eq_mult_neg_1) ::
scalar_norm [P_APP 3] e1.body
in
tclTHENS
(cut (mk_eq eq1 (mk_inv eq2)))
[tclTHENLIST [
(intros_using [aux]);
(generalize_tac [mkApp (Lazy.force coq_OMEGA8,
[| eq1;eq2;mkVar id1;mkVar id2; mkVar aux|])]);
(clear [id1;id2;aux]);
(intros_using [id]);
(loop l) ];
tclTHEN (mk_then tac) reflexivity]
| STATE {st_new_eq=e;st_def=def;st_orig=orig;st_coef=m;st_var=v} :: l ->
let id = new_identifier ()
and id2 = hyp_of_tag orig.id in
tag_hypothesis id e.id;
let eq1 = val_of(decompile def)
and eq2 = val_of(decompile orig) in
let vid = unintern_id v in
let theorem =
mkApp (build_coq_ex (), [|
Lazy.force coq_Z;
mkLambda
(Name vid,
Lazy.force coq_Z,
mk_eq (mkRel 1) eq1) |])
in
let mm = mk_integer m in
let p_initial = [P_APP 2;P_TYPE] in
let tac =
clever_rewrite (P_APP 1 :: P_APP 1 :: P_APP 2 :: p_initial)
[[P_APP 1]] (Lazy.force coq_fast_Zopp_eq_mult_neg_1) ::
shuffle_mult_right p_initial
orig.body m ({c= negone;v= v}::def.body) in
tclTHENS
(cut theorem)
[tclTHENLIST [
(intros_using [aux]);
(elim_id aux);
(clear [aux]);
(intros_using [vid; aux]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA9,
[| mkVar vid;eq2;eq1;mm; mkVar id2;mkVar aux |])]);
(mk_then tac);
(clear [aux]);
(intros_using [id]);
(loop l) ];
tclTHEN (exists_tac eq1) reflexivity ]
| SPLIT_INEQ(e,(e1,act1),(e2,act2)) :: l ->
let id1 = new_identifier ()
and id2 = new_identifier () in
tag_hypothesis id1 e1; tag_hypothesis id2 e2;
let id = hyp_of_tag e.id in
let tac1 = norm_add [P_APP 2;P_TYPE] e.body in
let tac2 = scalar_norm_add [P_APP 2;P_TYPE] e.body in
let eq = val_of(decompile e) in
tclTHENS
(simplest_elim (applist (Lazy.force coq_OMEGA19, [eq; mkVar id])))
[tclTHENLIST [ (mk_then tac1); (intros_using [id1]); (loop act1) ];
tclTHENLIST [ (mk_then tac2); (intros_using [id2]); (loop act2) ]]
| SUM(e3,(k1,e1),(k2,e2)) :: l ->
let id = new_identifier () in
tag_hypothesis id e3;
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2.id in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
if k1 =? one & e2.kind = EQUA then
let tac_thm =
match e1.kind with
| EQUA -> Lazy.force coq_OMEGA5
| INEQ -> Lazy.force coq_OMEGA6
| DISE -> Lazy.force coq_OMEGA20
in
let kk = mk_integer k2 in
let p_initial =
if e1.kind=DISE then [P_APP 1; P_TYPE] else [P_APP 2; P_TYPE] in
let tac = shuffle_mult_right p_initial e1.body k2 e2.body in
tclTHENLIST [
(generalize_tac
[mkApp (tac_thm, [| eq1; eq2; kk; mkVar id1; mkVar id2 |])]);
(mk_then tac);
(intros_using [id]);
(loop l)
]
else
let kk1 = mk_integer k1
and kk2 = mk_integer k2 in
let p_initial = [P_APP 2;P_TYPE] in
let tac= shuffle_mult p_initial k1 e1.body k2 e2.body in
tclTHENS (cut (mk_gt kk1 izero))
[tclTHENS
(cut (mk_gt kk2 izero))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA7, [|
eq1;eq2;kk1;kk2;
mkVar aux1;mkVar aux2;
mkVar id1;mkVar id2 |])]);
(clear [aux1;aux2]);
(mk_then tac);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ]
| CONSTANT_NOT_NUL(e,k) :: l ->
tclTHEN (generalize_tac [mkVar (hyp_of_tag e)]) Equality.discrConcl
| CONSTANT_NUL(e) :: l ->
tclTHEN (resolve_id (hyp_of_tag e)) reflexivity
| CONSTANT_NEG(e,k) :: l ->
tclTHENLIST [
(generalize_tac [mkVar (hyp_of_tag e)]);
(unfold sp_Zle);
simpl_in_concl;
(unfold sp_not);
(intros_using [aux]);
(resolve_id aux);
reflexivity
]
| _ -> tclIDTAC
in
loop
let normalize p_initial t =
let (tac,t') = transform p_initial t in
let (tac',t'') = condense p_initial t' in
let (tac'',t''') = clear_zero p_initial t'' in
tac @ tac' @ tac'' , t'''
let normalize_equation id flag theorem pos t t1 t2 (tactic,defs) =
let p_initial = [P_APP pos ;P_TYPE] in
let (tac,t') = normalize p_initial t in
let shift_left =
tclTHEN
(generalize_tac [mkApp (theorem, [| t1; t2; mkVar id |]) ])
(tclTRY (clear [id]))
in
if tac <> [] then
let id' = new_identifier () in
((id',(tclTHENLIST [ (shift_left); (mk_then tac); (intros_using [id']) ]))
:: tactic,
compile id' flag t' :: defs)
else
(tactic,defs)
let destructure_omega gl tac_def (id,c) =
if atompart_of_id id = "State" then
tac_def
else
try match destructurate_prop c with
| Kapp(Eq,[typ;t1;t2])
when destructurate_type (pf_nf gl typ) = Kapp(Z,[]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id EQUA (Lazy.force coq_Zegal_left) 2 t t1 t2 tac_def
| Kapp(Zne,[t1;t2]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id DISE (Lazy.force coq_Zne_left) 1 t t1 t2 tac_def
| Kapp(Zle,[t1;t2]) ->
let t = mk_plus t2 (mk_inv t1) in
normalize_equation
id INEQ (Lazy.force coq_Zle_left) 2 t t1 t2 tac_def
| Kapp(Zlt,[t1;t2]) ->
let t = mk_plus (mk_plus t2 (mk_integer negone)) (mk_inv t1) in
normalize_equation
id INEQ (Lazy.force coq_Zlt_left) 2 t t1 t2 tac_def
| Kapp(Zge,[t1;t2]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id INEQ (Lazy.force coq_Zge_left) 2 t t1 t2 tac_def
| Kapp(Zgt,[t1;t2]) ->
let t = mk_plus (mk_plus t1 (mk_integer negone)) (mk_inv t2) in
normalize_equation
id INEQ (Lazy.force coq_Zgt_left) 2 t t1 t2 tac_def
| _ -> tac_def
with e when catchable_exception e -> tac_def
let reintroduce id =
(* [id] cannot be cleared if dependent: protect it by a try *)
tclTHEN (tclTRY (clear [id])) (intro_using id)
let coq_omega gl =
clear_tables ();
let tactic_normalisation, system =
List.fold_left (destructure_omega gl) ([],[]) (pf_hyps_types gl) in
let prelude,sys =
List.fold_left
(fun (tac,sys) (t,(v,th,b)) ->
if b then
let id = new_identifier () in
let i = new_id () in
tag_hypothesis id i;
(tclTHENLIST [
(simplest_elim (applist (Lazy.force coq_intro_Z, [t])));
(intros_using [v; id]);
(elim_id id);
(clear [id]);
(intros_using [th;id]);
tac ]),
{kind = INEQ;
body = [{v=intern_id v; c=one}];
constant = zero; id = i} :: sys
else
(tclTHENLIST [
(simplest_elim (applist (Lazy.force coq_new_var, [t])));
(intros_using [v;th]);
tac ]),
sys)
(tclIDTAC,[]) (dump_tables ())
in
let system = system @ sys in
if !display_system_flag then display_system display_var system;
if !old_style_flag then begin
try
let _ = simplify (new_id,new_var_num,display_var) false system in
tclIDTAC gl
with UNSOLVABLE ->
let _,path = depend [] [] (history ()) in
if !display_action_flag then display_action display_var path;
(tclTHEN prelude (replay_history tactic_normalisation path)) gl
end else begin
try
let path = simplify_strong (new_id,new_var_num,display_var) system in
if !display_action_flag then display_action display_var path;
(tclTHEN prelude (replay_history tactic_normalisation path)) gl
with NO_CONTRADICTION -> error "Omega can't solve this system"
end
let coq_omega = solver_time coq_omega
let nat_inject gl =
let rec explore p t =
try match destructurate_term t with
| Kapp(Plus,[t1;t2]) ->
tclTHENLIST [
(clever_rewrite_gen p (mk_plus (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_plus),[t1;t2]));
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2)
]
| Kapp(Mult,[t1;t2]) ->
tclTHENLIST [
(clever_rewrite_gen p (mk_times (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_mult),[t1;t2]));
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2)
]
| Kapp(Minus,[t1;t2]) ->
let id = new_identifier () in
tclTHENS
(tclTHEN
(simplest_elim (applist (Lazy.force coq_le_gt_dec, [t2;t1])))
(intros_using [id]))
[
tclTHENLIST [
(clever_rewrite_gen p
(mk_minus (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_minus1),[t1;t2;mkVar id]));
(loop [id,mkApp (Lazy.force coq_le, [| t2;t1 |])]);
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2) ];
(tclTHEN
(clever_rewrite_gen p (mk_integer zero)
((Lazy.force coq_inj_minus2),[t1;t2;mkVar id]))
(loop [id,mkApp (Lazy.force coq_gt, [| t2;t1 |])]))
]
| Kapp(S,[t']) ->
let rec is_number t =
try match destructurate_term t with
Kapp(S,[t]) -> is_number t
| Kapp(O,[]) -> true
| _ -> false
with e when catchable_exception e -> false
in
let rec loop p t =
try match destructurate_term t with
Kapp(S,[t]) ->
(tclTHEN
(clever_rewrite_gen p
(mkApp (Lazy.force coq_Zsucc, [| mk_inj t |]))
((Lazy.force coq_inj_S),[t]))
(loop (P_APP 1 :: p) t))
| _ -> explore p t
with e when catchable_exception e -> explore p t
in
if is_number t' then focused_simpl p else loop p t
| Kapp(Pred,[t]) ->
let t_minus_one =
mkApp (Lazy.force coq_minus, [| t;
mkApp (Lazy.force coq_S, [| Lazy.force coq_O |]) |]) in
tclTHEN
(clever_rewrite_gen_nat (P_APP 1 :: p) t_minus_one
((Lazy.force coq_pred_of_minus),[t]))
(explore p t_minus_one)
| Kapp(O,[]) -> focused_simpl p
| _ -> tclIDTAC
with e when catchable_exception e -> tclIDTAC
and loop = function
| [] -> tclIDTAC
| (i,t)::lit ->
begin try match destructurate_prop t with
Kapp(Le,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_le, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Lt,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_lt, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Ge,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_ge, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Gt,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_gt, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Neq,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_neq, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Eq,[typ;t1;t2]) ->
if pf_conv_x gl typ (Lazy.force coq_nat) then
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_eq, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 2; P_TYPE] t1);
(explore [P_APP 3; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
else loop lit
| _ -> loop lit
with e when catchable_exception e -> loop lit end
in
loop (List.rev (pf_hyps_types gl)) gl
let dec_binop = function
| Zne -> coq_dec_Zne
| Zle -> coq_dec_Zle
| Zlt -> coq_dec_Zlt
| Zge -> coq_dec_Zge
| Zgt -> coq_dec_Zgt
| Le -> coq_dec_le
| Lt -> coq_dec_lt
| Ge -> coq_dec_ge
| Gt -> coq_dec_gt
| _ -> raise Not_found
let not_binop = function
| Zne -> coq_not_Zne
| Zle -> coq_Znot_le_gt
| Zlt -> coq_Znot_lt_ge
| Zge -> coq_Znot_ge_lt
| Zgt -> coq_Znot_gt_le
| Le -> coq_not_le
| Lt -> coq_not_lt
| Ge -> coq_not_ge
| Gt -> coq_not_gt
| _ -> raise Not_found
* A decidability check : for some [ t ] , could we build a term
of type [ decidable t ] ( i.e. [ t\/~t ] ) ? Otherwise , we raise
[ Undecidable ] . Note that a successful check implies that
[ t ] has type Prop .
of type [decidable t] (i.e. [t\/~t]) ? Otherwise, we raise
[Undecidable]. Note that a successful check implies that
[t] has type Prop.
*)
exception Undecidable
let rec decidability gl t =
match destructurate_prop t with
| Kapp(Or,[t1;t2]) ->
mkApp (Lazy.force coq_dec_or, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kapp(And,[t1;t2]) ->
mkApp (Lazy.force coq_dec_and, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kapp(Iff,[t1;t2]) ->
mkApp (Lazy.force coq_dec_iff, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kimp(t1,t2) ->
This is the only situation where it 's not obvious that [ t ]
is in Prop . The recursive call on [ t2 ] will ensure that .
is in Prop. The recursive call on [t2] will ensure that. *)
mkApp (Lazy.force coq_dec_imp,
[| t1; t2; decidability gl t1; decidability gl t2 |])
| Kapp(Not,[t1]) ->
mkApp (Lazy.force coq_dec_not, [| t1; decidability gl t1 |])
| Kapp(Eq,[typ;t1;t2]) ->
begin match destructurate_type (pf_nf gl typ) with
| Kapp(Z,[]) -> mkApp (Lazy.force coq_dec_eq, [| t1;t2 |])
| Kapp(Nat,[]) -> mkApp (Lazy.force coq_dec_eq_nat, [| t1;t2 |])
| _ -> raise Undecidable
end
| Kapp(op,[t1;t2]) ->
(try mkApp (Lazy.force (dec_binop op), [| t1; t2 |])
with Not_found -> raise Undecidable)
| Kapp(False,[]) -> Lazy.force coq_dec_False
| Kapp(True,[]) -> Lazy.force coq_dec_True
| _ -> raise Undecidable
let onClearedName id tac =
We can not ensure that hyps can be cleared ( because of dependencies ) ,
(* so renaming may be necessary *)
tclTHEN
(tclTRY (clear [id]))
(fun gl ->
let id = fresh_id [] id gl in
tclTHEN (introduction id) (tac id) gl)
let onClearedName2 id tac =
tclTHEN
(tclTRY (clear [id]))
(fun gl ->
let id1 = fresh_id [] (add_suffix id "_left") gl in
let id2 = fresh_id [] (add_suffix id "_right") gl in
tclTHENLIST [ introduction id1; introduction id2; tac id1 id2 ] gl)
let destructure_hyps gl =
let rec loop = function
| [] -> (tclTHEN nat_inject coq_omega)
| (i,body,t)::lit ->
begin try match destructurate_prop t with
| Kapp(False,[]) -> elim_id i
| Kapp((Zle|Zge|Zgt|Zlt|Zne),[t1;t2]) -> loop lit
| Kapp(Or,[t1;t2]) ->
(tclTHENS
(elim_id i)
[ onClearedName i (fun i -> (loop ((i,None,t1)::lit)));
onClearedName i (fun i -> (loop ((i,None,t2)::lit))) ])
| Kapp(And,[t1;t2]) ->
tclTHEN
(elim_id i)
(onClearedName2 i (fun i1 i2 ->
loop ((i1,None,t1)::(i2,None,t2)::lit)))
| Kapp(Iff,[t1;t2]) ->
tclTHEN
(elim_id i)
(onClearedName2 i (fun i1 i2 ->
loop ((i1,None,mkArrow t1 t2)::(i2,None,mkArrow t2 t1)::lit)))
| Kimp(t1,t2) ->
(* t1 and t2 might be in Type rather than Prop.
For t1, the decidability check will ensure being Prop. *)
if is_Prop (pf_type_of gl t2)
then
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac [mkApp (Lazy.force coq_imp_simp,
[| t1; t2; d1; mkVar i|])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_or (mk_not t1) t2)::lit))))
]
else
loop lit
| Kapp(Not,[t]) ->
begin match destructurate_prop t with
Kapp(Or,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_or,[| t1; t2; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_and (mk_not t1) (mk_not t2)):: lit))))
]
| Kapp(And,[t1;t2]) ->
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_and,
[| t1; t2; d1; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_or (mk_not t1) (mk_not t2))::lit))))
]
| Kapp(Iff,[t1;t2]) ->
let d1 = decidability gl t1 in
let d2 = decidability gl t2 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_iff,
[| t1; t2; d1; d2; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,
mk_or (mk_and t1 (mk_not t2))
(mk_and (mk_not t1) t2))::lit))))
]
| Kimp(t1,t2) ->
t2 must be in Prop otherwise ~(t1->t2 ) would n't be ok .
For t1 , being decidable implies being Prop .
For t1, being decidable implies being Prop. *)
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_imp,
[| t1; t2; d1; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_and t1 (mk_not t2)) :: lit))))
]
| Kapp(Not,[t]) ->
let d = decidability gl t in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_not, [| t; d; mkVar i |])]);
(onClearedName i (fun i -> (loop ((i,None,t)::lit))))
]
| Kapp(op,[t1;t2]) ->
(try
let thm = not_binop op in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force thm, [| t1;t2;mkVar i|])]);
(onClearedName i (fun _ -> loop lit))
]
with Not_found -> loop lit)
| Kapp(Eq,[typ;t1;t2]) ->
if !old_style_flag then begin
match destructurate_type (pf_nf gl typ) with
| Kapp(Nat,_) ->
tclTHENLIST [
(simplest_elim
(mkApp
(Lazy.force coq_not_eq, [|t1;t2;mkVar i|])));
(onClearedName i (fun _ -> loop lit))
]
| Kapp(Z,_) ->
tclTHENLIST [
(simplest_elim
(mkApp
(Lazy.force coq_not_Zeq, [|t1;t2;mkVar i|])));
(onClearedName i (fun _ -> loop lit))
]
| _ -> loop lit
end else begin
match destructurate_type (pf_nf gl typ) with
| Kapp(Nat,_) ->
(tclTHEN
(convert_hyp_no_check
(i,body,
(mkApp (Lazy.force coq_neq, [| t1;t2|]))))
(loop lit))
| Kapp(Z,_) ->
(tclTHEN
(convert_hyp_no_check
(i,body,
(mkApp (Lazy.force coq_Zne, [| t1;t2|]))))
(loop lit))
| _ -> loop lit
end
| _ -> loop lit
end
| _ -> loop lit
with
| Undecidable -> loop lit
| e when catchable_exception e -> loop lit
end
in
loop (pf_hyps gl) gl
let destructure_goal gl =
let concl = pf_concl gl in
let rec loop t =
match destructurate_prop t with
| Kapp(Not,[t]) ->
(tclTHEN
(tclTHEN (unfold sp_not) intro)
destructure_hyps)
| Kimp(a,b) -> (tclTHEN intro (loop b))
| Kapp(False,[]) -> destructure_hyps
| _ ->
let goal_tac =
try
let dec = decidability gl t in
tclTHEN
(Tactics.refine
(mkApp (Lazy.force coq_dec_not_not, [| t; dec; mkNewMeta () |])))
intro
with Undecidable -> Tactics.elim_type (build_coq_False ())
in
tclTHEN goal_tac destructure_hyps
in
(loop concl) gl
let destructure_goal = all_time (destructure_goal)
let omega_solver gl =
Coqlib.check_required_library ["Coq";"omega";"Omega"];
let result = destructure_goal gl in
(* if !display_time_flag then begin text_time ();
flush Pervasives.stdout end; *)
result
| null | https://raw.githubusercontent.com/hemmi/coq2scala/d10f441c18146933a99bf2088116bd213ac3648d/coq-8.4pl2-old/plugins/omega/coq_omega.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
************************************************************************
************************************************************************
uses build_coq_and, build_coq_not, build_coq_or, build_coq_ex
For unfold
Abstraction and product
Case
[id] cannot be cleared if dependent: protect it by a try
so renaming may be necessary
t1 and t2 might be in Type rather than Prop.
For t1, the decidability check will ensure being Prop.
if !display_time_flag then begin text_time ();
flush Pervasives.stdout end; | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Omega : a solver of quantifier - free problems in Presburger Arithmetic
( CNET , Lannion , France )
open Util
open Pp
open Reduction
open Proof_type
open Names
open Nameops
open Term
open Declarations
open Environ
open Sign
open Inductive
open Tacticals
open Tacmach
open Evar_refiner
open Tactics
open Clenv
open Logic
open Libnames
open Nametab
open Contradiction
module OmegaSolver = Omega.MakeOmegaSolver (Bigint)
open OmegaSolver
Added by JCF , 09/03/98
let elim_id id gl = simplest_elim (pf_global gl id) gl
let resolve_id id gl = apply (pf_global gl id) gl
let timing timer_name f arg = f arg
let display_time_flag = ref false
let display_system_flag = ref false
let display_action_flag = ref false
let old_style_flag = ref false
let read f () = !f
let write f x = f:=x
open Goptions
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega system time displaying flag";
optkey = ["Omega";"System"];
optread = read display_system_flag;
optwrite = write display_system_flag }
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega action display flag";
optkey = ["Omega";"Action"];
optread = read display_action_flag;
optwrite = write display_action_flag }
let _ =
declare_bool_option
{ optsync = false;
optdepr = false;
optname = "Omega old style flag";
optkey = ["Omega";"OldStyle"];
optread = read old_style_flag;
optwrite = write old_style_flag }
let all_time = timing "Omega "
let solver_time = timing "Solver "
let exact_time = timing "Rewrites "
let elim_time = timing "Elim "
let simpl_time = timing "Simpl "
let generalize_time = timing "Generalize"
let new_identifier =
let cpt = ref 0 in
(fun () -> let s = "Omega" ^ string_of_int !cpt in incr cpt; id_of_string s)
let new_identifier_state =
let cpt = ref 0 in
(fun () -> let s = make_ident "State" (Some !cpt) in incr cpt; s)
let new_identifier_var =
let cpt = ref 0 in
(fun () -> let s = "Zvar" ^ string_of_int !cpt in incr cpt; id_of_string s)
let new_id =
let cpt = ref 0 in fun () -> incr cpt; !cpt
let new_var_num =
let cpt = ref 1000 in (fun () -> incr cpt; !cpt)
let new_var =
let cpt = ref 0 in fun () -> incr cpt; Nameops.make_ident "WW" (Some !cpt)
let display_var i = Printf.sprintf "X%d" i
let intern_id,unintern_id =
let cpt = ref 0 in
let table = Hashtbl.create 7 and co_table = Hashtbl.create 7 in
(fun (name : identifier) ->
try Hashtbl.find table name with Not_found ->
let idx = !cpt in
Hashtbl.add table name idx;
Hashtbl.add co_table idx name;
incr cpt; idx),
(fun idx ->
try Hashtbl.find co_table idx with Not_found ->
let v = new_var () in
Hashtbl.add table v idx; Hashtbl.add co_table idx v; v)
let mk_then = tclTHENLIST
let exists_tac c = constructor_tac false (Some 1) 1 (Glob_term.ImplicitBindings [c])
let generalize_tac t = generalize_time (generalize t)
let elim t = elim_time (simplest_elim t)
let exact t = exact_time (Tactics.refine t)
let unfold s = Tactics.unfold_in_concl [Termops.all_occurrences, Lazy.force s]
let rev_assoc k =
let rec loop = function
| [] -> raise Not_found | (v,k')::_ when k = k' -> v | _ :: l -> loop l
in
loop
let tag_hypothesis,tag_of_hyp, hyp_of_tag =
let l = ref ([]:(identifier * int) list) in
(fun h id -> l := (h,id):: !l),
(fun h -> try List.assoc h !l with Not_found -> failwith "tag_hypothesis"),
(fun h -> try rev_assoc h !l with Not_found -> failwith "tag_hypothesis")
let hide_constr,find_constr,clear_tables,dump_tables =
let l = ref ([]:(constr * (identifier * identifier * bool)) list) in
(fun h id eg b -> l := (h,(id,eg,b)):: !l),
(fun h -> try list_assoc_f eq_constr h !l with Not_found -> failwith "find_contr"),
(fun () -> l := []),
(fun () -> !l)
Lazy evaluation is used for Coq constants , because this code
is evaluated before the compiled modules are loaded .
To use the constant Zplus , one must type " Lazy.force coq_Zplus "
This is the right way to access to Coq constants in tactics ML code
is evaluated before the compiled modules are loaded.
To use the constant Zplus, one must type "Lazy.force coq_Zplus"
This is the right way to access to Coq constants in tactics ML code *)
open Coqlib
let logic_dir = ["Coq";"Logic";"Decidable"]
let coq_modules =
init_modules @arith_modules @ [logic_dir] @ zarith_base_modules
@ [["Coq"; "omega"; "OmegaLemmas"]]
let init_constant = gen_constant_in_modules "Omega" init_modules
let constant = gen_constant_in_modules "Omega" coq_modules
let z_constant = gen_constant_in_modules "Omega" [["Coq";"ZArith"]]
let zbase_constant =
gen_constant_in_modules "Omega" [["Coq";"ZArith";"BinInt"]]
Zarith
let coq_xH = lazy (constant "xH")
let coq_xO = lazy (constant "xO")
let coq_xI = lazy (constant "xI")
let coq_Z0 = lazy (constant "Z0")
let coq_Zpos = lazy (constant "Zpos")
let coq_Zneg = lazy (constant "Zneg")
let coq_Z = lazy (constant "Z")
let coq_comparison = lazy (constant "comparison")
let coq_Gt = lazy (constant "Gt")
let coq_Zplus = lazy (zbase_constant "Z.add")
let coq_Zmult = lazy (zbase_constant "Z.mul")
let coq_Zopp = lazy (zbase_constant "Z.opp")
let coq_Zminus = lazy (zbase_constant "Z.sub")
let coq_Zsucc = lazy (zbase_constant "Z.succ")
let coq_Zpred = lazy (zbase_constant "Z.pred")
let coq_Zgt = lazy (zbase_constant "Z.gt")
let coq_Zle = lazy (zbase_constant "Z.le")
let coq_Z_of_nat = lazy (zbase_constant "Z.of_nat")
let coq_inj_plus = lazy (z_constant "Nat2Z.inj_add")
let coq_inj_mult = lazy (z_constant "Nat2Z.inj_mul")
let coq_inj_minus1 = lazy (z_constant "Nat2Z.inj_sub")
let coq_inj_minus2 = lazy (constant "inj_minus2")
let coq_inj_S = lazy (z_constant "Nat2Z.inj_succ")
let coq_inj_le = lazy (z_constant "Znat.inj_le")
let coq_inj_lt = lazy (z_constant "Znat.inj_lt")
let coq_inj_ge = lazy (z_constant "Znat.inj_ge")
let coq_inj_gt = lazy (z_constant "Znat.inj_gt")
let coq_inj_neq = lazy (z_constant "inj_neq")
let coq_inj_eq = lazy (z_constant "inj_eq")
let coq_fast_Zplus_assoc_reverse = lazy (constant "fast_Zplus_assoc_reverse")
let coq_fast_Zplus_assoc = lazy (constant "fast_Zplus_assoc")
let coq_fast_Zmult_assoc_reverse = lazy (constant "fast_Zmult_assoc_reverse")
let coq_fast_Zplus_permute = lazy (constant "fast_Zplus_permute")
let coq_fast_Zplus_comm = lazy (constant "fast_Zplus_comm")
let coq_fast_Zmult_comm = lazy (constant "fast_Zmult_comm")
let coq_Zmult_le_approx = lazy (constant "Zmult_le_approx")
let coq_OMEGA1 = lazy (constant "OMEGA1")
let coq_OMEGA2 = lazy (constant "OMEGA2")
let coq_OMEGA3 = lazy (constant "OMEGA3")
let coq_OMEGA4 = lazy (constant "OMEGA4")
let coq_OMEGA5 = lazy (constant "OMEGA5")
let coq_OMEGA6 = lazy (constant "OMEGA6")
let coq_OMEGA7 = lazy (constant "OMEGA7")
let coq_OMEGA8 = lazy (constant "OMEGA8")
let coq_OMEGA9 = lazy (constant "OMEGA9")
let coq_fast_OMEGA10 = lazy (constant "fast_OMEGA10")
let coq_fast_OMEGA11 = lazy (constant "fast_OMEGA11")
let coq_fast_OMEGA12 = lazy (constant "fast_OMEGA12")
let coq_fast_OMEGA13 = lazy (constant "fast_OMEGA13")
let coq_fast_OMEGA14 = lazy (constant "fast_OMEGA14")
let coq_fast_OMEGA15 = lazy (constant "fast_OMEGA15")
let coq_fast_OMEGA16 = lazy (constant "fast_OMEGA16")
let coq_OMEGA17 = lazy (constant "OMEGA17")
let coq_OMEGA18 = lazy (constant "OMEGA18")
let coq_OMEGA19 = lazy (constant "OMEGA19")
let coq_OMEGA20 = lazy (constant "OMEGA20")
let coq_fast_Zred_factor0 = lazy (constant "fast_Zred_factor0")
let coq_fast_Zred_factor1 = lazy (constant "fast_Zred_factor1")
let coq_fast_Zred_factor2 = lazy (constant "fast_Zred_factor2")
let coq_fast_Zred_factor3 = lazy (constant "fast_Zred_factor3")
let coq_fast_Zred_factor4 = lazy (constant "fast_Zred_factor4")
let coq_fast_Zred_factor5 = lazy (constant "fast_Zred_factor5")
let coq_fast_Zred_factor6 = lazy (constant "fast_Zred_factor6")
let coq_fast_Zmult_plus_distr_l = lazy (constant "fast_Zmult_plus_distr_l")
let coq_fast_Zmult_opp_comm = lazy (constant "fast_Zmult_opp_comm")
let coq_fast_Zopp_plus_distr = lazy (constant "fast_Zopp_plus_distr")
let coq_fast_Zopp_mult_distr_r = lazy (constant "fast_Zopp_mult_distr_r")
let coq_fast_Zopp_eq_mult_neg_1 = lazy (constant "fast_Zopp_eq_mult_neg_1")
let coq_fast_Zopp_involutive = lazy (constant "fast_Zopp_involutive")
let coq_Zegal_left = lazy (constant "Zegal_left")
let coq_Zne_left = lazy (constant "Zne_left")
let coq_Zlt_left = lazy (constant "Zlt_left")
let coq_Zge_left = lazy (constant "Zge_left")
let coq_Zgt_left = lazy (constant "Zgt_left")
let coq_Zle_left = lazy (constant "Zle_left")
let coq_new_var = lazy (constant "new_var")
let coq_intro_Z = lazy (constant "intro_Z")
let coq_dec_eq = lazy (zbase_constant "Z.eq_decidable")
let coq_dec_Zne = lazy (constant "dec_Zne")
let coq_dec_Zle = lazy (zbase_constant "Z.le_decidable")
let coq_dec_Zlt = lazy (zbase_constant "Z.lt_decidable")
let coq_dec_Zgt = lazy (constant "dec_Zgt")
let coq_dec_Zge = lazy (constant "dec_Zge")
let coq_not_Zeq = lazy (constant "not_Zeq")
let coq_not_Zne = lazy (constant "not_Zne")
let coq_Znot_le_gt = lazy (constant "Znot_le_gt")
let coq_Znot_lt_ge = lazy (constant "Znot_lt_ge")
let coq_Znot_ge_lt = lazy (constant "Znot_ge_lt")
let coq_Znot_gt_le = lazy (constant "Znot_gt_le")
let coq_neq = lazy (constant "neq")
let coq_Zne = lazy (constant "Zne")
let coq_Zle = lazy (zbase_constant "Z.le")
let coq_Zgt = lazy (zbase_constant "Z.gt")
let coq_Zge = lazy (zbase_constant "Z.ge")
let coq_Zlt = lazy (zbase_constant "Z.lt")
Peano /
let coq_le = lazy (init_constant "le")
let coq_lt = lazy (init_constant "lt")
let coq_ge = lazy (init_constant "ge")
let coq_gt = lazy (init_constant "gt")
let coq_minus = lazy (init_constant "minus")
let coq_plus = lazy (init_constant "plus")
let coq_mult = lazy (init_constant "mult")
let coq_pred = lazy (init_constant "pred")
let coq_nat = lazy (init_constant "nat")
let coq_S = lazy (init_constant "S")
let coq_O = lazy (init_constant "O")
Compare_dec / Peano_dec / Minus
let coq_pred_of_minus = lazy (constant "pred_of_minus")
let coq_le_gt_dec = lazy (constant "le_gt_dec")
let coq_dec_eq_nat = lazy (constant "dec_eq_nat")
let coq_dec_le = lazy (constant "dec_le")
let coq_dec_lt = lazy (constant "dec_lt")
let coq_dec_ge = lazy (constant "dec_ge")
let coq_dec_gt = lazy (constant "dec_gt")
let coq_not_eq = lazy (constant "not_eq")
let coq_not_le = lazy (constant "not_le")
let coq_not_lt = lazy (constant "not_lt")
let coq_not_ge = lazy (constant "not_ge")
let coq_not_gt = lazy (constant "not_gt")
Logic / Decidable
let coq_eq_ind_r = lazy (constant "eq_ind_r")
let coq_dec_or = lazy (constant "dec_or")
let coq_dec_and = lazy (constant "dec_and")
let coq_dec_imp = lazy (constant "dec_imp")
let coq_dec_iff = lazy (constant "dec_iff")
let coq_dec_not = lazy (constant "dec_not")
let coq_dec_False = lazy (constant "dec_False")
let coq_dec_not_not = lazy (constant "dec_not_not")
let coq_dec_True = lazy (constant "dec_True")
let coq_not_or = lazy (constant "not_or")
let coq_not_and = lazy (constant "not_and")
let coq_not_imp = lazy (constant "not_imp")
let coq_not_iff = lazy (constant "not_iff")
let coq_not_not = lazy (constant "not_not")
let coq_imp_simp = lazy (constant "imp_simp")
let coq_iff = lazy (constant "iff")
open Closure
let evaluable_ref_of_constr s c = match kind_of_term (Lazy.force c) with
| Const kn when Tacred.is_evaluable (Global.env()) (EvalConstRef kn) ->
EvalConstRef kn
| _ -> anomaly ("Coq_omega: "^s^" is not an evaluable constant")
let sp_Zsucc = lazy (evaluable_ref_of_constr "Z.succ" coq_Zsucc)
let sp_Zpred = lazy (evaluable_ref_of_constr "Z.pred" coq_Zpred)
let sp_Zminus = lazy (evaluable_ref_of_constr "Z.sub" coq_Zminus)
let sp_Zle = lazy (evaluable_ref_of_constr "Z.le" coq_Zle)
let sp_Zgt = lazy (evaluable_ref_of_constr "Z.gt" coq_Zgt)
let sp_Zge = lazy (evaluable_ref_of_constr "Z.ge" coq_Zge)
let sp_Zlt = lazy (evaluable_ref_of_constr "Z.lt" coq_Zlt)
let sp_not = lazy (evaluable_ref_of_constr "not" (lazy (build_coq_not ())))
let mk_var v = mkVar (id_of_string v)
let mk_plus t1 t2 = mkApp (Lazy.force coq_Zplus, [| t1; t2 |])
let mk_times t1 t2 = mkApp (Lazy.force coq_Zmult, [| t1; t2 |])
let mk_minus t1 t2 = mkApp (Lazy.force coq_Zminus, [| t1;t2 |])
let mk_eq t1 t2 = mkApp (build_coq_eq (), [| Lazy.force coq_Z; t1; t2 |])
let mk_le t1 t2 = mkApp (Lazy.force coq_Zle, [| t1; t2 |])
let mk_gt t1 t2 = mkApp (Lazy.force coq_Zgt, [| t1; t2 |])
let mk_inv t = mkApp (Lazy.force coq_Zopp, [| t |])
let mk_and t1 t2 = mkApp (build_coq_and (), [| t1; t2 |])
let mk_or t1 t2 = mkApp (build_coq_or (), [| t1; t2 |])
let mk_not t = mkApp (build_coq_not (), [| t |])
let mk_eq_rel t1 t2 = mkApp (build_coq_eq (),
[| Lazy.force coq_comparison; t1; t2 |])
let mk_inj t = mkApp (Lazy.force coq_Z_of_nat, [| t |])
let mk_integer n =
let rec loop n =
if n =? one then Lazy.force coq_xH else
mkApp((if n mod two =? zero then Lazy.force coq_xO else Lazy.force coq_xI),
[| loop (n/two) |])
in
if n =? zero then Lazy.force coq_Z0
else mkApp ((if n >? zero then Lazy.force coq_Zpos else Lazy.force coq_Zneg),
[| loop (abs n) |])
type omega_constant =
| Zplus | Zmult | Zminus | Zsucc | Zopp | Zpred
| Plus | Mult | Minus | Pred | S | O
| Zpos | Zneg | Z0 | Z_of_nat
| Eq | Neq
| Zne | Zle | Zlt | Zge | Zgt
| Z | Nat
| And | Or | False | True | Not | Iff
| Le | Lt | Ge | Gt
| Other of string
type omega_proposition =
| Keq of constr * constr * constr
| Kn
type result =
| Kvar of identifier
| Kapp of omega_constant * constr list
| Kimp of constr * constr
| Kufo
Nota : Kimp correspond to a binder ( Prod ) , but hopefully we wo n't
have to bother with term lifting : Kimp will correspond to anonymous
product , for which ( Rel 1 ) does n't occur in the right term .
Moreover , we 'll work on fully introduced goals , hence no Rel 's in
the term parts that we manipulate , but rather Var 's .
otherwise : all constr manipulated here are closed
have to bother with term lifting: Kimp will correspond to anonymous
product, for which (Rel 1) doesn't occur in the right term.
Moreover, we'll work on fully introduced goals, hence no Rel's in
the term parts that we manipulate, but rather Var's.
Said otherwise: all constr manipulated here are closed *)
let destructurate_prop t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [_;_;_] when eq_constr c (build_coq_eq ()) -> Kapp (Eq,args)
| _, [_;_] when eq_constr c (Lazy.force coq_neq) -> Kapp (Neq,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zne) -> Kapp (Zne,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zle) -> Kapp (Zle,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zlt) -> Kapp (Zlt,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zge) -> Kapp (Zge,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zgt) -> Kapp (Zgt,args)
| _, [_;_] when eq_constr c (build_coq_and ()) -> Kapp (And,args)
| _, [_;_] when eq_constr c (build_coq_or ()) -> Kapp (Or,args)
| _, [_;_] when eq_constr c (Lazy.force coq_iff) -> Kapp (Iff, args)
| _, [_] when eq_constr c (build_coq_not ()) -> Kapp (Not,args)
| _, [] when eq_constr c (build_coq_False ()) -> Kapp (False,args)
| _, [] when eq_constr c (build_coq_True ()) -> Kapp (True,args)
| _, [_;_] when eq_constr c (Lazy.force coq_le) -> Kapp (Le,args)
| _, [_;_] when eq_constr c (Lazy.force coq_lt) -> Kapp (Lt,args)
| _, [_;_] when eq_constr c (Lazy.force coq_ge) -> Kapp (Ge,args)
| _, [_;_] when eq_constr c (Lazy.force coq_gt) -> Kapp (Gt,args)
| Const sp, args ->
Kapp (Other (string_of_path (path_of_global (ConstRef sp))),args)
| Construct csp , args ->
Kapp (Other (string_of_path (path_of_global (ConstructRef csp))), args)
| Ind isp, args ->
Kapp (Other (string_of_path (path_of_global (IndRef isp))),args)
| Var id,[] -> Kvar id
| Prod (Anonymous,typ,body), [] -> Kimp(typ,body)
| Prod (Name _,_,_),[] -> error "Omega: Not a quantifier-free goal"
| _ -> Kufo
let destructurate_type t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [] when eq_constr c (Lazy.force coq_Z) -> Kapp (Z,args)
| _, [] when eq_constr c (Lazy.force coq_nat) -> Kapp (Nat,args)
| _ -> Kufo
let destructurate_term t =
let c, args = decompose_app t in
match kind_of_term c, args with
| _, [_;_] when eq_constr c (Lazy.force coq_Zplus) -> Kapp (Zplus,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zmult) -> Kapp (Zmult,args)
| _, [_;_] when eq_constr c (Lazy.force coq_Zminus) -> Kapp (Zminus,args)
| _, [_] when eq_constr c (Lazy.force coq_Zsucc) -> Kapp (Zsucc,args)
| _, [_] when eq_constr c (Lazy.force coq_Zpred) -> Kapp (Zpred,args)
| _, [_] when eq_constr c (Lazy.force coq_Zopp) -> Kapp (Zopp,args)
| _, [_;_] when eq_constr c (Lazy.force coq_plus) -> Kapp (Plus,args)
| _, [_;_] when eq_constr c (Lazy.force coq_mult) -> Kapp (Mult,args)
| _, [_;_] when eq_constr c (Lazy.force coq_minus) -> Kapp (Minus,args)
| _, [_] when eq_constr c (Lazy.force coq_pred) -> Kapp (Pred,args)
| _, [_] when eq_constr c (Lazy.force coq_S) -> Kapp (S,args)
| _, [] when eq_constr c (Lazy.force coq_O) -> Kapp (O,args)
| _, [_] when eq_constr c (Lazy.force coq_Zpos) -> Kapp (Zneg,args)
| _, [_] when eq_constr c (Lazy.force coq_Zneg) -> Kapp (Zpos,args)
| _, [] when eq_constr c (Lazy.force coq_Z0) -> Kapp (Z0,args)
| _, [_] when eq_constr c (Lazy.force coq_Z_of_nat) -> Kapp (Z_of_nat,args)
| Var id,[] -> Kvar id
| _ -> Kufo
let recognize_number t =
let rec loop t =
match decompose_app t with
| f, [t] when eq_constr f (Lazy.force coq_xI) -> one + two * loop t
| f, [t] when eq_constr f (Lazy.force coq_xO) -> two * loop t
| f, [] when eq_constr f (Lazy.force coq_xH) -> one
| _ -> failwith "not a number"
in
match decompose_app t with
| f, [t] when eq_constr f (Lazy.force coq_Zpos) -> loop t
| f, [t] when eq_constr f (Lazy.force coq_Zneg) -> neg (loop t)
| f, [] when eq_constr f (Lazy.force coq_Z0) -> zero
| _ -> failwith "not a number"
type constr_path =
| P_APP of int
| P_BODY
| P_TYPE
| P_BRANCH of int
| P_ARITY
| P_ARG
let context operation path (t : constr) =
let rec loop i p0 t =
match (p0,kind_of_term t) with
| (p, Cast (c,k,t)) -> mkCast (loop i p c,k,t)
| ([], _) -> operation i t
| ((P_APP n :: p), App (f,v)) ->
let v' = Array.copy v in
v'.(pred n) <- loop i p v'.(pred n); mkApp (f, v')
| ((P_BRANCH n :: p), Case (ci,q,c,v)) ->
avant , y avait mkApp ... anyway , BRANCH seems nowhere used
let v' = Array.copy v in
v'.(n) <- loop i p v'.(n); (mkCase (ci,q,c,v'))
| ((P_ARITY :: p), App (f,l)) ->
appvect (loop i p f,l)
| ((P_ARG :: p), App (f,v)) ->
let v' = Array.copy v in
v'.(0) <- loop i p v'.(0); mkApp (f,v')
| (p, Fix ((_,n as ln),(tys,lna,v))) ->
let l = Array.length v in
let v' = Array.copy v in
v'.(n)<- loop (Pervasives.(+) i l) p v.(n); (mkFix (ln,(tys,lna,v')))
| ((P_BODY :: p), Prod (n,t,c)) ->
(mkProd (n,t,loop (succ i) p c))
| ((P_BODY :: p), Lambda (n,t,c)) ->
(mkLambda (n,t,loop (succ i) p c))
| ((P_BODY :: p), LetIn (n,b,t,c)) ->
(mkLetIn (n,b,t,loop (succ i) p c))
| ((P_TYPE :: p), Prod (n,t,c)) ->
(mkProd (n,loop i p t,c))
| ((P_TYPE :: p), Lambda (n,t,c)) ->
(mkLambda (n,loop i p t,c))
| ((P_TYPE :: p), LetIn (n,b,t,c)) ->
(mkLetIn (n,b,loop i p t,c))
| (p, _) ->
ppnl (Printer.pr_lconstr t);
failwith ("abstract_path " ^ string_of_int(List.length p))
in
loop 1 path t
let occurence path (t : constr) =
let rec loop p0 t = match (p0,kind_of_term t) with
| (p, Cast (c,_,_)) -> loop p c
| ([], _) -> t
| ((P_APP n :: p), App (f,v)) -> loop p v.(pred n)
| ((P_BRANCH n :: p), Case (_,_,_,v)) -> loop p v.(n)
| ((P_ARITY :: p), App (f,_)) -> loop p f
| ((P_ARG :: p), App (f,v)) -> loop p v.(0)
| (p, Fix((_,n) ,(_,_,v))) -> loop p v.(n)
| ((P_BODY :: p), Prod (n,t,c)) -> loop p c
| ((P_BODY :: p), Lambda (n,t,c)) -> loop p c
| ((P_BODY :: p), LetIn (n,b,t,c)) -> loop p c
| ((P_TYPE :: p), Prod (n,term,c)) -> loop p term
| ((P_TYPE :: p), Lambda (n,term,c)) -> loop p term
| ((P_TYPE :: p), LetIn (n,b,term,c)) -> loop p term
| (p, _) ->
ppnl (Printer.pr_lconstr t);
failwith ("occurence " ^ string_of_int(List.length p))
in
loop path t
let abstract_path typ path t =
let term_occur = ref (mkRel 0) in
let abstract = context (fun i t -> term_occur:= t; mkRel i) path t in
mkLambda (Name (id_of_string "x"), typ, abstract), !term_occur
let focused_simpl path gl =
let newc = context (fun i t -> pf_nf gl t) (List.rev path) (pf_concl gl) in
convert_concl_no_check newc DEFAULTcast gl
let focused_simpl path = simpl_time (focused_simpl path)
type oformula =
| Oplus of oformula * oformula
| Oinv of oformula
| Otimes of oformula * oformula
| Oatom of identifier
| Oz of bigint
| Oufo of constr
let rec oprint = function
| Oplus(t1,t2) ->
print_string "("; oprint t1; print_string "+";
oprint t2; print_string ")"
| Oinv t -> print_string "~"; oprint t
| Otimes (t1,t2) ->
print_string "("; oprint t1; print_string "*";
oprint t2; print_string ")"
| Oatom s -> print_string (string_of_id s)
| Oz i -> print_string (string_of_bigint i)
| Oufo f -> print_string "?"
let rec weight = function
| Oatom c -> intern_id c
| Oz _ -> -1
| Oinv c -> weight c
| Otimes(c,_) -> weight c
| Oplus _ -> failwith "weight"
| Oufo _ -> -1
let rec val_of = function
| Oatom c -> mkVar c
| Oz c -> mk_integer c
| Oinv c -> mkApp (Lazy.force coq_Zopp, [| val_of c |])
| Otimes (t1,t2) -> mkApp (Lazy.force coq_Zmult, [| val_of t1; val_of t2 |])
| Oplus(t1,t2) -> mkApp (Lazy.force coq_Zplus, [| val_of t1; val_of t2 |])
| Oufo c -> c
let compile name kind =
let rec loop accu = function
| Oplus(Otimes(Oatom v,Oz n),r) -> loop ({v=intern_id v; c=n} :: accu) r
| Oz n ->
let id = new_id () in
tag_hypothesis name id;
{kind = kind; body = List.rev accu; constant = n; id = id}
| _ -> anomaly "compile_equation"
in
loop []
let rec decompile af =
let rec loop = function
| ({v=v; c=n}::r) -> Oplus(Otimes(Oatom (unintern_id v),Oz n),loop r)
| [] -> Oz af.constant
in
loop af.body
let mkNewMeta () = mkMeta (Evarutil.new_meta())
let clever_rewrite_base_poly typ p result theorem gl =
let full = pf_concl gl in
let (abstracted,occ) = abstract_path typ (List.rev p) full in
let t =
applist
(mkLambda
(Name (id_of_string "P"),
mkArrow typ mkProp,
mkLambda
(Name (id_of_string "H"),
applist (mkRel 1,[result]),
mkApp (Lazy.force coq_eq_ind_r,
[| typ; result; mkRel 2; mkRel 1; occ; theorem |]))),
[abstracted])
in
exact (applist(t,[mkNewMeta()])) gl
let clever_rewrite_base p result theorem gl =
clever_rewrite_base_poly (Lazy.force coq_Z) p result theorem gl
let clever_rewrite_base_nat p result theorem gl =
clever_rewrite_base_poly (Lazy.force coq_nat) p result theorem gl
let clever_rewrite_gen p result (t,args) =
let theorem = applist(t, args) in
clever_rewrite_base p result theorem
let clever_rewrite_gen_nat p result (t,args) =
let theorem = applist(t, args) in
clever_rewrite_base_nat p result theorem
let clever_rewrite p vpath t gl =
let full = pf_concl gl in
let (abstracted,occ) = abstract_path (Lazy.force coq_Z) (List.rev p) full in
let vargs = List.map (fun p -> occurence p occ) vpath in
let t' = applist(t, (vargs @ [abstracted])) in
exact (applist(t',[mkNewMeta()])) gl
let rec shuffle p (t1,t2) =
match t1,t2 with
| Oplus(l1,r1), Oplus(l2,r2) ->
if weight l1 > weight l2 then
let (tac,t') = shuffle (P_APP 2 :: p) (r1,t2) in
(clever_rewrite p [[P_APP 1;P_APP 1];
[P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse)
:: tac,
Oplus(l1,t'))
else
let (tac,t') = shuffle (P_APP 2 :: p) (t1,r2) in
(clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_permute)
:: tac,
Oplus(l2,t'))
| Oplus(l1,r1), t2 ->
if weight l1 > weight t2 then
let (tac,t') = shuffle (P_APP 2 :: p) (r1,t2) in
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse)
:: tac,
Oplus(l1, t')
else
[clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zplus_comm)],
Oplus(t2,t1)
| t1,Oplus(l2,r2) ->
if weight l2 > weight t1 then
let (tac,t') = shuffle (P_APP 2 :: p) (t1,r2) in
clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_permute)
:: tac,
Oplus(l2,t')
else [],Oplus(t1,t2)
| Oz t1,Oz t2 ->
[focused_simpl p], Oz(Bigint.add t1 t2)
| t1,t2 ->
if weight t1 < weight t2 then
[clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zplus_comm)],
Oplus(t2,t1)
else [],Oplus(t1,t2)
let rec shuffle_mult p_init k1 e1 k2 e2 =
let rec loop p = function
| (({c=c1;v=v1}::l1) as l1'),(({c=c2;v=v2}::l2) as l2') ->
if v1 = v2 then
let tac =
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA10)
in
if Bigint.add (Bigint.mult k1 c1) (Bigint.mult k2 c2) =? zero then
let tac' =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5) in
tac :: focused_simpl (P_APP 1::P_APP 2:: p) :: tac' ::
loop p (l1,l2)
else tac :: loop (P_APP 2 :: p) (l1,l2)
else if v1 > v2 then
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2];
[P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) ::
loop (P_APP 2 :: p) (l1,l2')
else
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) (l1',l2)
| ({c=c1;v=v1}::l1), [] ->
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2];
[P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) ::
loop (P_APP 2 :: p) (l1,[])
| [],({c=c2;v=v2}::l2) ->
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) ([],l2)
| [],[] -> [focused_simpl p_init]
in
loop p_init (e1,e2)
let rec shuffle_mult_right p_init e1 k2 e2 =
let rec loop p = function
| (({c=c1;v=v1}::l1) as l1'),(({c=c2;v=v2}::l2) as l2') ->
if v1 = v2 then
let tac =
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA15)
in
if Bigint.add c1 (Bigint.mult k2 c2) =? zero then
let tac' =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5)
in
tac :: focused_simpl (P_APP 1::P_APP 2:: p) :: tac' ::
loop p (l1,l2)
else tac :: loop (P_APP 2 :: p) (l1,l2)
else if v1 > v2 then
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) (l1,l2')
else
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) (l1',l2)
| ({c=c1;v=v1}::l1), [] ->
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) (l1,[])
| [],({c=c2;v=v2}::l2) ->
clever_rewrite p [[P_APP 2; P_APP 1; P_APP 1; P_APP 1];
[P_APP 2; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1];
[P_APP 2; P_APP 1; P_APP 2];
[P_APP 2; P_APP 2]]
(Lazy.force coq_fast_OMEGA12) ::
loop (P_APP 2 :: p) ([],l2)
| [],[] -> [focused_simpl p_init]
in
loop p_init (e1,e2)
let rec shuffle_cancel p = function
| [] -> [focused_simpl p]
| ({c=c1}::l1) ->
let tac =
clever_rewrite p [[P_APP 1; P_APP 1; P_APP 1];[P_APP 1; P_APP 2];
[P_APP 2; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2; P_APP 1]]
(if c1 >? zero then
(Lazy.force coq_fast_OMEGA13)
else
(Lazy.force coq_fast_OMEGA14))
in
tac :: shuffle_cancel p l1
let rec scalar p n = function
| Oplus(t1,t2) ->
let tac1,t1' = scalar (P_APP 1 :: p) n t1 and
tac2,t2' = scalar (P_APP 2 :: p) n t2 in
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zmult_plus_distr_l) ::
(tac1 @ tac2), Oplus(t1',t2')
| Oinv t ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zmult_opp_comm);
focused_simpl (P_APP 2 :: p)], Otimes(t,Oz(neg n))
| Otimes(t1,Oz x) ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zmult_assoc_reverse);
focused_simpl (P_APP 2 :: p)],
Otimes(t1,Oz (n*x))
| Otimes(t1,t2) -> error "Omega: Can't solve a goal with non-linear products"
| (Oatom _ as t) -> [], Otimes(t,Oz n)
| Oz i -> [focused_simpl p],Oz(n*i)
| Oufo c -> [], Oufo (mkApp (Lazy.force coq_Zmult, [| mk_integer n; c |]))
let rec scalar_norm p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| (_::l) ->
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1];[P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_OMEGA16) :: loop (P_APP 2 :: p) l
in
loop p_init
let rec norm_add p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| _:: l ->
clever_rewrite p [[P_APP 1;P_APP 1]; [P_APP 1; P_APP 2];[P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc_reverse) ::
loop (P_APP 2 :: p) l
in
loop p_init
let rec scalar_norm_add p_init =
let rec loop p = function
| [] -> [focused_simpl p_init]
| _ :: l ->
clever_rewrite p
[[P_APP 1; P_APP 1; P_APP 1; P_APP 1];
[P_APP 1; P_APP 1; P_APP 1; P_APP 2];
[P_APP 1; P_APP 1; P_APP 2]; [P_APP 2]; [P_APP 1; P_APP 2]]
(Lazy.force coq_fast_OMEGA11) :: loop (P_APP 2 :: p) l
in
loop p_init
let rec negate p = function
| Oplus(t1,t2) ->
let tac1,t1' = negate (P_APP 1 :: p) t1 and
tac2,t2' = negate (P_APP 2 :: p) t2 in
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zopp_plus_distr) ::
(tac1 @ tac2),
Oplus(t1',t2')
| Oinv t ->
[clever_rewrite p [[P_APP 1;P_APP 1]] (Lazy.force coq_fast_Zopp_involutive)], t
| Otimes(t1,Oz x) ->
[clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zopp_mult_distr_r);
focused_simpl (P_APP 2 :: p)], Otimes(t1,Oz (neg x))
| Otimes(t1,t2) -> error "Omega: Can't solve a goal with non-linear products"
| (Oatom _ as t) ->
let r = Otimes(t,Oz(negone)) in
[clever_rewrite p [[P_APP 1]] (Lazy.force coq_fast_Zopp_eq_mult_neg_1)], r
| Oz i -> [focused_simpl p],Oz(neg i)
| Oufo c -> [], Oufo (mkApp (Lazy.force coq_Zopp, [| c |]))
let rec transform p t =
let default isnat t' =
try
let v,th,_ = find_constr t' in
[clever_rewrite_base p (mkVar v) (mkVar th)], Oatom v
with e when Errors.noncritical e ->
let v = new_identifier_var ()
and th = new_identifier () in
hide_constr t' v th isnat;
[clever_rewrite_base p (mkVar v) (mkVar th)], Oatom v
in
try match destructurate_term t with
| Kapp(Zplus,[t1;t2]) ->
let tac1,t1' = transform (P_APP 1 :: p) t1
and tac2,t2' = transform (P_APP 2 :: p) t2 in
let tac,t' = shuffle p (t1',t2') in
tac1 @ tac2 @ tac, t'
| Kapp(Zminus,[t1;t2]) ->
let tac,t =
transform p
(mkApp (Lazy.force coq_Zplus,
[| t1; (mkApp (Lazy.force coq_Zopp, [| t2 |])) |])) in
unfold sp_Zminus :: tac,t
| Kapp(Zsucc,[t1]) ->
let tac,t = transform p (mkApp (Lazy.force coq_Zplus,
[| t1; mk_integer one |])) in
unfold sp_Zsucc :: tac,t
| Kapp(Zpred,[t1]) ->
let tac,t = transform p (mkApp (Lazy.force coq_Zplus,
[| t1; mk_integer negone |])) in
unfold sp_Zpred :: tac,t
| Kapp(Zmult,[t1;t2]) ->
let tac1,t1' = transform (P_APP 1 :: p) t1
and tac2,t2' = transform (P_APP 2 :: p) t2 in
begin match t1',t2' with
| (_,Oz n) -> let tac,t' = scalar p n t1' in tac1 @ tac2 @ tac,t'
| (Oz n,_) ->
let sym =
clever_rewrite p [[P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zmult_comm) in
let tac,t' = scalar p n t2' in tac1 @ tac2 @ (sym :: tac),t'
| _ -> default false t
end
| Kapp((Zpos|Zneg|Z0),_) ->
(try ([],Oz(recognize_number t))
with e when Errors.noncritical e -> default false t)
| Kvar s -> [],Oatom s
| Kapp(Zopp,[t]) ->
let tac,t' = transform (P_APP 1 :: p) t in
let tac',t'' = negate p t' in
tac @ tac', t''
| Kapp(Z_of_nat,[t']) -> default true t'
| _ -> default false t
with e when catchable_exception e -> default false t
let shrink_pair p f1 f2 =
match f1,f2 with
| Oatom v,Oatom _ ->
let r = Otimes(Oatom v,Oz two) in
clever_rewrite p [[P_APP 1]] (Lazy.force coq_fast_Zred_factor1), r
| Oatom v, Otimes(_,c2) ->
let r = Otimes(Oatom v,Oplus(c2,Oz one)) in
clever_rewrite p [[P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zred_factor2), r
| Otimes (v1,c1),Oatom v ->
let r = Otimes(Oatom v,Oplus(c1,Oz one)) in
clever_rewrite p [[P_APP 2];[P_APP 1;P_APP 2]]
(Lazy.force coq_fast_Zred_factor3), r
| Otimes (Oatom v,c1),Otimes (v2,c2) ->
let r = Otimes(Oatom v,Oplus(c1,c2)) in
clever_rewrite p
[[P_APP 1;P_APP 1];[P_APP 1;P_APP 2];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zred_factor4),r
| t1,t2 ->
begin
oprint t1; print_newline (); oprint t2; print_newline ();
flush Pervasives.stdout; error "shrink.1"
end
let reduce_factor p = function
| Oatom v ->
let r = Otimes(Oatom v,Oz one) in
[clever_rewrite p [[]] (Lazy.force coq_fast_Zred_factor0)],r
| Otimes(Oatom v,Oz n) as f -> [],f
| Otimes(Oatom v,c) ->
let rec compute = function
| Oz n -> n
| Oplus(t1,t2) -> Bigint.add (compute t1) (compute t2)
| _ -> error "condense.1"
in
[focused_simpl (P_APP 2 :: p)], Otimes(Oatom v,Oz(compute c))
| t -> oprint t; error "reduce_factor.1"
let rec condense p = function
| Oplus(f1,(Oplus(f2,r) as t)) ->
if weight f1 = weight f2 then begin
let shrink_tac,t = shrink_pair (P_APP 1 :: p) f1 f2 in
let assoc_tac =
clever_rewrite p
[[P_APP 1];[P_APP 2;P_APP 1];[P_APP 2;P_APP 2]]
(Lazy.force coq_fast_Zplus_assoc) in
let tac_list,t' = condense p (Oplus(t,r)) in
(assoc_tac :: shrink_tac :: tac_list), t'
end else begin
let tac,f = reduce_factor (P_APP 1 :: p) f1 in
let tac',t' = condense (P_APP 2 :: p) t in
(tac @ tac'), Oplus(f,t')
end
| Oplus(f1,Oz n) ->
let tac,f1' = reduce_factor (P_APP 1 :: p) f1 in tac,Oplus(f1',Oz n)
| Oplus(f1,f2) ->
if weight f1 = weight f2 then begin
let tac_shrink,t = shrink_pair p f1 f2 in
let tac,t' = condense p t in
tac_shrink :: tac,t'
end else begin
let tac,f = reduce_factor (P_APP 1 :: p) f1 in
let tac',t' = condense (P_APP 2 :: p) f2 in
(tac @ tac'),Oplus(f,t')
end
| Oz _ as t -> [],t
| t ->
let tac,t' = reduce_factor p t in
let final = Oplus(t',Oz zero) in
let tac' = clever_rewrite p [[]] (Lazy.force coq_fast_Zred_factor6) in
tac @ [tac'], final
let rec clear_zero p = function
| Oplus(Otimes(Oatom v,Oz n),r) when n =? zero ->
let tac =
clever_rewrite p [[P_APP 1;P_APP 1];[P_APP 2]]
(Lazy.force coq_fast_Zred_factor5) in
let tac',t = clear_zero p r in
tac :: tac',t
| Oplus(f,r) ->
let tac,t = clear_zero (P_APP 2 :: p) r in tac,Oplus(f,t)
| t -> [],t
let replay_history tactic_normalisation =
let aux = id_of_string "auxiliary" in
let aux1 = id_of_string "auxiliary_1" in
let aux2 = id_of_string "auxiliary_2" in
let izero = mk_integer zero in
let rec loop t =
match t with
| HYP e :: l ->
begin
try
tclTHEN
(List.assoc (hyp_of_tag e.id) tactic_normalisation)
(loop l)
with Not_found -> loop l end
| NEGATE_CONTRADICT (e2,e1,b) :: l ->
let eq1 = decompile e1
and eq2 = decompile e2 in
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2.id in
let k = if b then negone else one in
let p_initial = [P_APP 1;P_TYPE] in
let tac= shuffle_mult_right p_initial e1.body k e2.body in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_OMEGA17, [|
val_of eq1;
val_of eq2;
mk_integer k;
mkVar id1; mkVar id2 |])]);
(mk_then tac);
(intros_using [aux]);
(resolve_id aux);
reflexivity
]
| CONTRADICTION (e1,e2) :: l ->
let eq1 = decompile e1
and eq2 = decompile e2 in
let p_initial = [P_APP 2;P_TYPE] in
let tac = shuffle_cancel p_initial e1.body in
let solve_le =
let not_sup_sup = mkApp (build_coq_eq (), [|
Lazy.force coq_comparison;
Lazy.force coq_Gt;
Lazy.force coq_Gt |])
in
tclTHENS
(tclTHENLIST [
(unfold sp_Zle);
(simpl_in_concl);
intro;
(absurd not_sup_sup) ])
[ assumption ; reflexivity ]
in
let theorem =
mkApp (Lazy.force coq_OMEGA2, [|
val_of eq1; val_of eq2;
mkVar (hyp_of_tag e1.id);
mkVar (hyp_of_tag e2.id) |])
in
tclTHEN (tclTHEN (generalize_tac [theorem]) (mk_then tac)) (solve_le)
| DIVIDE_AND_APPROX (e1,e2,k,d) :: l ->
let id = hyp_of_tag e1.id in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
let kk = mk_integer k
and dd = mk_integer d in
let rhs = mk_plus (mk_times eq2 kk) dd in
let state_eg = mk_eq eq1 rhs in
let tac = scalar_norm_add [P_APP 3] e2.body in
tclTHENS
(cut state_eg)
[ tclTHENS
(tclTHENLIST [
(intros_using [aux]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA1,
[| eq1; rhs; mkVar aux; mkVar id |])]);
(clear [aux;id]);
(intros_using [id]);
(cut (mk_gt kk dd)) ])
[ tclTHENS
(cut (mk_gt kk izero))
[ tclTHENLIST [
(intros_using [aux1; aux2]);
(generalize_tac
[mkApp (Lazy.force coq_Zmult_le_approx,
[| kk;eq2;dd;mkVar aux1;mkVar aux2; mkVar id |])]);
(clear [aux1;aux2;id]);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
(simpl_in_concl);
reflexivity ] ];
tclTHENLIST [ (unfold sp_Zgt); simpl_in_concl; reflexivity ]
];
tclTHEN (mk_then tac) reflexivity ]
| NOT_EXACT_DIVIDE (e1,k) :: l ->
let c = floor_div e1.constant k in
let d = Bigint.sub e1.constant (Bigint.mult c k) in
let e2 = {id=e1.id; kind=EQUA;constant = c;
body = map_eq_linear (fun c -> c / k) e1.body } in
let eq2 = val_of(decompile e2) in
let kk = mk_integer k
and dd = mk_integer d in
let tac = scalar_norm_add [P_APP 2] e2.body in
tclTHENS
(cut (mk_gt dd izero))
[ tclTHENS (cut (mk_gt kk dd))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA4,
[| dd;kk;eq2;mkVar aux1; mkVar aux2 |])]);
(clear [aux1;aux2]);
(unfold sp_not);
(intros_using [aux]);
(resolve_id aux);
(mk_then tac);
assumption ] ;
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ]
| EXACT_DIVIDE (e1,k) :: l ->
let id = hyp_of_tag e1.id in
let e2 = map_eq_afine (fun c -> c / k) e1 in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
let kk = mk_integer k in
let state_eq = mk_eq eq1 (mk_times eq2 kk) in
if e1.kind = DISE then
let tac = scalar_norm [P_APP 3] e2.body in
tclTHENS
(cut state_eq)
[tclTHENLIST [
(intros_using [aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA18,
[| eq1;eq2;kk;mkVar aux1; mkVar id |])]);
(clear [aux1;id]);
(intros_using [id]);
(loop l) ];
tclTHEN (mk_then tac) reflexivity ]
else
let tac = scalar_norm [P_APP 3] e2.body in
tclTHENS (cut state_eq)
[
tclTHENS
(cut (mk_gt kk izero))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA3,
[| eq1; eq2; kk; mkVar aux2; mkVar aux1;mkVar id|])]);
(clear [aux1;aux2;id]);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHEN (mk_then tac) reflexivity ]
| (MERGE_EQ(e3,e1,e2)) :: l ->
let id = new_identifier () in
tag_hypothesis id e3;
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2 in
let eq1 = val_of(decompile e1)
and eq2 = val_of (decompile (negate_eq e1)) in
let tac =
clever_rewrite [P_APP 3] [[P_APP 1]]
(Lazy.force coq_fast_Zopp_eq_mult_neg_1) ::
scalar_norm [P_APP 3] e1.body
in
tclTHENS
(cut (mk_eq eq1 (mk_inv eq2)))
[tclTHENLIST [
(intros_using [aux]);
(generalize_tac [mkApp (Lazy.force coq_OMEGA8,
[| eq1;eq2;mkVar id1;mkVar id2; mkVar aux|])]);
(clear [id1;id2;aux]);
(intros_using [id]);
(loop l) ];
tclTHEN (mk_then tac) reflexivity]
| STATE {st_new_eq=e;st_def=def;st_orig=orig;st_coef=m;st_var=v} :: l ->
let id = new_identifier ()
and id2 = hyp_of_tag orig.id in
tag_hypothesis id e.id;
let eq1 = val_of(decompile def)
and eq2 = val_of(decompile orig) in
let vid = unintern_id v in
let theorem =
mkApp (build_coq_ex (), [|
Lazy.force coq_Z;
mkLambda
(Name vid,
Lazy.force coq_Z,
mk_eq (mkRel 1) eq1) |])
in
let mm = mk_integer m in
let p_initial = [P_APP 2;P_TYPE] in
let tac =
clever_rewrite (P_APP 1 :: P_APP 1 :: P_APP 2 :: p_initial)
[[P_APP 1]] (Lazy.force coq_fast_Zopp_eq_mult_neg_1) ::
shuffle_mult_right p_initial
orig.body m ({c= negone;v= v}::def.body) in
tclTHENS
(cut theorem)
[tclTHENLIST [
(intros_using [aux]);
(elim_id aux);
(clear [aux]);
(intros_using [vid; aux]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA9,
[| mkVar vid;eq2;eq1;mm; mkVar id2;mkVar aux |])]);
(mk_then tac);
(clear [aux]);
(intros_using [id]);
(loop l) ];
tclTHEN (exists_tac eq1) reflexivity ]
| SPLIT_INEQ(e,(e1,act1),(e2,act2)) :: l ->
let id1 = new_identifier ()
and id2 = new_identifier () in
tag_hypothesis id1 e1; tag_hypothesis id2 e2;
let id = hyp_of_tag e.id in
let tac1 = norm_add [P_APP 2;P_TYPE] e.body in
let tac2 = scalar_norm_add [P_APP 2;P_TYPE] e.body in
let eq = val_of(decompile e) in
tclTHENS
(simplest_elim (applist (Lazy.force coq_OMEGA19, [eq; mkVar id])))
[tclTHENLIST [ (mk_then tac1); (intros_using [id1]); (loop act1) ];
tclTHENLIST [ (mk_then tac2); (intros_using [id2]); (loop act2) ]]
| SUM(e3,(k1,e1),(k2,e2)) :: l ->
let id = new_identifier () in
tag_hypothesis id e3;
let id1 = hyp_of_tag e1.id
and id2 = hyp_of_tag e2.id in
let eq1 = val_of(decompile e1)
and eq2 = val_of(decompile e2) in
if k1 =? one & e2.kind = EQUA then
let tac_thm =
match e1.kind with
| EQUA -> Lazy.force coq_OMEGA5
| INEQ -> Lazy.force coq_OMEGA6
| DISE -> Lazy.force coq_OMEGA20
in
let kk = mk_integer k2 in
let p_initial =
if e1.kind=DISE then [P_APP 1; P_TYPE] else [P_APP 2; P_TYPE] in
let tac = shuffle_mult_right p_initial e1.body k2 e2.body in
tclTHENLIST [
(generalize_tac
[mkApp (tac_thm, [| eq1; eq2; kk; mkVar id1; mkVar id2 |])]);
(mk_then tac);
(intros_using [id]);
(loop l)
]
else
let kk1 = mk_integer k1
and kk2 = mk_integer k2 in
let p_initial = [P_APP 2;P_TYPE] in
let tac= shuffle_mult p_initial k1 e1.body k2 e2.body in
tclTHENS (cut (mk_gt kk1 izero))
[tclTHENS
(cut (mk_gt kk2 izero))
[tclTHENLIST [
(intros_using [aux2;aux1]);
(generalize_tac
[mkApp (Lazy.force coq_OMEGA7, [|
eq1;eq2;kk1;kk2;
mkVar aux1;mkVar aux2;
mkVar id1;mkVar id2 |])]);
(clear [aux1;aux2]);
(mk_then tac);
(intros_using [id]);
(loop l) ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ];
tclTHENLIST [
(unfold sp_Zgt);
simpl_in_concl;
reflexivity ] ]
| CONSTANT_NOT_NUL(e,k) :: l ->
tclTHEN (generalize_tac [mkVar (hyp_of_tag e)]) Equality.discrConcl
| CONSTANT_NUL(e) :: l ->
tclTHEN (resolve_id (hyp_of_tag e)) reflexivity
| CONSTANT_NEG(e,k) :: l ->
tclTHENLIST [
(generalize_tac [mkVar (hyp_of_tag e)]);
(unfold sp_Zle);
simpl_in_concl;
(unfold sp_not);
(intros_using [aux]);
(resolve_id aux);
reflexivity
]
| _ -> tclIDTAC
in
loop
let normalize p_initial t =
let (tac,t') = transform p_initial t in
let (tac',t'') = condense p_initial t' in
let (tac'',t''') = clear_zero p_initial t'' in
tac @ tac' @ tac'' , t'''
let normalize_equation id flag theorem pos t t1 t2 (tactic,defs) =
let p_initial = [P_APP pos ;P_TYPE] in
let (tac,t') = normalize p_initial t in
let shift_left =
tclTHEN
(generalize_tac [mkApp (theorem, [| t1; t2; mkVar id |]) ])
(tclTRY (clear [id]))
in
if tac <> [] then
let id' = new_identifier () in
((id',(tclTHENLIST [ (shift_left); (mk_then tac); (intros_using [id']) ]))
:: tactic,
compile id' flag t' :: defs)
else
(tactic,defs)
let destructure_omega gl tac_def (id,c) =
if atompart_of_id id = "State" then
tac_def
else
try match destructurate_prop c with
| Kapp(Eq,[typ;t1;t2])
when destructurate_type (pf_nf gl typ) = Kapp(Z,[]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id EQUA (Lazy.force coq_Zegal_left) 2 t t1 t2 tac_def
| Kapp(Zne,[t1;t2]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id DISE (Lazy.force coq_Zne_left) 1 t t1 t2 tac_def
| Kapp(Zle,[t1;t2]) ->
let t = mk_plus t2 (mk_inv t1) in
normalize_equation
id INEQ (Lazy.force coq_Zle_left) 2 t t1 t2 tac_def
| Kapp(Zlt,[t1;t2]) ->
let t = mk_plus (mk_plus t2 (mk_integer negone)) (mk_inv t1) in
normalize_equation
id INEQ (Lazy.force coq_Zlt_left) 2 t t1 t2 tac_def
| Kapp(Zge,[t1;t2]) ->
let t = mk_plus t1 (mk_inv t2) in
normalize_equation
id INEQ (Lazy.force coq_Zge_left) 2 t t1 t2 tac_def
| Kapp(Zgt,[t1;t2]) ->
let t = mk_plus (mk_plus t1 (mk_integer negone)) (mk_inv t2) in
normalize_equation
id INEQ (Lazy.force coq_Zgt_left) 2 t t1 t2 tac_def
| _ -> tac_def
with e when catchable_exception e -> tac_def
let reintroduce id =
tclTHEN (tclTRY (clear [id])) (intro_using id)
let coq_omega gl =
clear_tables ();
let tactic_normalisation, system =
List.fold_left (destructure_omega gl) ([],[]) (pf_hyps_types gl) in
let prelude,sys =
List.fold_left
(fun (tac,sys) (t,(v,th,b)) ->
if b then
let id = new_identifier () in
let i = new_id () in
tag_hypothesis id i;
(tclTHENLIST [
(simplest_elim (applist (Lazy.force coq_intro_Z, [t])));
(intros_using [v; id]);
(elim_id id);
(clear [id]);
(intros_using [th;id]);
tac ]),
{kind = INEQ;
body = [{v=intern_id v; c=one}];
constant = zero; id = i} :: sys
else
(tclTHENLIST [
(simplest_elim (applist (Lazy.force coq_new_var, [t])));
(intros_using [v;th]);
tac ]),
sys)
(tclIDTAC,[]) (dump_tables ())
in
let system = system @ sys in
if !display_system_flag then display_system display_var system;
if !old_style_flag then begin
try
let _ = simplify (new_id,new_var_num,display_var) false system in
tclIDTAC gl
with UNSOLVABLE ->
let _,path = depend [] [] (history ()) in
if !display_action_flag then display_action display_var path;
(tclTHEN prelude (replay_history tactic_normalisation path)) gl
end else begin
try
let path = simplify_strong (new_id,new_var_num,display_var) system in
if !display_action_flag then display_action display_var path;
(tclTHEN prelude (replay_history tactic_normalisation path)) gl
with NO_CONTRADICTION -> error "Omega can't solve this system"
end
let coq_omega = solver_time coq_omega
let nat_inject gl =
let rec explore p t =
try match destructurate_term t with
| Kapp(Plus,[t1;t2]) ->
tclTHENLIST [
(clever_rewrite_gen p (mk_plus (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_plus),[t1;t2]));
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2)
]
| Kapp(Mult,[t1;t2]) ->
tclTHENLIST [
(clever_rewrite_gen p (mk_times (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_mult),[t1;t2]));
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2)
]
| Kapp(Minus,[t1;t2]) ->
let id = new_identifier () in
tclTHENS
(tclTHEN
(simplest_elim (applist (Lazy.force coq_le_gt_dec, [t2;t1])))
(intros_using [id]))
[
tclTHENLIST [
(clever_rewrite_gen p
(mk_minus (mk_inj t1) (mk_inj t2))
((Lazy.force coq_inj_minus1),[t1;t2;mkVar id]));
(loop [id,mkApp (Lazy.force coq_le, [| t2;t1 |])]);
(explore (P_APP 1 :: p) t1);
(explore (P_APP 2 :: p) t2) ];
(tclTHEN
(clever_rewrite_gen p (mk_integer zero)
((Lazy.force coq_inj_minus2),[t1;t2;mkVar id]))
(loop [id,mkApp (Lazy.force coq_gt, [| t2;t1 |])]))
]
| Kapp(S,[t']) ->
let rec is_number t =
try match destructurate_term t with
Kapp(S,[t]) -> is_number t
| Kapp(O,[]) -> true
| _ -> false
with e when catchable_exception e -> false
in
let rec loop p t =
try match destructurate_term t with
Kapp(S,[t]) ->
(tclTHEN
(clever_rewrite_gen p
(mkApp (Lazy.force coq_Zsucc, [| mk_inj t |]))
((Lazy.force coq_inj_S),[t]))
(loop (P_APP 1 :: p) t))
| _ -> explore p t
with e when catchable_exception e -> explore p t
in
if is_number t' then focused_simpl p else loop p t
| Kapp(Pred,[t]) ->
let t_minus_one =
mkApp (Lazy.force coq_minus, [| t;
mkApp (Lazy.force coq_S, [| Lazy.force coq_O |]) |]) in
tclTHEN
(clever_rewrite_gen_nat (P_APP 1 :: p) t_minus_one
((Lazy.force coq_pred_of_minus),[t]))
(explore p t_minus_one)
| Kapp(O,[]) -> focused_simpl p
| _ -> tclIDTAC
with e when catchable_exception e -> tclIDTAC
and loop = function
| [] -> tclIDTAC
| (i,t)::lit ->
begin try match destructurate_prop t with
Kapp(Le,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_le, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Lt,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_lt, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Ge,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_ge, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Gt,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_gt, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Neq,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_neq, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 1; P_TYPE] t1);
(explore [P_APP 2; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
| Kapp(Eq,[typ;t1;t2]) ->
if pf_conv_x gl typ (Lazy.force coq_nat) then
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_inj_eq, [| t1;t2;mkVar i |]) ]);
(explore [P_APP 2; P_TYPE] t1);
(explore [P_APP 3; P_TYPE] t2);
(reintroduce i);
(loop lit)
]
else loop lit
| _ -> loop lit
with e when catchable_exception e -> loop lit end
in
loop (List.rev (pf_hyps_types gl)) gl
let dec_binop = function
| Zne -> coq_dec_Zne
| Zle -> coq_dec_Zle
| Zlt -> coq_dec_Zlt
| Zge -> coq_dec_Zge
| Zgt -> coq_dec_Zgt
| Le -> coq_dec_le
| Lt -> coq_dec_lt
| Ge -> coq_dec_ge
| Gt -> coq_dec_gt
| _ -> raise Not_found
let not_binop = function
| Zne -> coq_not_Zne
| Zle -> coq_Znot_le_gt
| Zlt -> coq_Znot_lt_ge
| Zge -> coq_Znot_ge_lt
| Zgt -> coq_Znot_gt_le
| Le -> coq_not_le
| Lt -> coq_not_lt
| Ge -> coq_not_ge
| Gt -> coq_not_gt
| _ -> raise Not_found
* A decidability check : for some [ t ] , could we build a term
of type [ decidable t ] ( i.e. [ t\/~t ] ) ? Otherwise , we raise
[ Undecidable ] . Note that a successful check implies that
[ t ] has type Prop .
of type [decidable t] (i.e. [t\/~t]) ? Otherwise, we raise
[Undecidable]. Note that a successful check implies that
[t] has type Prop.
*)
exception Undecidable
let rec decidability gl t =
match destructurate_prop t with
| Kapp(Or,[t1;t2]) ->
mkApp (Lazy.force coq_dec_or, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kapp(And,[t1;t2]) ->
mkApp (Lazy.force coq_dec_and, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kapp(Iff,[t1;t2]) ->
mkApp (Lazy.force coq_dec_iff, [| t1; t2;
decidability gl t1; decidability gl t2 |])
| Kimp(t1,t2) ->
This is the only situation where it 's not obvious that [ t ]
is in Prop . The recursive call on [ t2 ] will ensure that .
is in Prop. The recursive call on [t2] will ensure that. *)
mkApp (Lazy.force coq_dec_imp,
[| t1; t2; decidability gl t1; decidability gl t2 |])
| Kapp(Not,[t1]) ->
mkApp (Lazy.force coq_dec_not, [| t1; decidability gl t1 |])
| Kapp(Eq,[typ;t1;t2]) ->
begin match destructurate_type (pf_nf gl typ) with
| Kapp(Z,[]) -> mkApp (Lazy.force coq_dec_eq, [| t1;t2 |])
| Kapp(Nat,[]) -> mkApp (Lazy.force coq_dec_eq_nat, [| t1;t2 |])
| _ -> raise Undecidable
end
| Kapp(op,[t1;t2]) ->
(try mkApp (Lazy.force (dec_binop op), [| t1; t2 |])
with Not_found -> raise Undecidable)
| Kapp(False,[]) -> Lazy.force coq_dec_False
| Kapp(True,[]) -> Lazy.force coq_dec_True
| _ -> raise Undecidable
let onClearedName id tac =
We can not ensure that hyps can be cleared ( because of dependencies ) ,
tclTHEN
(tclTRY (clear [id]))
(fun gl ->
let id = fresh_id [] id gl in
tclTHEN (introduction id) (tac id) gl)
let onClearedName2 id tac =
tclTHEN
(tclTRY (clear [id]))
(fun gl ->
let id1 = fresh_id [] (add_suffix id "_left") gl in
let id2 = fresh_id [] (add_suffix id "_right") gl in
tclTHENLIST [ introduction id1; introduction id2; tac id1 id2 ] gl)
let destructure_hyps gl =
let rec loop = function
| [] -> (tclTHEN nat_inject coq_omega)
| (i,body,t)::lit ->
begin try match destructurate_prop t with
| Kapp(False,[]) -> elim_id i
| Kapp((Zle|Zge|Zgt|Zlt|Zne),[t1;t2]) -> loop lit
| Kapp(Or,[t1;t2]) ->
(tclTHENS
(elim_id i)
[ onClearedName i (fun i -> (loop ((i,None,t1)::lit)));
onClearedName i (fun i -> (loop ((i,None,t2)::lit))) ])
| Kapp(And,[t1;t2]) ->
tclTHEN
(elim_id i)
(onClearedName2 i (fun i1 i2 ->
loop ((i1,None,t1)::(i2,None,t2)::lit)))
| Kapp(Iff,[t1;t2]) ->
tclTHEN
(elim_id i)
(onClearedName2 i (fun i1 i2 ->
loop ((i1,None,mkArrow t1 t2)::(i2,None,mkArrow t2 t1)::lit)))
| Kimp(t1,t2) ->
if is_Prop (pf_type_of gl t2)
then
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac [mkApp (Lazy.force coq_imp_simp,
[| t1; t2; d1; mkVar i|])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_or (mk_not t1) t2)::lit))))
]
else
loop lit
| Kapp(Not,[t]) ->
begin match destructurate_prop t with
Kapp(Or,[t1;t2]) ->
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_or,[| t1; t2; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_and (mk_not t1) (mk_not t2)):: lit))))
]
| Kapp(And,[t1;t2]) ->
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_and,
[| t1; t2; d1; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_or (mk_not t1) (mk_not t2))::lit))))
]
| Kapp(Iff,[t1;t2]) ->
let d1 = decidability gl t1 in
let d2 = decidability gl t2 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_iff,
[| t1; t2; d1; d2; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,
mk_or (mk_and t1 (mk_not t2))
(mk_and (mk_not t1) t2))::lit))))
]
| Kimp(t1,t2) ->
t2 must be in Prop otherwise ~(t1->t2 ) would n't be ok .
For t1 , being decidable implies being Prop .
For t1, being decidable implies being Prop. *)
let d1 = decidability gl t1 in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_imp,
[| t1; t2; d1; mkVar i |])]);
(onClearedName i (fun i ->
(loop ((i,None,mk_and t1 (mk_not t2)) :: lit))))
]
| Kapp(Not,[t]) ->
let d = decidability gl t in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force coq_not_not, [| t; d; mkVar i |])]);
(onClearedName i (fun i -> (loop ((i,None,t)::lit))))
]
| Kapp(op,[t1;t2]) ->
(try
let thm = not_binop op in
tclTHENLIST [
(generalize_tac
[mkApp (Lazy.force thm, [| t1;t2;mkVar i|])]);
(onClearedName i (fun _ -> loop lit))
]
with Not_found -> loop lit)
| Kapp(Eq,[typ;t1;t2]) ->
if !old_style_flag then begin
match destructurate_type (pf_nf gl typ) with
| Kapp(Nat,_) ->
tclTHENLIST [
(simplest_elim
(mkApp
(Lazy.force coq_not_eq, [|t1;t2;mkVar i|])));
(onClearedName i (fun _ -> loop lit))
]
| Kapp(Z,_) ->
tclTHENLIST [
(simplest_elim
(mkApp
(Lazy.force coq_not_Zeq, [|t1;t2;mkVar i|])));
(onClearedName i (fun _ -> loop lit))
]
| _ -> loop lit
end else begin
match destructurate_type (pf_nf gl typ) with
| Kapp(Nat,_) ->
(tclTHEN
(convert_hyp_no_check
(i,body,
(mkApp (Lazy.force coq_neq, [| t1;t2|]))))
(loop lit))
| Kapp(Z,_) ->
(tclTHEN
(convert_hyp_no_check
(i,body,
(mkApp (Lazy.force coq_Zne, [| t1;t2|]))))
(loop lit))
| _ -> loop lit
end
| _ -> loop lit
end
| _ -> loop lit
with
| Undecidable -> loop lit
| e when catchable_exception e -> loop lit
end
in
loop (pf_hyps gl) gl
let destructure_goal gl =
let concl = pf_concl gl in
let rec loop t =
match destructurate_prop t with
| Kapp(Not,[t]) ->
(tclTHEN
(tclTHEN (unfold sp_not) intro)
destructure_hyps)
| Kimp(a,b) -> (tclTHEN intro (loop b))
| Kapp(False,[]) -> destructure_hyps
| _ ->
let goal_tac =
try
let dec = decidability gl t in
tclTHEN
(Tactics.refine
(mkApp (Lazy.force coq_dec_not_not, [| t; dec; mkNewMeta () |])))
intro
with Undecidable -> Tactics.elim_type (build_coq_False ())
in
tclTHEN goal_tac destructure_hyps
in
(loop concl) gl
let destructure_goal = all_time (destructure_goal)
let omega_solver gl =
Coqlib.check_required_library ["Coq";"omega";"Omega"];
let result = destructure_goal gl in
result
|
55c43ac7d4a1fafb723a507f66b3ac20b1e65df9b0d1e7b1a179d9e5c1982927 | clj-commons/claypoole | impl.clj | The Climate Corporation licenses this file to you under under the Apache
;; License, Version 2.0 (the "License"); you may not use this file except in
compliance with the License . You may obtain a copy of the License at
;;
;; -2.0
;;
;; See the NOTICE file distributed with this work for additional information
;; regarding copyright ownership. Unless required by applicable law or agreed
to in writing , software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express
;; or implied. See the License for the specific language governing permissions
;; and limitations under the License.
(ns com.climate.claypoole.impl
"Implementation helper functions for Claypoole."
(:require [clojure.core :as core])
(:import [clojure.lang IFn]
[com.climate.claypoole.impl Prioritized PriorityThreadpoolImpl]
[java.util Collection List]
[java.util.concurrent
ExecutionException
Executors
ExecutorService
Future
LinkedBlockingQueue
ThreadFactory
TimeoutException
TimeUnit]))
(defn binding-conveyor-fn
"Like clojure.core/binding-conveyor-fn for resetting bindings to run a
function in another thread."
[f]
(let [frame (clojure.lang.Var/cloneThreadBindingFrame)]
(with-meta
(fn []
(let [frame-before (clojure.lang.Var/getThreadBindingFrame)]
(clojure.lang.Var/resetThreadBindingFrame frame)
(try
(f)
(finally
;; This does not matter for correctness, but prevents leaking
;; data in binding frames in thread locals.
(clojure.lang.Var/resetThreadBindingFrame frame-before)))))
(meta f))))
(defn deref-future
"Like clojure.core/deref-future."
([^Future fut]
(.get fut))
([^Future fut timeout-ms timeout-val]
(try (.get fut timeout-ms TimeUnit/MILLISECONDS)
(catch TimeoutException _e
timeout-val))))
(defn deref-fixing-exceptions
"If a future experiences an exception and you dereference the future, you
will see not the original exception but a
java.util.concurrent.ExecutionException. That's sometimes not the result you
want. This catches those exceptions and re-throws the future's exception,
which can be much less surprising to downstream code."
[fut]
(try (deref fut)
(catch java.util.concurrent.ExecutionException e
(let [cause (.getCause e)]
;; Update the stack trace to include e
(.setStackTrace cause (into-array StackTraceElement
(concat
(.getStackTrace cause)
(.getStackTrace e))))
(throw cause)))))
(defn dummy-future-call
"A dummy future-call that runs in serial and returns a future containing the
result."
[f]
(let [result (f)]
(reify
clojure.lang.IDeref
(deref [_] result)
clojure.lang.IBlockingDeref
(deref [_ _timeout-ms _timeout-val] result)
clojure.lang.IPending
(isRealized [_] true)
Future
(get [_] result)
(get [_ _timeout _unit] result)
(isCancelled [_] false)
(isDone [_] true)
(cancel [_ _interrupt?] false))))
(defn validate-future-pool
"Verify that a threadpool is a valid pool for a future."
[pool]
(when-not (or (= :serial pool)
(= :builtin pool)
(instance? ExecutorService pool))
(throw (IllegalArgumentException.
(format
(str "Threadpool futures require a threadpool, :builtin, or "
":serial, not %s.") pool)))))
(defonce ^{:doc "The previously-used threadpool ID."}
threadpool-id
;; Start at -1 so we can just use the return value of (swap! inc).
(atom -1))
(defn default-threadpool-name
"The default name for threads in a threadpool. Gives each threadpool a
unique ID via threadpool-id."
[]
(format "claypoole-%d" (swap! threadpool-id inc)))
(defn apply-map
"Apply a function that takes keyword arguments to a map of arguments."
[f & args]
(let [args* (drop-last args)
arg-map (last args)]
(apply f (concat args* (mapcat identity arg-map)))))
(defn thread-factory
"Create a ThreadFactory with keyword options including thread daemon status
:daemon, the thread name format :name (a string for format with one integer),
and a thread priority :thread-priority."
^java.util.concurrent.ThreadFactory
[& {:keys [daemon thread-priority] pool-name :name
:or {daemon true}}]
(let [daemon* (boolean daemon)
pool-name* (or pool-name (default-threadpool-name))
thread-priority* (or thread-priority
(.getPriority (Thread/currentThread)))
default-factory (Executors/defaultThreadFactory)
;; The previously-used thread ID. Start at -1 so we can just use the
;; return value of (swap! inc).
thread-id (atom -1)]
(reify ThreadFactory
(^Thread newThread [_ ^Runnable r]
(doto (.newThread default-factory r)
(.setDaemon daemon*)
(.setName (str pool-name* "-" (swap! thread-id inc)))
(.setPriority thread-priority*))))))
(defn unchunk
"Takes a seqable and returns a lazy sequence that is maximally lazy.
Based on -do-i-avoid-clojures-chunking-behavior-for-lazy-seqs-that-i-want-to-short-ci"
[s]
(lazy-seq
(when-let [s (seq s)]
(cons (first s)
(unchunk (rest s))))))
(defn threadpool
"Make a threadpool. It should be shutdown when no longer needed.
See docs in com.climate.claypoole/threadpool."
^java.util.concurrent.ScheduledExecutorService [n & args]
;; Return a ScheduledThreadPool rather than a FixedThreadPool because it's
;; the same thing with some bonus features.
(Executors/newScheduledThreadPool n (apply thread-factory args)))
(defn- prioritize
"Apply a priority function to a task.
Note that this re-throws all priority-fn exceptions as ExecutionExceptions.
That shouldn't mess anything up because the caller re-throws it as an
ExecutionException anyway.
For simplicity, prioritize reifies both Callable and Runnable, rather than
having one prioritize function for each of those types. That means, for
example, that if you prioritize a Runnable that is not also a Callable, you
might want to cast the result to Runnable or otherwise use it carefully."
[task, ^IFn priority-fn]
(let [priority (try
(long (apply priority-fn (-> task meta :args)))
(catch Exception e
(throw (ExecutionException.
"Priority function exception" e))))]
(reify
Callable
(call [_] (.call ^Callable task))
Runnable
(run [_] (.run ^Runnable task))
Prioritized
(getPriority [_] priority))))
A Threadpool that applies a priority function to tasks and uses a
;; PriorityThreadpoolImpl to run them.
(deftype PriorityThreadpool [^PriorityThreadpoolImpl pool, ^IFn priority-fn]
ExecutorService
(^boolean awaitTermination [_, ^long timeout, ^TimeUnit unit]
(.awaitTermination pool timeout unit))
(^List invokeAll [_, ^Collection tasks]
(.invokeAll pool (map #(prioritize % priority-fn) tasks)))
(^List invokeAll [_, ^Collection tasks, ^long timeout, ^TimeUnit unit]
(.invokeAll pool (map #(prioritize % priority-fn) tasks) timeout unit))
(^Object invokeAny [_, ^Collection tasks]
(.invokeAny pool (map #(prioritize % priority-fn) tasks)))
(^Object invokeAny [_, ^Collection tasks, ^long timeout, ^TimeUnit unit]
(.invokeAny pool (map #(prioritize % priority-fn) tasks) timeout unit))
(^boolean isShutdown [_]
(.isShutdown pool))
(^boolean isTerminated [_]
(.isTerminated pool))
(shutdown [_]
(.shutdown pool))
(^List shutdownNow [_]
(.shutdownNow pool))
(^Future submit [_, ^Runnable task]
(.submit pool ^Runnable (prioritize task priority-fn)))
(^Future submit [_, ^Runnable task, ^Object result]
(.submit pool ^Runnable (prioritize task priority-fn) result))
(^Future submit [_ ^Callable task]
(.submit pool ^Callable (prioritize task priority-fn))))
(defn ->threadpool
"Convert the argument into a threadpool, leaving the special keyword :serial
alone.
Returns [created? threadpool], where created? indicates whether a new
threadpool was instantiated."
[arg]
(cond
(instance? ExecutorService arg) [false arg]
(integer? arg) [true (threadpool arg)]
(= :builtin arg) [false clojure.lang.Agent/soloExecutor]
(= :serial arg) [false :serial]
:else (throw (IllegalArgumentException.
(format
(str "Claypoole functions require a threadpool, a "
"number, :builtin, or :serial, not %s.") arg)))))
(defn get-pool-size
"If the pool has a max size, get that; else, return nil."
[pool]
(cond
(instance? java.util.concurrent.ScheduledThreadPoolExecutor pool)
(.getCorePoolSize ^java.util.concurrent.ScheduledThreadPoolExecutor pool)
(instance? java.util.concurrent.ThreadPoolExecutor pool)
(.getMaximumPoolSize ^java.util.concurrent.ThreadPoolExecutor pool)
:else
nil))
;; Queue-seq needs a unique item that, when seen in a queue, indicates that the
;; sequence has ended. It uses the private object end-marker, and uses
;; identical? to check against this object's (unique) memory address.
(let [end-marker (Object.)]
(defn- queue-reader
"Make a lazy sequence from a queue, stopping upon reading the unique
end-marker object."
[^LinkedBlockingQueue q]
(lazy-seq
(let [x (.take q)]
(when-not (identical? x end-marker)
(cons x (queue-reader q))))))
(defn queue-seq
"Create a queue and a lazy sequence that reads from that queue."
[]
(let [q (LinkedBlockingQueue.)]
[q (queue-reader q)]))
(defn queue-seq-add!
"Add an item to a queue (and its lazy sequence)."
[^LinkedBlockingQueue q x]
(.put q x))
(defn queue-seq-end!
"End a lazy sequence reading from a queue."
[q]
(queue-seq-add! q end-marker)))
(defn lazy-co-read
"Zip s1 and s2, stopping when s1 stops. This helps avoid potential blocking
when trying to read queue sequences.
In particular, this will block:
(map vector
(range 10)
(concat (range 10) (lazy-seq (deref (promise)))))
even though we only can read 10 things. Lazy-co-read fixes that case by
checking the first sequence first, so this will not block:
(lazy-co-read
(range 10)
(concat (range 10) (lazy-seq (deref (promise)))))"
[s1 s2]
(lazy-seq (when-not (empty? s1)
(cons [(first s1) (first s2)]
(lazy-co-read (rest s1) (rest s2))))))
(defn with-priority-fn
"Make a priority-threadpool wrapper that uses a given priority function.
The priority function is applied to a pmap'd function's arguments. e.g.
(upmap (with-priority-fn p (fn [x _] x)) + [6 5 4] [1 2 3])
will use pool p to run tasks [(+ 6 1) (+ 5 2) (+ 4 3)]
with priorities [6 5 4]."
^com.climate.claypoole.impl.PriorityThreadpool
[^PriorityThreadpool pool priority-fn]
(let [^PriorityThreadpoolImpl pool* (.pool pool)]
(PriorityThreadpool. pool* priority-fn)))
(defn pfor-internal
"Do the messy parsing of the :priority from the for bindings."
[pool bindings body pmap-fn-sym]
(when (vector? pool)
(throw (IllegalArgumentException.
(str "Got a vector instead of a pool--"
"did you forget to use a threadpool?"))))
(if-not (= :priority (first (take-last 2 bindings)))
;; If there's no priority, everything is simple.
`(~pmap-fn-sym ~pool #(%) (for ~bindings (fn [] ~@body)))
;; If there's a priority, God help us--let's pull that thing out.
(let [bindings* (vec (drop-last 2 bindings))
priority-value (last bindings)]
`(let [pool# (with-priority-fn ~pool (fn [_# p#] p#))
;; We can't just make functions; we have to have the priority as
;; an argument to work with the priority-fn.
[fns# priorities#] (apply map vector
(for ~bindings*
[(fn [priority#] ~@body)
~priority-value]))]
(~pmap-fn-sym pool# #(%1 %2) fns# priorities#)))))
(defn seq-open
"Converts a seq s into a lazy seq that calls a function f when the seq is
fully realized or when an exception is thrown. Sort of like with-open, but
not a macro, not necessarily calling .close, and for a lazy seq."
[f s]
(lazy-seq
(let [sprime (try
force one element of s to make exceptions happen here
(when-let [s (seq s)]
(cons (first s) (rest s)))
(catch Throwable t
(f)
(throw t)))]
(if (seq sprime)
(cons (first sprime) (seq-open f (rest sprime)))
(do (f) nil)))))
| null | https://raw.githubusercontent.com/clj-commons/claypoole/00d9829997429f95d1af0b21dc5d2863706291eb/src/clj/com/climate/claypoole/impl.clj | clojure | License, Version 2.0 (the "License"); you may not use this file except in
-2.0
See the NOTICE file distributed with this work for additional information
regarding copyright ownership. Unless required by applicable law or agreed
or implied. See the License for the specific language governing permissions
and limitations under the License.
This does not matter for correctness, but prevents leaking
data in binding frames in thread locals.
Update the stack trace to include e
Start at -1 so we can just use the return value of (swap! inc).
The previously-used thread ID. Start at -1 so we can just use the
return value of (swap! inc).
Return a ScheduledThreadPool rather than a FixedThreadPool because it's
the same thing with some bonus features.
PriorityThreadpoolImpl to run them.
Queue-seq needs a unique item that, when seen in a queue, indicates that the
sequence has ended. It uses the private object end-marker, and uses
identical? to check against this object's (unique) memory address.
If there's no priority, everything is simple.
If there's a priority, God help us--let's pull that thing out.
We can't just make functions; we have to have the priority as
an argument to work with the priority-fn. | The Climate Corporation licenses this file to you under under the Apache
compliance with the License . You may obtain a copy of the License at
to in writing , software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express
(ns com.climate.claypoole.impl
"Implementation helper functions for Claypoole."
(:require [clojure.core :as core])
(:import [clojure.lang IFn]
[com.climate.claypoole.impl Prioritized PriorityThreadpoolImpl]
[java.util Collection List]
[java.util.concurrent
ExecutionException
Executors
ExecutorService
Future
LinkedBlockingQueue
ThreadFactory
TimeoutException
TimeUnit]))
(defn binding-conveyor-fn
"Like clojure.core/binding-conveyor-fn for resetting bindings to run a
function in another thread."
[f]
(let [frame (clojure.lang.Var/cloneThreadBindingFrame)]
(with-meta
(fn []
(let [frame-before (clojure.lang.Var/getThreadBindingFrame)]
(clojure.lang.Var/resetThreadBindingFrame frame)
(try
(f)
(finally
(clojure.lang.Var/resetThreadBindingFrame frame-before)))))
(meta f))))
(defn deref-future
"Like clojure.core/deref-future."
([^Future fut]
(.get fut))
([^Future fut timeout-ms timeout-val]
(try (.get fut timeout-ms TimeUnit/MILLISECONDS)
(catch TimeoutException _e
timeout-val))))
(defn deref-fixing-exceptions
"If a future experiences an exception and you dereference the future, you
will see not the original exception but a
java.util.concurrent.ExecutionException. That's sometimes not the result you
want. This catches those exceptions and re-throws the future's exception,
which can be much less surprising to downstream code."
[fut]
(try (deref fut)
(catch java.util.concurrent.ExecutionException e
(let [cause (.getCause e)]
(.setStackTrace cause (into-array StackTraceElement
(concat
(.getStackTrace cause)
(.getStackTrace e))))
(throw cause)))))
(defn dummy-future-call
"A dummy future-call that runs in serial and returns a future containing the
result."
[f]
(let [result (f)]
(reify
clojure.lang.IDeref
(deref [_] result)
clojure.lang.IBlockingDeref
(deref [_ _timeout-ms _timeout-val] result)
clojure.lang.IPending
(isRealized [_] true)
Future
(get [_] result)
(get [_ _timeout _unit] result)
(isCancelled [_] false)
(isDone [_] true)
(cancel [_ _interrupt?] false))))
(defn validate-future-pool
"Verify that a threadpool is a valid pool for a future."
[pool]
(when-not (or (= :serial pool)
(= :builtin pool)
(instance? ExecutorService pool))
(throw (IllegalArgumentException.
(format
(str "Threadpool futures require a threadpool, :builtin, or "
":serial, not %s.") pool)))))
(defonce ^{:doc "The previously-used threadpool ID."}
threadpool-id
(atom -1))
(defn default-threadpool-name
"The default name for threads in a threadpool. Gives each threadpool a
unique ID via threadpool-id."
[]
(format "claypoole-%d" (swap! threadpool-id inc)))
(defn apply-map
"Apply a function that takes keyword arguments to a map of arguments."
[f & args]
(let [args* (drop-last args)
arg-map (last args)]
(apply f (concat args* (mapcat identity arg-map)))))
(defn thread-factory
"Create a ThreadFactory with keyword options including thread daemon status
:daemon, the thread name format :name (a string for format with one integer),
and a thread priority :thread-priority."
^java.util.concurrent.ThreadFactory
[& {:keys [daemon thread-priority] pool-name :name
:or {daemon true}}]
(let [daemon* (boolean daemon)
pool-name* (or pool-name (default-threadpool-name))
thread-priority* (or thread-priority
(.getPriority (Thread/currentThread)))
default-factory (Executors/defaultThreadFactory)
thread-id (atom -1)]
(reify ThreadFactory
(^Thread newThread [_ ^Runnable r]
(doto (.newThread default-factory r)
(.setDaemon daemon*)
(.setName (str pool-name* "-" (swap! thread-id inc)))
(.setPriority thread-priority*))))))
(defn unchunk
"Takes a seqable and returns a lazy sequence that is maximally lazy.
Based on -do-i-avoid-clojures-chunking-behavior-for-lazy-seqs-that-i-want-to-short-ci"
[s]
(lazy-seq
(when-let [s (seq s)]
(cons (first s)
(unchunk (rest s))))))
(defn threadpool
"Make a threadpool. It should be shutdown when no longer needed.
See docs in com.climate.claypoole/threadpool."
^java.util.concurrent.ScheduledExecutorService [n & args]
(Executors/newScheduledThreadPool n (apply thread-factory args)))
(defn- prioritize
"Apply a priority function to a task.
Note that this re-throws all priority-fn exceptions as ExecutionExceptions.
That shouldn't mess anything up because the caller re-throws it as an
ExecutionException anyway.
For simplicity, prioritize reifies both Callable and Runnable, rather than
having one prioritize function for each of those types. That means, for
example, that if you prioritize a Runnable that is not also a Callable, you
might want to cast the result to Runnable or otherwise use it carefully."
[task, ^IFn priority-fn]
(let [priority (try
(long (apply priority-fn (-> task meta :args)))
(catch Exception e
(throw (ExecutionException.
"Priority function exception" e))))]
(reify
Callable
(call [_] (.call ^Callable task))
Runnable
(run [_] (.run ^Runnable task))
Prioritized
(getPriority [_] priority))))
A Threadpool that applies a priority function to tasks and uses a
(deftype PriorityThreadpool [^PriorityThreadpoolImpl pool, ^IFn priority-fn]
ExecutorService
(^boolean awaitTermination [_, ^long timeout, ^TimeUnit unit]
(.awaitTermination pool timeout unit))
(^List invokeAll [_, ^Collection tasks]
(.invokeAll pool (map #(prioritize % priority-fn) tasks)))
(^List invokeAll [_, ^Collection tasks, ^long timeout, ^TimeUnit unit]
(.invokeAll pool (map #(prioritize % priority-fn) tasks) timeout unit))
(^Object invokeAny [_, ^Collection tasks]
(.invokeAny pool (map #(prioritize % priority-fn) tasks)))
(^Object invokeAny [_, ^Collection tasks, ^long timeout, ^TimeUnit unit]
(.invokeAny pool (map #(prioritize % priority-fn) tasks) timeout unit))
(^boolean isShutdown [_]
(.isShutdown pool))
(^boolean isTerminated [_]
(.isTerminated pool))
(shutdown [_]
(.shutdown pool))
(^List shutdownNow [_]
(.shutdownNow pool))
(^Future submit [_, ^Runnable task]
(.submit pool ^Runnable (prioritize task priority-fn)))
(^Future submit [_, ^Runnable task, ^Object result]
(.submit pool ^Runnable (prioritize task priority-fn) result))
(^Future submit [_ ^Callable task]
(.submit pool ^Callable (prioritize task priority-fn))))
(defn ->threadpool
"Convert the argument into a threadpool, leaving the special keyword :serial
alone.
Returns [created? threadpool], where created? indicates whether a new
threadpool was instantiated."
[arg]
(cond
(instance? ExecutorService arg) [false arg]
(integer? arg) [true (threadpool arg)]
(= :builtin arg) [false clojure.lang.Agent/soloExecutor]
(= :serial arg) [false :serial]
:else (throw (IllegalArgumentException.
(format
(str "Claypoole functions require a threadpool, a "
"number, :builtin, or :serial, not %s.") arg)))))
(defn get-pool-size
"If the pool has a max size, get that; else, return nil."
[pool]
(cond
(instance? java.util.concurrent.ScheduledThreadPoolExecutor pool)
(.getCorePoolSize ^java.util.concurrent.ScheduledThreadPoolExecutor pool)
(instance? java.util.concurrent.ThreadPoolExecutor pool)
(.getMaximumPoolSize ^java.util.concurrent.ThreadPoolExecutor pool)
:else
nil))
(let [end-marker (Object.)]
(defn- queue-reader
"Make a lazy sequence from a queue, stopping upon reading the unique
end-marker object."
[^LinkedBlockingQueue q]
(lazy-seq
(let [x (.take q)]
(when-not (identical? x end-marker)
(cons x (queue-reader q))))))
(defn queue-seq
"Create a queue and a lazy sequence that reads from that queue."
[]
(let [q (LinkedBlockingQueue.)]
[q (queue-reader q)]))
(defn queue-seq-add!
"Add an item to a queue (and its lazy sequence)."
[^LinkedBlockingQueue q x]
(.put q x))
(defn queue-seq-end!
"End a lazy sequence reading from a queue."
[q]
(queue-seq-add! q end-marker)))
(defn lazy-co-read
"Zip s1 and s2, stopping when s1 stops. This helps avoid potential blocking
when trying to read queue sequences.
In particular, this will block:
(map vector
(range 10)
(concat (range 10) (lazy-seq (deref (promise)))))
even though we only can read 10 things. Lazy-co-read fixes that case by
checking the first sequence first, so this will not block:
(lazy-co-read
(range 10)
(concat (range 10) (lazy-seq (deref (promise)))))"
[s1 s2]
(lazy-seq (when-not (empty? s1)
(cons [(first s1) (first s2)]
(lazy-co-read (rest s1) (rest s2))))))
(defn with-priority-fn
"Make a priority-threadpool wrapper that uses a given priority function.
The priority function is applied to a pmap'd function's arguments. e.g.
(upmap (with-priority-fn p (fn [x _] x)) + [6 5 4] [1 2 3])
will use pool p to run tasks [(+ 6 1) (+ 5 2) (+ 4 3)]
with priorities [6 5 4]."
^com.climate.claypoole.impl.PriorityThreadpool
[^PriorityThreadpool pool priority-fn]
(let [^PriorityThreadpoolImpl pool* (.pool pool)]
(PriorityThreadpool. pool* priority-fn)))
(defn pfor-internal
"Do the messy parsing of the :priority from the for bindings."
[pool bindings body pmap-fn-sym]
(when (vector? pool)
(throw (IllegalArgumentException.
(str "Got a vector instead of a pool--"
"did you forget to use a threadpool?"))))
(if-not (= :priority (first (take-last 2 bindings)))
`(~pmap-fn-sym ~pool #(%) (for ~bindings (fn [] ~@body)))
(let [bindings* (vec (drop-last 2 bindings))
priority-value (last bindings)]
`(let [pool# (with-priority-fn ~pool (fn [_# p#] p#))
[fns# priorities#] (apply map vector
(for ~bindings*
[(fn [priority#] ~@body)
~priority-value]))]
(~pmap-fn-sym pool# #(%1 %2) fns# priorities#)))))
(defn seq-open
"Converts a seq s into a lazy seq that calls a function f when the seq is
fully realized or when an exception is thrown. Sort of like with-open, but
not a macro, not necessarily calling .close, and for a lazy seq."
[f s]
(lazy-seq
(let [sprime (try
force one element of s to make exceptions happen here
(when-let [s (seq s)]
(cons (first s) (rest s)))
(catch Throwable t
(f)
(throw t)))]
(if (seq sprime)
(cons (first sprime) (seq-open f (rest sprime)))
(do (f) nil)))))
|
2a85eb5b0e938d4a210b34909ca6158a75eabf4ebdba852a0694d3ad9c968980 | ndmitchell/shake | Errors.hs | # LANGUAGE TypeFamilies , GeneralizedNewtypeDeriving , DeriveDataTypeable , ScopedTypeVariables #
module Test.Errors(main) where
import Development.Shake
import Development.Shake.Classes
import Development.Shake.FilePath
import Test.Type
import Data.List.Extra
import Control.Monad
import Control.Concurrent.Extra
import General.GetOpt
import General.Extra
import Data.IORef
import Control.Exception.Extra
import System.Directory as IO
import System.Time.Extra
import qualified System.IO.Extra as IO
data Args = Die deriving (Eq,Enum,Bounded,Show)
newtype BadBinary = BadBinary String deriving (NFData,Show,Eq,Hashable,Typeable)
type instance RuleResult BadBinary = BadBinary
instance Binary BadBinary where
put (BadBinary x) = put x
get = do x <- get; if x == "bad" then error "get: BadBinary \"bad\"" else pure $ BadBinary x
main = testBuildArgs test optionsEnum $ \args -> do
"norule" %> \_ ->
need ["norule_isavailable"]
"failcreate" %> \_ ->
pure ()
["failcreates", "failcreates2"] &%> \_ ->
writeFile' "failcreates" ""
"recursive_" %> \_ -> need ["intermediate_"]
"intermediate_" %> \_ -> need ["recursive_"]
"rec1" %> \_ -> need ["rec2"]
"rec2" %> \_ -> need ["rec1"]
"systemcmd" %> \_ ->
cmd "random_missing_command"
"stack1" %> \_ -> need ["stack2"]
"stack2" %> \_ -> need ["stack3"]
"stack3" %> \_ -> error "crash"
"staunch1" %> \out -> do
liftIO $ sleep 0.1
writeFile' out "test"
"staunch2" %> \_ -> error "crash"
let catcher out op = out %> \out -> do
writeFile' out "0"
op $ do src <- IO.readFile' out; writeFile out $ show (read src + 1 :: Int)
catcher "finally1" $ actionFinally $ fail "die"
catcher "finally2" $ actionFinally $ pure ()
catcher "finally3" $ actionFinally $ liftIO $ sleep 10
catcher "finally4" $ actionFinally $ need ["wait"]
"wait" ~> do liftIO $ sleep 10
catcher "exception1" $ actionOnException $ fail "die"
catcher "exception2" $ actionOnException $ pure ()
"retry*" %> \out -> do
ref <- liftIO $ newIORef 3
actionRetry (read [last out]) $ liftIO $ do
old <- readIORef ref
writeIORef ref $ old - 1
if old == 0 then writeFile' out "" else fail "die"
res <- newResource "resource_name" 1
"resource" %> \_ ->
withResource res 1 $
need ["resource-dep"]
"overlap.txt" %> \out -> writeFile' out "overlap.txt"
"overlap.t*" %> \out -> writeFile' out "overlap.t*"
"overlap.*" %> \out -> writeFile' out "overlap.*"
["*.txx","*.tox"] &%> \_ -> fail "do not run"
["*p.txx"] &%> \_ -> fail "do not run"
"chain.2" %> \out -> do
src <- readFile' "chain.1"
if src == "err" then error "err_chain" else writeFileChanged out src
"chain.3" %> \out -> copyFile' "chain.2" out
"tempfile" %> \out -> do
file <- withTempFile $ \file -> do
liftIO $ assertExists file
pure file
liftIO $ assertMissing file
withTempFile $ \file -> do
liftIO $ assertExists file
writeFile' out file
fail "tempfile-died"
"tempdir" %> \out -> do
file <- withTempDir $ \dir -> do
let file = dir </> "foo.txt"
liftIO $ writeFile (dir </> "foo.txt") ""
-- will throw if the directory does not exist
writeFile' out ""
pure file
liftIO $ assertMissing file
phony "fail1" $ fail "die1"
phony "fail2" $ fail "die2"
when (Die `elem` args) $ action $ error "death error"
"fresh_dir" %> \out -> liftIO $ createDirectoryRecursive out
"need_dir" %> \out -> do
liftIO $ createDirectoryRecursive "existing_dir"
need ["existing_dir"]
writeFile' out ""
"persist_failure.1" %> \out -> do
liftIO $ appendFile "persist_failure.log" "[pre]"
need ["persist_failure.2"]
liftIO $ appendFile "persist_failure.log" "[post]"
writeFile' out ""
"persist_failure.2" %> \out -> do
src <- readFile' "persist_failure.3"
liftIO $ print ("persist_failure.3", src)
if src == "die" then do
liftIO $ appendFile "persist_failure.log" "[err]"
fail "die"
else
writeFileChanged out src
"fast_failure" %> \_ -> do
liftIO $ sleep 0.1
fail "die"
"slow_success" %> \out -> do
liftIO $ sleep 20
writeFile' out ""
addOracle $ \(BadBinary x) -> pure $ BadBinary $ 'b':x
"badinput" %> \out -> do
askOracle $ BadBinary "bad"
liftIO $ appendFile out "x"
"badoutput" %> \out -> do
askOracle $ BadBinary "ad"
liftIO $ appendFile out "x"
"badnone" %> \out -> do
alwaysRerun
liftIO $ appendFile out "x"
"produces1" %> \out -> do
produces [out <.> "also"]
writeFile' (out <.> "also") ""
writeFile' out ""
"produces2" %> \out -> do
produces [out <.> "also"]
writeFile' out ""
"finalfinal" %> \out -> do
writeFile' out ""
lock <- liftIO newLock
let output = withLock lock . appendFile out
liftIO (sleep 100)
`actionFinally` (output "X" >> sleep 0.1)
`actionFinally` output "Y"
let catching out = flip actionCatch $ \(e :: SomeException) -> writeFile' out $ show e
"catch1" %> \out -> catching out $ fail "magic1"
"catch2" %> \out -> catching out $ liftIO $ killThread =<< myThreadId
"catch3.1" %> \out -> fail "magic3"
"catch3.2" %> \out -> catching out $ need ["catch3.1"]
not tested by default since only causes an error when idle GC is turned on
phony "block" $
liftIO $ putStrLn $ let x = x in x
test build = do
on Windows , file paths may end up with \ separators , make sure we can still match them
let crash args parts = assertExceptionAfter (replace "\\" "/") parts (build $ "--quiet" : args)
build ["clean"]
writeFile "chain.1" "x"
build ["chain.3","--sleep"]
writeFile "chain.1" "err"
crash ["chain.3"] ["err_chain"]
crash ["norule"] ["norule_isavailable"]
crash ["failcreate"] ["failcreate"]
crash ["failcreates"] ["failcreates"]
crash ["recursive_"] ["recursive_","intermediate_","recursive"]
crash ["rec1","rec2"] ["rec1","rec2","indirect recursion","recursive"]
notMacCI $ crash ["systemcmd"] ["systemcmd","random_missing_command", "at cmd, called at"]
crash ["stack1"] ["stack1","stack2","stack3","crash"]
b <- IO.doesFileExist "staunch1"
when b $ removeFile "staunch1"
crash ["staunch1","staunch2","-j2"] ["crash"]
assertBoolIO (not <$> IO.doesFileExist "staunch1") "File should not exist, should have crashed first"
crash ["staunch1","staunch2","-j2","--keep-going","--silent"] ["crash"]
assertBoolIO (IO.doesFileExist "staunch1") "File should exist, staunch should have let it be created"
crash ["finally1"] ["die"]
assertContents "finally1" "1"
build ["finally2"]
assertContents "finally2" "1"
crash ["exception1"] ["die"]
assertContents "exception1" "1"
build ["exception2"]
assertContents "exception2" "0"
crash ["retry0"] ["positive","0"]
crash ["retry1"] ["die"]
build ["retry4"]
forM_ ["finally3","finally4"] $ \name -> do
t <- forkIO $ ignore $ build [name,"--exception"]
retry 10 $ sleep 0.1 >> assertContents name "0"
throwTo t (IndexOutOfBounds "test")
retry 10 $ sleep 0.1 >> assertContents name "1"
crash ["resource"] ["cannot currently introduce a dependency","withResource","resource_name"]
build ["overlap.foo"]
assertContents "overlap.foo" "overlap.*"
build ["overlap.txt"]
assertContents "overlap.txt" "overlap.txt"
crash ["overlap.txx"] $
["key matches multiple rules","matched: 4","overlap.txx","overlap.t*","overlap.*","*.tox"] ++
["Test/Errors.hs"]
crash ["tempfile"] ["tempfile-died"]
src <- readFile "tempfile"
assertMissing src
build ["tempdir"]
crash ["--die"] ["Shake","death error","Test/Errors.hs"]
putStrLn "## BUILD errors"
(out,_) <- IO.captureOutput $ build []
assertBool ("nothing to do" `isInfixOf` out) $ "Expected 'nothing to do', but got: " ++ out
putStrLn "## BUILD errors fail1 fail2 -k -j2"
(out,_) <- IO.captureOutput $ try_ $ build ["fail1","fail2","-k","-j2",""]
assertBool ("die1" `isInfixOf` out && "die2" `isInfixOf` out) $ "Expected 'die1' and 'die2', but got: " ++ out
crash ["fresh_dir"] ["expected a file, got a directory","fresh_dir"]
crash ["need_dir"] ["expected a file, got a directory","existing_dir"]
check errors do n't persist to the database , # 428
writeFile "persist_failure.log" ""
writeFile "persist_failure.3" "test"
build ["persist_failure.1","--sleep"]
writeFile "persist_failure.3" "die"
crash ["persist_failure.1","--sleep"] []
assertContents "persist_failure.log" "[pre][post][err][pre]"
writeFile "persist_failure.3" "test"
build ["persist_failure.1","--sleep"]
assertContents "persist_failure.log" "[pre][post][err][pre]"
writeFile "persist_failure.3" "more"
build ["persist_failure.1"]
assertContents "persist_failure.log" "[pre][post][err][pre][pre][post]"
-- check a fast failure aborts a slow success
(t, _) <- duration $ crash ["fast_failure","slow_success","-j2"] ["die"]
assertBool (t < 10) $ "Took too long, expected < 10, got " ++ show t
-- for exceptions on Key we die while reading the database, and restart from scratch
build ["badinput"]
build ["badinput","--silent"]
assertContents "badinput" "xx"
build ["badnone","--silent"] -- must be able to still run other rules
assertContents "badnone" "x"
-- for exceptions on Value we die while running the rule that requires it
build ["badoutput"]
crash ["badoutput"] ["badoutput","BadBinary"]
build ["badnone"] -- must be able to still run other rules
assertContents "badnone" "xx"
-- check that produces works
build ["produces1"]
crash ["produces2"] ["produces","produces2.also"]
check finally does n't run twice , See
t <- forkIO $ build ["finalfinal","--quiet"]
sleep 0.2
killThread t
sleep 0.5
assertContents "finalfinal" "XY"
build ["catch1"]
assertContentsInfix "catch1" "magic1"
crash ["catch2"] [show ThreadKilled]
crash ["catch3.2"] ["magic3"]
| null | https://raw.githubusercontent.com/ndmitchell/shake/06ed483ad7970b13ae6c4aded5a2939c88b88431/src/Test/Errors.hs | haskell | will throw if the directory does not exist
check a fast failure aborts a slow success
for exceptions on Key we die while reading the database, and restart from scratch
must be able to still run other rules
for exceptions on Value we die while running the rule that requires it
must be able to still run other rules
check that produces works | # LANGUAGE TypeFamilies , GeneralizedNewtypeDeriving , DeriveDataTypeable , ScopedTypeVariables #
module Test.Errors(main) where
import Development.Shake
import Development.Shake.Classes
import Development.Shake.FilePath
import Test.Type
import Data.List.Extra
import Control.Monad
import Control.Concurrent.Extra
import General.GetOpt
import General.Extra
import Data.IORef
import Control.Exception.Extra
import System.Directory as IO
import System.Time.Extra
import qualified System.IO.Extra as IO
data Args = Die deriving (Eq,Enum,Bounded,Show)
newtype BadBinary = BadBinary String deriving (NFData,Show,Eq,Hashable,Typeable)
type instance RuleResult BadBinary = BadBinary
instance Binary BadBinary where
put (BadBinary x) = put x
get = do x <- get; if x == "bad" then error "get: BadBinary \"bad\"" else pure $ BadBinary x
main = testBuildArgs test optionsEnum $ \args -> do
"norule" %> \_ ->
need ["norule_isavailable"]
"failcreate" %> \_ ->
pure ()
["failcreates", "failcreates2"] &%> \_ ->
writeFile' "failcreates" ""
"recursive_" %> \_ -> need ["intermediate_"]
"intermediate_" %> \_ -> need ["recursive_"]
"rec1" %> \_ -> need ["rec2"]
"rec2" %> \_ -> need ["rec1"]
"systemcmd" %> \_ ->
cmd "random_missing_command"
"stack1" %> \_ -> need ["stack2"]
"stack2" %> \_ -> need ["stack3"]
"stack3" %> \_ -> error "crash"
"staunch1" %> \out -> do
liftIO $ sleep 0.1
writeFile' out "test"
"staunch2" %> \_ -> error "crash"
let catcher out op = out %> \out -> do
writeFile' out "0"
op $ do src <- IO.readFile' out; writeFile out $ show (read src + 1 :: Int)
catcher "finally1" $ actionFinally $ fail "die"
catcher "finally2" $ actionFinally $ pure ()
catcher "finally3" $ actionFinally $ liftIO $ sleep 10
catcher "finally4" $ actionFinally $ need ["wait"]
"wait" ~> do liftIO $ sleep 10
catcher "exception1" $ actionOnException $ fail "die"
catcher "exception2" $ actionOnException $ pure ()
"retry*" %> \out -> do
ref <- liftIO $ newIORef 3
actionRetry (read [last out]) $ liftIO $ do
old <- readIORef ref
writeIORef ref $ old - 1
if old == 0 then writeFile' out "" else fail "die"
res <- newResource "resource_name" 1
"resource" %> \_ ->
withResource res 1 $
need ["resource-dep"]
"overlap.txt" %> \out -> writeFile' out "overlap.txt"
"overlap.t*" %> \out -> writeFile' out "overlap.t*"
"overlap.*" %> \out -> writeFile' out "overlap.*"
["*.txx","*.tox"] &%> \_ -> fail "do not run"
["*p.txx"] &%> \_ -> fail "do not run"
"chain.2" %> \out -> do
src <- readFile' "chain.1"
if src == "err" then error "err_chain" else writeFileChanged out src
"chain.3" %> \out -> copyFile' "chain.2" out
"tempfile" %> \out -> do
file <- withTempFile $ \file -> do
liftIO $ assertExists file
pure file
liftIO $ assertMissing file
withTempFile $ \file -> do
liftIO $ assertExists file
writeFile' out file
fail "tempfile-died"
"tempdir" %> \out -> do
file <- withTempDir $ \dir -> do
let file = dir </> "foo.txt"
liftIO $ writeFile (dir </> "foo.txt") ""
writeFile' out ""
pure file
liftIO $ assertMissing file
phony "fail1" $ fail "die1"
phony "fail2" $ fail "die2"
when (Die `elem` args) $ action $ error "death error"
"fresh_dir" %> \out -> liftIO $ createDirectoryRecursive out
"need_dir" %> \out -> do
liftIO $ createDirectoryRecursive "existing_dir"
need ["existing_dir"]
writeFile' out ""
"persist_failure.1" %> \out -> do
liftIO $ appendFile "persist_failure.log" "[pre]"
need ["persist_failure.2"]
liftIO $ appendFile "persist_failure.log" "[post]"
writeFile' out ""
"persist_failure.2" %> \out -> do
src <- readFile' "persist_failure.3"
liftIO $ print ("persist_failure.3", src)
if src == "die" then do
liftIO $ appendFile "persist_failure.log" "[err]"
fail "die"
else
writeFileChanged out src
"fast_failure" %> \_ -> do
liftIO $ sleep 0.1
fail "die"
"slow_success" %> \out -> do
liftIO $ sleep 20
writeFile' out ""
addOracle $ \(BadBinary x) -> pure $ BadBinary $ 'b':x
"badinput" %> \out -> do
askOracle $ BadBinary "bad"
liftIO $ appendFile out "x"
"badoutput" %> \out -> do
askOracle $ BadBinary "ad"
liftIO $ appendFile out "x"
"badnone" %> \out -> do
alwaysRerun
liftIO $ appendFile out "x"
"produces1" %> \out -> do
produces [out <.> "also"]
writeFile' (out <.> "also") ""
writeFile' out ""
"produces2" %> \out -> do
produces [out <.> "also"]
writeFile' out ""
"finalfinal" %> \out -> do
writeFile' out ""
lock <- liftIO newLock
let output = withLock lock . appendFile out
liftIO (sleep 100)
`actionFinally` (output "X" >> sleep 0.1)
`actionFinally` output "Y"
let catching out = flip actionCatch $ \(e :: SomeException) -> writeFile' out $ show e
"catch1" %> \out -> catching out $ fail "magic1"
"catch2" %> \out -> catching out $ liftIO $ killThread =<< myThreadId
"catch3.1" %> \out -> fail "magic3"
"catch3.2" %> \out -> catching out $ need ["catch3.1"]
not tested by default since only causes an error when idle GC is turned on
phony "block" $
liftIO $ putStrLn $ let x = x in x
test build = do
on Windows , file paths may end up with \ separators , make sure we can still match them
let crash args parts = assertExceptionAfter (replace "\\" "/") parts (build $ "--quiet" : args)
build ["clean"]
writeFile "chain.1" "x"
build ["chain.3","--sleep"]
writeFile "chain.1" "err"
crash ["chain.3"] ["err_chain"]
crash ["norule"] ["norule_isavailable"]
crash ["failcreate"] ["failcreate"]
crash ["failcreates"] ["failcreates"]
crash ["recursive_"] ["recursive_","intermediate_","recursive"]
crash ["rec1","rec2"] ["rec1","rec2","indirect recursion","recursive"]
notMacCI $ crash ["systemcmd"] ["systemcmd","random_missing_command", "at cmd, called at"]
crash ["stack1"] ["stack1","stack2","stack3","crash"]
b <- IO.doesFileExist "staunch1"
when b $ removeFile "staunch1"
crash ["staunch1","staunch2","-j2"] ["crash"]
assertBoolIO (not <$> IO.doesFileExist "staunch1") "File should not exist, should have crashed first"
crash ["staunch1","staunch2","-j2","--keep-going","--silent"] ["crash"]
assertBoolIO (IO.doesFileExist "staunch1") "File should exist, staunch should have let it be created"
crash ["finally1"] ["die"]
assertContents "finally1" "1"
build ["finally2"]
assertContents "finally2" "1"
crash ["exception1"] ["die"]
assertContents "exception1" "1"
build ["exception2"]
assertContents "exception2" "0"
crash ["retry0"] ["positive","0"]
crash ["retry1"] ["die"]
build ["retry4"]
forM_ ["finally3","finally4"] $ \name -> do
t <- forkIO $ ignore $ build [name,"--exception"]
retry 10 $ sleep 0.1 >> assertContents name "0"
throwTo t (IndexOutOfBounds "test")
retry 10 $ sleep 0.1 >> assertContents name "1"
crash ["resource"] ["cannot currently introduce a dependency","withResource","resource_name"]
build ["overlap.foo"]
assertContents "overlap.foo" "overlap.*"
build ["overlap.txt"]
assertContents "overlap.txt" "overlap.txt"
crash ["overlap.txx"] $
["key matches multiple rules","matched: 4","overlap.txx","overlap.t*","overlap.*","*.tox"] ++
["Test/Errors.hs"]
crash ["tempfile"] ["tempfile-died"]
src <- readFile "tempfile"
assertMissing src
build ["tempdir"]
crash ["--die"] ["Shake","death error","Test/Errors.hs"]
putStrLn "## BUILD errors"
(out,_) <- IO.captureOutput $ build []
assertBool ("nothing to do" `isInfixOf` out) $ "Expected 'nothing to do', but got: " ++ out
putStrLn "## BUILD errors fail1 fail2 -k -j2"
(out,_) <- IO.captureOutput $ try_ $ build ["fail1","fail2","-k","-j2",""]
assertBool ("die1" `isInfixOf` out && "die2" `isInfixOf` out) $ "Expected 'die1' and 'die2', but got: " ++ out
crash ["fresh_dir"] ["expected a file, got a directory","fresh_dir"]
crash ["need_dir"] ["expected a file, got a directory","existing_dir"]
check errors do n't persist to the database , # 428
writeFile "persist_failure.log" ""
writeFile "persist_failure.3" "test"
build ["persist_failure.1","--sleep"]
writeFile "persist_failure.3" "die"
crash ["persist_failure.1","--sleep"] []
assertContents "persist_failure.log" "[pre][post][err][pre]"
writeFile "persist_failure.3" "test"
build ["persist_failure.1","--sleep"]
assertContents "persist_failure.log" "[pre][post][err][pre]"
writeFile "persist_failure.3" "more"
build ["persist_failure.1"]
assertContents "persist_failure.log" "[pre][post][err][pre][pre][post]"
(t, _) <- duration $ crash ["fast_failure","slow_success","-j2"] ["die"]
assertBool (t < 10) $ "Took too long, expected < 10, got " ++ show t
build ["badinput"]
build ["badinput","--silent"]
assertContents "badinput" "xx"
assertContents "badnone" "x"
build ["badoutput"]
crash ["badoutput"] ["badoutput","BadBinary"]
assertContents "badnone" "xx"
build ["produces1"]
crash ["produces2"] ["produces","produces2.also"]
check finally does n't run twice , See
t <- forkIO $ build ["finalfinal","--quiet"]
sleep 0.2
killThread t
sleep 0.5
assertContents "finalfinal" "XY"
build ["catch1"]
assertContentsInfix "catch1" "magic1"
crash ["catch2"] [show ThreadKilled]
crash ["catch3.2"] ["magic3"]
|
ea15d6ac845ad2ddb3d3e1a15edc2a04094154a54ea724fbae147b6cc10333a4 | owlbarn/owl_symbolic | example_12.ml |
* OWL - OCaml Scientific and Engineering Computing
* Copyright ( c ) 2016 - 2020 < >
* OWL - OCaml Scientific and Engineering Computing
* Copyright (c) 2016-2020 Liang Wang <>
*)
open Owl_symbolic_neural_graph
open Owl_symbolic_types
(** InceptionV3 *)
(* Note to specify the defautl value of padding in avgpool etc. *)
let conv2d_bn ?(padding = SAME_UPPER) kernel stride nn =
conv2d ~padding kernel stride nn |> normalisation |> activation Relu
let mix_typ1 in_shape bp_size nn =
let branch1x1 = conv2d_bn [| 64; in_shape; 1; 1 |] [| 1; 1 |] nn in
let branch5x5 =
nn
|> conv2d_bn [| 48; in_shape; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 64; 48; 5; 5 |] [| 1; 1 |]
in
let branch3x3dbl =
nn
|> conv2d_bn [| 64; in_shape; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 96; 64; 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 96; 96; 3; 3 |] [| 1; 1 |]
in
let branch_pool =
nn
|> avg_pool2d ~padding:SAME_UPPER [| 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| bp_size; in_shape; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch5x5; branch3x3dbl; branch_pool |]
let mix_typ3 nn =
let branch3x3 = conv2d_bn [| 384; 288; 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
let branch3x3dbl =
nn
|> conv2d_bn [| 64; 288; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 96; 64; 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 96; 96; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch_pool = max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
concat ~axis:1 [| branch3x3; branch3x3dbl; branch_pool |]
let mix_typ4 size nn =
let branch1x1 = conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |] nn in
let branch7x7 =
nn
|> conv2d_bn [| size; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| 192; size; 7; 1 |] [| 1; 1 |]
in
let branch7x7dbl =
nn
|> conv2d_bn [| size; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; size; 1; 7 |] [| 1; 1 |]
in
let branch_pool =
nn
|> avg_pool2d [| 3; 3 |] [| 1; 1 |] ~padding:SAME_UPPER
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch7x7; branch7x7dbl; branch_pool |]
let mix_typ8 nn =
let branch3x3 =
nn
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 320; 192; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch7x7x3 =
nn
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch_pool = max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
concat ~axis:1 [| branch3x3; branch7x7x3; branch_pool |]
let mix_typ9 input nn =
let branch1x1 = conv2d_bn [| 320; input; 1; 1 |] [| 1; 1 |] nn in
let branch3x3 = conv2d_bn [| 384; input; 1; 1 |] [| 1; 1 |] nn in
let branch3x3_1 = branch3x3 |> conv2d_bn [| 384; 384; 1; 3 |] [| 1; 1 |] in
let branch3x3_2 = branch3x3 |> conv2d_bn [| 384; 384; 3; 1 |] [| 1; 1 |] in
let branch3x3 = concat ~axis:1 [| branch3x3_1; branch3x3_2 |] in
let branch3x3dbl =
nn
|> conv2d_bn [| 448; input; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 384; 448; 3; 3 |] [| 1; 1 |]
in
let branch3x3dbl_1 = branch3x3dbl |> conv2d_bn [| 384; 384; 1; 3 |] [| 1; 1 |] in
let branch3x3dbl_2 = branch3x3dbl |> conv2d_bn [| 384; 384; 3; 1 |] [| 1; 1 |] in
let branch3x3dbl = concat ~axis:1 [| branch3x3dbl_1; branch3x3dbl_2 |] in
let branch_pool =
nn
|> avg_pool2d ~padding:SAME_UPPER [| 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 192; input; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch3x3; branch3x3dbl; branch_pool |]
let make_network batch img_size =
input [| batch; 3; img_size; img_size |]
|> conv2d_bn [| 32; 3; 3; 3 |] [| 2; 2 |] ~padding:VALID
|> conv2d_bn [| 32; 32; 3; 3 |] [| 1; 1 |] ~padding:VALID
|> conv2d_bn [| 64; 32; 3; 3 |] [| 1; 1 |]
|> max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID
|> conv2d_bn [| 80; 64; 1; 1 |] [| 1; 1 |] ~padding:VALID
|> conv2d_bn [| 192; 80; 3; 3 |] [| 1; 1 |] ~padding:VALID
|> max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID
|> mix_typ1 192 32
|> mix_typ1 256 64
|> mix_typ1 288 64
|> mix_typ3
|> mix_typ4 128
|> mix_typ4 160
|> mix_typ4 160
|> mix_typ4 192
|> mix_typ8
|> mix_typ9 1280
|> mix_typ9 2048
|> global_avg_pool2d
|> linear 1000
|> activation (Softmax 1)
|> get_network
let _ =
let nn = make_network 1 299 in
let onnx_graph = Owl_symbolic_engine_onnx.of_symbolic nn in
Owl_symbolic_engine_onnx.save onnx_graph "test.onnx"
| null | https://raw.githubusercontent.com/owlbarn/owl_symbolic/dc853a016757d3f143c5e07e50075e7ae605d969/example/example_12.ml | ocaml | * InceptionV3
Note to specify the defautl value of padding in avgpool etc. |
* OWL - OCaml Scientific and Engineering Computing
* Copyright ( c ) 2016 - 2020 < >
* OWL - OCaml Scientific and Engineering Computing
* Copyright (c) 2016-2020 Liang Wang <>
*)
open Owl_symbolic_neural_graph
open Owl_symbolic_types
let conv2d_bn ?(padding = SAME_UPPER) kernel stride nn =
conv2d ~padding kernel stride nn |> normalisation |> activation Relu
let mix_typ1 in_shape bp_size nn =
let branch1x1 = conv2d_bn [| 64; in_shape; 1; 1 |] [| 1; 1 |] nn in
let branch5x5 =
nn
|> conv2d_bn [| 48; in_shape; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 64; 48; 5; 5 |] [| 1; 1 |]
in
let branch3x3dbl =
nn
|> conv2d_bn [| 64; in_shape; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 96; 64; 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 96; 96; 3; 3 |] [| 1; 1 |]
in
let branch_pool =
nn
|> avg_pool2d ~padding:SAME_UPPER [| 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| bp_size; in_shape; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch5x5; branch3x3dbl; branch_pool |]
let mix_typ3 nn =
let branch3x3 = conv2d_bn [| 384; 288; 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
let branch3x3dbl =
nn
|> conv2d_bn [| 64; 288; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 96; 64; 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 96; 96; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch_pool = max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
concat ~axis:1 [| branch3x3; branch3x3dbl; branch_pool |]
let mix_typ4 size nn =
let branch1x1 = conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |] nn in
let branch7x7 =
nn
|> conv2d_bn [| size; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| 192; size; 7; 1 |] [| 1; 1 |]
in
let branch7x7dbl =
nn
|> conv2d_bn [| size; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| size; size; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; size; 1; 7 |] [| 1; 1 |]
in
let branch_pool =
nn
|> avg_pool2d [| 3; 3 |] [| 1; 1 |] ~padding:SAME_UPPER
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch7x7; branch7x7dbl; branch_pool |]
let mix_typ8 nn =
let branch3x3 =
nn
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 320; 192; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch7x7x3 =
nn
|> conv2d_bn [| 192; 768; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 1; 7 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 7; 1 |] [| 1; 1 |]
|> conv2d_bn [| 192; 192; 3; 3 |] [| 2; 2 |] ~padding:VALID
in
let branch_pool = max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID nn in
concat ~axis:1 [| branch3x3; branch7x7x3; branch_pool |]
let mix_typ9 input nn =
let branch1x1 = conv2d_bn [| 320; input; 1; 1 |] [| 1; 1 |] nn in
let branch3x3 = conv2d_bn [| 384; input; 1; 1 |] [| 1; 1 |] nn in
let branch3x3_1 = branch3x3 |> conv2d_bn [| 384; 384; 1; 3 |] [| 1; 1 |] in
let branch3x3_2 = branch3x3 |> conv2d_bn [| 384; 384; 3; 1 |] [| 1; 1 |] in
let branch3x3 = concat ~axis:1 [| branch3x3_1; branch3x3_2 |] in
let branch3x3dbl =
nn
|> conv2d_bn [| 448; input; 1; 1 |] [| 1; 1 |]
|> conv2d_bn [| 384; 448; 3; 3 |] [| 1; 1 |]
in
let branch3x3dbl_1 = branch3x3dbl |> conv2d_bn [| 384; 384; 1; 3 |] [| 1; 1 |] in
let branch3x3dbl_2 = branch3x3dbl |> conv2d_bn [| 384; 384; 3; 1 |] [| 1; 1 |] in
let branch3x3dbl = concat ~axis:1 [| branch3x3dbl_1; branch3x3dbl_2 |] in
let branch_pool =
nn
|> avg_pool2d ~padding:SAME_UPPER [| 3; 3 |] [| 1; 1 |]
|> conv2d_bn [| 192; input; 1; 1 |] [| 1; 1 |]
in
concat ~axis:1 [| branch1x1; branch3x3; branch3x3dbl; branch_pool |]
let make_network batch img_size =
input [| batch; 3; img_size; img_size |]
|> conv2d_bn [| 32; 3; 3; 3 |] [| 2; 2 |] ~padding:VALID
|> conv2d_bn [| 32; 32; 3; 3 |] [| 1; 1 |] ~padding:VALID
|> conv2d_bn [| 64; 32; 3; 3 |] [| 1; 1 |]
|> max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID
|> conv2d_bn [| 80; 64; 1; 1 |] [| 1; 1 |] ~padding:VALID
|> conv2d_bn [| 192; 80; 3; 3 |] [| 1; 1 |] ~padding:VALID
|> max_pool2d [| 3; 3 |] [| 2; 2 |] ~padding:VALID
|> mix_typ1 192 32
|> mix_typ1 256 64
|> mix_typ1 288 64
|> mix_typ3
|> mix_typ4 128
|> mix_typ4 160
|> mix_typ4 160
|> mix_typ4 192
|> mix_typ8
|> mix_typ9 1280
|> mix_typ9 2048
|> global_avg_pool2d
|> linear 1000
|> activation (Softmax 1)
|> get_network
let _ =
let nn = make_network 1 299 in
let onnx_graph = Owl_symbolic_engine_onnx.of_symbolic nn in
Owl_symbolic_engine_onnx.save onnx_graph "test.onnx"
|
041519f289f1285663cce910f6da0aef33c54f0f9948d3e42dd01bf501294df4 | vasyaod/parental-control | Config.hs | module Config where
import Control.Applicative
import qualified Data.HashMap.Strict as HM
import Data.Maybe (fromMaybe, fromJust)
import Data.Text
import Data.Time
import Data.Yaml
import qualified Data.Yaml as Y
data MyConfig = MyConfig
{ os :: String,
commands :: Commands,
statePath :: String,
httpEnable :: Bool,
httpPort :: Int,
httpInterface :: String,
httpStaticPath :: String,
usersConfigPath :: Maybe String,
usersConfigRefreshPeriod :: Int
}
deriving (Eq, Show)
instance FromJSON MyConfig where
parseJSON (Y.Object m) =
MyConfig
<$> m .:? pack ("os") .!= "linux"
<*> m .:? pack ("commands") .!= fromJust (parseMaybe (\m -> parseJSON ((Object HM.empty) :: Value)) ())
<*> m .:? pack ("statePath") .!= "/var/lib/parental-control"
<*> m .:? pack ("httpEnable") .!= True
<*> m .:? pack ("httpPort") .!= 8090
<*> m .:? pack ("httpInterface") .!= "127.0.0.1"
<*> m .:? pack ("httpStaticPath") .!= "/usr/share/parental-control"
<*> m .:? pack ("usersConfigPath")
<*> m .:? pack ("usersConfigRefreshPeriod") .!= 300
parseJSON x = fail ("not an object: " ++ show x)
data Commands = Commands
{ message :: String,
kill :: String
}
deriving (Eq, Show)
instance FromJSON Commands where
parseJSON (Y.Object m) =
Commands
<$> m .: pack ("message")
<*> m .: pack ("kill")
parseJSON x = fail ("not an object: " ++ show x)
data User = User
{ login :: String,
timeLimit :: Int,
noticePeriod :: Int,
extendedTime :: [ExtendedTime],
schedule :: Schedule
}
deriving (Eq, Show)
instance FromJSON User where
parseJSON (Y.Object m) =
User
<$> m .: pack ("login")
<*> m .:? pack ("timeLimit") .!= 1500
<*> m .:? pack ("noticePeriod") .!= 5
<*> m .:? pack ("extendedTime") .!= []
<*> m .: pack ("schedule")
parseJSON x = fail ("not an object: " ++ show x)
data ExtendedTime = ExtendedTime
{ date :: String,
timeCount :: Int
}
deriving (Eq, Show)
instance FromJSON ExtendedTime where
parseJSON (Y.Object m) =
ExtendedTime
<$> m .: pack ("date")
<*> m .: pack ("timeCount")
parseJSON x = fail ("not an object: " ++ show x)
data Schedule = Schedule
{ mon :: [Range],
tue :: [Range],
wed :: [Range],
thu :: [Range],
fri :: [Range],
sat :: [Range],
sun :: [Range]
}
deriving (Eq, Show)
instance FromJSON Schedule where
parseJSON (Y.Object m) =
Schedule
<$> m .: pack ("mon")
<*> m .: pack ("tue")
<*> m .: pack ("wed")
<*> m .: pack ("thu")
<*> m .: pack ("fri")
<*> m .: pack ("sat")
<*> m .: pack ("sun")
parseJSON x = fail ("not an object: " ++ show x)
data Range = Range
{ start :: TimeOfDay,
end :: TimeOfDay
}
deriving (Eq, Show)
instance FromJSON Range where
parseJSON (Y.Object m) =
Range
<$> m .: pack ("start")
<*> m .: pack ("end")
parseJSON x = fail ("not an object: " ++ show x)
readConfig :: String -> IO (MyConfig)
readConfig = decodeFileThrow | null | https://raw.githubusercontent.com/vasyaod/parental-control/ec7c971b2d76a014a4f40cfd47a6defd08a73e42/schedule-daemon/src/Config.hs | haskell | module Config where
import Control.Applicative
import qualified Data.HashMap.Strict as HM
import Data.Maybe (fromMaybe, fromJust)
import Data.Text
import Data.Time
import Data.Yaml
import qualified Data.Yaml as Y
data MyConfig = MyConfig
{ os :: String,
commands :: Commands,
statePath :: String,
httpEnable :: Bool,
httpPort :: Int,
httpInterface :: String,
httpStaticPath :: String,
usersConfigPath :: Maybe String,
usersConfigRefreshPeriod :: Int
}
deriving (Eq, Show)
instance FromJSON MyConfig where
parseJSON (Y.Object m) =
MyConfig
<$> m .:? pack ("os") .!= "linux"
<*> m .:? pack ("commands") .!= fromJust (parseMaybe (\m -> parseJSON ((Object HM.empty) :: Value)) ())
<*> m .:? pack ("statePath") .!= "/var/lib/parental-control"
<*> m .:? pack ("httpEnable") .!= True
<*> m .:? pack ("httpPort") .!= 8090
<*> m .:? pack ("httpInterface") .!= "127.0.0.1"
<*> m .:? pack ("httpStaticPath") .!= "/usr/share/parental-control"
<*> m .:? pack ("usersConfigPath")
<*> m .:? pack ("usersConfigRefreshPeriod") .!= 300
parseJSON x = fail ("not an object: " ++ show x)
data Commands = Commands
{ message :: String,
kill :: String
}
deriving (Eq, Show)
instance FromJSON Commands where
parseJSON (Y.Object m) =
Commands
<$> m .: pack ("message")
<*> m .: pack ("kill")
parseJSON x = fail ("not an object: " ++ show x)
data User = User
{ login :: String,
timeLimit :: Int,
noticePeriod :: Int,
extendedTime :: [ExtendedTime],
schedule :: Schedule
}
deriving (Eq, Show)
instance FromJSON User where
parseJSON (Y.Object m) =
User
<$> m .: pack ("login")
<*> m .:? pack ("timeLimit") .!= 1500
<*> m .:? pack ("noticePeriod") .!= 5
<*> m .:? pack ("extendedTime") .!= []
<*> m .: pack ("schedule")
parseJSON x = fail ("not an object: " ++ show x)
data ExtendedTime = ExtendedTime
{ date :: String,
timeCount :: Int
}
deriving (Eq, Show)
instance FromJSON ExtendedTime where
parseJSON (Y.Object m) =
ExtendedTime
<$> m .: pack ("date")
<*> m .: pack ("timeCount")
parseJSON x = fail ("not an object: " ++ show x)
data Schedule = Schedule
{ mon :: [Range],
tue :: [Range],
wed :: [Range],
thu :: [Range],
fri :: [Range],
sat :: [Range],
sun :: [Range]
}
deriving (Eq, Show)
instance FromJSON Schedule where
parseJSON (Y.Object m) =
Schedule
<$> m .: pack ("mon")
<*> m .: pack ("tue")
<*> m .: pack ("wed")
<*> m .: pack ("thu")
<*> m .: pack ("fri")
<*> m .: pack ("sat")
<*> m .: pack ("sun")
parseJSON x = fail ("not an object: " ++ show x)
data Range = Range
{ start :: TimeOfDay,
end :: TimeOfDay
}
deriving (Eq, Show)
instance FromJSON Range where
parseJSON (Y.Object m) =
Range
<$> m .: pack ("start")
<*> m .: pack ("end")
parseJSON x = fail ("not an object: " ++ show x)
readConfig :: String -> IO (MyConfig)
readConfig = decodeFileThrow | |
30e3253ee705a54bb3fbdad205a7436aff31db6acf06158a435fa74aeef87428 | astynax/hemmet | BEM.hs | module Hemmet.BEM
( BemBackend
, BemRunner
, bem
, bemHtml
, bemCss
, bemReactFlux
) where
import Data.Text as T
import Hemmet.Backend
import Hemmet.Runner
import Hemmet.BEM.Rendering
import Hemmet.BEM.Template
import Hemmet.BEM.Transformation
import Hemmet.BEM.Tree
type BemBackend = Backend BemPayload
type BemRunner = Runner BemPayload
bem :: BemBackend
bem =
Backend
{ getTransformation = \input ->
if "<" `T.isPrefixOf` input
then (stripTopNode, T.tail input)
else (id, input)
, parser = template
, examples = bemExamples
}
bemHtml :: BemRunner
bemHtml = PureRunner renderHtmlM
bemCss :: BemRunner
bemCss = PureRunner renderCssM
bemReactFlux :: BemRunner
bemReactFlux = PureRunner renderReactFluxM
bemExamples :: [(Text, Text)]
bemExamples =
[ ("minimal", ":foo")
, ( "complex"
, "form:search-form$theme>input.query>(div.help~hidden_t)+\
\span.submit-button~disabled_t:button~text_small>.hint"
)
, ("transformation: top node strip", "<:block>.elem")
]
| null | https://raw.githubusercontent.com/astynax/hemmet/0ee76cc4ffca5726f8a70ab78ae8473232b96ee3/src/Hemmet/BEM.hs | haskell | module Hemmet.BEM
( BemBackend
, BemRunner
, bem
, bemHtml
, bemCss
, bemReactFlux
) where
import Data.Text as T
import Hemmet.Backend
import Hemmet.Runner
import Hemmet.BEM.Rendering
import Hemmet.BEM.Template
import Hemmet.BEM.Transformation
import Hemmet.BEM.Tree
type BemBackend = Backend BemPayload
type BemRunner = Runner BemPayload
bem :: BemBackend
bem =
Backend
{ getTransformation = \input ->
if "<" `T.isPrefixOf` input
then (stripTopNode, T.tail input)
else (id, input)
, parser = template
, examples = bemExamples
}
bemHtml :: BemRunner
bemHtml = PureRunner renderHtmlM
bemCss :: BemRunner
bemCss = PureRunner renderCssM
bemReactFlux :: BemRunner
bemReactFlux = PureRunner renderReactFluxM
bemExamples :: [(Text, Text)]
bemExamples =
[ ("minimal", ":foo")
, ( "complex"
, "form:search-form$theme>input.query>(div.help~hidden_t)+\
\span.submit-button~disabled_t:button~text_small>.hint"
)
, ("transformation: top node strip", "<:block>.elem")
]
| |
f3890b9dc835b7517eb42c848bf348f156d296f39322d1c06ff5d48a99734d9e | ocaml-flambda/ocaml-jst | member.ml | let () = print_endline "Hello!"
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/1bb6c797df7c63ddae1fc2e6f403a0ee9896cc8e/testsuite/tests/packs/inconsistent/member.ml | ocaml | let () = print_endline "Hello!"
| |
edfff449098a35916f1caa2eef58db954025a10a5ce51259d3cf04318b176277 | dm3/clojure.java-time | user.clj | (ns user
(:require [clojure.tools.namespace.repl :as repl]
[criterium.core :as crit]
[taoensso.timbre :as timbre]
[taoensso.tufte :as profiling :refer (pspy profile defnp p)]))
(defn go []
(set! *warn-on-reflection* false)
(repl/refresh-all)
(require '[java-time :as j])
(eval `(profile :info :local-date-time (j/local-date-time 1 2 3)))
(eval `(profile :info :zoned-date-time (j/zoned-date-time 1 2 3)))
(eval `(profile :info :fail (try (j/zoned-date-time 1 2 "a") (catch Exception e# nil)))))
(defn bench []
(repl/refresh-all)
(require '[java-time :as j])
(eval `(crit/bench (j/local-date-time 1 2 3))))
(defn print-reflection-warnings []
(set! *warn-on-reflection* true)
(repl/refresh-all)
(set! *warn-on-reflection* false))
| null | https://raw.githubusercontent.com/dm3/clojure.java-time/f00db0b2c6e1540ca12815aacb57949a9bfb5589/dev/user.clj | clojure | (ns user
(:require [clojure.tools.namespace.repl :as repl]
[criterium.core :as crit]
[taoensso.timbre :as timbre]
[taoensso.tufte :as profiling :refer (pspy profile defnp p)]))
(defn go []
(set! *warn-on-reflection* false)
(repl/refresh-all)
(require '[java-time :as j])
(eval `(profile :info :local-date-time (j/local-date-time 1 2 3)))
(eval `(profile :info :zoned-date-time (j/zoned-date-time 1 2 3)))
(eval `(profile :info :fail (try (j/zoned-date-time 1 2 "a") (catch Exception e# nil)))))
(defn bench []
(repl/refresh-all)
(require '[java-time :as j])
(eval `(crit/bench (j/local-date-time 1 2 3))))
(defn print-reflection-warnings []
(set! *warn-on-reflection* true)
(repl/refresh-all)
(set! *warn-on-reflection* false))
| |
f22f4f752207bdb7ddac14d8e0a1a754dfc0d709e488139775aada7e54c81313 | Engil/Goodboy | interrupts.ml | type t = char
let vblank= 0
let lcd_stats = 1
let timer = 2
let serial = 3
let joypad = 4
let vblank_handler = 0x40
let lcd_stats_handler = 0x48
let timer_handler = 0x50
let joypad_handler = 0x60
| null | https://raw.githubusercontent.com/Engil/Goodboy/2e9abc243b929d8bdfb7c5d4874ddb8a07c55fac/lib/interrupts.ml | ocaml | type t = char
let vblank= 0
let lcd_stats = 1
let timer = 2
let serial = 3
let joypad = 4
let vblank_handler = 0x40
let lcd_stats_handler = 0x48
let timer_handler = 0x50
let joypad_handler = 0x60
| |
9cd3c5aa467133fba7d58e5e2375f7e6882777d8108d467e7b0835f8b7d22a3b | huangcheng/WebRTC | session_storage.erl | %%%-------------------------------------------------------------------
@author huangcheng
( C ) 2016 , < COMPANY >
%%% @doc
%%%
%%% @end
Created : 07 . Nov 2016 13:37
%%%-------------------------------------------------------------------
-module(session_storage).
-author("huangcheng").
-behaviour(gen_server).
-include("../include/webrtc.hrl").
-type client() :: {Name :: atom(), Pid :: pid()}.
%%-type status() :: idle | busy.
-type clients() :: [client()].
%% API
-export([start_link/0]).
-export([create_room/2,
enter_room/2,
get_server_by_room/1,
get_client_by_name/2,
get_client_pid/2]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(SERVER, ?MODULE).
-define(ETS_TABLE_NAME, sesstion_table).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% @end
%%--------------------------------------------------------------------
-spec(start_link() ->
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
-spec(init(Args :: term()) ->
{ok, State :: term()} | {ok, State :: term(), timeout() | hibernate} |
{stop, Reason :: term()} | ignore).
init([]) ->
{ok, ets:new(?ETS_TABLE_NAME, [ordered_set, private])}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
%% @end
%%--------------------------------------------------------------------
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
State :: term()) ->
{reply, Reply :: term(), NewState :: term()} |
{reply, Reply :: term(), NewState :: term(), timeout() | hibernate} |
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), Reply :: term(), NewState :: term()} |
{stop, Reason :: term(), NewState :: term()}).
handle_call({create, {Room, Server}}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[_] ->
{error, ?ERROR_ROOM_ALREADY_EXISTED};
[] ->
RoomInfo = #session{room = Room, server = Server, audiences = []},
ets:insert(State, RoomInfo),
ok
end, {reply, Reply, State};
handle_call({enter, {Room, Client}}, _From, State) ->
{ClientName, _} = Client,
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
case get_client_by_name(Result#session.audiences, ClientName) of
[_] ->
{error, ?ERROR_USER_ALREADY_EXISTED};
[] ->
NewSession = Result#session{audiences = [Client | Result#session.audiences]},
ets:insert(State, NewSession),
{ok, Result#session.server}
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({server, Room}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
{ok, Result#session.server};
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({client, {Room, ClientName}}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
case get_client_by_name(Result#session.audiences, ClientName) of
[] ->
{error, ?ERROR_CLIENT_DOES_NOT_EXIST};
[{_, Pid}] ->
{ok, Pid}
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({audiences , Room } , _ From , State ) - >
%% Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
%% [Result] ->
%% case Audiences = Result#session.audiences of
%% [] ->
%% {error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
%% _ ->
%% {ok, Audiences}
%% end;
%% [] ->
%% {error, ?ERROR_ROOM_DOES_NOT_EXIST}
%% end,
%% {reply, Reply, State};
handle_call({idle , Room } , _ From , State ) - >
Reply = case ? MODULE : ) of
%% [Result] ->
%% case Audiences = Result#session.audiences of
%% [] ->
%% {error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
%% _ ->
%% case get_idle_clients(Audiences) of
%% [] ->
%% {error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
%% Clients ->
[ Pid || { _ , { Pid , _ } } < - Clients ]
%% end
%% end;
%% [] ->
%% {error, ?ERROR_ROOM_DOES_NOT_EXIST}
%% end,
%% {reply, Reply, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
%% @end
%%--------------------------------------------------------------------
-spec(handle_cast(Request :: term(), State :: term()) ->
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), NewState :: term()}).
handle_cast(_Request, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
-spec(handle_info(Info :: timeout() | term(), State :: term()) ->
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), NewState :: term()}).
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
State :: term()) -> term()).
terminate(_Reason, _State) ->
ets:delete(_State),
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
-spec(code_change(OldVsn :: term() | {down, term()}, State :: term(),
Extra :: term()) ->
{ok, NewState :: term()} | {error, Reason :: term()}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
@public
%% @doc
%% Create a living room.
%%
@spec create_room(Room , Server , Anchor ) - > ok | { error , Reason }
%% @end
%%--------------------------------------------------------------------
-spec(create_room(Room :: integer(), Server :: pid()) -> ok | {error, Reason :: integer()}).
create_room(Room, Server) ->
gen_server:call(?SERVER, {create, {Room, Server}}).
-spec(enter_room(Room :: integer(), Client :: client()) -> {ok, Server :: pid()} | {error, Reason :: integer()}).
enter_room(Room, Client) ->
gen_server:call(?SERVER, {enter, {Room, Client}}).
-spec(get_server_by_room(Room :: integer()) -> {ok, Server :: pid()} | {error, Reason :: integer()}).
get_server_by_room(Room) ->
gen_server:call(?SERVER, {server, Room}).
-spec(get_client_pid(Room :: integer(), ClientName :: atom()) -> {ok, ClientPid :: pid()} | {error, Reason :: integer()}).
get_client_pid(Room, ClientName) ->
gen_server:call(?SERVER, {client, {Room, ClientName}}).
-spec(get_client_by_name(Clients :: clients(), Name :: atom()) -> [client()] | []).
get_client_by_name(Clients, Name) ->
lists:filter(fun(Client) ->
{RoomName, _} = Client,
Name =:= RoomName
end, Clients).
| null | https://raw.githubusercontent.com/huangcheng/WebRTC/bade880d5c09968c0e8b2acb1da5467c84936290/apps/webrtc/src/session_storage.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
-type status() :: idle | busy.
API
gen_server callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
@end
--------------------------------------------------------------------
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
case Audiences = Result#session.audiences of
[] ->
{error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
_ ->
{ok, Audiences}
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
[Result] ->
case Audiences = Result#session.audiences of
[] ->
{error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
_ ->
case get_idle_clients(Audiences) of
[] ->
{error, ?ERROR_ROOM_DOES_NOT_HAVE_ANY_CLIENT};
Clients ->
end
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
--------------------------------------------------------------------
@doc
Handling cast messages
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
--------------------------------------------------------------------
@doc
Create a living room.
@end
-------------------------------------------------------------------- | @author huangcheng
( C ) 2016 , < COMPANY >
Created : 07 . Nov 2016 13:37
-module(session_storage).
-author("huangcheng").
-behaviour(gen_server).
-include("../include/webrtc.hrl").
-type client() :: {Name :: atom(), Pid :: pid()}.
-type clients() :: [client()].
-export([start_link/0]).
-export([create_room/2,
enter_room/2,
get_server_by_room/1,
get_client_by_name/2,
get_client_pid/2]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(SERVER, ?MODULE).
-define(ETS_TABLE_NAME, sesstion_table).
-spec(start_link() ->
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
@private
) - > { ok , State } |
{ ok , State , Timeout } |
-spec(init(Args :: term()) ->
{ok, State :: term()} | {ok, State :: term(), timeout() | hibernate} |
{stop, Reason :: term()} | ignore).
init([]) ->
{ok, ets:new(?ETS_TABLE_NAME, [ordered_set, private])}.
@private
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
State :: term()) ->
{reply, Reply :: term(), NewState :: term()} |
{reply, Reply :: term(), NewState :: term(), timeout() | hibernate} |
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), Reply :: term(), NewState :: term()} |
{stop, Reason :: term(), NewState :: term()}).
handle_call({create, {Room, Server}}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[_] ->
{error, ?ERROR_ROOM_ALREADY_EXISTED};
[] ->
RoomInfo = #session{room = Room, server = Server, audiences = []},
ets:insert(State, RoomInfo),
ok
end, {reply, Reply, State};
handle_call({enter, {Room, Client}}, _From, State) ->
{ClientName, _} = Client,
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
case get_client_by_name(Result#session.audiences, ClientName) of
[_] ->
{error, ?ERROR_USER_ALREADY_EXISTED};
[] ->
NewSession = Result#session{audiences = [Client | Result#session.audiences]},
ets:insert(State, NewSession),
{ok, Result#session.server}
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({server, Room}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
{ok, Result#session.server};
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({client, {Room, ClientName}}, _From, State) ->
Reply = case ets:match_object(State, #session{room = Room, _ = '_'}) of
[Result] ->
case get_client_by_name(Result#session.audiences, ClientName) of
[] ->
{error, ?ERROR_CLIENT_DOES_NOT_EXIST};
[{_, Pid}] ->
{ok, Pid}
end;
[] ->
{error, ?ERROR_ROOM_DOES_NOT_EXIST}
end,
{reply, Reply, State};
handle_call({audiences , Room } , _ From , State ) - >
handle_call({idle , Room } , _ From , State ) - >
Reply = case ? MODULE : ) of
[ Pid || { _ , { Pid , _ } } < - Clients ]
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@private
-spec(handle_cast(Request :: term(), State :: term()) ->
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), NewState :: term()}).
handle_cast(_Request, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
-spec(handle_info(Info :: timeout() | term(), State :: term()) ->
{noreply, NewState :: term()} |
{noreply, NewState :: term(), timeout() | hibernate} |
{stop, Reason :: term(), NewState :: term()}).
handle_info(_Info, State) ->
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
State :: term()) -> term()).
terminate(_Reason, _State) ->
ets:delete(_State),
ok.
@private
, State , Extra ) - > { ok , NewState }
-spec(code_change(OldVsn :: term() | {down, term()}, State :: term(),
Extra :: term()) ->
{ok, NewState :: term()} | {error, Reason :: term()}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
@public
@spec create_room(Room , Server , Anchor ) - > ok | { error , Reason }
-spec(create_room(Room :: integer(), Server :: pid()) -> ok | {error, Reason :: integer()}).
create_room(Room, Server) ->
gen_server:call(?SERVER, {create, {Room, Server}}).
-spec(enter_room(Room :: integer(), Client :: client()) -> {ok, Server :: pid()} | {error, Reason :: integer()}).
enter_room(Room, Client) ->
gen_server:call(?SERVER, {enter, {Room, Client}}).
-spec(get_server_by_room(Room :: integer()) -> {ok, Server :: pid()} | {error, Reason :: integer()}).
get_server_by_room(Room) ->
gen_server:call(?SERVER, {server, Room}).
-spec(get_client_pid(Room :: integer(), ClientName :: atom()) -> {ok, ClientPid :: pid()} | {error, Reason :: integer()}).
get_client_pid(Room, ClientName) ->
gen_server:call(?SERVER, {client, {Room, ClientName}}).
-spec(get_client_by_name(Clients :: clients(), Name :: atom()) -> [client()] | []).
get_client_by_name(Clients, Name) ->
lists:filter(fun(Client) ->
{RoomName, _} = Client,
Name =:= RoomName
end, Clients).
|
efabce4b97442d5914a3f5191b74358c2980030726b98e387f83a6cdeb179ce0 | ocurrent/ocaml-docs-ci | track.ml | module Git = Current_git
module OpamPackage = struct
include OpamPackage
let to_yojson t = `String (OpamPackage.to_string t)
let of_yojson = function
| `String str -> (
match OpamPackage.of_string_opt str with
| Some x -> Ok x
| None -> Error "failed to parse version")
| _ -> Error "failed to parse version"
end
module Track = struct
type t = No_context
let id = "opam-repo-track"
let auto_cancel = true
module Key = struct
type t = { limit : int option; repo : Git.Commit.t; filter : string list }
let digest { repo; filter; limit } =
Git.Commit.hash repo ^ String.concat ";" filter ^ "; "
^ (limit |> Option.map string_of_int |> Option.value ~default:"")
end
let pp f { Key.repo; filter; _ } =
Fmt.pf f "opam repo track\n%a\n%a" Git.Commit.pp_short repo Fmt.(list string) filter
module Value = struct
type package_definition = { package : OpamPackage.t; digest : string } [@@deriving yojson]
type t = package_definition list [@@deriving yojson]
let marshal t = t |> to_yojson |> Yojson.Safe.to_string
let unmarshal t = t |> Yojson.Safe.from_string |> of_yojson |> Result.get_ok
end
let rec take n lst =
match (n, lst) with 0, _ -> [] | _, [] -> [] | n, a :: q -> a :: take (n - 1) q
let take = function Some n -> take n | None -> Fun.id
let get_file path = Lwt_io.with_file ~mode:Input (Fpath.to_string path) Lwt_io.read
let get_versions ~limit path =
let open Lwt.Syntax in
let open Rresult in
Bos.OS.Dir.contents path
>>| (fun versions ->
versions
|> Lwt_list.map_p (fun path ->
let+ content = get_file Fpath.(path / "opam") in
Value.
{
package = path |> Fpath.basename |> OpamPackage.of_string;
digest = Digest.(string content |> to_hex);
}))
|> Result.get_ok
|> Lwt.map (fun v ->
v |> List.sort (fun a b -> -OpamPackage.compare a.Value.package b.package) |> take limit)
let build No_context job { Key.repo; filter; limit } =
let open Lwt.Syntax in
let open Rresult in
let filter name = match filter with [] -> true | lst -> List.mem (Fpath.basename name) lst in
let* () = Current.Job.start ~level:Harmless job in
Git.with_checkout ~job repo @@ fun dir ->
let result =
Bos.OS.Dir.contents Fpath.(dir / "packages") >>| fun packages ->
packages |> List.filter filter
|> Lwt_list.map_s (get_versions ~limit)
|> Lwt.map (fun v -> List.flatten v)
in
match result with Ok v -> Lwt.map Result.ok v | Error e -> Lwt.return_error e
end
module TrackCache = Misc.LatchedBuilder (Track)
open Track.Value
type t = package_definition [@@deriving yojson]
let pkg t = t.package
let digest t = t.digest
module Map = OpamStd.Map.Make (struct
type nonrec t = t
let compare a b = O.OpamPackage.compare a.package b.package
let to_json { package; digest } = `A [ OpamPackage.to_json package; `String digest ]
let of_json _ = None
let to_string t = OpamPackage.to_string t.package
end)
let v ~limit ~(filter : string list) (repo : Git.Commit.t Current.t) =
let open Current.Syntax in
Current.component "Track packages - %a" Fmt.(list string) filter
|> let> repo = repo in
opkey is a constant because we expect only one instance of track
TrackCache.get ~opkey:"track" No_context { filter; repo; limit }
| null | https://raw.githubusercontent.com/ocurrent/ocaml-docs-ci/cb5d4a54a7fd9883aec066b4bd1fcb50ca42e7bc/src/lib/track.ml | ocaml | module Git = Current_git
module OpamPackage = struct
include OpamPackage
let to_yojson t = `String (OpamPackage.to_string t)
let of_yojson = function
| `String str -> (
match OpamPackage.of_string_opt str with
| Some x -> Ok x
| None -> Error "failed to parse version")
| _ -> Error "failed to parse version"
end
module Track = struct
type t = No_context
let id = "opam-repo-track"
let auto_cancel = true
module Key = struct
type t = { limit : int option; repo : Git.Commit.t; filter : string list }
let digest { repo; filter; limit } =
Git.Commit.hash repo ^ String.concat ";" filter ^ "; "
^ (limit |> Option.map string_of_int |> Option.value ~default:"")
end
let pp f { Key.repo; filter; _ } =
Fmt.pf f "opam repo track\n%a\n%a" Git.Commit.pp_short repo Fmt.(list string) filter
module Value = struct
type package_definition = { package : OpamPackage.t; digest : string } [@@deriving yojson]
type t = package_definition list [@@deriving yojson]
let marshal t = t |> to_yojson |> Yojson.Safe.to_string
let unmarshal t = t |> Yojson.Safe.from_string |> of_yojson |> Result.get_ok
end
let rec take n lst =
match (n, lst) with 0, _ -> [] | _, [] -> [] | n, a :: q -> a :: take (n - 1) q
let take = function Some n -> take n | None -> Fun.id
let get_file path = Lwt_io.with_file ~mode:Input (Fpath.to_string path) Lwt_io.read
let get_versions ~limit path =
let open Lwt.Syntax in
let open Rresult in
Bos.OS.Dir.contents path
>>| (fun versions ->
versions
|> Lwt_list.map_p (fun path ->
let+ content = get_file Fpath.(path / "opam") in
Value.
{
package = path |> Fpath.basename |> OpamPackage.of_string;
digest = Digest.(string content |> to_hex);
}))
|> Result.get_ok
|> Lwt.map (fun v ->
v |> List.sort (fun a b -> -OpamPackage.compare a.Value.package b.package) |> take limit)
let build No_context job { Key.repo; filter; limit } =
let open Lwt.Syntax in
let open Rresult in
let filter name = match filter with [] -> true | lst -> List.mem (Fpath.basename name) lst in
let* () = Current.Job.start ~level:Harmless job in
Git.with_checkout ~job repo @@ fun dir ->
let result =
Bos.OS.Dir.contents Fpath.(dir / "packages") >>| fun packages ->
packages |> List.filter filter
|> Lwt_list.map_s (get_versions ~limit)
|> Lwt.map (fun v -> List.flatten v)
in
match result with Ok v -> Lwt.map Result.ok v | Error e -> Lwt.return_error e
end
module TrackCache = Misc.LatchedBuilder (Track)
open Track.Value
type t = package_definition [@@deriving yojson]
let pkg t = t.package
let digest t = t.digest
module Map = OpamStd.Map.Make (struct
type nonrec t = t
let compare a b = O.OpamPackage.compare a.package b.package
let to_json { package; digest } = `A [ OpamPackage.to_json package; `String digest ]
let of_json _ = None
let to_string t = OpamPackage.to_string t.package
end)
let v ~limit ~(filter : string list) (repo : Git.Commit.t Current.t) =
let open Current.Syntax in
Current.component "Track packages - %a" Fmt.(list string) filter
|> let> repo = repo in
opkey is a constant because we expect only one instance of track
TrackCache.get ~opkey:"track" No_context { filter; repo; limit }
| |
c900fd4d16bde295831124374430d00134408d49d731ee4a9f427569ef450f35 | MyPost/cassius | retrieve.clj | (ns cassius.net.command.retrieve
(:require [cassius.protocols :refer [to-bbuff]]
[cassius.types.byte-buffer]
[cassius.net.connection :refer [client]]
[cassius.net.command
[keyspace :as ksp]
[macros :refer [raise-on-invalid-request]]])
(:import [org.apache.cassandra.thrift SlicePredicate ColumnParent
KeyRange SliceRange
ConsistencyLevel]))
(def EMPTY (to-bbuff ""))
(defn retrive-slice [conn ks cf row ^SlicePredicate slp]
(let [cp (ColumnParent. cf)]
(ksp/set-keyspace conn ks)
(raise-on-invalid-request
[conn ks cf :retrieve :column-family-not-found]
(.get_slice (client conn) (to-bbuff row) cp slp
(or (:consistency conn) ConsistencyLevel/ONE)))))
(defn retrieve-range-slices
([conn ks cf ^SlicePredicate slp]
(retrieve-range-slices conn ks cf slp EMPTY EMPTY Integer/MAX_VALUE))
([conn ks cf ^SlicePredicate slp start-key end-key count]
(let [cp (ColumnParent. cf)
kr (doto (KeyRange.)
(.setStart_key start-key)
(.setEnd_key end-key)
(.setCount count))]
(ksp/set-keyspace conn ks)
(raise-on-invalid-request
[conn ks cf :retrieve :column-family-not-found]
(.get_range_slices (client conn) cp slp kr
(or (:consistency conn) ConsistencyLevel/ONE))))))
(defn retrieve-row
([conn ks cf row]
(retrieve-row conn ks cf row EMPTY EMPTY false Integer/MAX_VALUE))
([conn ks cf row start-key end-key reverse? count]
(let [slr (SliceRange. start-key
end-key
reverse? count)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrive-slice conn ks cf row slp))))
(defn retrieve-column
[conn ks cf row col]
(let [COL (to-bbuff col)
slr (SliceRange. COL
COL
false 1)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrive-slice conn ks cf row slp)))
(defn retrieve-column-family
([conn ks cf]
(retrieve-column-family conn ks cf EMPTY))
([conn ks cf start-key]
(retrieve-column-family conn ks cf start-key EMPTY))
([conn ks cf start-key end-key]
(retrieve-column-family conn ks cf start-key end-key Integer/MAX_VALUE))
([conn ks cf start-key end-key max-results]
(let [cp (ColumnParent. cf)
slr (SliceRange. EMPTY EMPTY
false Integer/MAX_VALUE)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrieve-range-slices conn ks cf slp start-key end-key max-results))))
| null | https://raw.githubusercontent.com/MyPost/cassius/7b5f550fa8e8f825d4ecd7ba6a0d34c5ff606a7c/src/cassius/net/command/retrieve.clj | clojure | (ns cassius.net.command.retrieve
(:require [cassius.protocols :refer [to-bbuff]]
[cassius.types.byte-buffer]
[cassius.net.connection :refer [client]]
[cassius.net.command
[keyspace :as ksp]
[macros :refer [raise-on-invalid-request]]])
(:import [org.apache.cassandra.thrift SlicePredicate ColumnParent
KeyRange SliceRange
ConsistencyLevel]))
(def EMPTY (to-bbuff ""))
(defn retrive-slice [conn ks cf row ^SlicePredicate slp]
(let [cp (ColumnParent. cf)]
(ksp/set-keyspace conn ks)
(raise-on-invalid-request
[conn ks cf :retrieve :column-family-not-found]
(.get_slice (client conn) (to-bbuff row) cp slp
(or (:consistency conn) ConsistencyLevel/ONE)))))
(defn retrieve-range-slices
([conn ks cf ^SlicePredicate slp]
(retrieve-range-slices conn ks cf slp EMPTY EMPTY Integer/MAX_VALUE))
([conn ks cf ^SlicePredicate slp start-key end-key count]
(let [cp (ColumnParent. cf)
kr (doto (KeyRange.)
(.setStart_key start-key)
(.setEnd_key end-key)
(.setCount count))]
(ksp/set-keyspace conn ks)
(raise-on-invalid-request
[conn ks cf :retrieve :column-family-not-found]
(.get_range_slices (client conn) cp slp kr
(or (:consistency conn) ConsistencyLevel/ONE))))))
(defn retrieve-row
([conn ks cf row]
(retrieve-row conn ks cf row EMPTY EMPTY false Integer/MAX_VALUE))
([conn ks cf row start-key end-key reverse? count]
(let [slr (SliceRange. start-key
end-key
reverse? count)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrive-slice conn ks cf row slp))))
(defn retrieve-column
[conn ks cf row col]
(let [COL (to-bbuff col)
slr (SliceRange. COL
COL
false 1)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrive-slice conn ks cf row slp)))
(defn retrieve-column-family
([conn ks cf]
(retrieve-column-family conn ks cf EMPTY))
([conn ks cf start-key]
(retrieve-column-family conn ks cf start-key EMPTY))
([conn ks cf start-key end-key]
(retrieve-column-family conn ks cf start-key end-key Integer/MAX_VALUE))
([conn ks cf start-key end-key max-results]
(let [cp (ColumnParent. cf)
slr (SliceRange. EMPTY EMPTY
false Integer/MAX_VALUE)
slp (doto (SlicePredicate.)
(.setSlice_range slr))]
(retrieve-range-slices conn ks cf slp start-key end-key max-results))))
| |
2dcef8dfd6108323028c5bfa9f16c85ec339ab3dcbd3b1bf664962b1205d41ad | mvaldesdeleon/haskell-book | wordnumber.hs | module WordNumber where
import Data.List (intersperse, map)
digitToWord :: Int -> String
digitToWord 0 = "zero"
digitToWord 1 = "one"
digitToWord 2 = "two"
digitToWord 3 = "three"
digitToWord 4 = "four"
digitToWord 5 = "five"
digitToWord 6 = "six"
digitToWord 7 = "seven"
digitToWord 8 = "eight"
digitToWord 9 = "nine"
digitToWord _ = ""
digits :: Int -> [Int]
digits n = go n []
where go n digs
| d > 0 = go d (r:digs)
| otherwise = (r:digs)
where d = n `div` 10
r = n `mod` 10
wordNumber :: Int -> String
wordNumber = concat . intersperse "-" . map digitToWord . digits
| null | https://raw.githubusercontent.com/mvaldesdeleon/haskell-book/ee4a70708041686abe2f1d951185786119470eb4/ch08/wordnumber.hs | haskell | module WordNumber where
import Data.List (intersperse, map)
digitToWord :: Int -> String
digitToWord 0 = "zero"
digitToWord 1 = "one"
digitToWord 2 = "two"
digitToWord 3 = "three"
digitToWord 4 = "four"
digitToWord 5 = "five"
digitToWord 6 = "six"
digitToWord 7 = "seven"
digitToWord 8 = "eight"
digitToWord 9 = "nine"
digitToWord _ = ""
digits :: Int -> [Int]
digits n = go n []
where go n digs
| d > 0 = go d (r:digs)
| otherwise = (r:digs)
where d = n `div` 10
r = n `mod` 10
wordNumber :: Int -> String
wordNumber = concat . intersperse "-" . map digitToWord . digits
| |
39c91e21d546bd4f438bf205f7a90ad8c7c2e8c3fc9e1ba961ed798ffcf27e3b | kadena-io/kadenamint | Kadenamint.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Kadenamint where
import Control.Concurrent.Async (async, cancel, withAsync)
import Control.Lens (makeLenses, (^.), (<&>))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Reader (ReaderT(..), runReaderT)
import qualified Data.Aeson as Aeson
import Data.Decimal (Decimal)
import Data.Functor (void)
import Data.IORef (newIORef)
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Client (defaultManagerSettings, newManager)
import Servant.Client (BaseUrl(..), Scheme(Http), mkClientEnv, runClientM)
import System.Console.ANSI (SGR(..), ConsoleLayer(..))
import Prelude hiding (head, log)
import Pact.Types.Capability (SigCapability)
import Pact.Types.Command (Command(..), CommandResult(..), PactResult(..))
import Pact.Types.Pretty (pretty)
import Kadenamint.ABCI as ABCI
import Kadenamint.Coin
import Kadenamint.Common
import Kadenamint.Pact
import Kadenamint.Tendermint
import Kadenamint.Tendermint.RPC
data KadenamintNode = KadenamintNode
{ _kadenamintNode_tendermint :: TendermintNode
, _kadenamintNode_pactAPIPort :: Word
} deriving (Eq, Ord, Show)
mkKadenamintNode :: TendermintNode -> KadenamintNode
mkKadenamintNode tn = KadenamintNode tn apiPort
where
apiPort = proxyAppPort + 1
(_, proxyAppPort) = unsafeHostPortFromURI $ tn ^. tendermintNode_config . config_proxyApp
makeLenses ''KadenamintNode
broadcastEnv :: Env
broadcastEnv = Env
{ _env_printer = sgrify [SetRGBColor Foreground cyan] . ("\n[RPC] " <>)
}
runEverything :: IO ()
runEverything = do
initProcess
withLocalKadenamintNetwork 3 $ \root -> \case
[n0, n1, _n2] -> timelineCoinContract root n0 n1
_ -> impossible
withKadenamintNode :: MonadIO m => KadenamintNode -> m ()
withKadenamintNode kn = liftIO $ do
let tn = _kadenamintNode_tendermint kn
home = tn ^. tendermintNode_home
(_, proxyAppPort) = unsafeHostPortFromURI $ tn ^. tendermintNode_config . config_proxyApp
rrs <- newIORef mempty
pactDbEnv <- initDb $ T.unpack home <> "/pact-db"
withAsync (runApiServer pactDbEnv rrs (broadcastPactCmd kn) (proxyAppPort + 1)) $ \_ -> runABCI pactDbEnv rrs tn
addKadenamintNode :: MonadIO m => Text -> Text -> NodePorts -> KadenamintNode -> m KadenamintNode
addKadenamintNode home moniker ports preExistingNode = mkKadenamintNode <$> addTendermintNode home moniker ports (_kadenamintNode_tendermint preExistingNode)
loadKadenamintNode :: MonadIO m => Text -> m KadenamintNode
loadKadenamintNode = fmap mkKadenamintNode . loadTendermintNode
runKadenamintNodeDir :: MonadIO m => Text -> m ()
runKadenamintNodeDir = runNodeDir mkKadenamintNode _kadenamintNode_tendermint withKadenamintNode
runKadenamintNode :: MonadIO m => KadenamintNode -> m ()
runKadenamintNode = runNode _kadenamintNode_tendermint withKadenamintNode
withThrowawayKadenamintNetwork :: Word -> (Text -> [KadenamintNode] -> IO ()) -> IO ()
withThrowawayKadenamintNetwork size f = withTempDir $ \x -> withKadenamintNetwork x size f
withLocalKadenamintNetwork :: Word -> (Text -> [KadenamintNode] -> IO ()) -> IO ()
withLocalKadenamintNetwork size f = withCurrentDir $ \x -> withKadenamintNetwork (x <> "/.network") size f
withKadenamintNetwork
:: Text
-> Word
-> (Text -> [KadenamintNode] -> IO ())
-> IO ()
withKadenamintNetwork root size = withNetwork root $ AppNetwork
{ _appNetwork_toAppNode = mkKadenamintNode
, _appNetwork_fromAppNode = _kadenamintNode_tendermint
, _appNetwork_withNode = withKadenamintNode
, _appNetwork_size = size
}
showBalancesTx :: MonadIO m => KadenamintNode -> m ()
showBalancesTx = broadcastPact showBalances
showBalanceTx :: MonadIO m => Text -> KadenamintNode -> m ()
showBalanceTx acct = broadcastPact ("(coin.get-balance '" <> acct <> ")")
transferTx :: MonadIO m => Text -> Text -> Decimal -> KadenamintNode -> m ()
transferTx from to amount = broadcastPactSigned (Just from) (Just [mkTransferCapability from to amount]) (transfer from to amount <> showBalances)
timelineCoinContract :: Text -> KadenamintNode -> KadenamintNode -> IO ()
timelineCoinContract root n0 n1 = do
sleep 4
showBalancesTx n1
sleep 4
n3 <- addKadenamintNode (root <> "/nodeX") "nodeX" extraNodePorts n0
a3 <- liftIO $ async $ runKadenamintNode n3
sleep 4
showBalancesTx n3
sleep 4
transferTx "sender00" "sender01" 1 n3
sleep 4
liftIO $ cancel a3
flip runReaderT (coreEnv Nothing) $ log "Stopping nodeX" Nothing
sleep 4
transferTx "sender00" "sender02" 1 n0
sleep 4
void $ liftIO $ async $ runKadenamintNode n3
broadcastPact :: MonadIO m => Text -> KadenamintNode -> m ()
broadcastPact = broadcastPactSigned Nothing Nothing
broadcastPactSigned :: MonadIO m => Maybe Text -> Maybe [SigCapability] -> Text -> KadenamintNode -> m ()
broadcastPactSigned sender caps code kn = do
let
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
cmd <- mkExec' code sender caps
flip runReaderT broadcastEnv $ do
log ("Broadcasting pact code via node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow code)
broadcastTransaction host port $ tshow $ Aeson.toJSON cmd
broadcastPactCmd :: MonadIO m => KadenamintNode -> Command Text -> m ()
broadcastPactCmd kn cmd = do
let
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
flip runReaderT broadcastEnv $ do
log ("Broadcasting pact command via node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow cmd)
broadcastTransaction host port $ tshow $ Aeson.toJSON cmd
localCall :: MonadIO m => Text -> KadenamintNode -> m String
localCall code kn = do
m <- liftIO $ newManager defaultManagerSettings
cmd <- mkExec' code Nothing Nothing
let
apiPort = fromEnum $ _kadenamintNode_pactAPIPort kn
nodeUrl = BaseUrl Http "localhost" apiPort ""
env = mkClientEnv m nodeUrl
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
flip runReaderT broadcastEnv $ do
log ("Sending pact command to /local endpoint of node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow cmd)
liftIO $ runClientM (localEndpoint cmd) env <&> \case
Left err -> show err
Right cr -> case _crResult cr of
PactResult (Left err) -> show err
PactResult (Right er) -> show $ pretty $ er
| null | https://raw.githubusercontent.com/kadena-io/kadenamint/00c99eca71c4107d1ed31754e26d9e552237e3bd/src/Kadenamint.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
module Kadenamint where
import Control.Concurrent.Async (async, cancel, withAsync)
import Control.Lens (makeLenses, (^.), (<&>))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Reader (ReaderT(..), runReaderT)
import qualified Data.Aeson as Aeson
import Data.Decimal (Decimal)
import Data.Functor (void)
import Data.IORef (newIORef)
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Client (defaultManagerSettings, newManager)
import Servant.Client (BaseUrl(..), Scheme(Http), mkClientEnv, runClientM)
import System.Console.ANSI (SGR(..), ConsoleLayer(..))
import Prelude hiding (head, log)
import Pact.Types.Capability (SigCapability)
import Pact.Types.Command (Command(..), CommandResult(..), PactResult(..))
import Pact.Types.Pretty (pretty)
import Kadenamint.ABCI as ABCI
import Kadenamint.Coin
import Kadenamint.Common
import Kadenamint.Pact
import Kadenamint.Tendermint
import Kadenamint.Tendermint.RPC
data KadenamintNode = KadenamintNode
{ _kadenamintNode_tendermint :: TendermintNode
, _kadenamintNode_pactAPIPort :: Word
} deriving (Eq, Ord, Show)
mkKadenamintNode :: TendermintNode -> KadenamintNode
mkKadenamintNode tn = KadenamintNode tn apiPort
where
apiPort = proxyAppPort + 1
(_, proxyAppPort) = unsafeHostPortFromURI $ tn ^. tendermintNode_config . config_proxyApp
makeLenses ''KadenamintNode
broadcastEnv :: Env
broadcastEnv = Env
{ _env_printer = sgrify [SetRGBColor Foreground cyan] . ("\n[RPC] " <>)
}
runEverything :: IO ()
runEverything = do
initProcess
withLocalKadenamintNetwork 3 $ \root -> \case
[n0, n1, _n2] -> timelineCoinContract root n0 n1
_ -> impossible
withKadenamintNode :: MonadIO m => KadenamintNode -> m ()
withKadenamintNode kn = liftIO $ do
let tn = _kadenamintNode_tendermint kn
home = tn ^. tendermintNode_home
(_, proxyAppPort) = unsafeHostPortFromURI $ tn ^. tendermintNode_config . config_proxyApp
rrs <- newIORef mempty
pactDbEnv <- initDb $ T.unpack home <> "/pact-db"
withAsync (runApiServer pactDbEnv rrs (broadcastPactCmd kn) (proxyAppPort + 1)) $ \_ -> runABCI pactDbEnv rrs tn
addKadenamintNode :: MonadIO m => Text -> Text -> NodePorts -> KadenamintNode -> m KadenamintNode
addKadenamintNode home moniker ports preExistingNode = mkKadenamintNode <$> addTendermintNode home moniker ports (_kadenamintNode_tendermint preExistingNode)
loadKadenamintNode :: MonadIO m => Text -> m KadenamintNode
loadKadenamintNode = fmap mkKadenamintNode . loadTendermintNode
runKadenamintNodeDir :: MonadIO m => Text -> m ()
runKadenamintNodeDir = runNodeDir mkKadenamintNode _kadenamintNode_tendermint withKadenamintNode
runKadenamintNode :: MonadIO m => KadenamintNode -> m ()
runKadenamintNode = runNode _kadenamintNode_tendermint withKadenamintNode
withThrowawayKadenamintNetwork :: Word -> (Text -> [KadenamintNode] -> IO ()) -> IO ()
withThrowawayKadenamintNetwork size f = withTempDir $ \x -> withKadenamintNetwork x size f
withLocalKadenamintNetwork :: Word -> (Text -> [KadenamintNode] -> IO ()) -> IO ()
withLocalKadenamintNetwork size f = withCurrentDir $ \x -> withKadenamintNetwork (x <> "/.network") size f
withKadenamintNetwork
:: Text
-> Word
-> (Text -> [KadenamintNode] -> IO ())
-> IO ()
withKadenamintNetwork root size = withNetwork root $ AppNetwork
{ _appNetwork_toAppNode = mkKadenamintNode
, _appNetwork_fromAppNode = _kadenamintNode_tendermint
, _appNetwork_withNode = withKadenamintNode
, _appNetwork_size = size
}
showBalancesTx :: MonadIO m => KadenamintNode -> m ()
showBalancesTx = broadcastPact showBalances
showBalanceTx :: MonadIO m => Text -> KadenamintNode -> m ()
showBalanceTx acct = broadcastPact ("(coin.get-balance '" <> acct <> ")")
transferTx :: MonadIO m => Text -> Text -> Decimal -> KadenamintNode -> m ()
transferTx from to amount = broadcastPactSigned (Just from) (Just [mkTransferCapability from to amount]) (transfer from to amount <> showBalances)
timelineCoinContract :: Text -> KadenamintNode -> KadenamintNode -> IO ()
timelineCoinContract root n0 n1 = do
sleep 4
showBalancesTx n1
sleep 4
n3 <- addKadenamintNode (root <> "/nodeX") "nodeX" extraNodePorts n0
a3 <- liftIO $ async $ runKadenamintNode n3
sleep 4
showBalancesTx n3
sleep 4
transferTx "sender00" "sender01" 1 n3
sleep 4
liftIO $ cancel a3
flip runReaderT (coreEnv Nothing) $ log "Stopping nodeX" Nothing
sleep 4
transferTx "sender00" "sender02" 1 n0
sleep 4
void $ liftIO $ async $ runKadenamintNode n3
broadcastPact :: MonadIO m => Text -> KadenamintNode -> m ()
broadcastPact = broadcastPactSigned Nothing Nothing
broadcastPactSigned :: MonadIO m => Maybe Text -> Maybe [SigCapability] -> Text -> KadenamintNode -> m ()
broadcastPactSigned sender caps code kn = do
let
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
cmd <- mkExec' code sender caps
flip runReaderT broadcastEnv $ do
log ("Broadcasting pact code via node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow code)
broadcastTransaction host port $ tshow $ Aeson.toJSON cmd
broadcastPactCmd :: MonadIO m => KadenamintNode -> Command Text -> m ()
broadcastPactCmd kn cmd = do
let
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
flip runReaderT broadcastEnv $ do
log ("Broadcasting pact command via node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow cmd)
broadcastTransaction host port $ tshow $ Aeson.toJSON cmd
localCall :: MonadIO m => Text -> KadenamintNode -> m String
localCall code kn = do
m <- liftIO $ newManager defaultManagerSettings
cmd <- mkExec' code Nothing Nothing
let
apiPort = fromEnum $ _kadenamintNode_pactAPIPort kn
nodeUrl = BaseUrl Http "localhost" apiPort ""
env = mkClientEnv m nodeUrl
tn = _kadenamintNode_tendermint kn
cfg = _tendermintNode_config tn
(host, port) = unsafeHostPortFromURI $ _configRPC_laddr $ _config_rpc cfg
flip runReaderT broadcastEnv $ do
log ("Sending pact command to /local endpoint of node #" <> _config_moniker cfg <> " at " <> host <> ":" <> tshow port) (Just $ tshow cmd)
liftIO $ runClientM (localEndpoint cmd) env <&> \case
Left err -> show err
Right cr -> case _crResult cr of
PactResult (Left err) -> show err
PactResult (Right er) -> show $ pretty $ er
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.