_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
f99e281dbc75381fb0ce3a4de55d71a92c6351ee78cf84cc474d0b162175e9f1 | interstar/cardigan-bay | command_line.cljc | (ns clj-ts.command-line
(:require [instaparse.core :as insta]))
(def parse
(insta/parser
"
<Command> = <'!'> <space> Move | <'!'> Tags
Move = <'>>'> PageName
Tags = (<space> Tag)*
PageName = Name
Tag = <'+'> Name
<Name> = #'[A-Za-z0-9]+'
space = #'\\s+'
"))
(defn parser [s]
(let [parsed (parse s)
f (first parsed)]
(cond
(= :Move (first f)) {:type :Move :page-name (-> parsed first second second)}
(= :Tags (first f)) {:type :Tags :tags (-> parsed first rest)}
:otherwise
{:type :error :original s :result parsed}
)))
(defn parsed-seq [xs]
(let [ps (map parser xs)
commands (filter #(not= :error (:type %)) ps)
moves (filter #(= (:type %) :Move) commands)
raw-tags (map :tags (filter #(= (:type %) :Tags) commands))
has-move? (> (count moves) 0)
]
{:commands commands
:non-commands
(map #(:original %) (filter #(= :error (:type %)) ps))
:ps ps
:size (count commands)
:has-commands? (> (count commands) 0)
:has-tags? (> (count raw-tags) 0)
:tags raw-tags ;;(flatten (map keyword raw-tags))
:has-move? has-move?
:move-destination (if has-move? (-> moves first :page-name) nil)}
))
(defn command-line? [s]
(not= :error (:type (parser s))))
| null | https://raw.githubusercontent.com/interstar/cardigan-bay/e4767d300dd23ed011aa25403f924d7bfe5cb30c/src/clj_ts/command_line.cljc | clojure | (flatten (map keyword raw-tags)) | (ns clj-ts.command-line
(:require [instaparse.core :as insta]))
(def parse
(insta/parser
"
<Command> = <'!'> <space> Move | <'!'> Tags
Move = <'>>'> PageName
Tags = (<space> Tag)*
PageName = Name
Tag = <'+'> Name
<Name> = #'[A-Za-z0-9]+'
space = #'\\s+'
"))
(defn parser [s]
(let [parsed (parse s)
f (first parsed)]
(cond
(= :Move (first f)) {:type :Move :page-name (-> parsed first second second)}
(= :Tags (first f)) {:type :Tags :tags (-> parsed first rest)}
:otherwise
{:type :error :original s :result parsed}
)))
(defn parsed-seq [xs]
(let [ps (map parser xs)
commands (filter #(not= :error (:type %)) ps)
moves (filter #(= (:type %) :Move) commands)
raw-tags (map :tags (filter #(= (:type %) :Tags) commands))
has-move? (> (count moves) 0)
]
{:commands commands
:non-commands
(map #(:original %) (filter #(= :error (:type %)) ps))
:ps ps
:size (count commands)
:has-commands? (> (count commands) 0)
:has-tags? (> (count raw-tags) 0)
:has-move? has-move?
:move-destination (if has-move? (-> moves first :page-name) nil)}
))
(defn command-line? [s]
(not= :error (:type (parser s))))
|
4c9188267b1eed7bf31f6bd4dc25aef3a7dbbc0e024f92d396a87b5e36c778ad | jonathan-laurent/KaFlow | causal_core.mli | open Causal_core_shared
type sigma_event_info = { number_added : int }
type causal_core = (step_id * sigma_event_info) list
type t = causal_core
val core_events : causal_core -> step_id list
{ 6 Simple interface }
val iter_causal_cores :
Trace_explorer.t ->
step_id list ->
(step_id -> causal_core -> unit) ->
unit
{ 6 Expert interface }
type var_info_table
val init_var_infos :
?last_step_id:step_id -> Trace_explorer.t -> var_info_table
val get_modifications_history : 'a Grid.var -> var_info_table -> History.t
val compute_causal_core :
Trace_explorer.t -> var_info_table -> step_id list -> t | null | https://raw.githubusercontent.com/jonathan-laurent/KaFlow/e81cfbe4f270b9f0c94b7002b02dd5be61041ab8/src/causal_core.mli | ocaml | open Causal_core_shared
type sigma_event_info = { number_added : int }
type causal_core = (step_id * sigma_event_info) list
type t = causal_core
val core_events : causal_core -> step_id list
{ 6 Simple interface }
val iter_causal_cores :
Trace_explorer.t ->
step_id list ->
(step_id -> causal_core -> unit) ->
unit
{ 6 Expert interface }
type var_info_table
val init_var_infos :
?last_step_id:step_id -> Trace_explorer.t -> var_info_table
val get_modifications_history : 'a Grid.var -> var_info_table -> History.t
val compute_causal_core :
Trace_explorer.t -> var_info_table -> step_id list -> t | |
122b486220aa1e0e281af5ea6dacaa9619080590bff2d85b260375409d51680d | aryx/xix | stack.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
$ I d : stack.ml , v 1.4 1996/04/30 14:50:37 xleroy Exp $
type 'a t = { mutable c : 'a list }
exception Empty
let create () = { c = [] }
let clear s = s.c <- []
let push x s = s.c <- x :: s.c
let pop s =
match s.c with
hd::tl -> s.c <- tl; hd
| [] -> raise Empty
let length s = List.length s.c
let iter f s = List.iter f s.c
(* addons pad *)
let top_opt s =
match s.c with
| [] -> None
| x::xs -> Some x
let top s =
match s.c with
| x::xs -> x
| [] -> raise Empty
let nth i s =
List.nth s.c i
| null | https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/lib_core/collections/todo/stack.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
addons pad | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
$ I d : stack.ml , v 1.4 1996/04/30 14:50:37 xleroy Exp $
type 'a t = { mutable c : 'a list }
exception Empty
let create () = { c = [] }
let clear s = s.c <- []
let push x s = s.c <- x :: s.c
let pop s =
match s.c with
hd::tl -> s.c <- tl; hd
| [] -> raise Empty
let length s = List.length s.c
let iter f s = List.iter f s.c
let top_opt s =
match s.c with
| [] -> None
| x::xs -> Some x
let top s =
match s.c with
| x::xs -> x
| [] -> raise Empty
let nth i s =
List.nth s.c i
|
d1f7237a12d80eb45ac852ff483c733032fa49c3321766df3c77caa88f42f411 | fukamachi/cl-cookie | cl-cookie.lisp | (in-package :cl-user)
(defpackage cl-cookie-test
(:use :cl
:cl-cookie
:prove)
(:import-from :cl-cookie
:parse-cookie-date
:match-cookie-path
:match-cookie))
(in-package :cl-cookie-test)
(plan nil)
(subtest "parse-cookie-date"
(loop for (date . rfc3339) in '(("Wed, 06-Feb-2008 21:01:38 GMT" . "2008-02-06T21:01:38+0000")
("Wed, 06-Feb-08 21:01:38 GMT" . "2008-02-06T21:01:38+0000")
("Tue Feb 13 08:00:00 2007 GMT" . "2007-02-13T08:00:00+0000")
("Wednesday, 07-February-2027 08:55:23 GMT" . "2027-02-07T08:55:23+0000")
("Wed, 07-02-2017 10:34:45 GMT" . "2017-02-07T10:34:45+0000"))
do (let ((parsed (parse-cookie-date date)))
(ok parsed (format nil "Can parse ~S" date))
(is (local-time:universal-to-timestamp parsed)
(local-time:parse-timestring rfc3339)
:test #'local-time:timestamp=))))
(subtest "parse-set-cookie-header"
(is (parse-set-cookie-header "SID=31d4d96e407aad42" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/")
:test #'cookie=
"name and value")
(is (parse-set-cookie-header "SID=" "example.com" "/")
(make-cookie :name "SID" :value "" :origin-host "example.com" :path "/")
:test #'cookie=
"no value")
(is (parse-set-cookie-header "SID=31d4d96e407aad42; Path=/; Domain=example.com" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/" :domain "example.com")
:test #'cookie=
"path and domain")
(is (parse-set-cookie-header "SID=31d4d96e407aad42; Path=/; Secure; HttpOnly" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/" :secure-p t :httponly-p t)
:test #'cookie-equal
"secure and httponly"))
(subtest "write-cookie-header"
(is (write-cookie-header nil)
nil)
(is (write-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42"))
"SID=31d4d96e407aad42")
(is (write-cookie-header (list (make-cookie :name "SID" :value "31d4d96e407aad42")
(make-cookie :name "lang" :value "en-US")))
"SID=31d4d96e407aad42; lang=en-US"))
(subtest "match-cookie-path"
(ok (match-cookie-path "/" "/"))
(ok (match-cookie-path "/" ""))
(ok (match-cookie-path "" "/"))
(ok (not (match-cookie-path "/" "/accounts")))
(ok (match-cookie-path "/accounts" "/"))
(ok (match-cookie-path "/accounts/nitro_idiot" "/"))
(ok (not (match-cookie-path "/" "/accounts")))
(ok (match-cookie-path "/accounts" "/accounts"))
(ok (match-cookie-path "/accounts/" "/accounts"))
(ok (not (match-cookie-path "/accounts-page" "/accounts")))
(ok (match-cookie-path "/accounts/nitro_idiot" "/accounts")))
(subtest "match-cookie"
(subtest "cookie with domain and path"
(let ((cookie
(make-cookie :name "LSID" :value "DQAAAK...Eaem_vYg" :origin-host "docs.foo.com"
:domain ".foo.com" :path "/accounts")))
(diag "path")
(ok (not (match-cookie cookie "docs.foo.com" "/")))
(ok (match-cookie cookie "docs.foo.com" "/accounts"))
(ok (match-cookie cookie "docs.foo.com" "/accounts/"))
(ok (match-cookie cookie "docs.foo.com" "/accounts/nitro_idiot"))
(ok (not (match-cookie cookie "docs.foo.com" "/accounts-page" :securep t)))
(diag "domain")
(ok (not (match-cookie cookie "foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")
(ok (not (match-cookie cookie "one.docs.foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")))
(subtest "cookie with path"
(let ((cookie
(make-cookie :name "LSID" :value "DQAAAK...Eaem_vYg" :origin-host "docs.foo.com"
:path "/accounts" :secure-p t :httponly-p t)))
(diag "secure")
(ok (not (match-cookie cookie "docs.foo.com" "/accounts")))
(ok (match-cookie cookie "docs.foo.com" "/accounts" :securep t))
(diag "path")
(ok (not (match-cookie cookie "docs.foo.com" "/" :securep t)))
(ok (match-cookie cookie "docs.foo.com" "/accounts" :securep t))
(ok (match-cookie cookie "docs.foo.com" "/accounts/" :securep t))
(ok (match-cookie cookie "docs.foo.com" "/accounts/nitro_idiot" :securep t))
(ok (not (match-cookie cookie "docs.foo.com" "/accounts-page" :securep t)))
(diag "domain")
(ok (not (match-cookie cookie "foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")
(ok (not (match-cookie cookie "one.docs.foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL"))))
(subtest "cookie-jar"
(let ((cookie-jar (make-cookie-jar)))
(is (length (cookie-jar-cookies cookie-jar)) 0
"initial cookie jar is empty")
(merge-cookies cookie-jar
(list (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "example.com" :path "/")
(make-cookie :name "lang" :value "en-US" :domain "example.com" :path "/accounts")))
(is (length (cookie-jar-cookies cookie-jar)) 2)
(merge-cookies cookie-jar
(list (make-cookie :name "id" :value "30" :domain "example.com")))
(is (length (cookie-jar-cookies cookie-jar)) 3)
(merge-cookies cookie-jar
(list (make-cookie :name "lang" :value "ja-JP" :domain "example.com" :path "/accounts")))
(subtest "can overwrite"
(is (length (cookie-jar-cookies cookie-jar)) 3)
(is (cookie-value
(find "lang" (cookie-jar-cookies cookie-jar) :key #'cookie-name :test #'string=))
"ja-JP"))
(subtest "not overwrite other domain cookies"
(merge-cookies cookie-jar
(list (make-cookie :name "lang" :value "fr-FR" :domain "www.example.com")))
(is (length (cookie-jar-cookies cookie-jar)) 4))
(subtest "Cross site cooking"
(merge-cookies cookie-jar
(list (make-cookie :name "name" :value "Ultraman" :domain ".com")))
(is (cookie-jar-host-cookies cookie-jar "hatena.com" "/") nil))))
(subtest "write-set-cookie-header"
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42"))
"SID=31d4d96e407aad42"
:test #'string=
"name and value")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "www.example.com"))
"SID=31d4d96e407aad42; Domain=www.example.com"
:test #'string=
"name, value, and domain")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "www.example.com" :path "/users"))
"SID=31d4d96e407aad42; Path=/users; Domain=www.example.com"
:test #'string=
"name, value, domain, and path")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :expires (encode-universal-time 6 22 19 25 1 2002)))
"SID=31d4d96e407aad42; Expires=Sat, 26 Jan 2002 00:22:06 GMT"
:test #'string=
"name, value, and expires")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :expires (encode-universal-time 6 22 19 25 1 2002)
:secure-p t :httponly-p t))
"SID=31d4d96e407aad42; Expires=Sat, 26 Jan 2002 00:22:06 GMT; Secure; HttpOnly"
:test #'string=))
(finalize)
| null | https://raw.githubusercontent.com/fukamachi/cl-cookie/e6babbf57c9c6e0b6998a5b5ecaea8fa59f88296/t/cl-cookie.lisp | lisp | (in-package :cl-user)
(defpackage cl-cookie-test
(:use :cl
:cl-cookie
:prove)
(:import-from :cl-cookie
:parse-cookie-date
:match-cookie-path
:match-cookie))
(in-package :cl-cookie-test)
(plan nil)
(subtest "parse-cookie-date"
(loop for (date . rfc3339) in '(("Wed, 06-Feb-2008 21:01:38 GMT" . "2008-02-06T21:01:38+0000")
("Wed, 06-Feb-08 21:01:38 GMT" . "2008-02-06T21:01:38+0000")
("Tue Feb 13 08:00:00 2007 GMT" . "2007-02-13T08:00:00+0000")
("Wednesday, 07-February-2027 08:55:23 GMT" . "2027-02-07T08:55:23+0000")
("Wed, 07-02-2017 10:34:45 GMT" . "2017-02-07T10:34:45+0000"))
do (let ((parsed (parse-cookie-date date)))
(ok parsed (format nil "Can parse ~S" date))
(is (local-time:universal-to-timestamp parsed)
(local-time:parse-timestring rfc3339)
:test #'local-time:timestamp=))))
(subtest "parse-set-cookie-header"
(is (parse-set-cookie-header "SID=31d4d96e407aad42" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/")
:test #'cookie=
"name and value")
(is (parse-set-cookie-header "SID=" "example.com" "/")
(make-cookie :name "SID" :value "" :origin-host "example.com" :path "/")
:test #'cookie=
"no value")
(is (parse-set-cookie-header "SID=31d4d96e407aad42; Path=/; Domain=example.com" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/" :domain "example.com")
:test #'cookie=
"path and domain")
(is (parse-set-cookie-header "SID=31d4d96e407aad42; Path=/; Secure; HttpOnly" "example.com" "/")
(make-cookie :name "SID" :value "31d4d96e407aad42" :origin-host "example.com" :path "/" :secure-p t :httponly-p t)
:test #'cookie-equal
"secure and httponly"))
(subtest "write-cookie-header"
(is (write-cookie-header nil)
nil)
(is (write-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42"))
"SID=31d4d96e407aad42")
(is (write-cookie-header (list (make-cookie :name "SID" :value "31d4d96e407aad42")
(make-cookie :name "lang" :value "en-US")))
"SID=31d4d96e407aad42; lang=en-US"))
(subtest "match-cookie-path"
(ok (match-cookie-path "/" "/"))
(ok (match-cookie-path "/" ""))
(ok (match-cookie-path "" "/"))
(ok (not (match-cookie-path "/" "/accounts")))
(ok (match-cookie-path "/accounts" "/"))
(ok (match-cookie-path "/accounts/nitro_idiot" "/"))
(ok (not (match-cookie-path "/" "/accounts")))
(ok (match-cookie-path "/accounts" "/accounts"))
(ok (match-cookie-path "/accounts/" "/accounts"))
(ok (not (match-cookie-path "/accounts-page" "/accounts")))
(ok (match-cookie-path "/accounts/nitro_idiot" "/accounts")))
(subtest "match-cookie"
(subtest "cookie with domain and path"
(let ((cookie
(make-cookie :name "LSID" :value "DQAAAK...Eaem_vYg" :origin-host "docs.foo.com"
:domain ".foo.com" :path "/accounts")))
(diag "path")
(ok (not (match-cookie cookie "docs.foo.com" "/")))
(ok (match-cookie cookie "docs.foo.com" "/accounts"))
(ok (match-cookie cookie "docs.foo.com" "/accounts/"))
(ok (match-cookie cookie "docs.foo.com" "/accounts/nitro_idiot"))
(ok (not (match-cookie cookie "docs.foo.com" "/accounts-page" :securep t)))
(diag "domain")
(ok (not (match-cookie cookie "foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")
(ok (not (match-cookie cookie "one.docs.foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")))
(subtest "cookie with path"
(let ((cookie
(make-cookie :name "LSID" :value "DQAAAK...Eaem_vYg" :origin-host "docs.foo.com"
:path "/accounts" :secure-p t :httponly-p t)))
(diag "secure")
(ok (not (match-cookie cookie "docs.foo.com" "/accounts")))
(ok (match-cookie cookie "docs.foo.com" "/accounts" :securep t))
(diag "path")
(ok (not (match-cookie cookie "docs.foo.com" "/" :securep t)))
(ok (match-cookie cookie "docs.foo.com" "/accounts" :securep t))
(ok (match-cookie cookie "docs.foo.com" "/accounts/" :securep t))
(ok (match-cookie cookie "docs.foo.com" "/accounts/nitro_idiot" :securep t))
(ok (not (match-cookie cookie "docs.foo.com" "/accounts-page" :securep t)))
(diag "domain")
(ok (not (match-cookie cookie "foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL")
(ok (not (match-cookie cookie "one.docs.foo.com" "/" :securep t))
"Send only to the origin-host when :host is NIL"))))
(subtest "cookie-jar"
(let ((cookie-jar (make-cookie-jar)))
(is (length (cookie-jar-cookies cookie-jar)) 0
"initial cookie jar is empty")
(merge-cookies cookie-jar
(list (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "example.com" :path "/")
(make-cookie :name "lang" :value "en-US" :domain "example.com" :path "/accounts")))
(is (length (cookie-jar-cookies cookie-jar)) 2)
(merge-cookies cookie-jar
(list (make-cookie :name "id" :value "30" :domain "example.com")))
(is (length (cookie-jar-cookies cookie-jar)) 3)
(merge-cookies cookie-jar
(list (make-cookie :name "lang" :value "ja-JP" :domain "example.com" :path "/accounts")))
(subtest "can overwrite"
(is (length (cookie-jar-cookies cookie-jar)) 3)
(is (cookie-value
(find "lang" (cookie-jar-cookies cookie-jar) :key #'cookie-name :test #'string=))
"ja-JP"))
(subtest "not overwrite other domain cookies"
(merge-cookies cookie-jar
(list (make-cookie :name "lang" :value "fr-FR" :domain "www.example.com")))
(is (length (cookie-jar-cookies cookie-jar)) 4))
(subtest "Cross site cooking"
(merge-cookies cookie-jar
(list (make-cookie :name "name" :value "Ultraman" :domain ".com")))
(is (cookie-jar-host-cookies cookie-jar "hatena.com" "/") nil))))
(subtest "write-set-cookie-header"
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42"))
"SID=31d4d96e407aad42"
:test #'string=
"name and value")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "www.example.com"))
"SID=31d4d96e407aad42; Domain=www.example.com"
:test #'string=
"name, value, and domain")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :domain "www.example.com" :path "/users"))
"SID=31d4d96e407aad42; Path=/users; Domain=www.example.com"
:test #'string=
"name, value, domain, and path")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :expires (encode-universal-time 6 22 19 25 1 2002)))
"SID=31d4d96e407aad42; Expires=Sat, 26 Jan 2002 00:22:06 GMT"
:test #'string=
"name, value, and expires")
(is (write-set-cookie-header (make-cookie :name "SID" :value "31d4d96e407aad42" :expires (encode-universal-time 6 22 19 25 1 2002)
:secure-p t :httponly-p t))
"SID=31d4d96e407aad42; Expires=Sat, 26 Jan 2002 00:22:06 GMT; Secure; HttpOnly"
:test #'string=))
(finalize)
| |
b752eccad642fb7e0d7970d48f17255d989deab60360998d748660128e433219 | naoiwata/sicp | ex3.65.scm | ;;
;; @author naoiwata
SICP Chapter3
Exercise 3.65 .
;;
; ------------------------------------------------------------------------
; solution
; ------------------------------------------------------------------------
(add-load-path "./pages/" :relative)
(load "stream.scm")
(load "3.5.2.scm")
(define (ln-summands n)
(cons-stream (/ 1.0 n)
(stream-map - (ln-summands (+ n 1)))))
(define ln-stream
(partial-sums (ln-summands 1)))
; ------------------------------------------------------------------------
; test
; ------------------------------------------------------------------------
; partial-sums
(map
(lambda (x) (print (stream-ref ln-stream x)))
(iota 2000))
1.0
0.5
0.8333333333333333
0.5833333333333333
0.7833333333333332
0.6166666666666666
0.7595238095238095
0.6345238095238095
; 0.7456349206349207
0.6456349206349207
0.7365440115440116
; 0.6532106782106782
0.7301337551337552
0.6587051837051838
0.7253718503718505
0.6628718503718505
0.7216953797836152
0.6661398242280596
0.718771403175428
0.6687714031754279
; ...
0.6933974934353783
0.6928969929348777
0.6933972430599402
0.6928972430599403 < = n = 2000
; euler-transform
(define square (lambda (x) (* x x)))
(define (euler-transform s)
(let
((s0 (stream-ref s 0))
(s1 (stream-ref s 1))
(s2 (stream-ref s 2)))
(cons-stream
(- s2 (/ (square (- s2 s1))
(+ s0 (* -2 s1) s2)))
(euler-transform (stream-cdr s)))))
(display-stream (euler-transform ln-stream))
0.7
0.6904761904761905
0.6944444444444444
0.6924242424242424
0.6935897435897436
0.6928571428571428
0.6933473389355742
0.6930033416875522
0.6932539682539683
0.6930657506744464
0.6932106782106783
0.6930967180967181
0.6931879423258734
0.6931137858557215
0.6931748806748808
0.6931239512121866
0.6931668512550866
0.6931303775344023
0.693161647077867
0.6931346368409872
; tableau
(define (make-tableau transform s)
(cons-stream s
(make-tableau transform
(transform s))))
(define (accelerated-square transform s)
(stream-map stream-car
(make-tableau transform s)))
(display-stream (accelerated-square euler-transform ln-stream))
1.0
0.7
0.6932773109243697
0.6931488693329254
0.6931471960735491
0.6931471806635636
0.6931471805604039
0.6931471805599445 < - convergence
0.6931471805599427
0.6931471805599454
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
; +nan.0
| null | https://raw.githubusercontent.com/naoiwata/sicp/7314136c5892de402015acfe4b9148a3558b1211/chapter3/ex3.65.scm | scheme |
@author naoiwata
------------------------------------------------------------------------
solution
------------------------------------------------------------------------
------------------------------------------------------------------------
test
------------------------------------------------------------------------
partial-sums
0.7456349206349207
0.6532106782106782
...
euler-transform
tableau
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0
+nan.0 | SICP Chapter3
Exercise 3.65 .
(add-load-path "./pages/" :relative)
(load "stream.scm")
(load "3.5.2.scm")
(define (ln-summands n)
(cons-stream (/ 1.0 n)
(stream-map - (ln-summands (+ n 1)))))
(define ln-stream
(partial-sums (ln-summands 1)))
(map
(lambda (x) (print (stream-ref ln-stream x)))
(iota 2000))
1.0
0.5
0.8333333333333333
0.5833333333333333
0.7833333333333332
0.6166666666666666
0.7595238095238095
0.6345238095238095
0.6456349206349207
0.7365440115440116
0.7301337551337552
0.6587051837051838
0.7253718503718505
0.6628718503718505
0.7216953797836152
0.6661398242280596
0.718771403175428
0.6687714031754279
0.6933974934353783
0.6928969929348777
0.6933972430599402
0.6928972430599403 < = n = 2000
(define square (lambda (x) (* x x)))
(define (euler-transform s)
(let
((s0 (stream-ref s 0))
(s1 (stream-ref s 1))
(s2 (stream-ref s 2)))
(cons-stream
(- s2 (/ (square (- s2 s1))
(+ s0 (* -2 s1) s2)))
(euler-transform (stream-cdr s)))))
(display-stream (euler-transform ln-stream))
0.7
0.6904761904761905
0.6944444444444444
0.6924242424242424
0.6935897435897436
0.6928571428571428
0.6933473389355742
0.6930033416875522
0.6932539682539683
0.6930657506744464
0.6932106782106783
0.6930967180967181
0.6931879423258734
0.6931137858557215
0.6931748806748808
0.6931239512121866
0.6931668512550866
0.6931303775344023
0.693161647077867
0.6931346368409872
(define (make-tableau transform s)
(cons-stream s
(make-tableau transform
(transform s))))
(define (accelerated-square transform s)
(stream-map stream-car
(make-tableau transform s)))
(display-stream (accelerated-square euler-transform ln-stream))
1.0
0.7
0.6932773109243697
0.6931488693329254
0.6931471960735491
0.6931471806635636
0.6931471805604039
0.6931471805599445 < - convergence
0.6931471805599427
0.6931471805599454
|
498972cfb4506ad843ce267ccfafd70151d73e347c2626460bcb1ce59ec53cff | wedesoft/aiscm | xorg_scale_list.scm | (use-modules (aiscm magick) (aiscm xorg) (aiscm core))
(define img (read-image "fubk.png"))
(show (list (* (rgb 1 0 0) img) (* (rgb 0 1 0) img) (* (rgb 0 0 1) img)) #:shape '(120 160))
| null | https://raw.githubusercontent.com/wedesoft/aiscm/2c3db8d00cad6e042150714ada85da19cf4338ad/tests/integration/xorg_scale_list.scm | scheme | (use-modules (aiscm magick) (aiscm xorg) (aiscm core))
(define img (read-image "fubk.png"))
(show (list (* (rgb 1 0 0) img) (* (rgb 0 1 0) img) (* (rgb 0 0 1) img)) #:shape '(120 160))
| |
e2d742559ca6f13f8e6f664b4a926961c97beec321f8c4206bf3e05dc45a8ffc | PapenfussLab/bioshake | Samtools.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Bioshake.Cluster.Samtools where
import Bioshake
import Bioshake.Cluster.Torque
import Bioshake.Internal.Samtools
import Bioshake.TH
import Development.Shake
import Development.Shake.FilePath
indexRules :: Given Config => Rules ()
indexRules = do
"//*.bai" %> \out -> do
let input = dropExtension out
need [input]
withSubmit (run "samtools index" [input] [out]) [Left given]
"//*.fai" %> \out -> do
let input = dropExtension out
need [input]
withSubmit (run "samtools faidx" [input]) [Left given]
$(makeSingleCluster ''AddRGLine [''IsBam] 'buildAddRGLine)
$(makeCluster ''SortBam [''IsBam] 'buildSortBam)
$(makeCluster ''NameSortBam [''IsBam] 'buildNameSortBam)
$(makeCluster ''Sam2Bam [''IsSam] 'buildSam2Bam)
$(makeCluster ''MappedOnly [''IsSam] 'buildMappedOnly)
$(makeSingleCluster ''Pileup [''IsBam, ''Referenced, ''Sorted] 'buildPileup)
$(makeSingleCluster ''FixMates [''IsBam, ''NameSorted] 'buildFixMates)
$(makeSingleCluster ''MarkDups [''IsBam, ''Sorted, ''MS] 'buildMarkDups)
$(makeSingleCluster ''BedCov [''IsBam, ''Capture] 'buildBedCov)
| null | https://raw.githubusercontent.com/PapenfussLab/bioshake/afeb7219b171e242b6e9bb9e99e2f80c0a099aff/Bioshake/Cluster/Samtools.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators # | # LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE MultiParamTypeClasses #
module Bioshake.Cluster.Samtools where
import Bioshake
import Bioshake.Cluster.Torque
import Bioshake.Internal.Samtools
import Bioshake.TH
import Development.Shake
import Development.Shake.FilePath
indexRules :: Given Config => Rules ()
indexRules = do
"//*.bai" %> \out -> do
let input = dropExtension out
need [input]
withSubmit (run "samtools index" [input] [out]) [Left given]
"//*.fai" %> \out -> do
let input = dropExtension out
need [input]
withSubmit (run "samtools faidx" [input]) [Left given]
$(makeSingleCluster ''AddRGLine [''IsBam] 'buildAddRGLine)
$(makeCluster ''SortBam [''IsBam] 'buildSortBam)
$(makeCluster ''NameSortBam [''IsBam] 'buildNameSortBam)
$(makeCluster ''Sam2Bam [''IsSam] 'buildSam2Bam)
$(makeCluster ''MappedOnly [''IsSam] 'buildMappedOnly)
$(makeSingleCluster ''Pileup [''IsBam, ''Referenced, ''Sorted] 'buildPileup)
$(makeSingleCluster ''FixMates [''IsBam, ''NameSorted] 'buildFixMates)
$(makeSingleCluster ''MarkDups [''IsBam, ''Sorted, ''MS] 'buildMarkDups)
$(makeSingleCluster ''BedCov [''IsBam, ''Capture] 'buildBedCov)
|
d7619a7d9473959da74c1f51f465c4b950654efd33cc9226d6596e0304fa80d9 | deadpendency/deadpendency | CanDetermineDependencies.hs | module DD.Effect.DetermineDependencies.Backend.Model.CanDetermineDependencies
( CanDetermineDependencies (..),
CDDWrapper (..),
)
where
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Git.GitPath
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesError
import Data.Vector qualified as V
class CanDetermineDependencies a where
determineDependencies :: GitPath -> a -> Either DetermineDependenciesError (V.Vector BasicDependency)
data CDDWrapper where
CDDWrapper :: (CanDetermineDependencies a) => GitPath -> a -> CDDWrapper
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/dependency-determiner/src/DD/Effect/DetermineDependencies/Backend/Model/CanDetermineDependencies.hs | haskell | module DD.Effect.DetermineDependencies.Backend.Model.CanDetermineDependencies
( CanDetermineDependencies (..),
CDDWrapper (..),
)
where
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Git.GitPath
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesError
import Data.Vector qualified as V
class CanDetermineDependencies a where
determineDependencies :: GitPath -> a -> Either DetermineDependenciesError (V.Vector BasicDependency)
data CDDWrapper where
CDDWrapper :: (CanDetermineDependencies a) => GitPath -> a -> CDDWrapper
| |
dcffc9d28c9d01cadf422c6c9f77ad33289a07a6579401099cc3052e2278947a | qitab/pyjure | mop.clj | (ns pyjure.mop
(:require [clojure.core :as c]
[clojure.string :as str])
(:use [clojure.core.match :only [match]]
[pyjure.debug]
[pyjure.utilities]
[pyjure.names]))
;; Meta-Object Protocol for pyjure objects
;; #customization
;; #object.__dict__
;; TODO:
;; * most everything
;; * somehow use keywords as keys for python object fields, rather than strings,
;; since they are much faster (being interned makes comparison trivial).
;; Yet we must keep implementing python strings as uninterned strings, and
;; normal python dicts as normal clojure maps;
;; so when exposing internal class maps as user-visible python dicts,
;; have a special dict type that has a key adapter.
;;;; Phase one: protocols for objects with attributes
;; The protocols
(defprotocol PyObject
"Protocol for viewing something as a python-style object"
($get [x key] "get an object's attribute value by name")
($__dict__ [x] "get a dict of all of an object's attributes"))
(defn $get-via-attributes [x key] (get ($__dict__ x) key))
(defprotocol PyStateful
"Protocol for stateful manipulation of a python-style object"
($assoc [x key value] "set an object's attribute to a new value")
($update [x key fun] "update the value of an object's attribute"))
;; Pure user objects
(defrecord $Pure [attr] ;; Pure object with given attribute dict
PyObject
($get [x key] ((:attr x) key))
($__dict__ [x] (:attr x)))
(defn Pure [attr] (->$Pure attr))
Monotonic objects :
;; we promise to only ever update it but in monotonic ways
(defrecord $Monotonic [aattr]
PyObject
($get [x key] (@(:aattr x) key))
($__dict__ [x] @(:aattr x))
PyStateful
($assoc [x key value] (swap! (:aattr x) #(assoc % key value)))
($update [x key fun] (swap! (:aattr x) #(update-in % [key] fun))))
(defn Monotonic [attr] (->$Monotonic (atom attr)))
Individual Keywords represent a few magic entities
(def $keywords (atom {}))
(comment
(extend Keyword
PyObject
($__dict__ [x] (get @$magic-keywords x))
($get [x key] (($__dict__ x) key)))
)
;; (def-keyword $$types :type (Monotonic {:__name__ "class"}))
(defn mkClass [prefix name bases attr]
(Monotonic
(merge {:__class__ :type, :__bases__ bases,
:__name__ name, :__qualname__ (str prefix name),
:__subclasses__ []}
attr)))
(defmacro def-py-type [name bases attr spec]
`(def-py $$types ~name
(mkClass ~(pyname name) bases attr spec)))
;;; Define some base types
(def-py $None :None)
(def-py $NoneType ;; python singleton None
{"__instancecheck__" #(= % $None)})
;;; Now for methods and functions
(defn $get-class [x] ($get x :__class__))
(defn $get-method [class name] (NFN '$get-method))
(defn $mro [class] (NIY '$mro))
(defn-py isinstance? [instance class]
(<- (if-let [i? ($get class :__instancecheck__)] (i? instance))
(if-let [c ($get instance :__class__)] (or (= c class) (.indexOf ($mro class) c)))
(NIY))) ;; fallback
| null | https://raw.githubusercontent.com/qitab/pyjure/b9aa49b4f74c85f2b617e924f61eaddb194119bf/src/pyjure/mop.clj | clojure | Meta-Object Protocol for pyjure objects
#customization
#object.__dict__
TODO:
* most everything
* somehow use keywords as keys for python object fields, rather than strings,
since they are much faster (being interned makes comparison trivial).
Yet we must keep implementing python strings as uninterned strings, and
normal python dicts as normal clojure maps;
so when exposing internal class maps as user-visible python dicts,
have a special dict type that has a key adapter.
Phase one: protocols for objects with attributes
The protocols
Pure user objects
Pure object with given attribute dict
we promise to only ever update it but in monotonic ways
(def-keyword $$types :type (Monotonic {:__name__ "class"}))
Define some base types
python singleton None
Now for methods and functions
fallback | (ns pyjure.mop
(:require [clojure.core :as c]
[clojure.string :as str])
(:use [clojure.core.match :only [match]]
[pyjure.debug]
[pyjure.utilities]
[pyjure.names]))
(defprotocol PyObject
"Protocol for viewing something as a python-style object"
($get [x key] "get an object's attribute value by name")
($__dict__ [x] "get a dict of all of an object's attributes"))
(defn $get-via-attributes [x key] (get ($__dict__ x) key))
(defprotocol PyStateful
"Protocol for stateful manipulation of a python-style object"
($assoc [x key value] "set an object's attribute to a new value")
($update [x key fun] "update the value of an object's attribute"))
PyObject
($get [x key] ((:attr x) key))
($__dict__ [x] (:attr x)))
(defn Pure [attr] (->$Pure attr))
Monotonic objects :
(defrecord $Monotonic [aattr]
PyObject
($get [x key] (@(:aattr x) key))
($__dict__ [x] @(:aattr x))
PyStateful
($assoc [x key value] (swap! (:aattr x) #(assoc % key value)))
($update [x key fun] (swap! (:aattr x) #(update-in % [key] fun))))
(defn Monotonic [attr] (->$Monotonic (atom attr)))
Individual Keywords represent a few magic entities
(def $keywords (atom {}))
(comment
(extend Keyword
PyObject
($__dict__ [x] (get @$magic-keywords x))
($get [x key] (($__dict__ x) key)))
)
(defn mkClass [prefix name bases attr]
(Monotonic
(merge {:__class__ :type, :__bases__ bases,
:__name__ name, :__qualname__ (str prefix name),
:__subclasses__ []}
attr)))
(defmacro def-py-type [name bases attr spec]
`(def-py $$types ~name
(mkClass ~(pyname name) bases attr spec)))
(def-py $None :None)
{"__instancecheck__" #(= % $None)})
(defn $get-class [x] ($get x :__class__))
(defn $get-method [class name] (NFN '$get-method))
(defn $mro [class] (NIY '$mro))
(defn-py isinstance? [instance class]
(<- (if-let [i? ($get class :__instancecheck__)] (i? instance))
(if-let [c ($get instance :__class__)] (or (= c class) (.indexOf ($mro class) c)))
|
5043f834f87f3b1417576b306f72df0d14d31d7b466bd0975e03e3ec8c7c463a | otto-de/tesla-examples | system.clj | (ns de.otto.tesla.zk.example.system
(:require [de.otto.tesla.system :as system]
[de.otto.tesla.serving-with-jetty :as jetty]
[de.otto.tesla.zk.zk-observer :as observer]
[de.otto.tesla.zk.example.page :as example-page]
[com.stuartsierra.component :as c])
(:gen-class))
(defn example-system [runtime-config]
(-> (system/base-system (assoc runtime-config :name "example-zk-service"))
(assoc :zk-observer
(c/using (observer/new-zkobserver) [:config]))
(assoc :example-page
(c/using (example-page/new-example-page) [:handler :zk-observer :app-status]))
(jetty/add-server :example-page)))
(defn -main
"starts up the production system."
[& args]
(system/start (example-system {}))) | null | https://raw.githubusercontent.com/otto-de/tesla-examples/1be46ceae1dac53204a8f655a81285e1ca214f9b/zookeeper-example/src/de/otto/tesla/zk/example/system.clj | clojure | (ns de.otto.tesla.zk.example.system
(:require [de.otto.tesla.system :as system]
[de.otto.tesla.serving-with-jetty :as jetty]
[de.otto.tesla.zk.zk-observer :as observer]
[de.otto.tesla.zk.example.page :as example-page]
[com.stuartsierra.component :as c])
(:gen-class))
(defn example-system [runtime-config]
(-> (system/base-system (assoc runtime-config :name "example-zk-service"))
(assoc :zk-observer
(c/using (observer/new-zkobserver) [:config]))
(assoc :example-page
(c/using (example-page/new-example-page) [:handler :zk-observer :app-status]))
(jetty/add-server :example-page)))
(defn -main
"starts up the production system."
[& args]
(system/start (example-system {}))) | |
8593ddc205d54bddf6f3f7d67042b356974c3aab9930bc11714afdba9d2f06a8 | melange-re/melange | res_scanner.ml | module Diagnostics = Res_diagnostics
module Token = Res_token
module Comment = Res_comment
type mode = Jsx | Diamond
(* We hide the implementation detail of the scanner reading character. Our char
will also contain the special -1 value to indicate end-of-file. This isn't
ideal; we should clean this up *)
let hackyEOFChar = Char.unsafe_chr (-1)
type charEncoding = Char.t
type t = {
filename: string;
src: string;
mutable err:
startPos: Lexing.position
-> endPos: Lexing.position
-> Diagnostics.category
-> unit;
mutable ch: charEncoding; (* current character *)
mutable offset: int; (* character offset *)
mutable lineOffset: int; (* current line offset *)
mutable lnum: int; (* current line number *)
mutable mode: mode list;
}
let setDiamondMode scanner =
scanner.mode <- Diamond::scanner.mode
let setJsxMode scanner =
scanner.mode <- Jsx::scanner.mode
let popMode scanner mode =
match scanner.mode with
| m::ms when m = mode ->
scanner.mode <- ms
| _ -> ()
let inDiamondMode scanner = match scanner.mode with
| Diamond::_ -> true
| _ -> false
let inJsxMode scanner = match scanner.mode with
| Jsx::_ -> true
| _ -> false
let position scanner = Lexing.{
pos_fname = scanner.filename;
(* line number *)
pos_lnum = scanner.lnum;
(* offset of the beginning of the line (number
of characters between the beginning of the scanner and the beginning
of the line) *)
pos_bol = scanner.lineOffset;
(* [pos_cnum] is the offset of the position (number of
characters between the beginning of the scanner and the position). *)
pos_cnum = scanner.offset;
}
Small debugging util
❯ echo ' let msg = " hello " ' | ./lib / rescript.exe
let msg = " hello "
^-^ let 0 - 3
let msg = " hello "
^-^ msg 4 - 7
let msg = " hello "
^ = 8 - 9
let msg = " hello "
^-----^ string " hello " 10 - 17
let msg = " hello "
^ eof 18 - 18
let msg = " hello "
❯ echo 'let msg = "hello"' | ./lib/rescript.exe
let msg = "hello"
^-^ let 0-3
let msg = "hello"
^-^ msg 4-7
let msg = "hello"
^ = 8-9
let msg = "hello"
^-----^ string "hello" 10-17
let msg = "hello"
^ eof 18-18
let msg = "hello"
*)
let _printDebug ~startPos ~endPos scanner token =
let open Lexing in
print_string scanner.src;
print_string ((String.make [@doesNotRaise]) startPos.pos_cnum ' ');
print_char '^';
(match endPos.pos_cnum - startPos.pos_cnum with
| 0 ->
if token = Token.Eof then ()
else assert false
| 1 -> ()
| n -> (
print_string ((String.make [@doesNotRaise]) (n - 2) '-');
print_char '^';
));
print_char ' ';
print_string (Res_token.toString token);
print_char ' ';
print_int startPos.pos_cnum;
print_char '-';
print_int endPos.pos_cnum;
print_endline ""
[@@live]
let next scanner =
let nextOffset = scanner.offset + 1 in
(match scanner.ch with
| '\n' ->
scanner.lineOffset <- nextOffset;
scanner.lnum <- scanner.lnum + 1;
(* What about CRLF (\r + \n) on windows?
* \r\n will always be terminated by a \n
* -> we can just bump the line count on \n *)
| _ -> ());
if nextOffset < String.length scanner.src then (
scanner.offset <- nextOffset;
scanner.ch <- String.unsafe_get scanner.src scanner.offset;
) else (
scanner.offset <- String.length scanner.src;
scanner.ch <- hackyEOFChar
)
let next2 scanner =
next scanner;
next scanner
let next3 scanner =
next scanner;
next scanner;
next scanner
let peek scanner =
if scanner.offset + 1 < String.length scanner.src then
String.unsafe_get scanner.src (scanner.offset + 1)
else
hackyEOFChar
let peek2 scanner =
if scanner.offset + 2 < String.length scanner.src then
String.unsafe_get scanner.src (scanner.offset + 2)
else
hackyEOFChar
let make ~filename src =
{
filename;
src = src;
err = (fun ~startPos:_ ~endPos:_ _ -> ());
ch = if src = "" then hackyEOFChar else String.unsafe_get src 0;
offset = 0;
lineOffset = 0;
lnum = 1;
mode = [];
}
(* generic helpers *)
let isWhitespace ch =
match ch with
| ' ' | '\t' | '\n' | '\r' -> true
| _ -> false
let rec skipWhitespace scanner =
if isWhitespace scanner.ch then (
next scanner;
skipWhitespace scanner
)
let digitValue ch =
match ch with
| '0'..'9' -> (Char.code ch) - 48
| 'a'..'f' ->
(Char.code ch) - (Char.code 'a') + 10
| 'A'..'F' ->
(Char.code ch) + 32 - (Char.code 'a') + 10
| _ -> 16 (* larger than any legal value *)
let rec skipLowerCaseChars scanner =
match scanner.ch with
| 'a'..'z' -> next scanner; skipLowerCaseChars scanner
| _ -> ()
(* scanning helpers *)
let scanIdentifier scanner =
let startOff = scanner.offset in
let rec skipGoodChars scanner =
match scanner.ch with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' ->
next scanner;
skipGoodChars scanner
| _ -> ()
in
skipGoodChars scanner;
let str = (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) in
if '{' == scanner.ch && str = "list" then begin
next scanner;
(* TODO: this isn't great *)
Token.lookupKeyword "list{"
end
else Token.lookupKeyword str
let scanDigits scanner ~base =
if base <= 10 then
let rec loop scanner =
match scanner.ch with
| '0'..'9' | '_' -> next scanner; loop scanner
| _ -> ()
in loop scanner
else
let rec loop scanner =
match scanner.ch with
(* hex *)
| '0'..'9' | 'a'..'f' | 'A'..'F' | '_' -> next scanner; loop scanner
| _ -> ()
in loop scanner
float : ( 0 … 9 ) { 0 … 9∣ _ } [ . { 0 … 9∣ _ } ] [ ( e∣ E ) [ + ∣ - ] ( 0 … 9 ) { 0 … 9∣ _ } ]
let scanNumber scanner =
let startOff = scanner.offset in
(* integer part *)
let base = match scanner.ch with
| '0' ->
(match peek scanner with
| 'x' | 'X' -> next2 scanner; 16
| 'o' | 'O' -> next2 scanner; 8
| 'b' | 'B' -> next2 scanner; 2
| _ -> next scanner; 8)
| _ -> 10
in
scanDigits scanner ~base;
(* *)
let isFloat = if '.' == scanner.ch then (
next scanner;
scanDigits scanner ~base;
true
) else
false
in
(* exponent part *)
let isFloat =
match scanner.ch with
| 'e' | 'E' | 'p' | 'P' ->
(match peek scanner with
| '+' | '-' -> next2 scanner
| _ -> next scanner);
scanDigits scanner ~base;
true
| _ -> isFloat
in
let literal =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff)
in
(* suffix *)
let suffix =
match scanner.ch with
| 'n' ->
let msg =
"Unsupported number type (nativeint). Did you mean `"
^ literal
^ "`?"
in
let pos = position scanner in
scanner.err ~startPos:pos ~endPos:pos (Diagnostics.message msg);
next scanner;
Some 'n'
| 'g'..'z' | 'G'..'Z' as ch ->
next scanner;
Some ch
| _ ->
None
in
if isFloat then
Token.Float {f = literal; suffix}
else
Token.Int {i = literal; suffix}
let scanExoticIdentifier scanner =
(* TODO: are we disregarding the current char...? Should be a quote *)
next scanner;
let buffer = Buffer.create 20 in
let startPos = position scanner in
let rec scan () =
match scanner.ch with
| '"' -> next scanner
| '\n' | '\r' ->
(* line break *)
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.message "A quoted identifier can't contain line breaks.");
next scanner
| ch when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?")
| ch ->
Buffer.add_char buffer ch;
next scanner;
scan ()
in
scan ();
(* TODO: do we really need to create a new buffer instead of substring once? *)
Token.Lident (Buffer.contents buffer)
let scanStringEscapeSequence ~startPos scanner =
let scan ~n ~base ~max =
let rec loop n x =
if n == 0 then x
else
let d = digitValue scanner.ch in
if d >= base then
let pos = position scanner in
let msg =
if scanner.ch == hackyEOFChar then "unclosed escape sequence"
else "unknown escape sequence"
in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg);
-1
else
let () = next scanner in
loop (n - 1) (x * base + d)
in
let x = loop n 0 in
if x > max then
let pos = position scanner in
let msg = "invalid escape sequence (value too high)" in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)
in
match scanner.ch with
(* \ already consumed *)
| 'n' | 't' | 'b' | 'r' | '\\' | ' ' | '\'' | '"' ->
next scanner
| '0'..'9' ->
(* decimal *)
scan ~n:3 ~base:10 ~max:255
| 'o' ->
(* octal *)
next scanner;
scan ~n:3 ~base:8 ~max:255
| 'x' ->
(* hex *)
next scanner;
scan ~n:2 ~base:16 ~max:255
| _ ->
(* unknown escape sequence
* TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat *)
(*
let pos = position scanner in
let msg =
if ch == -1 then "unclosed escape sequence"
else "unknown escape sequence"
in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)
*)
()
let scanString scanner =
(* assumption: we've just matched a quote *)
let startPosWithQuote = position scanner in
next scanner;
let firstCharOffset = scanner.offset in
let rec scan () =
match scanner.ch with
| '"' ->
let lastCharOffset = scanner.offset in
next scanner;
(String.sub [@doesNotRaise]) scanner.src firstCharOffset (lastCharOffset - firstCharOffset)
| '\\' ->
let startPos = position scanner in
next scanner;
scanStringEscapeSequence ~startPos scanner;
scan ()
| ch when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos:startPosWithQuote ~endPos Diagnostics.unclosedString;
(String.sub [@doesNotRaise]) scanner.src firstCharOffset (scanner.offset - firstCharOffset)
| _ ->
next scanner;
scan ()
in
Token.String (scan ())
let scanEscape scanner =
let convertNumber scanner ~n ~base =
let x = ref 0 in
for _ = n downto 1 do
let d = digitValue scanner.ch in
x := (!x * base) + d;
next scanner
done;
(Char.chr [@doesNotRaise]) !x
in
(* let offset = scanner.offset in *)
let c = match scanner.ch with
| '0'..'9' -> convertNumber scanner ~n:3 ~base:10
| 'b' -> next scanner; '\008'
| 'n' -> next scanner; '\010'
| 'r' -> next scanner; '\013'
| 't' -> next scanner; '\009'
| 'x' -> next scanner; convertNumber scanner ~n:2 ~base:16
| 'o' -> next scanner; convertNumber scanner ~n:3 ~base:8
| ch -> next scanner; ch
in
next scanner; (* Consume \' *)
(* TODO: do we know it's \' ? *)
Token.Character c
let scanSingleLineComment scanner =
let startOff = scanner.offset in
let startPos = position scanner in
let rec skip scanner =
match scanner.ch with
| '\n' | '\r' -> ()
| ch when ch == hackyEOFChar -> ()
| _ ->
next scanner;
skip scanner
in
skip scanner;
let endPos = position scanner in
Token.Comment (
Comment.makeSingleLineComment
~loc:(Location.{loc_start = startPos; loc_end = endPos; loc_ghost = false})
((String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff))
)
let scanMultiLineComment scanner =
(* assumption: we're only ever using this helper in `scan` after detecting a comment *)
let contentStartOff = scanner.offset + 2 in
let startPos = position scanner in
let rec scan ~depth =
(* invariant: depth > 0 right after this match. See assumption *)
match scanner.ch, peek scanner with
| '/', '*' ->
next2 scanner;
scan ~depth:(depth + 1)
| '*', '/' ->
next2 scanner;
if depth > 1 then scan ~depth:(depth - 1)
| ch, _ when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos Diagnostics.unclosedComment
| _ ->
next scanner;
scan ~depth
in
scan ~depth:0;
Token.Comment (
Comment.makeMultiLineComment
~loc:(Location.{loc_start = startPos; loc_end = (position scanner); loc_ghost = false})
((String.sub [@doesNotRaise]) scanner.src contentStartOff (scanner.offset - 2 - contentStartOff))
)
let scanTemplateLiteralToken scanner =
let startOff = scanner.offset in
(* if starting } here, consume it *)
if scanner.ch == '}' then next scanner;
let startPos = position scanner in
let rec scan () =
match scanner.ch with
| '`' ->
next scanner;
Token.TemplateTail(
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - 1 - startOff)
)
| '$' ->
(match peek scanner with
| '{' ->
next2 scanner;
let contents =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - 2 - startOff)
in
Token.TemplatePart contents
| _ ->
next scanner;
scan())
| '\\' ->
(match peek scanner with
| '`' | '\\' | '$'
| '\n' | '\r' ->
(* line break *)
next2 scanner;
scan ()
| _ ->
next scanner;
scan ())
| ch when ch = hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos Diagnostics.unclosedTemplate;
Token.TemplateTail(
(String.sub [@doesNotRaise]) scanner.src startOff (max (scanner.offset - 1 - startOff) 0)
)
| _ ->
next scanner;
scan ()
in
let token = scan () in
let endPos = position scanner in
(startPos, endPos, token)
let rec scan scanner =
skipWhitespace scanner;
let startPos = position scanner in
let token = match scanner.ch with
(* peeking 0 char *)
| 'A'..'Z' | 'a'..'z' -> scanIdentifier scanner
| '0'..'9' -> scanNumber scanner
| '`' -> next scanner; Token.Backtick
| '~' -> next scanner; Token.Tilde
| '?' -> next scanner; Token.Question
| ';' -> next scanner; Token.Semicolon
| '(' -> next scanner; Token.Lparen
| ')' -> next scanner; Token.Rparen
| '[' -> next scanner; Token.Lbracket
| ']' -> next scanner; Token.Rbracket
| '{' -> next scanner; Token.Lbrace
| '}' -> next scanner; Token.Rbrace
| ',' -> next scanner; Token.Comma
| '"' -> scanString scanner
peeking 1 char
| '_' ->
(match peek scanner with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' -> scanIdentifier scanner
| _ -> next scanner; Token.Underscore)
| '#' ->
(match peek scanner with
| '=' -> next2 scanner; Token.HashEqual
| _ -> next scanner; Token.Hash)
| '*' ->
(match peek scanner with
| '*' -> next2 scanner; Token.Exponentiation
| '.' -> next2 scanner; Token.AsteriskDot
| _ -> next scanner; Token.Asterisk)
| '@' ->
(match peek scanner with
| '@' -> next2 scanner; Token.AtAt
| _ -> next scanner; Token.At)
| '%' ->
(match peek scanner with
| '%' -> next2 scanner; Token.PercentPercent
| _ -> next scanner; Token.Percent)
| '|' ->
(match peek scanner with
| '|' -> next2 scanner; Token.Lor
| '>' -> next2 scanner; Token.BarGreater
| _ -> next scanner; Token.Bar)
| '&' ->
(match peek scanner with
| '&' -> next2 scanner; Token.Land
| _ -> next scanner; Token.Band)
| ':' ->
(match peek scanner with
| '=' -> next2 scanner; Token.ColonEqual
| '>' -> next2 scanner; Token.ColonGreaterThan
| _ -> next scanner; Token.Colon)
| '\\' -> next scanner; scanExoticIdentifier scanner
| '/' ->
(match peek scanner with
| '/' -> next2 scanner; scanSingleLineComment scanner
| '*' -> scanMultiLineComment scanner
| '.' -> next2 scanner; Token.ForwardslashDot
| _ -> next scanner; Token.Forwardslash)
| '-' ->
(match peek scanner with
| '.' -> next2 scanner; Token.MinusDot
| '>' -> next2 scanner; Token.MinusGreater
| _ -> next scanner; Token.Minus)
| '+' ->
(match peek scanner with
| '.' -> next2 scanner; Token.PlusDot
| '+' -> next2 scanner; Token.PlusPlus
| '=' -> next2 scanner; Token.PlusEqual
| _ -> next scanner; Token.Plus)
| '>' ->
(match peek scanner with
| '=' when not (inDiamondMode scanner) -> next2 scanner; Token.GreaterEqual
| _ -> next scanner; Token.GreaterThan)
| '<' when not (inJsxMode scanner) ->
(match peek scanner with
| '=' -> next2 scanner; Token.LessEqual
| _ -> next scanner; Token.LessThan)
special handling for JSX <
| '<' ->
Imagine the following : < div > <
* < indicates the start of a new jsx - element , the parser expects
* the name of a new element after the <
* Example : < div > < div
* But what if we have a / here : example < / in < div></div >
* This signals a closing element . To simulate the two - token lookahead ,
* the < / is emitted as a single new token LessThanSlash
* < indicates the start of a new jsx-element, the parser expects
* the name of a new element after the <
* Example: <div> <div
* But what if we have a / here: example </ in <div></div>
* This signals a closing element. To simulate the two-token lookahead,
* the </ is emitted as a single new token LessThanSlash *)
next scanner;
skipWhitespace scanner;
(match scanner.ch with
| '/' -> next scanner; Token.LessThanSlash
| '=' -> next scanner; Token.LessEqual
| _ -> Token.LessThan)
peeking 2 chars
| '.' ->
(match peek scanner, peek2 scanner with
| '.', '.' -> next3 scanner; Token.DotDotDot
| '.', _ -> next2 scanner; Token.DotDot
| _ -> next scanner; Token.Dot)
| '\'' ->
(match peek scanner, peek2 scanner with
| '\\', '"' ->
(* careful with this one! We're next-ing _once_ (not twice),
then relying on matching on the quote *)
next scanner; SingleQuote
| '\\', _ -> next2 scanner; scanEscape scanner
| ch, '\'' -> next3 scanner; Token.Character ch
| _ -> next scanner; SingleQuote)
| '!' ->
(match peek scanner, peek2 scanner with
| '=', '=' -> next3 scanner; Token.BangEqualEqual
| '=', _ -> next2 scanner; Token.BangEqual
| _ -> next scanner; Token.Bang)
| '=' ->
(match peek scanner, peek2 scanner with
| '=', '=' -> next3 scanner; Token.EqualEqualEqual
| '=', _ -> next2 scanner; Token.EqualEqual
| '>', _ -> next2 scanner; Token.EqualGreater
| _ -> next scanner; Token.Equal)
(* special cases *)
| ch when ch == hackyEOFChar -> next scanner; Token.Eof
| ch ->
(* if we arrive here, we're dealing with an unknown character,
* report the error and continue scanning… *)
next scanner;
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.unknownUchar ch);
let (_, _, token) = scan scanner in
token
in
let endPos = position scanner in
_ scanner token ;
(startPos, endPos, token)
(* misc helpers used elsewhere *)
Imagine : < div > < Navbar / > <
* is ` < ` the start of a jsx - child ? < div …
* or is it the start of a closing tag ? < /div >
* reconsiderLessThan peeks at the next token and
* determines the correct token to disambiguate
* is `<` the start of a jsx-child? <div …
* or is it the start of a closing tag? </div>
* reconsiderLessThan peeks at the next token and
* determines the correct token to disambiguate *)
let reconsiderLessThan scanner =
(* < consumed *)
skipWhitespace scanner;
if scanner.ch == '/' then
let () = next scanner in
Token.LessThanSlash
else
Token.LessThan
(* If an operator has whitespace around both sides, it's a binary operator *)
(* TODO: this helper seems out of place *)
let isBinaryOp src startCnum endCnum =
if startCnum == 0 then false
else begin
(* we're gonna put some assertions and invariant checks here because this is
used outside of the scanner's normal invariant assumptions *)
assert (endCnum >= 0);
assert (startCnum > 0 && startCnum < String.length src);
let leftOk = isWhitespace (String.unsafe_get src (startCnum - 1)) in
(* we need some stronger confidence that endCnum is ok *)
let rightOk = endCnum >= String.length src || isWhitespace (String.unsafe_get src endCnum) in
leftOk && rightOk
end
Assume ` { ` consumed , advances the scanner towards the ends of quoted strings . ( for conversion )
* In { | foo bar | } the scanner will be advanced until after the ` | } `
* In {| foo bar |} the scanner will be advanced until after the `|}` *)
let tryAdvanceQuotedString scanner =
let rec scanContents tag =
match scanner.ch with
| '|' ->
next scanner;
(match scanner.ch with
| 'a'..'z' ->
let startOff = scanner.offset in
skipLowerCaseChars scanner;
let suffix =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff)
in begin
if tag = suffix then (
if scanner.ch = '}' then
next scanner
else
scanContents tag
) else
scanContents tag
end
| '}' -> next scanner
| _ -> scanContents tag)
| ch when ch == hackyEOFChar ->
TODO : why is this place checking EOF and not others ?
()
| _ ->
next scanner;
scanContents tag
in
match scanner.ch with
| 'a'..'z' ->
let startOff = scanner.offset in
skipLowerCaseChars scanner;
let tag = (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) in
if scanner.ch = '|' then scanContents tag
| '|' ->
scanContents ""
| _ -> ()
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/napkin/res_scanner.ml | ocaml | We hide the implementation detail of the scanner reading character. Our char
will also contain the special -1 value to indicate end-of-file. This isn't
ideal; we should clean this up
current character
character offset
current line offset
current line number
line number
offset of the beginning of the line (number
of characters between the beginning of the scanner and the beginning
of the line)
[pos_cnum] is the offset of the position (number of
characters between the beginning of the scanner and the position).
What about CRLF (\r + \n) on windows?
* \r\n will always be terminated by a \n
* -> we can just bump the line count on \n
generic helpers
larger than any legal value
scanning helpers
TODO: this isn't great
hex
integer part
exponent part
suffix
TODO: are we disregarding the current char...? Should be a quote
line break
TODO: do we really need to create a new buffer instead of substring once?
\ already consumed
decimal
octal
hex
unknown escape sequence
* TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat
let pos = position scanner in
let msg =
if ch == -1 then "unclosed escape sequence"
else "unknown escape sequence"
in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)
assumption: we've just matched a quote
let offset = scanner.offset in
Consume \'
TODO: do we know it's \' ?
assumption: we're only ever using this helper in `scan` after detecting a comment
invariant: depth > 0 right after this match. See assumption
if starting } here, consume it
line break
peeking 0 char
careful with this one! We're next-ing _once_ (not twice),
then relying on matching on the quote
special cases
if we arrive here, we're dealing with an unknown character,
* report the error and continue scanning…
misc helpers used elsewhere
< consumed
If an operator has whitespace around both sides, it's a binary operator
TODO: this helper seems out of place
we're gonna put some assertions and invariant checks here because this is
used outside of the scanner's normal invariant assumptions
we need some stronger confidence that endCnum is ok | module Diagnostics = Res_diagnostics
module Token = Res_token
module Comment = Res_comment
type mode = Jsx | Diamond
let hackyEOFChar = Char.unsafe_chr (-1)
type charEncoding = Char.t
type t = {
filename: string;
src: string;
mutable err:
startPos: Lexing.position
-> endPos: Lexing.position
-> Diagnostics.category
-> unit;
mutable mode: mode list;
}
let setDiamondMode scanner =
scanner.mode <- Diamond::scanner.mode
let setJsxMode scanner =
scanner.mode <- Jsx::scanner.mode
let popMode scanner mode =
match scanner.mode with
| m::ms when m = mode ->
scanner.mode <- ms
| _ -> ()
let inDiamondMode scanner = match scanner.mode with
| Diamond::_ -> true
| _ -> false
let inJsxMode scanner = match scanner.mode with
| Jsx::_ -> true
| _ -> false
let position scanner = Lexing.{
pos_fname = scanner.filename;
pos_lnum = scanner.lnum;
pos_bol = scanner.lineOffset;
pos_cnum = scanner.offset;
}
Small debugging util
❯ echo ' let msg = " hello " ' | ./lib / rescript.exe
let msg = " hello "
^-^ let 0 - 3
let msg = " hello "
^-^ msg 4 - 7
let msg = " hello "
^ = 8 - 9
let msg = " hello "
^-----^ string " hello " 10 - 17
let msg = " hello "
^ eof 18 - 18
let msg = " hello "
❯ echo 'let msg = "hello"' | ./lib/rescript.exe
let msg = "hello"
^-^ let 0-3
let msg = "hello"
^-^ msg 4-7
let msg = "hello"
^ = 8-9
let msg = "hello"
^-----^ string "hello" 10-17
let msg = "hello"
^ eof 18-18
let msg = "hello"
*)
let _printDebug ~startPos ~endPos scanner token =
let open Lexing in
print_string scanner.src;
print_string ((String.make [@doesNotRaise]) startPos.pos_cnum ' ');
print_char '^';
(match endPos.pos_cnum - startPos.pos_cnum with
| 0 ->
if token = Token.Eof then ()
else assert false
| 1 -> ()
| n -> (
print_string ((String.make [@doesNotRaise]) (n - 2) '-');
print_char '^';
));
print_char ' ';
print_string (Res_token.toString token);
print_char ' ';
print_int startPos.pos_cnum;
print_char '-';
print_int endPos.pos_cnum;
print_endline ""
[@@live]
let next scanner =
let nextOffset = scanner.offset + 1 in
(match scanner.ch with
| '\n' ->
scanner.lineOffset <- nextOffset;
scanner.lnum <- scanner.lnum + 1;
| _ -> ());
if nextOffset < String.length scanner.src then (
scanner.offset <- nextOffset;
scanner.ch <- String.unsafe_get scanner.src scanner.offset;
) else (
scanner.offset <- String.length scanner.src;
scanner.ch <- hackyEOFChar
)
let next2 scanner =
next scanner;
next scanner
let next3 scanner =
next scanner;
next scanner;
next scanner
let peek scanner =
if scanner.offset + 1 < String.length scanner.src then
String.unsafe_get scanner.src (scanner.offset + 1)
else
hackyEOFChar
let peek2 scanner =
if scanner.offset + 2 < String.length scanner.src then
String.unsafe_get scanner.src (scanner.offset + 2)
else
hackyEOFChar
let make ~filename src =
{
filename;
src = src;
err = (fun ~startPos:_ ~endPos:_ _ -> ());
ch = if src = "" then hackyEOFChar else String.unsafe_get src 0;
offset = 0;
lineOffset = 0;
lnum = 1;
mode = [];
}
let isWhitespace ch =
match ch with
| ' ' | '\t' | '\n' | '\r' -> true
| _ -> false
let rec skipWhitespace scanner =
if isWhitespace scanner.ch then (
next scanner;
skipWhitespace scanner
)
let digitValue ch =
match ch with
| '0'..'9' -> (Char.code ch) - 48
| 'a'..'f' ->
(Char.code ch) - (Char.code 'a') + 10
| 'A'..'F' ->
(Char.code ch) + 32 - (Char.code 'a') + 10
let rec skipLowerCaseChars scanner =
match scanner.ch with
| 'a'..'z' -> next scanner; skipLowerCaseChars scanner
| _ -> ()
let scanIdentifier scanner =
let startOff = scanner.offset in
let rec skipGoodChars scanner =
match scanner.ch with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' ->
next scanner;
skipGoodChars scanner
| _ -> ()
in
skipGoodChars scanner;
let str = (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) in
if '{' == scanner.ch && str = "list" then begin
next scanner;
Token.lookupKeyword "list{"
end
else Token.lookupKeyword str
let scanDigits scanner ~base =
if base <= 10 then
let rec loop scanner =
match scanner.ch with
| '0'..'9' | '_' -> next scanner; loop scanner
| _ -> ()
in loop scanner
else
let rec loop scanner =
match scanner.ch with
| '0'..'9' | 'a'..'f' | 'A'..'F' | '_' -> next scanner; loop scanner
| _ -> ()
in loop scanner
float : ( 0 … 9 ) { 0 … 9∣ _ } [ . { 0 … 9∣ _ } ] [ ( e∣ E ) [ + ∣ - ] ( 0 … 9 ) { 0 … 9∣ _ } ]
let scanNumber scanner =
let startOff = scanner.offset in
let base = match scanner.ch with
| '0' ->
(match peek scanner with
| 'x' | 'X' -> next2 scanner; 16
| 'o' | 'O' -> next2 scanner; 8
| 'b' | 'B' -> next2 scanner; 2
| _ -> next scanner; 8)
| _ -> 10
in
scanDigits scanner ~base;
let isFloat = if '.' == scanner.ch then (
next scanner;
scanDigits scanner ~base;
true
) else
false
in
let isFloat =
match scanner.ch with
| 'e' | 'E' | 'p' | 'P' ->
(match peek scanner with
| '+' | '-' -> next2 scanner
| _ -> next scanner);
scanDigits scanner ~base;
true
| _ -> isFloat
in
let literal =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff)
in
let suffix =
match scanner.ch with
| 'n' ->
let msg =
"Unsupported number type (nativeint). Did you mean `"
^ literal
^ "`?"
in
let pos = position scanner in
scanner.err ~startPos:pos ~endPos:pos (Diagnostics.message msg);
next scanner;
Some 'n'
| 'g'..'z' | 'G'..'Z' as ch ->
next scanner;
Some ch
| _ ->
None
in
if isFloat then
Token.Float {f = literal; suffix}
else
Token.Int {i = literal; suffix}
let scanExoticIdentifier scanner =
next scanner;
let buffer = Buffer.create 20 in
let startPos = position scanner in
let rec scan () =
match scanner.ch with
| '"' -> next scanner
| '\n' | '\r' ->
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.message "A quoted identifier can't contain line breaks.");
next scanner
| ch when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?")
| ch ->
Buffer.add_char buffer ch;
next scanner;
scan ()
in
scan ();
Token.Lident (Buffer.contents buffer)
let scanStringEscapeSequence ~startPos scanner =
let scan ~n ~base ~max =
let rec loop n x =
if n == 0 then x
else
let d = digitValue scanner.ch in
if d >= base then
let pos = position scanner in
let msg =
if scanner.ch == hackyEOFChar then "unclosed escape sequence"
else "unknown escape sequence"
in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg);
-1
else
let () = next scanner in
loop (n - 1) (x * base + d)
in
let x = loop n 0 in
if x > max then
let pos = position scanner in
let msg = "invalid escape sequence (value too high)" in
scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)
in
match scanner.ch with
| 'n' | 't' | 'b' | 'r' | '\\' | ' ' | '\'' | '"' ->
next scanner
| '0'..'9' ->
scan ~n:3 ~base:10 ~max:255
| 'o' ->
next scanner;
scan ~n:3 ~base:8 ~max:255
| 'x' ->
next scanner;
scan ~n:2 ~base:16 ~max:255
| _ ->
()
let scanString scanner =
let startPosWithQuote = position scanner in
next scanner;
let firstCharOffset = scanner.offset in
let rec scan () =
match scanner.ch with
| '"' ->
let lastCharOffset = scanner.offset in
next scanner;
(String.sub [@doesNotRaise]) scanner.src firstCharOffset (lastCharOffset - firstCharOffset)
| '\\' ->
let startPos = position scanner in
next scanner;
scanStringEscapeSequence ~startPos scanner;
scan ()
| ch when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos:startPosWithQuote ~endPos Diagnostics.unclosedString;
(String.sub [@doesNotRaise]) scanner.src firstCharOffset (scanner.offset - firstCharOffset)
| _ ->
next scanner;
scan ()
in
Token.String (scan ())
let scanEscape scanner =
let convertNumber scanner ~n ~base =
let x = ref 0 in
for _ = n downto 1 do
let d = digitValue scanner.ch in
x := (!x * base) + d;
next scanner
done;
(Char.chr [@doesNotRaise]) !x
in
let c = match scanner.ch with
| '0'..'9' -> convertNumber scanner ~n:3 ~base:10
| 'b' -> next scanner; '\008'
| 'n' -> next scanner; '\010'
| 'r' -> next scanner; '\013'
| 't' -> next scanner; '\009'
| 'x' -> next scanner; convertNumber scanner ~n:2 ~base:16
| 'o' -> next scanner; convertNumber scanner ~n:3 ~base:8
| ch -> next scanner; ch
in
Token.Character c
let scanSingleLineComment scanner =
let startOff = scanner.offset in
let startPos = position scanner in
let rec skip scanner =
match scanner.ch with
| '\n' | '\r' -> ()
| ch when ch == hackyEOFChar -> ()
| _ ->
next scanner;
skip scanner
in
skip scanner;
let endPos = position scanner in
Token.Comment (
Comment.makeSingleLineComment
~loc:(Location.{loc_start = startPos; loc_end = endPos; loc_ghost = false})
((String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff))
)
let scanMultiLineComment scanner =
let contentStartOff = scanner.offset + 2 in
let startPos = position scanner in
let rec scan ~depth =
match scanner.ch, peek scanner with
| '/', '*' ->
next2 scanner;
scan ~depth:(depth + 1)
| '*', '/' ->
next2 scanner;
if depth > 1 then scan ~depth:(depth - 1)
| ch, _ when ch == hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos Diagnostics.unclosedComment
| _ ->
next scanner;
scan ~depth
in
scan ~depth:0;
Token.Comment (
Comment.makeMultiLineComment
~loc:(Location.{loc_start = startPos; loc_end = (position scanner); loc_ghost = false})
((String.sub [@doesNotRaise]) scanner.src contentStartOff (scanner.offset - 2 - contentStartOff))
)
let scanTemplateLiteralToken scanner =
let startOff = scanner.offset in
if scanner.ch == '}' then next scanner;
let startPos = position scanner in
let rec scan () =
match scanner.ch with
| '`' ->
next scanner;
Token.TemplateTail(
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - 1 - startOff)
)
| '$' ->
(match peek scanner with
| '{' ->
next2 scanner;
let contents =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - 2 - startOff)
in
Token.TemplatePart contents
| _ ->
next scanner;
scan())
| '\\' ->
(match peek scanner with
| '`' | '\\' | '$'
| '\n' | '\r' ->
next2 scanner;
scan ()
| _ ->
next scanner;
scan ())
| ch when ch = hackyEOFChar ->
let endPos = position scanner in
scanner.err ~startPos ~endPos Diagnostics.unclosedTemplate;
Token.TemplateTail(
(String.sub [@doesNotRaise]) scanner.src startOff (max (scanner.offset - 1 - startOff) 0)
)
| _ ->
next scanner;
scan ()
in
let token = scan () in
let endPos = position scanner in
(startPos, endPos, token)
let rec scan scanner =
skipWhitespace scanner;
let startPos = position scanner in
let token = match scanner.ch with
| 'A'..'Z' | 'a'..'z' -> scanIdentifier scanner
| '0'..'9' -> scanNumber scanner
| '`' -> next scanner; Token.Backtick
| '~' -> next scanner; Token.Tilde
| '?' -> next scanner; Token.Question
| ';' -> next scanner; Token.Semicolon
| '(' -> next scanner; Token.Lparen
| ')' -> next scanner; Token.Rparen
| '[' -> next scanner; Token.Lbracket
| ']' -> next scanner; Token.Rbracket
| '{' -> next scanner; Token.Lbrace
| '}' -> next scanner; Token.Rbrace
| ',' -> next scanner; Token.Comma
| '"' -> scanString scanner
peeking 1 char
| '_' ->
(match peek scanner with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' -> scanIdentifier scanner
| _ -> next scanner; Token.Underscore)
| '#' ->
(match peek scanner with
| '=' -> next2 scanner; Token.HashEqual
| _ -> next scanner; Token.Hash)
| '*' ->
(match peek scanner with
| '*' -> next2 scanner; Token.Exponentiation
| '.' -> next2 scanner; Token.AsteriskDot
| _ -> next scanner; Token.Asterisk)
| '@' ->
(match peek scanner with
| '@' -> next2 scanner; Token.AtAt
| _ -> next scanner; Token.At)
| '%' ->
(match peek scanner with
| '%' -> next2 scanner; Token.PercentPercent
| _ -> next scanner; Token.Percent)
| '|' ->
(match peek scanner with
| '|' -> next2 scanner; Token.Lor
| '>' -> next2 scanner; Token.BarGreater
| _ -> next scanner; Token.Bar)
| '&' ->
(match peek scanner with
| '&' -> next2 scanner; Token.Land
| _ -> next scanner; Token.Band)
| ':' ->
(match peek scanner with
| '=' -> next2 scanner; Token.ColonEqual
| '>' -> next2 scanner; Token.ColonGreaterThan
| _ -> next scanner; Token.Colon)
| '\\' -> next scanner; scanExoticIdentifier scanner
| '/' ->
(match peek scanner with
| '/' -> next2 scanner; scanSingleLineComment scanner
| '*' -> scanMultiLineComment scanner
| '.' -> next2 scanner; Token.ForwardslashDot
| _ -> next scanner; Token.Forwardslash)
| '-' ->
(match peek scanner with
| '.' -> next2 scanner; Token.MinusDot
| '>' -> next2 scanner; Token.MinusGreater
| _ -> next scanner; Token.Minus)
| '+' ->
(match peek scanner with
| '.' -> next2 scanner; Token.PlusDot
| '+' -> next2 scanner; Token.PlusPlus
| '=' -> next2 scanner; Token.PlusEqual
| _ -> next scanner; Token.Plus)
| '>' ->
(match peek scanner with
| '=' when not (inDiamondMode scanner) -> next2 scanner; Token.GreaterEqual
| _ -> next scanner; Token.GreaterThan)
| '<' when not (inJsxMode scanner) ->
(match peek scanner with
| '=' -> next2 scanner; Token.LessEqual
| _ -> next scanner; Token.LessThan)
special handling for JSX <
| '<' ->
Imagine the following : < div > <
* < indicates the start of a new jsx - element , the parser expects
* the name of a new element after the <
* Example : < div > < div
* But what if we have a / here : example < / in < div></div >
* This signals a closing element . To simulate the two - token lookahead ,
* the < / is emitted as a single new token LessThanSlash
* < indicates the start of a new jsx-element, the parser expects
* the name of a new element after the <
* Example: <div> <div
* But what if we have a / here: example </ in <div></div>
* This signals a closing element. To simulate the two-token lookahead,
* the </ is emitted as a single new token LessThanSlash *)
next scanner;
skipWhitespace scanner;
(match scanner.ch with
| '/' -> next scanner; Token.LessThanSlash
| '=' -> next scanner; Token.LessEqual
| _ -> Token.LessThan)
peeking 2 chars
| '.' ->
(match peek scanner, peek2 scanner with
| '.', '.' -> next3 scanner; Token.DotDotDot
| '.', _ -> next2 scanner; Token.DotDot
| _ -> next scanner; Token.Dot)
| '\'' ->
(match peek scanner, peek2 scanner with
| '\\', '"' ->
next scanner; SingleQuote
| '\\', _ -> next2 scanner; scanEscape scanner
| ch, '\'' -> next3 scanner; Token.Character ch
| _ -> next scanner; SingleQuote)
| '!' ->
(match peek scanner, peek2 scanner with
| '=', '=' -> next3 scanner; Token.BangEqualEqual
| '=', _ -> next2 scanner; Token.BangEqual
| _ -> next scanner; Token.Bang)
| '=' ->
(match peek scanner, peek2 scanner with
| '=', '=' -> next3 scanner; Token.EqualEqualEqual
| '=', _ -> next2 scanner; Token.EqualEqual
| '>', _ -> next2 scanner; Token.EqualGreater
| _ -> next scanner; Token.Equal)
| ch when ch == hackyEOFChar -> next scanner; Token.Eof
| ch ->
next scanner;
let endPos = position scanner in
scanner.err ~startPos ~endPos (Diagnostics.unknownUchar ch);
let (_, _, token) = scan scanner in
token
in
let endPos = position scanner in
_ scanner token ;
(startPos, endPos, token)
Imagine : < div > < Navbar / > <
* is ` < ` the start of a jsx - child ? < div …
* or is it the start of a closing tag ? < /div >
* reconsiderLessThan peeks at the next token and
* determines the correct token to disambiguate
* is `<` the start of a jsx-child? <div …
* or is it the start of a closing tag? </div>
* reconsiderLessThan peeks at the next token and
* determines the correct token to disambiguate *)
let reconsiderLessThan scanner =
skipWhitespace scanner;
if scanner.ch == '/' then
let () = next scanner in
Token.LessThanSlash
else
Token.LessThan
let isBinaryOp src startCnum endCnum =
if startCnum == 0 then false
else begin
assert (endCnum >= 0);
assert (startCnum > 0 && startCnum < String.length src);
let leftOk = isWhitespace (String.unsafe_get src (startCnum - 1)) in
let rightOk = endCnum >= String.length src || isWhitespace (String.unsafe_get src endCnum) in
leftOk && rightOk
end
Assume ` { ` consumed , advances the scanner towards the ends of quoted strings . ( for conversion )
* In { | foo bar | } the scanner will be advanced until after the ` | } `
* In {| foo bar |} the scanner will be advanced until after the `|}` *)
let tryAdvanceQuotedString scanner =
let rec scanContents tag =
match scanner.ch with
| '|' ->
next scanner;
(match scanner.ch with
| 'a'..'z' ->
let startOff = scanner.offset in
skipLowerCaseChars scanner;
let suffix =
(String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff)
in begin
if tag = suffix then (
if scanner.ch = '}' then
next scanner
else
scanContents tag
) else
scanContents tag
end
| '}' -> next scanner
| _ -> scanContents tag)
| ch when ch == hackyEOFChar ->
TODO : why is this place checking EOF and not others ?
()
| _ ->
next scanner;
scanContents tag
in
match scanner.ch with
| 'a'..'z' ->
let startOff = scanner.offset in
skipLowerCaseChars scanner;
let tag = (String.sub [@doesNotRaise]) scanner.src startOff (scanner.offset - startOff) in
if scanner.ch = '|' then scanContents tag
| '|' ->
scanContents ""
| _ -> ()
|
6ad679190f8bc4ee7efd977b0c1506214b788076946b57a91ce8fb17b935f349 | kirstin-rhys/nestedmap | ForestSpec.hs | # LANGUAGE NoImplicitPrelude , UnicodeSyntax #
module Data.Nested.ForestSpec (spec) where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck.Modifiers
import qualified Data.List as L
import qualified Data.List.Ordered as OL
import Data.Function.Unicode ((∘))
import Prelude.Unicode ((⊥))
import Data.Int (Int)
import Data.Char (Char)
import Data.Maybe (Maybe(Just,Nothing))
import Data.Bool (not)
import Data.Bool.Unicode ((∨))
import Data.Eq.Unicode ((≡))
import Data.Function (($))
import Data.Foldable (all)
import Data.Tuple (uncurry, fst, snd)
import Data.Functor (fmap)
import Data.Bool (Bool(..))
import Data.Nested.Forest ( empty
, null
, size
, singleton
, fromList
, toList
, lookup
)
spec :: Spec
spec = describe "Forest" $ do
prop "null empty should always be true" prop_null_empty
prop "the size of a singleton forest should be the length of the argument" prop_singleton_size
prop "a unique ordered key set should be idempotent" prop_identity_unique_ordered
prop "the result of lookup should always be the same size as the query" prop_lookup_length_idempotent
prop "given non-overlapping keys, we should always find the input values" prop_lookup_true
prop_null_empty ∷ Bool
prop_null_empty = null empty ≡ True
prop_singleton_size ∷ [(Char,Int)] → Bool
prop_singleton_size xs = size (singleton xs) ≡ L.length xs
prop_identity_unique_ordered ∷ [[Int]] → Bool
prop_identity_unique_ordered vss = (L.null vss) ∨ (toList (fromList kvs) ≡ kvs)
where kvs = zipMV vss' ['a'..]
vss' = L.filter (not ∘ L.null) vss
prop_lookup_length_idempotent ∷ [[(Char,Int)]] → [Char] → Bool
prop_lookup_length_idempotent kvs ks = L.length ks ≡ L.length (lookup ks (fromList kvs))
prop_lookup_true ∷ [[Int]] → Bool
prop_lookup_true vss = all foo kvss
where tree = fromList kvss
foo ∷ [(Char, Int)] → Bool
foo kvs = fmap (Just ∘ snd) kvs ≡ lookup (fmap fst kvs) tree
kvss = zipMV vss ['a'..]
-- this is a hack. should be replace by a [[(k,v)]] generator which can guarantee non-overlapping keys
zipMV ∷ [[α]] → [β] → [[(β, α)]]
zipMV [] _ = []
zipMV (vs:vss) ks = L.zip ks' vs : zipMV vss ks''
where (ks', ks'') = L.splitAt (L.length vs) ks
| null | https://raw.githubusercontent.com/kirstin-rhys/nestedmap/04538d718f308bdeec8d1228cfe205e533b85ec9/test/Data/Nested/ForestSpec.hs | haskell | this is a hack. should be replace by a [[(k,v)]] generator which can guarantee non-overlapping keys | # LANGUAGE NoImplicitPrelude , UnicodeSyntax #
module Data.Nested.ForestSpec (spec) where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck.Modifiers
import qualified Data.List as L
import qualified Data.List.Ordered as OL
import Data.Function.Unicode ((∘))
import Prelude.Unicode ((⊥))
import Data.Int (Int)
import Data.Char (Char)
import Data.Maybe (Maybe(Just,Nothing))
import Data.Bool (not)
import Data.Bool.Unicode ((∨))
import Data.Eq.Unicode ((≡))
import Data.Function (($))
import Data.Foldable (all)
import Data.Tuple (uncurry, fst, snd)
import Data.Functor (fmap)
import Data.Bool (Bool(..))
import Data.Nested.Forest ( empty
, null
, size
, singleton
, fromList
, toList
, lookup
)
spec :: Spec
spec = describe "Forest" $ do
prop "null empty should always be true" prop_null_empty
prop "the size of a singleton forest should be the length of the argument" prop_singleton_size
prop "a unique ordered key set should be idempotent" prop_identity_unique_ordered
prop "the result of lookup should always be the same size as the query" prop_lookup_length_idempotent
prop "given non-overlapping keys, we should always find the input values" prop_lookup_true
prop_null_empty ∷ Bool
prop_null_empty = null empty ≡ True
prop_singleton_size ∷ [(Char,Int)] → Bool
prop_singleton_size xs = size (singleton xs) ≡ L.length xs
prop_identity_unique_ordered ∷ [[Int]] → Bool
prop_identity_unique_ordered vss = (L.null vss) ∨ (toList (fromList kvs) ≡ kvs)
where kvs = zipMV vss' ['a'..]
vss' = L.filter (not ∘ L.null) vss
prop_lookup_length_idempotent ∷ [[(Char,Int)]] → [Char] → Bool
prop_lookup_length_idempotent kvs ks = L.length ks ≡ L.length (lookup ks (fromList kvs))
prop_lookup_true ∷ [[Int]] → Bool
prop_lookup_true vss = all foo kvss
where tree = fromList kvss
foo ∷ [(Char, Int)] → Bool
foo kvs = fmap (Just ∘ snd) kvs ≡ lookup (fmap fst kvs) tree
kvss = zipMV vss ['a'..]
zipMV ∷ [[α]] → [β] → [[(β, α)]]
zipMV [] _ = []
zipMV (vs:vss) ks = L.zip ks' vs : zipMV vss ks''
where (ks', ks'') = L.splitAt (L.length vs) ks
|
fdddc712cf0fb667dc12d5ffc8979a1738b73f1662e42d78281adf489918b6ea | hoplon/brew | simplemde.cljs | (ns hoplon.simplemde
(:require [hoplon.core :as hl]
[javelin.core :as j]
[cljsjs.simplemde]))
(defn editor!
"Creates a SimpleMDE editor."
[& [config]]
(if config (js/SimpleMDE. (clj->js config))
(js/SimpleMDE.)))
(defelem editor [attr kids]
(let [content (:content attr)
change (:change attr)
blur (:blur attr)
attr (dissoc attr :content :change :blur)
editor (j/cell nil)
textarea (hl/textarea :css {:display "none"} kids)]
(j/cell= (when (and content editor) (.value editor content)))
(hl/with-dom textarea
(let [sme (editor! (assoc attr :element textarea))
cm (.-codemirror sme)]
(reset! editor sme)
(when blur
(.on cm "blur" #(blur (.getValue %1))))
(when change
(.on cm "change" #(change (.getValue %1))))))
textarea))
| null | https://raw.githubusercontent.com/hoplon/brew/08d520fc43a14180dd52955af48dae63d0065add/src/hoplon/simplemde.cljs | clojure | (ns hoplon.simplemde
(:require [hoplon.core :as hl]
[javelin.core :as j]
[cljsjs.simplemde]))
(defn editor!
"Creates a SimpleMDE editor."
[& [config]]
(if config (js/SimpleMDE. (clj->js config))
(js/SimpleMDE.)))
(defelem editor [attr kids]
(let [content (:content attr)
change (:change attr)
blur (:blur attr)
attr (dissoc attr :content :change :blur)
editor (j/cell nil)
textarea (hl/textarea :css {:display "none"} kids)]
(j/cell= (when (and content editor) (.value editor content)))
(hl/with-dom textarea
(let [sme (editor! (assoc attr :element textarea))
cm (.-codemirror sme)]
(reset! editor sme)
(when blur
(.on cm "blur" #(blur (.getValue %1))))
(when change
(.on cm "change" #(change (.getValue %1))))))
textarea))
| |
1d92b907a51b98a5efc31f7168d7e477ac1484772328a1422d65d3d16a1f16c3 | talex5/mirage-trace-viewer | mtv_ctf_loader.mli | Copyright ( C ) 2014 ,
open Bigarray
type packet
type log_buffer = (char, int8_unsigned_elt, c_layout) Array1.t
(** Locate packets in a trace stream and return them (using [Array1.sub]) in the correct order. *)
val packets : log_buffer -> packet list
val packet_data : packet -> log_buffer
val from_bigarray : log_buffer -> Mtv_event.t list
| null | https://raw.githubusercontent.com/talex5/mirage-trace-viewer/8492c53c57778fd55474d37b34833129c5c1d5d3/lib/mtv_ctf_loader.mli | ocaml | * Locate packets in a trace stream and return them (using [Array1.sub]) in the correct order. | Copyright ( C ) 2014 ,
open Bigarray
type packet
type log_buffer = (char, int8_unsigned_elt, c_layout) Array1.t
val packets : log_buffer -> packet list
val packet_data : packet -> log_buffer
val from_bigarray : log_buffer -> Mtv_event.t list
|
84f0793ee16c1591ea660e965ebd42ef5a2b571945350f0a403338ee4210017c | xmonad/xmonad-contrib | WorkspaceCompare.hs | -----------------------------------------------------------------------------
-- |
Module : XMonad . Util . WorkspaceCompare
-- Description : Functions for examining, comparing, and sorting workspaces.
Copyright : ( c ) < >
-- License : BSD3-style (see LICENSE)
--
Maintainer : < >
-- Stability : unstable
-- Portability : unportable
--
-----------------------------------------------------------------------------
module XMonad.Util.WorkspaceCompare ( WorkspaceCompare, WorkspaceSort
, filterOutWs
, getWsIndex
, getWsCompare
, getWsCompareByTag
, getXineramaPhysicalWsCompare
, getXineramaWsCompare
, mkWsSort
, getSortByIndex
, getSortByTag
, getSortByXineramaPhysicalRule
, getSortByXineramaRule ) where
import XMonad
import qualified XMonad.StackSet as S
import XMonad.Prelude
import XMonad.Actions.PhysicalScreens (ScreenComparator(ScreenComparator), getScreenIdAndRectangle, screenComparatorById)
type WorkspaceCompare = WorkspaceId -> WorkspaceId -> Ordering
type WorkspaceSort = [WindowSpace] -> [WindowSpace]
-- | Transforms a workspace list by filtering out the workspaces that
-- correspond to the given 'tag's. Intended for use with 'logHook's (see
' XMonad . Hooks . StatusBar . PP.filterOutWsPP ' ) and " XMonad . Hooks . EwmhDesktops "
-- (see 'XMonad.Hooks.EwmhDesktops.addEwmhWorkspaceSort').
filterOutWs :: [WorkspaceId] -> WorkspaceSort
filterOutWs ws = filter (\S.Workspace{ S.tag = tag } -> tag `notElem` ws)
-- | Lookup the index of a workspace id in the user's config, return Nothing
-- if that workspace does not exist in the config.
getWsIndex :: X (WorkspaceId -> Maybe Int)
getWsIndex = do
spaces <- asks (workspaces . config)
return $ flip elemIndex spaces
-- | Compare Maybe's differently, so Nothing (i.e. workspaces without indexes)
-- come last in the order
indexCompare :: Maybe Int -> Maybe Int -> Ordering
indexCompare Nothing Nothing = EQ
indexCompare Nothing (Just _) = GT
indexCompare (Just _) Nothing = LT
indexCompare a b = compare a b
| A comparison function for WorkspaceId , based on the index of the
-- tags in the user's config.
getWsCompare :: X WorkspaceCompare
getWsCompare = do
wsIndex <- getWsIndex
return $ mconcat [indexCompare `on` wsIndex, compare]
-- | A simple comparison function that orders workspaces
-- lexicographically by tag.
getWsCompareByTag :: X WorkspaceCompare
getWsCompareByTag = return compare
| A comparison function for Xinerama based on visibility , workspace
-- and screen id. It produces the same ordering as
' XMonad . Hooks . StatusBar . PP.pprWindowSetXinerama ' .
getXineramaWsCompare :: X WorkspaceCompare
getXineramaWsCompare = getXineramaPhysicalWsCompare $ screenComparatorById compare
-- | A comparison function like 'getXineramaWsCompare', but uses physical locations for screens.
getXineramaPhysicalWsCompare :: ScreenComparator -> X WorkspaceCompare
getXineramaPhysicalWsCompare (ScreenComparator sc) = do
w <- gets windowset
return $ \ a b -> case (isOnScreen a w, isOnScreen b w) of
(True, True) -> compareUsingScreen w a b
(False, False) -> compare a b
(True, False) -> LT
(False, True) -> GT
where
onScreen w = S.current w : S.visible w
isOnScreen a w = a `elem` map (S.tag . S.workspace) (onScreen w)
tagToScreen s x = fromJust $ find ((== x) . S.tag . S.workspace) s
compareUsingScreen w = sc `on` getScreenIdAndRectangle . tagToScreen (onScreen w)
-- | Create a workspace sorting function from a workspace comparison
-- function.
mkWsSort :: X WorkspaceCompare -> X WorkspaceSort
mkWsSort cmpX = do
cmp <- cmpX
return $ sortBy (\a b -> cmp (S.tag a) (S.tag b))
-- | Sort several workspaces according to their tags' indices in the
-- user's config.
getSortByIndex :: X WorkspaceSort
getSortByIndex = mkWsSort getWsCompare
-- | Sort workspaces lexicographically by tag.
getSortByTag :: X WorkspaceSort
getSortByTag = mkWsSort getWsCompareByTag
| Sort serveral workspaces for xinerama displays , in the same order
produced by ' XMonad . Hooks . StatusBar . PP.pprWindowSetXinerama ' : first
-- visible workspaces, sorted by screen, then hidden workspaces,
-- sorted by tag.
getSortByXineramaRule :: X WorkspaceSort
getSortByXineramaRule = mkWsSort getXineramaWsCompare
-- | Like 'getSortByXineramaRule', but allow you to use physical locations for screens.
getSortByXineramaPhysicalRule :: ScreenComparator -> X WorkspaceSort
getSortByXineramaPhysicalRule sc = mkWsSort $ getXineramaPhysicalWsCompare sc
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/e5b5ce74b203085998b9ca6f59f23be387e4aef2/XMonad/Util/WorkspaceCompare.hs | haskell | ---------------------------------------------------------------------------
|
Description : Functions for examining, comparing, and sorting workspaces.
License : BSD3-style (see LICENSE)
Stability : unstable
Portability : unportable
---------------------------------------------------------------------------
| Transforms a workspace list by filtering out the workspaces that
correspond to the given 'tag's. Intended for use with 'logHook's (see
(see 'XMonad.Hooks.EwmhDesktops.addEwmhWorkspaceSort').
| Lookup the index of a workspace id in the user's config, return Nothing
if that workspace does not exist in the config.
| Compare Maybe's differently, so Nothing (i.e. workspaces without indexes)
come last in the order
tags in the user's config.
| A simple comparison function that orders workspaces
lexicographically by tag.
and screen id. It produces the same ordering as
| A comparison function like 'getXineramaWsCompare', but uses physical locations for screens.
| Create a workspace sorting function from a workspace comparison
function.
| Sort several workspaces according to their tags' indices in the
user's config.
| Sort workspaces lexicographically by tag.
visible workspaces, sorted by screen, then hidden workspaces,
sorted by tag.
| Like 'getSortByXineramaRule', but allow you to use physical locations for screens. | Module : XMonad . Util . WorkspaceCompare
Copyright : ( c ) < >
Maintainer : < >
module XMonad.Util.WorkspaceCompare ( WorkspaceCompare, WorkspaceSort
, filterOutWs
, getWsIndex
, getWsCompare
, getWsCompareByTag
, getXineramaPhysicalWsCompare
, getXineramaWsCompare
, mkWsSort
, getSortByIndex
, getSortByTag
, getSortByXineramaPhysicalRule
, getSortByXineramaRule ) where
import XMonad
import qualified XMonad.StackSet as S
import XMonad.Prelude
import XMonad.Actions.PhysicalScreens (ScreenComparator(ScreenComparator), getScreenIdAndRectangle, screenComparatorById)
type WorkspaceCompare = WorkspaceId -> WorkspaceId -> Ordering
type WorkspaceSort = [WindowSpace] -> [WindowSpace]
' XMonad . Hooks . StatusBar . PP.filterOutWsPP ' ) and " XMonad . Hooks . EwmhDesktops "
filterOutWs :: [WorkspaceId] -> WorkspaceSort
filterOutWs ws = filter (\S.Workspace{ S.tag = tag } -> tag `notElem` ws)
getWsIndex :: X (WorkspaceId -> Maybe Int)
getWsIndex = do
spaces <- asks (workspaces . config)
return $ flip elemIndex spaces
indexCompare :: Maybe Int -> Maybe Int -> Ordering
indexCompare Nothing Nothing = EQ
indexCompare Nothing (Just _) = GT
indexCompare (Just _) Nothing = LT
indexCompare a b = compare a b
| A comparison function for WorkspaceId , based on the index of the
getWsCompare :: X WorkspaceCompare
getWsCompare = do
wsIndex <- getWsIndex
return $ mconcat [indexCompare `on` wsIndex, compare]
getWsCompareByTag :: X WorkspaceCompare
getWsCompareByTag = return compare
| A comparison function for Xinerama based on visibility , workspace
' XMonad . Hooks . StatusBar . PP.pprWindowSetXinerama ' .
getXineramaWsCompare :: X WorkspaceCompare
getXineramaWsCompare = getXineramaPhysicalWsCompare $ screenComparatorById compare
getXineramaPhysicalWsCompare :: ScreenComparator -> X WorkspaceCompare
getXineramaPhysicalWsCompare (ScreenComparator sc) = do
w <- gets windowset
return $ \ a b -> case (isOnScreen a w, isOnScreen b w) of
(True, True) -> compareUsingScreen w a b
(False, False) -> compare a b
(True, False) -> LT
(False, True) -> GT
where
onScreen w = S.current w : S.visible w
isOnScreen a w = a `elem` map (S.tag . S.workspace) (onScreen w)
tagToScreen s x = fromJust $ find ((== x) . S.tag . S.workspace) s
compareUsingScreen w = sc `on` getScreenIdAndRectangle . tagToScreen (onScreen w)
mkWsSort :: X WorkspaceCompare -> X WorkspaceSort
mkWsSort cmpX = do
cmp <- cmpX
return $ sortBy (\a b -> cmp (S.tag a) (S.tag b))
getSortByIndex :: X WorkspaceSort
getSortByIndex = mkWsSort getWsCompare
getSortByTag :: X WorkspaceSort
getSortByTag = mkWsSort getWsCompareByTag
| Sort serveral workspaces for xinerama displays , in the same order
produced by ' XMonad . Hooks . StatusBar . PP.pprWindowSetXinerama ' : first
getSortByXineramaRule :: X WorkspaceSort
getSortByXineramaRule = mkWsSort getXineramaWsCompare
getSortByXineramaPhysicalRule :: ScreenComparator -> X WorkspaceSort
getSortByXineramaPhysicalRule sc = mkWsSort $ getXineramaPhysicalWsCompare sc
|
0e2bde5befc1905759b4931c22b8a7b6b0c1b5a0ed32d1210fe9b95ed2344e0c | fukamachi/clozure-cl | l0-misc.lisp | -*- Mode : Lisp ; Package : CCL -*-
;;;
Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 1994 - 2001 Digitool , Inc
This file is part of Clozure CL .
;;;
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
;;; file "LICENSE". The LLGPL consists of a preamble and the LGPL,
which is distributed with Clozure CL as the file " LGPL " . Where these
;;; conflict, the preamble takes precedence.
;;;
;;; Clozure CL is referenced in the preamble as the "LIBRARY."
;;;
;;; The LLGPL is also available online at
;;;
(in-package "CCL")
;;; Bootstrapping for futexes
#+(and linux-target no (or x86-target arm-target))
(eval-when (:compile-toplevel :execute)
(pushnew :futex *features*))
#+futex
(eval-when (:compile-toplevel :execute)
;; We only need a few constants from <linux/futex.h>, which may
;; not have been included in the :libc .cdb files.
(defconstant FUTEX-WAIT 0)
(defconstant FUTEX-WAKE 1)
(defconstant futex-avail 0)
(defconstant futex-locked 1)
(defconstant futex-contended 2)
(declaim (inline %lock-futex %unlock-futex)))
;;; Miscellany.
(defun memq (item list)
(do* ((tail list (%cdr tail)))
((null tail))
(if (eq item (car tail))
(return tail))))
(defun %copy-u8-to-string (u8-vector source-idx string dest-idx n)
(declare (optimize (speed 3) (safety 0))
(fixnum source-idx dest-idx n)
(type (simple-array (unsigned-byte 8) (*)) u8-vector)
(simple-base-string string))
(do* ((i 0 (1+ i)))
((= i n) string)
(declare (fixnum i))
(setf (%scharcode string dest-idx) (aref u8-vector source-idx))
(incf source-idx)
(incf dest-idx)))
(defun %copy-string-to-u8 (string source-idx u8-vector dest-idx n)
(declare (optimize (speed 3) (safety 0))
(fixnum source-idx dest-idx n)
(type (simple-array (unsigned-byte 8) (*)) u8-vector)
(simple-base-string string))
(do* ((i 0 (1+ i)))
((= i n) u8-vector)
(declare (fixnum i))
(let* ((code (%scharcode string source-idx)))
(declare (type (mod #x11000) code))
(if (> code #xff)
(setq code (char-code #\Sub)))
(setf (aref u8-vector dest-idx) code)
(incf source-idx)
(incf dest-idx))))
(defun append-2 (y z)
(if (null y)
z
(let* ((new (cons (car y) nil))
(tail new))
(declare (list new tail))
(dolist (head (cdr y))
(setq tail (cdr (rplacd tail (cons head nil)))))
(rplacd tail z)
new)))
(defun dbg (&optional arg)
(dbg arg))
; This takes a simple-base-string and passes a C string into
the kernel " Bug " routine . Not too fancy , but neither is # _ DebugStr ,
; and there's a better chance that users would see this message.
(defun bug (arg)
(if (typep arg 'simple-base-string)
#+x86-target
(debug-trap-with-string arg)
#-x86-target
(let* ((len (length arg)))
(%stack-block ((buf (1+ len)))
(%cstr-pointer arg buf)
(ff-call
(%kernel-import target::kernel-import-lisp-bug)
:address buf
:void)))
(bug "Bug called with non-simple-base-string.")))
(defun total-bytes-allocated ()
(%heap-bytes-allocated)
#+not-any-more
(+ (unsignedwide->integer *total-bytes-freed*)
(%heap-bytes-allocated)))
(defun %freebytes ()
(with-macptrs (p)
(%setf-macptr-to-object p
(%fixnum-ref (%get-kernel-global 'all-areas)
target::area.succ))
(- (%get-natural p target::area.high)
(%get-natural p target::area.active))))
(defun %reservedbytes ()
(with-macptrs (p)
(%setf-macptr-to-object p (%get-kernel-global 'all-areas))
(- #+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high)
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))))
(defun object-in-application-heap-p (address)
(declare (ignore address))
t)
(defun frozen-space-dnodes ()
"Returns the current size of the frozen area."
(%fixnum-ref-natural (%get-kernel-global 'tenured-area)
target::area.static-dnodes))
(defun %usedbytes ()
(with-lock-grabbed (*kernel-exception-lock*)
(with-lock-grabbed (*kernel-tcr-area-lock*)
(%normalize-areas)
(let ((static 0)
(dynamic 0)
(library 0))
(do-consing-areas (area)
(let* ((active (%fixnum-ref area target::area.active))
(bytes (ash (- active
(%fixnum-ref area target::area.low))
target::fixnumshift))
(code (%fixnum-ref area target::area.code)))
(when (object-in-application-heap-p active)
(if (eql code area-dynamic)
(incf dynamic bytes)
(if (eql code area-managed-static)
(incf library bytes)
(incf static bytes))))))
(let* ((frozen-size (ash (frozen-space-dnodes) target::dnode-shift)))
(decf dynamic frozen-size)
(values dynamic static library frozen-size))))))
(defun %stack-space ()
(%normalize-areas)
(let ((free 0)
(used 0))
(with-macptrs (p)
(do-gc-areas (area)
(when (member (%fixnum-ref area target::area.code)
'(#.area-vstack
#.area-cstack
#.area-tstack))
(%setf-macptr-to-object p area)
(let ((active
#+32-bit-target
(%get-unsigned-long p target::area.active)
#+64-bit-target
(%%get-unsigned-longlong p target::area.active))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high))
(low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low)))
(incf used (- high active))
(incf free (- active low))))))
(values (+ free used) used free)))
; Returns an alist of the form:
; ((thread cstack-free cstack-used vstack-free vstack-used tstack-free tstack-used)
; ...)
(defun %stack-space-by-lisp-thread ()
(let* ((res nil))
(without-interrupts
(dolist (p (all-processes))
(let* ((thread (process-thread p)))
(when thread
(push (cons thread (multiple-value-list (%thread-stack-space thread))) res)))))
res))
Returns six values on most platforms , 4 on ARM .
;;; sp free
;;; sp used
free
used
;;; tsp free (not on ARM)
;;; tsp used (not on ARM)
(defun %thread-stack-space (&optional (thread *current-lisp-thread*))
(when (eq thread *current-lisp-thread*)
(%normalize-areas))
(labels ((free-and-used (area)
(with-macptrs (p)
(%setf-macptr-to-object p area)
(let* ((low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high))
(active
#+32-bit-target
(%get-unsigned-long p target::area.active)
#+64-bit-target
(%%get-unsigned-longlong p target::area.active))
(free (- active low))
(used (- high active)))
(loop
(setq area (%fixnum-ref area target::area.older))
(when (eql area 0) (return))
(%setf-macptr-to-object p area)
(let ((low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high)))
(declare (fixnum low high))
(incf used (- high low))))
(values free used)))))
(let* ((tcr (lisp-thread.tcr thread))
(cs-area #+(and windows-target x8632-target)
(%fixnum-ref (%fixnum-ref tcr (- target::tcr.aux
target::tcr-bias))
target::tcr-aux.cs-area)
#-(and windows-target x8632-target)
(%fixnum-ref tcr target::tcr.cs-area)))
(if (or (null tcr)
(zerop (%fixnum-ref cs-area)))
(values 0 0 0 0 0 0)
(multiple-value-bind (cf cu) (free-and-used cs-area)
(multiple-value-bind (vf vu)
(free-and-used (%fixnum-ref tcr (- target::tcr.vs-area
target::tcr-bias)))
#+arm-target
(values cf cu vf vu)
#-arm-target
(multiple-value-bind (tf tu)
(free-and-used (%fixnum-ref tcr (- target::tcr.ts-area
target::tcr-bias)))
(values cf cu vf vu tf tu))))))))
(defun room (&optional (verbose :default))
"Print to *STANDARD-OUTPUT* information about the state of internal
storage and its management. The optional argument controls the
verbosity of output. If it is T, ROOM prints out a maximal amount of
information. If it is NIL, ROOM prints out a minimal amount of
information. If it is :DEFAULT or it is not supplied, ROOM prints out
an intermediate amount of information."
(let* ((freebytes nil)
(usedbytes nil)
(static-used nil)
(staticlib-used nil)
(frozen-space-size nil)
(lispheap nil)
(reserved nil)
(static nil)
(stack-total)
(stack-used)
(stack-free)
(static-cons-reserved nil)
(stack-used-by-thread nil))
(progn
(progn
(setq freebytes (%freebytes))
(when verbose
(multiple-value-setq (usedbytes static-used staticlib-used frozen-space-size)
(%usedbytes))
(setq lispheap (+ freebytes usedbytes)
reserved (%reservedbytes)
static (+ static-used staticlib-used frozen-space-size))
(multiple-value-setq (stack-total stack-used stack-free)
(%stack-space))
(unless (eq verbose :default)
(setq stack-used-by-thread (%stack-space-by-lisp-thread))))))
(format t "~&Approximately ~:D bytes of memory can be allocated ~%before the next full GC is triggered. ~%" freebytes)
(when verbose
(flet ((k (n) (round n 1024)))
(princ "
Total Size Free Used")
(format t "~&Lisp Heap:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
lispheap (k lispheap)
freebytes (k freebytes)
usedbytes (k usedbytes))
(format t "~&Stacks:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
stack-total (k stack-total)
stack-free (k stack-free)
stack-used (k stack-used))
(format t "~&Static:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
static (k static)
0 0
static (k static))
(when (and frozen-space-size (not (zerop frozen-space-size)))
(setq static-cons-reserved (ash (reserved-static-conses) target::dnode-shift)
frozen-space-size (- frozen-space-size static-cons-reserved))
(unless (zerop static-cons-reserved)
(format t "~&~,3f MB of reserved static conses (~d free, ~d reserved)"
(/ static-cons-reserved (float (ash 1 20)))
(free-static-conses)
(reserved-static-conses)))
(unless (zerop frozen-space-size)
(format t "~&~,3f MB of static memory is \"frozen\" dynamic memory"
(/ frozen-space-size (float (ash 1 20))))))
(format t "~&~,3f MB reserved for heap expansion."
(/ reserved (float (ash 1 20))))
(unless (eq verbose :default)
(terpri)
(let* ((processes (all-processes)))
(dolist (thread-info stack-used-by-thread)
(destructuring-bind (thread sp-free sp-used vsp-free vsp-used #-arm-target tsp-free #-arm-target tsp-used)
thread-info
(let* ((process (dolist (p processes)
(when (eq (process-thread p) thread)
(return p)))))
(when process
(let ((sp-total (+ sp-used sp-free))
(vsp-total (+ vsp-used vsp-free))
#-arm-target
(tsp-total (+ tsp-used tsp-free)))
(format t "~%~a(~d)~% cstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)~
~% vstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)"
(process-name process)
(process-serial-number process)
sp-total (k sp-total) sp-free (k sp-free) sp-used (k sp-used)
vsp-total (k vsp-total) vsp-free (k vsp-free) vsp-used (k vsp-used))
#-arm-target
(format t
"~% tstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)"
tsp-total (k tsp-total) tsp-free (k tsp-free) tsp-used (k tsp-used)))))))))))))
(defun list-length (l)
"Return the length of the given LIST, or NIL if the LIST is circular."
(do* ((n 0 (+ n 2))
(fast l (cddr fast))
(slow l (cdr slow)))
((null fast) n)
(declare (fixnum n))
(if (null (cdr fast))
(return (the fixnum (1+ n)))
(if (and (eq fast slow)
(> n 0))
(return nil)))))
(defun proper-list-p (l)
(and (typep l 'list)
(do* ((n 0 (+ n 2))
(fast l (if (and (listp fast) (listp (cdr fast)))
(cddr fast)
(return-from proper-list-p nil)))
(slow l (cdr slow)))
((null fast) n)
(declare (fixnum n))
(if (atom fast)
(return nil)
(if (null (cdr fast))
(return t)
(if (and (eq fast slow)
(> n 0))
(return nil)))))))
(defun proper-sequence-p (x)
(cond ((typep x 'vector))
((typep x 'list) (not (null (list-length x))))))
(defun length (seq)
"Return an integer that is the length of SEQUENCE."
(seq-dispatch
seq
(or (list-length seq)
(%err-disp $XIMPROPERLIST seq))
(if (= (the fixnum (typecode seq)) target::subtag-vectorH)
(%svref seq target::vectorH.logsize-cell)
(uvsize seq))))
(defun %str-from-ptr (pointer len &optional (dest (make-string len)))
(declare (fixnum len)
(optimize (speed 3) (safety 0)))
(dotimes (i len dest)
(setf (%scharcode dest i) (%get-unsigned-byte pointer i))))
(defun %get-cstring (pointer)
(do* ((end 0 (1+ end)))
((zerop (the (unsigned-byte 8) (%get-unsigned-byte pointer end)))
(%str-from-ptr pointer end))
(declare (fixnum end))))
(defun %get-utf-8-cstring (pointer)
(do* ((end 0 (1+ end)))
((zerop (the (unsigned-byte 8) (%get-unsigned-byte pointer end)))
(let* ((len (utf-8-length-of-memory-encoding pointer end 0))
(string (make-string len)))
(utf-8-memory-decode pointer end 0 string)
string))
(declare (fixnum end))))
Assumes that pointer is terminated by a 0 - valued 16 - bit word
;;; and that it points to a valid utf-16 string with native endianness.
(defun %get-native-utf-16-cstring (pointer)
(do* ((nchars 0 (1+ nchars))
(i 0 (+ i 2))
(code (%get-unsigned-word pointer i) (%get-unsigned-word pointer i)))
((zerop code)
(do* ((string (make-string nchars))
(out 0 (1+ out))
(i 0 (+ i 2)))
((= out nchars) string)
(declare (fixnum i out))
(let* ((code (%get-unsigned-word pointer i)))
(declare (type (unsigned-byte 16) code))
(cond ((and (>= code #xd800)
(< code #xdc00))
(incf i 2)
(let* ((code2 (%get-unsigned-word pointer i)))
(declare (type (unsigned-byte 16) code2))
(setf (schar string out)
(utf-16-combine-surrogate-pairs code code2))))
(t (setf (schar string out) (code-char code)))))))
(when (and (>= code #xd800) (< code #xdc00))
(incf i 2))))
This is mostly here so we can bootstrap shared libs without
;;; having to bootstrap #_strcmp.
;;; Return true if the cstrings are equal, false otherwise.
(defun %cstrcmp (x y)
(do* ((i 0 (1+ i))
(bx (%get-byte x i) (%get-byte x i))
(by (%get-byte y i) (%get-byte y i)))
((not (= bx by)))
(declare (fixnum i bx by))
(when (zerop bx)
(return t))))
(defun %cnstrcmp (x y n)
(declare (fixnum n))
(do* ((i 0 (1+ i))
(bx (%get-byte x i) (%get-byte x i))
(by (%get-byte y i) (%get-byte y i)))
((= i n) t)
(declare (fixnum i bx by))
(unless (= bx by)
(return))))
(defvar %documentation nil)
(defvar %documentation-lock% nil)
(setq %documentation
(make-hash-table :weak t :size 100 :test 'eq :rehash-threshold .95)
%documentation-lock% (make-lock))
(defun %put-documentation (thing doc-id doc)
(with-lock-grabbed (%documentation-lock%)
(let* ((info (gethash thing %documentation))
(pair (assoc doc-id info)))
(if doc
(progn
(unless (typep doc 'string)
(report-bad-arg doc 'string))
(if pair
(setf (cdr pair) doc)
(setf (gethash thing %documentation) (cons (cons doc-id doc) info))))
(when pair
(if (setq info (nremove pair info))
(setf (gethash thing %documentation) info)
(remhash thing %documentation))))))
doc)
(defun %get-documentation (object doc-id)
(cdr (assoc doc-id (gethash object %documentation))))
This pretends to be ( SETF DOCUMENTATION ) , until that generic function
;;; is defined. It handles a few common cases.
(defun %set-documentation (thing doc-id doc-string)
(case doc-id
(function
(if (typep thing 'function)
(%put-documentation thing t doc-string)
(if (typep thing 'symbol)
(let* ((def (fboundp thing)))
(if def
(%put-documentation def t doc-string)))
(if (setf-function-name-p thing)
(%set-documentation
(setf-function-name thing) doc-id doc-string)))))
(variable
(if (typep thing 'symbol)
(%put-documentation thing doc-id doc-string)))
(t (%put-documentation thing doc-id doc-string)))
doc-string)
(%fhave 'set-documentation #'%set-documentation)
;;; This is intended for use by debugging tools. It's a horrible thing
;;; to do otherwise. The caller really needs to hold the heap-segment
;;; lock; this grabs the tcr queue lock as well.
(defparameter *spin-lock-tries* 1)
(defparameter *spin-lock-timeouts* 0)
#+(and (not futex) (not x86-target))
(defun %get-spin-lock (p)
(let* ((self (%current-tcr))
(n *spin-lock-tries*))
(declare (fixnum n))
(loop
(dotimes (i n)
(when (eql 0 (%ptr-store-fixnum-conditional p 0 self))
(return-from %get-spin-lock t)))
(%atomic-incf-node 1 '*spin-lock-timeouts* target::symbol.vcell)
(yield))))
(eval-when (:compile-toplevel :execute)
(declaim (inline note-lock-wait note-lock-held note-lock-released)))
(eval-when (:compile-toplevel)
(declaim (inline %lock-recursive-lock-ptr %unlock-recursive-lock-ptr)))
#-futex
(defun %lock-recursive-lock-ptr (ptr lock flag)
(with-macptrs ((p)
(owner (%get-ptr ptr target::lockptr.owner))
(signal (%get-ptr ptr target::lockptr.signal))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(%setf-macptr-to-object p (%current-tcr))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(loop
(without-interrupts
(when (eql p owner)
(incf (%get-natural ptr target::lockptr.count))
(when flag
(setf (lock-acquisition.status flag) t))
(return t))
(%get-spin-lock spin)
(when (eql 1 (incf (%get-natural ptr target::lockptr.avail)))
(setf (%get-ptr ptr target::lockptr.owner) p
(%get-natural ptr target::lockptr.count) 1)
(setf (%get-natural spin 0) 0)
(if flag
(setf (lock-acquisition.status flag) t))
(return t))
(setf (%get-natural spin 0) 0))
(%process-wait-on-semaphore-ptr signal 1 0 (recursive-lock-whostate lock)))))
#+futex
(defun %lock-recursive-lock-ptr (ptr lock flag)
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((self (%current-tcr))
(level *interrupt-level*))
(declare (fixnum self))
(without-interrupts
(cond ((eql self (%get-object ptr target::lockptr.owner))
(incf (%get-natural ptr target::lockptr.count)))
(t (%lock-futex ptr level lock #'recursive-lock-whostate)
(%set-object ptr target::lockptr.owner self)
(setf (%get-natural ptr target::lockptr.count) 1)))
(when flag
(setf (lock-acquisition.status flag) t))
t)))
(defun %lock-recursive-lock-object (lock &optional flag)
(%lock-recursive-lock-ptr (recursive-lock-ptr lock) lock flag))
#+futex
(progn
#-monitor-futex-wait
(defun futex-wait (p val whostate)
(with-process-whostate (whostate)
(int-errno-ffcall
(%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAIT :int val :address (%null-ptr) :address (%null-ptr) :int 0 :int)))
#+monitor-futex-wait
(progn
(defparameter *total-futex-wait-calls* 0)
(defparameter *total-futex-wait-times* 0)
(defun futex-wait (p val whostate)
(with-process-whostate (whostate)
(let* ((start (get-internal-real-time)))
(incf *total-futex-wait-calls*)
(int-errno-ffcall
(%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAIT :int val :address (%null-ptr) :address (%null-ptr) :int 0 :int)
(incf *total-futex-wait-times* (- (get-internal-real-time) start)))))))
#+futex
(defun futex-wake (p n)
(int-errno-ffcall (%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAKE :int n :address (%null-ptr) :address (%null-ptr) :int 0 :int))
#+futex
(defun %lock-futex (p wait-level lock fwhostate)
(let* ((val (%ptr-store-conditional p futex-avail futex-locked)))
(declare (fixnum val))
(or (eql val futex-avail)
(loop
(if (eql val futex-contended)
(let* ((*interrupt-level* wait-level))
(futex-wait p val (if fwhostate (funcall fwhostate lock) "futex wait")))
(setq val futex-contended))
(when (eql futex-avail (xchgl val p))
(return t))))))
#+futex
(defun %unlock-futex (p)
(unless (eql futex-avail (%atomic-decf-ptr p))
(setf (%get-natural p target::lockptr.avail) futex-avail)
(futex-wake p #$INT_MAX)))
#-futex
(defun %try-recursive-lock-object (lock &optional flag)
(let* ((ptr (recursive-lock-ptr lock)))
(with-macptrs ((p)
(owner (%get-ptr ptr target::lockptr.owner))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(%setf-macptr-to-object p (%current-tcr))
(if flag
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(report-bad-arg flag 'lock-acquisition)))
(without-interrupts
(cond ((eql p owner)
(incf (%get-natural ptr target::lockptr.count))
(if flag (setf (lock-acquisition.status flag) t))
t)
(t
(let* ((win nil))
(%get-spin-lock spin)
(when (setq win (eql 1 (incf (%get-natural ptr target::lockptr.avail))))
(setf (%get-ptr ptr target::lockptr.owner) p
(%get-natural ptr target::lockptr.count) 1)
(if flag (setf (lock-acquisition.status flag) t)))
(setf (%get-ptr spin) (%null-ptr))
win)))))))
#+futex
(defun %try-recursive-lock-object (lock &optional flag)
(let* ((self (%current-tcr))
(ptr (recursive-lock-ptr lock)))
(declare (fixnum self))
(if flag
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(report-bad-arg flag 'lock-acquisition)))
(without-interrupts
(cond ((eql (%get-object ptr target::lockptr.owner) self)
(incf (%get-natural ptr target::lockptr.count))
(if flag (setf (lock-acquisition.status flag) t))
t)
(t
(when (eql 0 (%ptr-store-conditional ptr futex-avail futex-locked))
(%set-object ptr target::lockptr.owner self)
(setf (%get-natural ptr target::lockptr.count) 1)
(if flag (setf (lock-acquisition.status flag) t))
t))))))
#-futex
(defun %unlock-recursive-lock-ptr (ptr lock)
(with-macptrs ((signal (%get-ptr ptr target::lockptr.signal))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(unless (eql (%get-object ptr target::lockptr.owner) (%current-tcr))
(error 'not-lock-owner :lock lock))
(without-interrupts
(when (eql 0 (decf (the fixnum
(%get-natural ptr target::lockptr.count))))
(%get-spin-lock spin)
(setf (%get-ptr ptr target::lockptr.owner) (%null-ptr))
(let* ((pending (+ (the fixnum
(1- (the fixnum (%get-fixnum ptr target::lockptr.avail))))
(the fixnum (%get-fixnum ptr target::lockptr.waiting)))))
(declare (fixnum pending))
(setf (%get-natural ptr target::lockptr.avail) 0
(%get-natural ptr target::lockptr.waiting) 0)
(setf (%get-ptr spin) (%null-ptr))
(dotimes (i pending)
(%signal-semaphore-ptr signal)))))
nil))
#+futex
(defun %unlock-recursive-lock-ptr (ptr lock)
(unless (eql (%get-object ptr target::lockptr.owner) (%current-tcr))
(error 'not-lock-owner :lock lock))
(without-interrupts
(when (eql 0 (decf (the fixnum
(%get-natural ptr target::lockptr.count))))
(setf (%get-natural ptr target::lockptr.owner) 0)
(%unlock-futex ptr)))
nil)
(defun %unlock-recursive-lock-object (lock)
(%unlock-recursive-lock-ptr (%svref lock target::lock._value-cell) lock))
(defun %%lock-owner (lock)
"Intended for debugging only; ownership may change while this code
is running."
(let* ((tcr (%get-object (recursive-lock-ptr lock) target::lockptr.owner)))
(unless (zerop tcr)
(tcr->process tcr))))
(defun %rplaca-conditional (cons-cell old new)
(%store-node-conditional target::cons.car cons-cell old new))
(defun %rplacd-conditional (cons-cell old new)
(%store-node-conditional target::cons.cdr cons-cell old new))
Atomically push NEW onto the list in the I'th cell of uvector V.
(defun atomic-push-uvector-cell (v i new)
(let* ((cell (cons new nil))
(offset (+ target::misc-data-offset (ash i target::word-shift))))
(loop
(let* ((old (%svref v i)))
(rplacd cell old)
(when (%store-node-conditional offset v old cell)
(return cell))))))
(defun atomic-pop-uvector-cell (v i)
(let* ((offset (+ target::misc-data-offset (ash i target::word-shift))))
(loop
(let* ((old (%svref v i)))
(if (null old)
(return (values nil nil))
(let* ((tail (cdr old)))
(when (%store-node-conditional offset v old tail)
(return (values (car old) t)))))))))
(defun store-gvector-conditional (index gvector old new)
(declare (index index))
(%store-node-conditional (the fixnum
(+ target::misc-data-offset
(the fixnum (ash index target::word-shift))))
gvector
old
new))
(defun %atomic-incf-car (cell &optional (by 1))
(%atomic-incf-node (require-type by 'fixnum)
(require-type cell 'cons)
target::cons.car))
(defun %atomic-incf-cdr (cell &optional (by 1))
(%atomic-incf-node (require-type by 'fixnum)
(require-type cell 'cons)
target::cons.cdr))
(defun %atomic-incf-gvector (v i &optional (by 1))
(setq v (require-type v 'gvector))
(setq i (require-type i 'fixnum))
(%atomic-incf-node by v (+ target::misc-data-offset (ash i target::word-shift))))
(defun %atomic-incf-symbol-value (s &optional (by 1))
(setq s (require-type s 'symbol))
(multiple-value-bind (base offset) (%symbol-binding-address s)
(%atomic-incf-node by base offset)))
;;; What happens if there are some pending readers and another writer,
;;; and we abort out of the semaphore wait ? If the writer semaphore is
;;; signaled before we abandon interest in it
#-futex
(defun %write-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((write-signal (%get-ptr ptr target::rwlock.writer-signal)) )
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
( % get - spin - lock ( % inc - ptr ptr target::rwlock.spin ) )
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(incf (%get-signed-natural ptr target::rwlock.state))
(setf (%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(do* ()
((eql 0 (%get-signed-natural ptr target::rwlock.state))
;; That wasn't so bad, was it ? We have the spinlock now.
(setf (%get-signed-natural ptr target::rwlock.state) 1
(%get-natural ptr target::rwlock.spin) 0)
(%set-object ptr target::rwlock.writer tcr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(incf (%get-natural ptr target::rwlock.blocked-writers))
(setf (%get-natural ptr target::rwlock.spin) 0)
(let* ((*interrupt-level* level))
(%process-wait-on-semaphore-ptr write-signal 1 0 (rwlock-write-whostate lock)))
(%get-spin-lock ptr)))))))
#+futex
(defun %write-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((write-signal (%INC-ptr ptr target::rwlock.writer-signal)) )
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
(%lock-futex ptr level lock nil)
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(incf (%get-signed-natural ptr target::rwlock.state))
(%unlock-futex ptr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(do* ()
((eql 0 (%get-signed-natural ptr target::rwlock.state))
;; That wasn't so bad, was it ? We have the spinlock now.
(setf (%get-signed-natural ptr target::rwlock.state) 1)
(setf (%get-signed-long write-signal) -1)
(%unlock-futex ptr)
(%set-object ptr target::rwlock.writer tcr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(incf (%get-natural ptr target::rwlock.blocked-writers))
(let* ((waitval -1))
(%unlock-futex ptr)
(with-process-whostate ((rwlock-write-whostate lock))
(let* ((*interrupt-level* level))
(futex-wait write-signal waitval (rwlock-write-whostate lock)))))
(%lock-futex ptr level lock nil)
(decf (%get-natural ptr target::rwlock.blocked-writers))))))))
(defun write-lock-rwlock (lock &optional flag)
(%write-lock-rwlock-ptr (read-write-lock-ptr lock) lock flag))
#-futex
(defun %read-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((read-signal (%get-ptr ptr target::rwlock.reader-signal)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
( % get - spin - lock ( % inc - ptr ptr target::rwlock.spin ) )
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(setf (%get-natural ptr target::rwlock.spin) 0)
(error 'deadlock :lock lock))
(do* ((state
(%get-signed-natural ptr target::rwlock.state)
(%get-signed-natural ptr target::rwlock.state)))
((<= state 0)
;; That wasn't so bad, was it ? We have the spinlock now.
(setf (%get-signed-natural ptr target::rwlock.state)
(the fixnum (1- state))
(%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(declare (fixnum state))
(incf (%get-natural ptr target::rwlock.blocked-readers))
(setf (%get-natural ptr target::rwlock.spin) 0)
(let* ((*interrupt-level* level))
(%process-wait-on-semaphore-ptr read-signal 1 0 (rwlock-read-whostate lock)))
(%get-spin-lock ptr)))))))
#+futex
(defun %read-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((reader-signal (%INC-ptr ptr target::rwlock.reader-signal)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
(%lock-futex ptr level lock nil)
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(%unlock-futex ptr)
(error 'deadlock :lock lock))
(do* ((state
(%get-signed-natural ptr target::rwlock.state)
(%get-signed-natural ptr target::rwlock.state)))
((<= state 0)
;; That wasn't so bad, was it ? We have the spinlock now.
(setf (%get-signed-natural ptr target::rwlock.state)
(the fixnum (1- state)))
(setf (%get-signed-long reader-signal) -1) ; can happen multiple times, but that's harmless
(%unlock-futex ptr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(declare (fixnum state))
(incf (%get-natural ptr target::rwlock.blocked-readers))
(let* ((waitval -1))
(%unlock-futex ptr)
(let* ((*interrupt-level* level))
(futex-wait reader-signal waitval (rwlock-read-whostate lock))))
(%lock-futex ptr level lock nil)
(decf (%get-natural ptr target::rwlock.blocked-readers))))))))
(defun read-lock-rwlock (lock &optional flag)
(%read-lock-rwlock-ptr (read-write-lock-ptr lock) lock flag))
#-futex
(defun %unlock-rwlock-ptr (ptr lock)
(with-macptrs ((reader-signal (%get-ptr ptr target::rwlock.reader-signal))
(writer-signal (%get-ptr ptr target::rwlock.writer-signal)))
(without-interrupts
(%get-spin-lock ptr)
(let* ((state (%get-signed-natural ptr target::rwlock.state))
(tcr (%current-tcr)))
(declare (fixnum state tcr))
(cond ((> state 0)
(unless (eql tcr (%get-object ptr target::rwlock.writer))
(setf (%get-natural ptr target::rwlock.spin) 0)
(error 'not-lock-owner :lock lock))
(decf state))
((< state 0) (incf state))
(t (setf (%get-natural ptr target::rwlock.spin) 0)
(error 'not-locked :lock lock)))
(setf (%get-signed-natural ptr target::rwlock.state) state)
(when (zerop state)
;; We want any thread waiting for a lock semaphore to
be able to wait interruptibly . When a thread waits ,
;; it increments either the "blocked-readers" or "blocked-writers"
;; field, but since it may get interrupted before obtaining
;; the semaphore that's more of "an expression of interest"
;; in taking the lock than it is "a firm commitment to take it."
It 's generally ( much ) better to signal the semaphore(s )
;; too often than it would be to not signal them often
;; enough; spurious wakeups are better than deadlock.
;; So: if there are blocked writers, the writer-signal
;; is raised once for each apparent blocked writer. (At most
one writer will actually succeed in taking the lock . )
;; If there are blocked readers, the reader-signal is raised
;; once for each of them. (It's possible for both the
;; reader and writer semaphores to be raised on the same
unlock ; the writer semaphore is raised first , so in that
;; sense, writers still have priority but it's not guaranteed.)
;; Both the "blocked-writers" and "blocked-readers" fields
;; are cleared here (they can't be changed from another thread
;; until this thread releases the spinlock.)
(setf (%get-signed-natural ptr target::rwlock.writer) 0)
(let* ((nwriters (%get-natural ptr target::rwlock.blocked-writers))
(nreaders (%get-natural ptr target::rwlock.blocked-readers)))
(declare (fixnum nreaders nwriters))
(when (> nwriters 0)
(setf (%get-natural ptr target::rwlock.blocked-writers) 0)
(dotimes (i nwriters)
(%signal-semaphore-ptr writer-signal)))
(when (> nreaders 0)
(setf (%get-natural ptr target::rwlock.blocked-readers) 0)
(dotimes (i nreaders)
(%signal-semaphore-ptr reader-signal)))))
(setf (%get-natural ptr target::rwlock.spin) 0)
t))))
#+futex
(defun %unlock-rwlock-ptr (ptr lock)
(with-macptrs ((reader-signal (%INC-ptr ptr target::rwlock.reader-signal))
(writer-signal (%INC-ptr ptr target::rwlock.writer-signal)))
(let* ((signal nil)
(wakeup 0))
(without-interrupts
(%lock-futex ptr -1 lock nil)
(let* ((state (%get-signed-natural ptr target::rwlock.state))
(tcr (%current-tcr)))
(declare (fixnum state tcr))
(cond ((> state 0)
(unless (eql tcr (%get-object ptr target::rwlock.writer))
(%unlock-futex ptr)
(error 'not-lock-owner :lock lock))
(decf state))
((< state 0) (incf state))
(t (%unlock-futex ptr)
(error 'not-locked :lock lock)))
(setf (%get-signed-natural ptr target::rwlock.state) state)
(when (zerop state)
(setf (%get-signed-natural ptr target::rwlock.writer) 0)
(let* ((nwriters (%get-natural ptr target::rwlock.blocked-writers))
(nreaders (%get-natural ptr target::rwlock.blocked-readers)))
(declare (fixnum nreaders nwriters))
(if (> nwriters 0)
(setq signal writer-signal wakeup 1)
(if (> nreaders 0)
(setq signal reader-signal wakeup #$INT_MAX)))))
(when signal (setf (%get-signed-long signal) 0))
(%unlock-futex ptr)
(when signal (futex-wake signal wakeup))
t)))))
(defun unlock-rwlock (lock)
(%unlock-rwlock-ptr (read-write-lock-ptr lock) lock))
;;; There are all kinds of ways to lose here.
;;; The caller must have read access to the lock exactly once,
;;; or have write access.
;;; there's currently no way to detect whether the caller has
;;; read access at all.
;;; If we have to block and get interrupted, cleanup code may
;;; try to unlock a lock that we don't hold. (It might be possible
;;; to circumvent that if we use the same notifcation object here
;;; that controls that cleanup process.)
(defun %promote-rwlock (lock &optional flag)
(let* ((ptr (read-write-lock-ptr lock)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(without-interrupts
#+futex
(%lock-futex ptr level lock nil)
#-futex
(%get-spin-lock ptr)
(let* ((state (%get-signed-natural ptr target::rwlock.state)))
(declare (fixnum state))
(cond ((> state 0)
(unless (eql (%get-object ptr target::rwlock.writer) tcr)
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(error :not-lock-owner :lock lock)))
((= state 0)
#+futex (%unlock-futex ptr)
#-futex (setf (%get-natural ptr target::rwlock.spin) 0)
(error :not-locked :lock lock))
(t
(if (= state -1)
(progn
(setf (%get-signed-natural ptr target::rwlock.state) 1)
(%set-object ptr target::rwlock.writer tcr)
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(progn
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(%unlock-rwlock-ptr ptr lock)
(let* ((*interrupt-level* level))
(%write-lock-rwlock-ptr ptr lock flag)))))))))))
(defun safe-get-ptr (p &optional dest)
(if (null dest)
(setq dest (%null-ptr))
(unless (typep dest 'macptr)
(check-type dest macptr)))
(without-interrupts ;reentrancy
(%safe-get-ptr p dest)))
| null | https://raw.githubusercontent.com/fukamachi/clozure-cl/4b0c69452386ae57b08984ed815d9b50b4bcc8a2/level-0/l0-misc.lisp | lisp | Package : CCL -*-
file "LICENSE". The LLGPL consists of a preamble and the LGPL,
conflict, the preamble takes precedence.
Clozure CL is referenced in the preamble as the "LIBRARY."
The LLGPL is also available online at
Bootstrapping for futexes
We only need a few constants from <linux/futex.h>, which may
not have been included in the :libc .cdb files.
Miscellany.
This takes a simple-base-string and passes a C string into
and there's a better chance that users would see this message.
Returns an alist of the form:
((thread cstack-free cstack-used vstack-free vstack-used tstack-free tstack-used)
...)
sp free
sp used
tsp free (not on ARM)
tsp used (not on ARM)
and that it points to a valid utf-16 string with native endianness.
having to bootstrap #_strcmp.
Return true if the cstrings are equal, false otherwise.
is defined. It handles a few common cases.
This is intended for use by debugging tools. It's a horrible thing
to do otherwise. The caller really needs to hold the heap-segment
lock; this grabs the tcr queue lock as well.
ownership may change while this code
What happens if there are some pending readers and another writer,
and we abort out of the semaphore wait ? If the writer semaphore is
signaled before we abandon interest in it
That wasn't so bad, was it ? We have the spinlock now.
That wasn't so bad, was it ? We have the spinlock now.
That wasn't so bad, was it ? We have the spinlock now.
That wasn't so bad, was it ? We have the spinlock now.
can happen multiple times, but that's harmless
We want any thread waiting for a lock semaphore to
it increments either the "blocked-readers" or "blocked-writers"
field, but since it may get interrupted before obtaining
the semaphore that's more of "an expression of interest"
in taking the lock than it is "a firm commitment to take it."
too often than it would be to not signal them often
enough; spurious wakeups are better than deadlock.
So: if there are blocked writers, the writer-signal
is raised once for each apparent blocked writer. (At most
If there are blocked readers, the reader-signal is raised
once for each of them. (It's possible for both the
reader and writer semaphores to be raised on the same
the writer semaphore is raised first , so in that
sense, writers still have priority but it's not guaranteed.)
Both the "blocked-writers" and "blocked-readers" fields
are cleared here (they can't be changed from another thread
until this thread releases the spinlock.)
There are all kinds of ways to lose here.
The caller must have read access to the lock exactly once,
or have write access.
there's currently no way to detect whether the caller has
read access at all.
If we have to block and get interrupted, cleanup code may
try to unlock a lock that we don't hold. (It might be possible
to circumvent that if we use the same notifcation object here
that controls that cleanup process.)
reentrancy | Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 1994 - 2001 Digitool , Inc
This file is part of Clozure CL .
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
which is distributed with Clozure CL as the file " LGPL " . Where these
(in-package "CCL")
#+(and linux-target no (or x86-target arm-target))
(eval-when (:compile-toplevel :execute)
(pushnew :futex *features*))
#+futex
(eval-when (:compile-toplevel :execute)
(defconstant FUTEX-WAIT 0)
(defconstant FUTEX-WAKE 1)
(defconstant futex-avail 0)
(defconstant futex-locked 1)
(defconstant futex-contended 2)
(declaim (inline %lock-futex %unlock-futex)))
(defun memq (item list)
(do* ((tail list (%cdr tail)))
((null tail))
(if (eq item (car tail))
(return tail))))
(defun %copy-u8-to-string (u8-vector source-idx string dest-idx n)
(declare (optimize (speed 3) (safety 0))
(fixnum source-idx dest-idx n)
(type (simple-array (unsigned-byte 8) (*)) u8-vector)
(simple-base-string string))
(do* ((i 0 (1+ i)))
((= i n) string)
(declare (fixnum i))
(setf (%scharcode string dest-idx) (aref u8-vector source-idx))
(incf source-idx)
(incf dest-idx)))
(defun %copy-string-to-u8 (string source-idx u8-vector dest-idx n)
(declare (optimize (speed 3) (safety 0))
(fixnum source-idx dest-idx n)
(type (simple-array (unsigned-byte 8) (*)) u8-vector)
(simple-base-string string))
(do* ((i 0 (1+ i)))
((= i n) u8-vector)
(declare (fixnum i))
(let* ((code (%scharcode string source-idx)))
(declare (type (mod #x11000) code))
(if (> code #xff)
(setq code (char-code #\Sub)))
(setf (aref u8-vector dest-idx) code)
(incf source-idx)
(incf dest-idx))))
(defun append-2 (y z)
(if (null y)
z
(let* ((new (cons (car y) nil))
(tail new))
(declare (list new tail))
(dolist (head (cdr y))
(setq tail (cdr (rplacd tail (cons head nil)))))
(rplacd tail z)
new)))
(defun dbg (&optional arg)
(dbg arg))
the kernel " Bug " routine . Not too fancy , but neither is # _ DebugStr ,
(defun bug (arg)
(if (typep arg 'simple-base-string)
#+x86-target
(debug-trap-with-string arg)
#-x86-target
(let* ((len (length arg)))
(%stack-block ((buf (1+ len)))
(%cstr-pointer arg buf)
(ff-call
(%kernel-import target::kernel-import-lisp-bug)
:address buf
:void)))
(bug "Bug called with non-simple-base-string.")))
(defun total-bytes-allocated ()
(%heap-bytes-allocated)
#+not-any-more
(+ (unsignedwide->integer *total-bytes-freed*)
(%heap-bytes-allocated)))
(defun %freebytes ()
(with-macptrs (p)
(%setf-macptr-to-object p
(%fixnum-ref (%get-kernel-global 'all-areas)
target::area.succ))
(- (%get-natural p target::area.high)
(%get-natural p target::area.active))))
(defun %reservedbytes ()
(with-macptrs (p)
(%setf-macptr-to-object p (%get-kernel-global 'all-areas))
(- #+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high)
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))))
(defun object-in-application-heap-p (address)
(declare (ignore address))
t)
(defun frozen-space-dnodes ()
"Returns the current size of the frozen area."
(%fixnum-ref-natural (%get-kernel-global 'tenured-area)
target::area.static-dnodes))
(defun %usedbytes ()
(with-lock-grabbed (*kernel-exception-lock*)
(with-lock-grabbed (*kernel-tcr-area-lock*)
(%normalize-areas)
(let ((static 0)
(dynamic 0)
(library 0))
(do-consing-areas (area)
(let* ((active (%fixnum-ref area target::area.active))
(bytes (ash (- active
(%fixnum-ref area target::area.low))
target::fixnumshift))
(code (%fixnum-ref area target::area.code)))
(when (object-in-application-heap-p active)
(if (eql code area-dynamic)
(incf dynamic bytes)
(if (eql code area-managed-static)
(incf library bytes)
(incf static bytes))))))
(let* ((frozen-size (ash (frozen-space-dnodes) target::dnode-shift)))
(decf dynamic frozen-size)
(values dynamic static library frozen-size))))))
(defun %stack-space ()
(%normalize-areas)
(let ((free 0)
(used 0))
(with-macptrs (p)
(do-gc-areas (area)
(when (member (%fixnum-ref area target::area.code)
'(#.area-vstack
#.area-cstack
#.area-tstack))
(%setf-macptr-to-object p area)
(let ((active
#+32-bit-target
(%get-unsigned-long p target::area.active)
#+64-bit-target
(%%get-unsigned-longlong p target::area.active))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high))
(low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low)))
(incf used (- high active))
(incf free (- active low))))))
(values (+ free used) used free)))
(defun %stack-space-by-lisp-thread ()
(let* ((res nil))
(without-interrupts
(dolist (p (all-processes))
(let* ((thread (process-thread p)))
(when thread
(push (cons thread (multiple-value-list (%thread-stack-space thread))) res)))))
res))
Returns six values on most platforms , 4 on ARM .
free
used
(defun %thread-stack-space (&optional (thread *current-lisp-thread*))
(when (eq thread *current-lisp-thread*)
(%normalize-areas))
(labels ((free-and-used (area)
(with-macptrs (p)
(%setf-macptr-to-object p area)
(let* ((low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high))
(active
#+32-bit-target
(%get-unsigned-long p target::area.active)
#+64-bit-target
(%%get-unsigned-longlong p target::area.active))
(free (- active low))
(used (- high active)))
(loop
(setq area (%fixnum-ref area target::area.older))
(when (eql area 0) (return))
(%setf-macptr-to-object p area)
(let ((low
#+32-bit-target
(%get-unsigned-long p target::area.low)
#+64-bit-target
(%%get-unsigned-longlong p target::area.low))
(high
#+32-bit-target
(%get-unsigned-long p target::area.high)
#+64-bit-target
(%%get-unsigned-longlong p target::area.high)))
(declare (fixnum low high))
(incf used (- high low))))
(values free used)))))
(let* ((tcr (lisp-thread.tcr thread))
(cs-area #+(and windows-target x8632-target)
(%fixnum-ref (%fixnum-ref tcr (- target::tcr.aux
target::tcr-bias))
target::tcr-aux.cs-area)
#-(and windows-target x8632-target)
(%fixnum-ref tcr target::tcr.cs-area)))
(if (or (null tcr)
(zerop (%fixnum-ref cs-area)))
(values 0 0 0 0 0 0)
(multiple-value-bind (cf cu) (free-and-used cs-area)
(multiple-value-bind (vf vu)
(free-and-used (%fixnum-ref tcr (- target::tcr.vs-area
target::tcr-bias)))
#+arm-target
(values cf cu vf vu)
#-arm-target
(multiple-value-bind (tf tu)
(free-and-used (%fixnum-ref tcr (- target::tcr.ts-area
target::tcr-bias)))
(values cf cu vf vu tf tu))))))))
(defun room (&optional (verbose :default))
"Print to *STANDARD-OUTPUT* information about the state of internal
storage and its management. The optional argument controls the
verbosity of output. If it is T, ROOM prints out a maximal amount of
information. If it is NIL, ROOM prints out a minimal amount of
information. If it is :DEFAULT or it is not supplied, ROOM prints out
an intermediate amount of information."
(let* ((freebytes nil)
(usedbytes nil)
(static-used nil)
(staticlib-used nil)
(frozen-space-size nil)
(lispheap nil)
(reserved nil)
(static nil)
(stack-total)
(stack-used)
(stack-free)
(static-cons-reserved nil)
(stack-used-by-thread nil))
(progn
(progn
(setq freebytes (%freebytes))
(when verbose
(multiple-value-setq (usedbytes static-used staticlib-used frozen-space-size)
(%usedbytes))
(setq lispheap (+ freebytes usedbytes)
reserved (%reservedbytes)
static (+ static-used staticlib-used frozen-space-size))
(multiple-value-setq (stack-total stack-used stack-free)
(%stack-space))
(unless (eq verbose :default)
(setq stack-used-by-thread (%stack-space-by-lisp-thread))))))
(format t "~&Approximately ~:D bytes of memory can be allocated ~%before the next full GC is triggered. ~%" freebytes)
(when verbose
(flet ((k (n) (round n 1024)))
(princ "
Total Size Free Used")
(format t "~&Lisp Heap:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
lispheap (k lispheap)
freebytes (k freebytes)
usedbytes (k usedbytes))
(format t "~&Stacks:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
stack-total (k stack-total)
stack-free (k stack-free)
stack-used (k stack-used))
(format t "~&Static:~15t~10D (~DK)~35t~10D (~DK)~55t~10D (~DK)"
static (k static)
0 0
static (k static))
(when (and frozen-space-size (not (zerop frozen-space-size)))
(setq static-cons-reserved (ash (reserved-static-conses) target::dnode-shift)
frozen-space-size (- frozen-space-size static-cons-reserved))
(unless (zerop static-cons-reserved)
(format t "~&~,3f MB of reserved static conses (~d free, ~d reserved)"
(/ static-cons-reserved (float (ash 1 20)))
(free-static-conses)
(reserved-static-conses)))
(unless (zerop frozen-space-size)
(format t "~&~,3f MB of static memory is \"frozen\" dynamic memory"
(/ frozen-space-size (float (ash 1 20))))))
(format t "~&~,3f MB reserved for heap expansion."
(/ reserved (float (ash 1 20))))
(unless (eq verbose :default)
(terpri)
(let* ((processes (all-processes)))
(dolist (thread-info stack-used-by-thread)
(destructuring-bind (thread sp-free sp-used vsp-free vsp-used #-arm-target tsp-free #-arm-target tsp-used)
thread-info
(let* ((process (dolist (p processes)
(when (eq (process-thread p) thread)
(return p)))))
(when process
(let ((sp-total (+ sp-used sp-free))
(vsp-total (+ vsp-used vsp-free))
#-arm-target
(tsp-total (+ tsp-used tsp-free)))
(format t "~%~a(~d)~% cstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)~
~% vstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)"
(process-name process)
(process-serial-number process)
sp-total (k sp-total) sp-free (k sp-free) sp-used (k sp-used)
vsp-total (k vsp-total) vsp-free (k vsp-free) vsp-used (k vsp-used))
#-arm-target
(format t
"~% tstack:~12T~10D (~DK) ~33T~10D (~DK) ~54T~10D (~DK)"
tsp-total (k tsp-total) tsp-free (k tsp-free) tsp-used (k tsp-used)))))))))))))
(defun list-length (l)
"Return the length of the given LIST, or NIL if the LIST is circular."
(do* ((n 0 (+ n 2))
(fast l (cddr fast))
(slow l (cdr slow)))
((null fast) n)
(declare (fixnum n))
(if (null (cdr fast))
(return (the fixnum (1+ n)))
(if (and (eq fast slow)
(> n 0))
(return nil)))))
(defun proper-list-p (l)
(and (typep l 'list)
(do* ((n 0 (+ n 2))
(fast l (if (and (listp fast) (listp (cdr fast)))
(cddr fast)
(return-from proper-list-p nil)))
(slow l (cdr slow)))
((null fast) n)
(declare (fixnum n))
(if (atom fast)
(return nil)
(if (null (cdr fast))
(return t)
(if (and (eq fast slow)
(> n 0))
(return nil)))))))
(defun proper-sequence-p (x)
(cond ((typep x 'vector))
((typep x 'list) (not (null (list-length x))))))
(defun length (seq)
"Return an integer that is the length of SEQUENCE."
(seq-dispatch
seq
(or (list-length seq)
(%err-disp $XIMPROPERLIST seq))
(if (= (the fixnum (typecode seq)) target::subtag-vectorH)
(%svref seq target::vectorH.logsize-cell)
(uvsize seq))))
(defun %str-from-ptr (pointer len &optional (dest (make-string len)))
(declare (fixnum len)
(optimize (speed 3) (safety 0)))
(dotimes (i len dest)
(setf (%scharcode dest i) (%get-unsigned-byte pointer i))))
(defun %get-cstring (pointer)
(do* ((end 0 (1+ end)))
((zerop (the (unsigned-byte 8) (%get-unsigned-byte pointer end)))
(%str-from-ptr pointer end))
(declare (fixnum end))))
(defun %get-utf-8-cstring (pointer)
(do* ((end 0 (1+ end)))
((zerop (the (unsigned-byte 8) (%get-unsigned-byte pointer end)))
(let* ((len (utf-8-length-of-memory-encoding pointer end 0))
(string (make-string len)))
(utf-8-memory-decode pointer end 0 string)
string))
(declare (fixnum end))))
Assumes that pointer is terminated by a 0 - valued 16 - bit word
(defun %get-native-utf-16-cstring (pointer)
(do* ((nchars 0 (1+ nchars))
(i 0 (+ i 2))
(code (%get-unsigned-word pointer i) (%get-unsigned-word pointer i)))
((zerop code)
(do* ((string (make-string nchars))
(out 0 (1+ out))
(i 0 (+ i 2)))
((= out nchars) string)
(declare (fixnum i out))
(let* ((code (%get-unsigned-word pointer i)))
(declare (type (unsigned-byte 16) code))
(cond ((and (>= code #xd800)
(< code #xdc00))
(incf i 2)
(let* ((code2 (%get-unsigned-word pointer i)))
(declare (type (unsigned-byte 16) code2))
(setf (schar string out)
(utf-16-combine-surrogate-pairs code code2))))
(t (setf (schar string out) (code-char code)))))))
(when (and (>= code #xd800) (< code #xdc00))
(incf i 2))))
This is mostly here so we can bootstrap shared libs without
(defun %cstrcmp (x y)
(do* ((i 0 (1+ i))
(bx (%get-byte x i) (%get-byte x i))
(by (%get-byte y i) (%get-byte y i)))
((not (= bx by)))
(declare (fixnum i bx by))
(when (zerop bx)
(return t))))
(defun %cnstrcmp (x y n)
(declare (fixnum n))
(do* ((i 0 (1+ i))
(bx (%get-byte x i) (%get-byte x i))
(by (%get-byte y i) (%get-byte y i)))
((= i n) t)
(declare (fixnum i bx by))
(unless (= bx by)
(return))))
(defvar %documentation nil)
(defvar %documentation-lock% nil)
(setq %documentation
(make-hash-table :weak t :size 100 :test 'eq :rehash-threshold .95)
%documentation-lock% (make-lock))
(defun %put-documentation (thing doc-id doc)
(with-lock-grabbed (%documentation-lock%)
(let* ((info (gethash thing %documentation))
(pair (assoc doc-id info)))
(if doc
(progn
(unless (typep doc 'string)
(report-bad-arg doc 'string))
(if pair
(setf (cdr pair) doc)
(setf (gethash thing %documentation) (cons (cons doc-id doc) info))))
(when pair
(if (setq info (nremove pair info))
(setf (gethash thing %documentation) info)
(remhash thing %documentation))))))
doc)
(defun %get-documentation (object doc-id)
(cdr (assoc doc-id (gethash object %documentation))))
This pretends to be ( SETF DOCUMENTATION ) , until that generic function
(defun %set-documentation (thing doc-id doc-string)
(case doc-id
(function
(if (typep thing 'function)
(%put-documentation thing t doc-string)
(if (typep thing 'symbol)
(let* ((def (fboundp thing)))
(if def
(%put-documentation def t doc-string)))
(if (setf-function-name-p thing)
(%set-documentation
(setf-function-name thing) doc-id doc-string)))))
(variable
(if (typep thing 'symbol)
(%put-documentation thing doc-id doc-string)))
(t (%put-documentation thing doc-id doc-string)))
doc-string)
(%fhave 'set-documentation #'%set-documentation)
(defparameter *spin-lock-tries* 1)
(defparameter *spin-lock-timeouts* 0)
#+(and (not futex) (not x86-target))
(defun %get-spin-lock (p)
(let* ((self (%current-tcr))
(n *spin-lock-tries*))
(declare (fixnum n))
(loop
(dotimes (i n)
(when (eql 0 (%ptr-store-fixnum-conditional p 0 self))
(return-from %get-spin-lock t)))
(%atomic-incf-node 1 '*spin-lock-timeouts* target::symbol.vcell)
(yield))))
(eval-when (:compile-toplevel :execute)
(declaim (inline note-lock-wait note-lock-held note-lock-released)))
(eval-when (:compile-toplevel)
(declaim (inline %lock-recursive-lock-ptr %unlock-recursive-lock-ptr)))
#-futex
(defun %lock-recursive-lock-ptr (ptr lock flag)
(with-macptrs ((p)
(owner (%get-ptr ptr target::lockptr.owner))
(signal (%get-ptr ptr target::lockptr.signal))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(%setf-macptr-to-object p (%current-tcr))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(loop
(without-interrupts
(when (eql p owner)
(incf (%get-natural ptr target::lockptr.count))
(when flag
(setf (lock-acquisition.status flag) t))
(return t))
(%get-spin-lock spin)
(when (eql 1 (incf (%get-natural ptr target::lockptr.avail)))
(setf (%get-ptr ptr target::lockptr.owner) p
(%get-natural ptr target::lockptr.count) 1)
(setf (%get-natural spin 0) 0)
(if flag
(setf (lock-acquisition.status flag) t))
(return t))
(setf (%get-natural spin 0) 0))
(%process-wait-on-semaphore-ptr signal 1 0 (recursive-lock-whostate lock)))))
#+futex
(defun %lock-recursive-lock-ptr (ptr lock flag)
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((self (%current-tcr))
(level *interrupt-level*))
(declare (fixnum self))
(without-interrupts
(cond ((eql self (%get-object ptr target::lockptr.owner))
(incf (%get-natural ptr target::lockptr.count)))
(t (%lock-futex ptr level lock #'recursive-lock-whostate)
(%set-object ptr target::lockptr.owner self)
(setf (%get-natural ptr target::lockptr.count) 1)))
(when flag
(setf (lock-acquisition.status flag) t))
t)))
(defun %lock-recursive-lock-object (lock &optional flag)
(%lock-recursive-lock-ptr (recursive-lock-ptr lock) lock flag))
#+futex
(progn
#-monitor-futex-wait
(defun futex-wait (p val whostate)
(with-process-whostate (whostate)
(int-errno-ffcall
(%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAIT :int val :address (%null-ptr) :address (%null-ptr) :int 0 :int)))
#+monitor-futex-wait
(progn
(defparameter *total-futex-wait-calls* 0)
(defparameter *total-futex-wait-times* 0)
(defun futex-wait (p val whostate)
(with-process-whostate (whostate)
(let* ((start (get-internal-real-time)))
(incf *total-futex-wait-calls*)
(int-errno-ffcall
(%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAIT :int val :address (%null-ptr) :address (%null-ptr) :int 0 :int)
(incf *total-futex-wait-times* (- (get-internal-real-time) start)))))))
#+futex
(defun futex-wake (p n)
(int-errno-ffcall (%kernel-import target::kernel-import-lisp-futex)
:address p :int FUTEX-WAKE :int n :address (%null-ptr) :address (%null-ptr) :int 0 :int))
#+futex
(defun %lock-futex (p wait-level lock fwhostate)
(let* ((val (%ptr-store-conditional p futex-avail futex-locked)))
(declare (fixnum val))
(or (eql val futex-avail)
(loop
(if (eql val futex-contended)
(let* ((*interrupt-level* wait-level))
(futex-wait p val (if fwhostate (funcall fwhostate lock) "futex wait")))
(setq val futex-contended))
(when (eql futex-avail (xchgl val p))
(return t))))))
#+futex
(defun %unlock-futex (p)
(unless (eql futex-avail (%atomic-decf-ptr p))
(setf (%get-natural p target::lockptr.avail) futex-avail)
(futex-wake p #$INT_MAX)))
#-futex
(defun %try-recursive-lock-object (lock &optional flag)
(let* ((ptr (recursive-lock-ptr lock)))
(with-macptrs ((p)
(owner (%get-ptr ptr target::lockptr.owner))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(%setf-macptr-to-object p (%current-tcr))
(if flag
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(report-bad-arg flag 'lock-acquisition)))
(without-interrupts
(cond ((eql p owner)
(incf (%get-natural ptr target::lockptr.count))
(if flag (setf (lock-acquisition.status flag) t))
t)
(t
(let* ((win nil))
(%get-spin-lock spin)
(when (setq win (eql 1 (incf (%get-natural ptr target::lockptr.avail))))
(setf (%get-ptr ptr target::lockptr.owner) p
(%get-natural ptr target::lockptr.count) 1)
(if flag (setf (lock-acquisition.status flag) t)))
(setf (%get-ptr spin) (%null-ptr))
win)))))))
#+futex
(defun %try-recursive-lock-object (lock &optional flag)
(let* ((self (%current-tcr))
(ptr (recursive-lock-ptr lock)))
(declare (fixnum self))
(if flag
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(report-bad-arg flag 'lock-acquisition)))
(without-interrupts
(cond ((eql (%get-object ptr target::lockptr.owner) self)
(incf (%get-natural ptr target::lockptr.count))
(if flag (setf (lock-acquisition.status flag) t))
t)
(t
(when (eql 0 (%ptr-store-conditional ptr futex-avail futex-locked))
(%set-object ptr target::lockptr.owner self)
(setf (%get-natural ptr target::lockptr.count) 1)
(if flag (setf (lock-acquisition.status flag) t))
t))))))
#-futex
(defun %unlock-recursive-lock-ptr (ptr lock)
(with-macptrs ((signal (%get-ptr ptr target::lockptr.signal))
(spin (%inc-ptr ptr target::lockptr.spinlock)))
(unless (eql (%get-object ptr target::lockptr.owner) (%current-tcr))
(error 'not-lock-owner :lock lock))
(without-interrupts
(when (eql 0 (decf (the fixnum
(%get-natural ptr target::lockptr.count))))
(%get-spin-lock spin)
(setf (%get-ptr ptr target::lockptr.owner) (%null-ptr))
(let* ((pending (+ (the fixnum
(1- (the fixnum (%get-fixnum ptr target::lockptr.avail))))
(the fixnum (%get-fixnum ptr target::lockptr.waiting)))))
(declare (fixnum pending))
(setf (%get-natural ptr target::lockptr.avail) 0
(%get-natural ptr target::lockptr.waiting) 0)
(setf (%get-ptr spin) (%null-ptr))
(dotimes (i pending)
(%signal-semaphore-ptr signal)))))
nil))
#+futex
(defun %unlock-recursive-lock-ptr (ptr lock)
(unless (eql (%get-object ptr target::lockptr.owner) (%current-tcr))
(error 'not-lock-owner :lock lock))
(without-interrupts
(when (eql 0 (decf (the fixnum
(%get-natural ptr target::lockptr.count))))
(setf (%get-natural ptr target::lockptr.owner) 0)
(%unlock-futex ptr)))
nil)
(defun %unlock-recursive-lock-object (lock)
(%unlock-recursive-lock-ptr (%svref lock target::lock._value-cell) lock))
(defun %%lock-owner (lock)
is running."
(let* ((tcr (%get-object (recursive-lock-ptr lock) target::lockptr.owner)))
(unless (zerop tcr)
(tcr->process tcr))))
(defun %rplaca-conditional (cons-cell old new)
(%store-node-conditional target::cons.car cons-cell old new))
(defun %rplacd-conditional (cons-cell old new)
(%store-node-conditional target::cons.cdr cons-cell old new))
Atomically push NEW onto the list in the I'th cell of uvector V.
(defun atomic-push-uvector-cell (v i new)
(let* ((cell (cons new nil))
(offset (+ target::misc-data-offset (ash i target::word-shift))))
(loop
(let* ((old (%svref v i)))
(rplacd cell old)
(when (%store-node-conditional offset v old cell)
(return cell))))))
(defun atomic-pop-uvector-cell (v i)
(let* ((offset (+ target::misc-data-offset (ash i target::word-shift))))
(loop
(let* ((old (%svref v i)))
(if (null old)
(return (values nil nil))
(let* ((tail (cdr old)))
(when (%store-node-conditional offset v old tail)
(return (values (car old) t)))))))))
(defun store-gvector-conditional (index gvector old new)
(declare (index index))
(%store-node-conditional (the fixnum
(+ target::misc-data-offset
(the fixnum (ash index target::word-shift))))
gvector
old
new))
(defun %atomic-incf-car (cell &optional (by 1))
(%atomic-incf-node (require-type by 'fixnum)
(require-type cell 'cons)
target::cons.car))
(defun %atomic-incf-cdr (cell &optional (by 1))
(%atomic-incf-node (require-type by 'fixnum)
(require-type cell 'cons)
target::cons.cdr))
(defun %atomic-incf-gvector (v i &optional (by 1))
(setq v (require-type v 'gvector))
(setq i (require-type i 'fixnum))
(%atomic-incf-node by v (+ target::misc-data-offset (ash i target::word-shift))))
(defun %atomic-incf-symbol-value (s &optional (by 1))
(setq s (require-type s 'symbol))
(multiple-value-bind (base offset) (%symbol-binding-address s)
(%atomic-incf-node by base offset)))
#-futex
(defun %write-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((write-signal (%get-ptr ptr target::rwlock.writer-signal)) )
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
( % get - spin - lock ( % inc - ptr ptr target::rwlock.spin ) )
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(incf (%get-signed-natural ptr target::rwlock.state))
(setf (%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(do* ()
((eql 0 (%get-signed-natural ptr target::rwlock.state))
(setf (%get-signed-natural ptr target::rwlock.state) 1
(%get-natural ptr target::rwlock.spin) 0)
(%set-object ptr target::rwlock.writer tcr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(incf (%get-natural ptr target::rwlock.blocked-writers))
(setf (%get-natural ptr target::rwlock.spin) 0)
(let* ((*interrupt-level* level))
(%process-wait-on-semaphore-ptr write-signal 1 0 (rwlock-write-whostate lock)))
(%get-spin-lock ptr)))))))
#+futex
(defun %write-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((write-signal (%INC-ptr ptr target::rwlock.writer-signal)) )
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
(%lock-futex ptr level lock nil)
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(incf (%get-signed-natural ptr target::rwlock.state))
(%unlock-futex ptr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(do* ()
((eql 0 (%get-signed-natural ptr target::rwlock.state))
(setf (%get-signed-natural ptr target::rwlock.state) 1)
(setf (%get-signed-long write-signal) -1)
(%unlock-futex ptr)
(%set-object ptr target::rwlock.writer tcr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(incf (%get-natural ptr target::rwlock.blocked-writers))
(let* ((waitval -1))
(%unlock-futex ptr)
(with-process-whostate ((rwlock-write-whostate lock))
(let* ((*interrupt-level* level))
(futex-wait write-signal waitval (rwlock-write-whostate lock)))))
(%lock-futex ptr level lock nil)
(decf (%get-natural ptr target::rwlock.blocked-writers))))))))
(defun write-lock-rwlock (lock &optional flag)
(%write-lock-rwlock-ptr (read-write-lock-ptr lock) lock flag))
#-futex
(defun %read-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((read-signal (%get-ptr ptr target::rwlock.reader-signal)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
( % get - spin - lock ( % inc - ptr ptr target::rwlock.spin ) )
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(setf (%get-natural ptr target::rwlock.spin) 0)
(error 'deadlock :lock lock))
(do* ((state
(%get-signed-natural ptr target::rwlock.state)
(%get-signed-natural ptr target::rwlock.state)))
((<= state 0)
(setf (%get-signed-natural ptr target::rwlock.state)
(the fixnum (1- state))
(%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(declare (fixnum state))
(incf (%get-natural ptr target::rwlock.blocked-readers))
(setf (%get-natural ptr target::rwlock.spin) 0)
(let* ((*interrupt-level* level))
(%process-wait-on-semaphore-ptr read-signal 1 0 (rwlock-read-whostate lock)))
(%get-spin-lock ptr)))))))
#+futex
(defun %read-lock-rwlock-ptr (ptr lock &optional flag)
(with-macptrs ((reader-signal (%INC-ptr ptr target::rwlock.reader-signal)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(declare (fixnum tcr))
(without-interrupts
(%lock-futex ptr level lock nil)
(if (eq (%get-object ptr target::rwlock.writer) tcr)
(progn
(%unlock-futex ptr)
(error 'deadlock :lock lock))
(do* ((state
(%get-signed-natural ptr target::rwlock.state)
(%get-signed-natural ptr target::rwlock.state)))
((<= state 0)
(setf (%get-signed-natural ptr target::rwlock.state)
(the fixnum (1- state)))
(%unlock-futex ptr)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(declare (fixnum state))
(incf (%get-natural ptr target::rwlock.blocked-readers))
(let* ((waitval -1))
(%unlock-futex ptr)
(let* ((*interrupt-level* level))
(futex-wait reader-signal waitval (rwlock-read-whostate lock))))
(%lock-futex ptr level lock nil)
(decf (%get-natural ptr target::rwlock.blocked-readers))))))))
(defun read-lock-rwlock (lock &optional flag)
(%read-lock-rwlock-ptr (read-write-lock-ptr lock) lock flag))
#-futex
(defun %unlock-rwlock-ptr (ptr lock)
(with-macptrs ((reader-signal (%get-ptr ptr target::rwlock.reader-signal))
(writer-signal (%get-ptr ptr target::rwlock.writer-signal)))
(without-interrupts
(%get-spin-lock ptr)
(let* ((state (%get-signed-natural ptr target::rwlock.state))
(tcr (%current-tcr)))
(declare (fixnum state tcr))
(cond ((> state 0)
(unless (eql tcr (%get-object ptr target::rwlock.writer))
(setf (%get-natural ptr target::rwlock.spin) 0)
(error 'not-lock-owner :lock lock))
(decf state))
((< state 0) (incf state))
(t (setf (%get-natural ptr target::rwlock.spin) 0)
(error 'not-locked :lock lock)))
(setf (%get-signed-natural ptr target::rwlock.state) state)
(when (zerop state)
be able to wait interruptibly . When a thread waits ,
It 's generally ( much ) better to signal the semaphore(s )
one writer will actually succeed in taking the lock . )
(setf (%get-signed-natural ptr target::rwlock.writer) 0)
(let* ((nwriters (%get-natural ptr target::rwlock.blocked-writers))
(nreaders (%get-natural ptr target::rwlock.blocked-readers)))
(declare (fixnum nreaders nwriters))
(when (> nwriters 0)
(setf (%get-natural ptr target::rwlock.blocked-writers) 0)
(dotimes (i nwriters)
(%signal-semaphore-ptr writer-signal)))
(when (> nreaders 0)
(setf (%get-natural ptr target::rwlock.blocked-readers) 0)
(dotimes (i nreaders)
(%signal-semaphore-ptr reader-signal)))))
(setf (%get-natural ptr target::rwlock.spin) 0)
t))))
#+futex
(defun %unlock-rwlock-ptr (ptr lock)
(with-macptrs ((reader-signal (%INC-ptr ptr target::rwlock.reader-signal))
(writer-signal (%INC-ptr ptr target::rwlock.writer-signal)))
(let* ((signal nil)
(wakeup 0))
(without-interrupts
(%lock-futex ptr -1 lock nil)
(let* ((state (%get-signed-natural ptr target::rwlock.state))
(tcr (%current-tcr)))
(declare (fixnum state tcr))
(cond ((> state 0)
(unless (eql tcr (%get-object ptr target::rwlock.writer))
(%unlock-futex ptr)
(error 'not-lock-owner :lock lock))
(decf state))
((< state 0) (incf state))
(t (%unlock-futex ptr)
(error 'not-locked :lock lock)))
(setf (%get-signed-natural ptr target::rwlock.state) state)
(when (zerop state)
(setf (%get-signed-natural ptr target::rwlock.writer) 0)
(let* ((nwriters (%get-natural ptr target::rwlock.blocked-writers))
(nreaders (%get-natural ptr target::rwlock.blocked-readers)))
(declare (fixnum nreaders nwriters))
(if (> nwriters 0)
(setq signal writer-signal wakeup 1)
(if (> nreaders 0)
(setq signal reader-signal wakeup #$INT_MAX)))))
(when signal (setf (%get-signed-long signal) 0))
(%unlock-futex ptr)
(when signal (futex-wake signal wakeup))
t)))))
(defun unlock-rwlock (lock)
(%unlock-rwlock-ptr (read-write-lock-ptr lock) lock))
(defun %promote-rwlock (lock &optional flag)
(let* ((ptr (read-write-lock-ptr lock)))
(if (istruct-typep flag 'lock-acquisition)
(setf (lock-acquisition.status flag) nil)
(if flag (report-bad-arg flag 'lock-acquisition)))
(let* ((level *interrupt-level*)
(tcr (%current-tcr)))
(without-interrupts
#+futex
(%lock-futex ptr level lock nil)
#-futex
(%get-spin-lock ptr)
(let* ((state (%get-signed-natural ptr target::rwlock.state)))
(declare (fixnum state))
(cond ((> state 0)
(unless (eql (%get-object ptr target::rwlock.writer) tcr)
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(error :not-lock-owner :lock lock)))
((= state 0)
#+futex (%unlock-futex ptr)
#-futex (setf (%get-natural ptr target::rwlock.spin) 0)
(error :not-locked :lock lock))
(t
(if (= state -1)
(progn
(setf (%get-signed-natural ptr target::rwlock.state) 1)
(%set-object ptr target::rwlock.writer tcr)
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(if flag
(setf (lock-acquisition.status flag) t))
t)
(progn
#+futex
(%unlock-futex ptr)
#-futex
(setf (%get-natural ptr target::rwlock.spin) 0)
(%unlock-rwlock-ptr ptr lock)
(let* ((*interrupt-level* level))
(%write-lock-rwlock-ptr ptr lock flag)))))))))))
(defun safe-get-ptr (p &optional dest)
(if (null dest)
(setq dest (%null-ptr))
(unless (typep dest 'macptr)
(check-type dest macptr)))
(%safe-get-ptr p dest)))
|
0532b910e1a55b0647217ca4c02c9b9ec10e4d823d4188c4f88512098faebe10 | svenpanne/EOPL3 | exercise-3-05.rkt | #lang eopl
; ------------------------------------------------------------------------------
Exercise 3.5
; A bit incomplete due to space restrictions:
;
( value - of < < x > > ( value - of < < 1 > >
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 7 ) [ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 1 )
; ----------------------------------------------------------------------------------------------------------------------
; (value-of <<-(x,1)>>
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 6 ) ... ...
; -------------------------------------------------------------- -------------------------------------------------------------------------
; (value-of <<let x = -(x,1) in -(x,y)>> (value-of <<-(-(x,8),y)>>
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 4 ) [ y=(num - val 4),y=(num - val 2),x=(num - val 7)]rho0 ) = ( -5 )
; ------------------------------------------------------------------------------------------------------------------------------------------
; (value-of <<let y = let x = -(x,1) in -(x,y)
( value - of < < 2 > > in -(-(x,8),y ) > >
[ 7)]rho0 ) = ( 2 ) [ y=(num - val 2),x=(num - val 7)]rho0 ) = ( -5 )
; ----------------------------------------------------------------------------------------------------------
( value - of < < let y = 2
; in let y = let x = -(x,1) in -(x,y)
( value - of < < 7 > > in -(-(x,8),y ) > >
rho0 ) = ( 7 ) [ 7)]rho0 ) = ( -5 )
; -----------------------------------------------------------------------------
( value - of < < let x = 7
in let y = 2
; in let y = let x = -(x,1) in -(x,y)
; in -(-(x,8),y)>>
rho0 ) = ( -5 )
| null | https://raw.githubusercontent.com/svenpanne/EOPL3/3fc14c4dbb1c53a37bd67399eba34cea8f8234cc/chapter3/exercise-3-05.rkt | racket | ------------------------------------------------------------------------------
A bit incomplete due to space restrictions:
----------------------------------------------------------------------------------------------------------------------
(value-of <<-(x,1)>>
-------------------------------------------------------------- -------------------------------------------------------------------------
(value-of <<let x = -(x,1) in -(x,y)>> (value-of <<-(-(x,8),y)>>
------------------------------------------------------------------------------------------------------------------------------------------
(value-of <<let y = let x = -(x,1) in -(x,y)
----------------------------------------------------------------------------------------------------------
in let y = let x = -(x,1) in -(x,y)
-----------------------------------------------------------------------------
in let y = let x = -(x,1) in -(x,y)
in -(-(x,8),y)>> | #lang eopl
Exercise 3.5
( value - of < < x > > ( value - of < < 1 > >
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 7 ) [ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 1 )
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 6 ) ... ...
[ y=(num - val 2),x=(num - val 7)]rho0 ) = ( 4 ) [ y=(num - val 4),y=(num - val 2),x=(num - val 7)]rho0 ) = ( -5 )
( value - of < < 2 > > in -(-(x,8),y ) > >
[ 7)]rho0 ) = ( 2 ) [ y=(num - val 2),x=(num - val 7)]rho0 ) = ( -5 )
( value - of < < let y = 2
( value - of < < 7 > > in -(-(x,8),y ) > >
rho0 ) = ( 7 ) [ 7)]rho0 ) = ( -5 )
( value - of < < let x = 7
in let y = 2
rho0 ) = ( -5 )
|
099a0b5595daee59f7cbf14607411428354dc71530feee300d918b9dccd80c35 | clj-kafka/franzy | project.clj | (defproject clj-kafka.franzy/json "0.0.0"
:description "A Kafka Serializer/Deserializer supporting JSON, and an add-on for Franzy, a Clojure Kafka client."
:dependencies [[org.clojure/clojure "1.8.0"]
[org.apache.kafka/kafka-clients "0.11.0.0"]
[cheshire "5.5.0"]]
:monolith/inherit true
:middleware [leiningen.v/dependency-version-from-scm
leiningen.v/version-from-scm
leiningen.v/add-workspace-data]
:plugins
[[lein-monolith "1.0.1"]
[com.roomkey/lein-v "6.2.0"]
])
| null | https://raw.githubusercontent.com/clj-kafka/franzy/6c2e2e65ad137d2bcbc04ff6e671f97ea8c0e380/json/project.clj | clojure | (defproject clj-kafka.franzy/json "0.0.0"
:description "A Kafka Serializer/Deserializer supporting JSON, and an add-on for Franzy, a Clojure Kafka client."
:dependencies [[org.clojure/clojure "1.8.0"]
[org.apache.kafka/kafka-clients "0.11.0.0"]
[cheshire "5.5.0"]]
:monolith/inherit true
:middleware [leiningen.v/dependency-version-from-scm
leiningen.v/version-from-scm
leiningen.v/add-workspace-data]
:plugins
[[lein-monolith "1.0.1"]
[com.roomkey/lein-v "6.2.0"]
])
| |
cc39dd497256a9fc841784e1d72400a8df8a8306a75728bd7f6cd589c287d4d9 | staples-sparx/kits | log_consumer.clj | (ns ^{:doc "Internal namespace. This spawns agents that writes log messages to file and rotates them"}
kits.logging.log-consumer
(:require
[kits.runtime :as runtime]
[kits.queues :as q]
[kits.logging.log-generator :as log])
(:import
(java.util Calendar TimeZone)
(java.io FileWriter Writer IOException)))
(set! *warn-on-reflection* true)
(def utc-tz (TimeZone/getTimeZone "UTC"))
(defmacro _+ [a b] `(unchecked-add (long ~a) (long ~b)))
(defmacro _- [a b] `(unchecked-subtract (long ~a) (long ~b)))
(defmacro _* [a b] `(unchecked-multiply (long ~a) (long ~b)))
(defn ms-time
" Returns number of milli-seconds since the epoch"
[]
(System/currentTimeMillis))
(defn resilient-close
"Close a writer ensuring that no exception can be triggered and we do not write anything to disk."
[^Writer writer error-callback]
(when writer
(try
(.close writer)
(catch Exception e
(error-callback e)))))
(defn resilient-flush
"Flush 'writer', but ignore any java.io.Exception. Useful to avoid
killing a logging loop when file system is full for instance."
[^Writer writer error-callback]
(try
(.flush writer)
(catch IOException e
(error-callback e))))
(defn resilient-write
"Write 'data' using 'writer', but ignore any java.io.Exception. Useful
to avoid killing a logging loop when file system is full for
instance."
[^Writer writer ^String data error-callback]
(try
(.write writer data)
(catch IOException e
(error-callback e))))
(defn utc-cal-at
"Returns a UTC Java calendar set at a specific point-of-time (timestamp in ms)"
[ts]
(doto (Calendar/getInstance)
(.setTimeZone utc-tz)
(.setTimeInMillis ts)))
(defn round-up-ts
"Return a timestamp rounded up to the next 'n' minutes"
[ts n-minutes]
(let [c ^Calendar(utc-cal-at ts)
min (.get c Calendar/MINUTE)
n (Math/floor (/ min n-minutes))
rounded-down (.getTimeInMillis
(doto ^Calendar c
(.set Calendar/MINUTE (* n n-minutes))))]
(_+ rounded-down (_* n-minutes 60000))))
(defn stdout [log-line]
(print log-line)
(flush))
(defn make-log-rotate-loop
"Build a loop that can be used in a thread pool to log entry with a
very high-troughput rate. Code is quite ugly but by lazily rotating
and flushing the writer we achieve very troughput."
[{:keys [queue compute-file-name formatter io-error-handler conf]}]
(let [{:keys [queue-timeout-ms rotate-every-minute max-unflushed max-elapsed-unflushed-ms]} conf
compute-next-rotate-at (fn [now] (round-up-ts now rotate-every-minute))
log-file-for (fn [ts]
(let [path (compute-file-name conf (runtime/thread-id) ts)]
{:path path
:writer (FileWriter. ^String path true)}))]
(fn [thread-name args]
(try
(let [now (ms-time)
rotate-at (compute-next-rotate-at now)
{:keys [path writer]} (log-file-for rotate-at)]
(stdout (log/info "log-consumer::make-log-rotate-loop"
(str "Log file: " path)))
(stdout (log/info "log-consumer::make-log-rotate-loop"
"Starting fetch loop for logging queue..."))
(loop [last-flush-at now
unflushed 0
rotate-at rotate-at
writer writer]
(let [msg (q/fetch queue queue-timeout-ms)
now (ms-time)]
;; Check whether we should rotate the logs
(let [rotate? (> now rotate-at)
rotate-at (if-not rotate?
rotate-at
(compute-next-rotate-at now))
writer (if-not rotate?
writer
(do
(resilient-close writer io-error-handler)
(:writer (log-file-for rotate-at))))]
(if-not msg
;; Check whether we should flush and get back to business
(if (and
(pos? unflushed)
(> (- now last-flush-at) max-elapsed-unflushed-ms))
(do
(stdout (log/info "log-consumer::make-log-rotate-loop"
"Flush inactive"))
(resilient-flush ^FileWriter writer io-error-handler)
(recur (ms-time) 0 rotate-at writer))
(recur last-flush-at unflushed rotate-at writer))
;; Write log entry, flushing lazily
(do
(resilient-write writer (formatter msg) io-error-handler)
(if (or (> (- now last-flush-at) max-elapsed-unflushed-ms)
(> unflushed max-unflushed))
(do
(stdout (log/info "log-consumer::make-log-rotate-loop" "Flush"))
(resilient-flush writer io-error-handler)
(recur (ms-time) 0 rotate-at writer))
(recur last-flush-at (inc unflushed) rotate-at writer))))))))
(catch Exception e
(stdout (log/error
"log-consumer::make-log-rotate-loop" "Exception in logging" e)))))))
| null | https://raw.githubusercontent.com/staples-sparx/kits/66ae99bce83e8fd1248cc5c0da1f23673f221073/src/clojure/kits/logging/log_consumer.clj | clojure | Check whether we should rotate the logs
Check whether we should flush and get back to business
Write log entry, flushing lazily | (ns ^{:doc "Internal namespace. This spawns agents that writes log messages to file and rotates them"}
kits.logging.log-consumer
(:require
[kits.runtime :as runtime]
[kits.queues :as q]
[kits.logging.log-generator :as log])
(:import
(java.util Calendar TimeZone)
(java.io FileWriter Writer IOException)))
(set! *warn-on-reflection* true)
(def utc-tz (TimeZone/getTimeZone "UTC"))
(defmacro _+ [a b] `(unchecked-add (long ~a) (long ~b)))
(defmacro _- [a b] `(unchecked-subtract (long ~a) (long ~b)))
(defmacro _* [a b] `(unchecked-multiply (long ~a) (long ~b)))
(defn ms-time
" Returns number of milli-seconds since the epoch"
[]
(System/currentTimeMillis))
(defn resilient-close
"Close a writer ensuring that no exception can be triggered and we do not write anything to disk."
[^Writer writer error-callback]
(when writer
(try
(.close writer)
(catch Exception e
(error-callback e)))))
(defn resilient-flush
"Flush 'writer', but ignore any java.io.Exception. Useful to avoid
killing a logging loop when file system is full for instance."
[^Writer writer error-callback]
(try
(.flush writer)
(catch IOException e
(error-callback e))))
(defn resilient-write
"Write 'data' using 'writer', but ignore any java.io.Exception. Useful
to avoid killing a logging loop when file system is full for
instance."
[^Writer writer ^String data error-callback]
(try
(.write writer data)
(catch IOException e
(error-callback e))))
(defn utc-cal-at
"Returns a UTC Java calendar set at a specific point-of-time (timestamp in ms)"
[ts]
(doto (Calendar/getInstance)
(.setTimeZone utc-tz)
(.setTimeInMillis ts)))
(defn round-up-ts
"Return a timestamp rounded up to the next 'n' minutes"
[ts n-minutes]
(let [c ^Calendar(utc-cal-at ts)
min (.get c Calendar/MINUTE)
n (Math/floor (/ min n-minutes))
rounded-down (.getTimeInMillis
(doto ^Calendar c
(.set Calendar/MINUTE (* n n-minutes))))]
(_+ rounded-down (_* n-minutes 60000))))
(defn stdout [log-line]
(print log-line)
(flush))
(defn make-log-rotate-loop
"Build a loop that can be used in a thread pool to log entry with a
very high-troughput rate. Code is quite ugly but by lazily rotating
and flushing the writer we achieve very troughput."
[{:keys [queue compute-file-name formatter io-error-handler conf]}]
(let [{:keys [queue-timeout-ms rotate-every-minute max-unflushed max-elapsed-unflushed-ms]} conf
compute-next-rotate-at (fn [now] (round-up-ts now rotate-every-minute))
log-file-for (fn [ts]
(let [path (compute-file-name conf (runtime/thread-id) ts)]
{:path path
:writer (FileWriter. ^String path true)}))]
(fn [thread-name args]
(try
(let [now (ms-time)
rotate-at (compute-next-rotate-at now)
{:keys [path writer]} (log-file-for rotate-at)]
(stdout (log/info "log-consumer::make-log-rotate-loop"
(str "Log file: " path)))
(stdout (log/info "log-consumer::make-log-rotate-loop"
"Starting fetch loop for logging queue..."))
(loop [last-flush-at now
unflushed 0
rotate-at rotate-at
writer writer]
(let [msg (q/fetch queue queue-timeout-ms)
now (ms-time)]
(let [rotate? (> now rotate-at)
rotate-at (if-not rotate?
rotate-at
(compute-next-rotate-at now))
writer (if-not rotate?
writer
(do
(resilient-close writer io-error-handler)
(:writer (log-file-for rotate-at))))]
(if-not msg
(if (and
(pos? unflushed)
(> (- now last-flush-at) max-elapsed-unflushed-ms))
(do
(stdout (log/info "log-consumer::make-log-rotate-loop"
"Flush inactive"))
(resilient-flush ^FileWriter writer io-error-handler)
(recur (ms-time) 0 rotate-at writer))
(recur last-flush-at unflushed rotate-at writer))
(do
(resilient-write writer (formatter msg) io-error-handler)
(if (or (> (- now last-flush-at) max-elapsed-unflushed-ms)
(> unflushed max-unflushed))
(do
(stdout (log/info "log-consumer::make-log-rotate-loop" "Flush"))
(resilient-flush writer io-error-handler)
(recur (ms-time) 0 rotate-at writer))
(recur last-flush-at (inc unflushed) rotate-at writer))))))))
(catch Exception e
(stdout (log/error
"log-consumer::make-log-rotate-loop" "Exception in logging" e)))))))
|
e358dbea18f3dca93c89f9769c00b2f1dcbd1a3489bea70fa59328d2a8d59adb | simplex-chat/simplex-chat | Options.hs | # LANGUAGE ApplicativeDo #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Options where
import qualified Data.Attoparsec.ByteString.Char8 as A
import Data.Int (Int64)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Options.Applicative
import Simplex.Chat.Controller (updateStr, versionNumber, versionString)
import Simplex.Chat.Options (ChatOpts (..), CoreChatOpts, coreChatOptsP)
import Simplex.Messaging.Parsers (parseAll)
import Simplex.Messaging.Util (safeDecodeUtf8)
data Publisher = Publisher
{ contactId :: Int64,
localDisplayName :: Text
}
deriving (Eq)
data BroadcastBotOpts = BroadcastBotOpts
{ coreOptions :: CoreChatOpts,
publishers :: [Publisher],
welcomeMessage :: String,
prohibitedMessage :: String
}
defaultWelcomeMessage :: [Publisher] -> String
defaultWelcomeMessage ps = "Hello! I am a broadcast bot.\nI broadcast messages to all connected users from " <> publisherNames ps <> "."
defaultProhibitedMessage :: [Publisher] -> String
defaultProhibitedMessage ps = "Sorry, only these users can broadcast messages: " <> publisherNames ps <> ". Your message is deleted."
publisherNames :: [Publisher] -> String
publisherNames = T.unpack . T.intercalate ", " . map (("@" <>) . localDisplayName)
broadcastBotOpts :: FilePath -> FilePath -> Parser BroadcastBotOpts
broadcastBotOpts appDir defaultDbFileName = do
coreOptions <- coreChatOptsP appDir defaultDbFileName
publishers <-
option
parsePublishers
( long "publishers"
<> metavar "PUBLISHERS"
<> help "Comma-separated list of publishers in the format CONTACT_ID:DISPLAY_NAME whose messages will be broadcasted"
<> value []
)
welcomeMessage_ <-
optional $
strOption
( long "welcome"
<> metavar "WELCOME"
<> help "Welcome message to be sent to all connecting users (default message will list allowed publishers)"
)
prohibitedMessage_ <-
optional $
strOption
( long "prohibited"
<> metavar "PROHIBITED"
<> help "Reply to non-publishers who try to send messages (default reply will list allowed publishers)"
<> showDefault
)
pure
BroadcastBotOpts
{ coreOptions,
publishers,
welcomeMessage = fromMaybe (defaultWelcomeMessage publishers) welcomeMessage_,
prohibitedMessage = fromMaybe (defaultProhibitedMessage publishers) prohibitedMessage_
}
parsePublishers :: ReadM [Publisher]
parsePublishers = eitherReader $ parseAll publishersP . encodeUtf8 . T.pack
publishersP :: A.Parser [Publisher]
publishersP = publisherP `A.sepBy1` A.char ','
where
publisherP = do
contactId <- A.decimal <* A.char ':'
localDisplayName <- safeDecodeUtf8 <$> A.takeTill (A.inClass ", ")
pure Publisher {contactId, localDisplayName}
getBroadcastBotOpts :: FilePath -> FilePath -> IO BroadcastBotOpts
getBroadcastBotOpts appDir defaultDbFileName =
execParser $
info
(helper <*> versionOption <*> broadcastBotOpts appDir defaultDbFileName)
(header versionStr <> fullDesc <> progDesc "Start chat bot with DB_FILE file and use SERVER as SMP server")
where
versionStr = versionString versionNumber
versionOption = infoOption versionAndUpdate (long "version" <> short 'v' <> help "Show version")
versionAndUpdate = versionStr <> "\n" <> updateStr
mkChatOpts :: BroadcastBotOpts -> ChatOpts
mkChatOpts BroadcastBotOpts {coreOptions} =
ChatOpts
{ coreOptions,
chatCmd = "",
chatCmdDelay = 3,
chatServerPort = Nothing,
optFilesFolder = Nothing,
allowInstantFiles = True,
maintenance = False
}
| null | https://raw.githubusercontent.com/simplex-chat/simplex-chat/01acbb970ae7762e1551e352131453e77a601764/apps/simplex-broadcast-bot/Options.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE ApplicativeDo #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
module Options where
import qualified Data.Attoparsec.ByteString.Char8 as A
import Data.Int (Int64)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Options.Applicative
import Simplex.Chat.Controller (updateStr, versionNumber, versionString)
import Simplex.Chat.Options (ChatOpts (..), CoreChatOpts, coreChatOptsP)
import Simplex.Messaging.Parsers (parseAll)
import Simplex.Messaging.Util (safeDecodeUtf8)
data Publisher = Publisher
{ contactId :: Int64,
localDisplayName :: Text
}
deriving (Eq)
data BroadcastBotOpts = BroadcastBotOpts
{ coreOptions :: CoreChatOpts,
publishers :: [Publisher],
welcomeMessage :: String,
prohibitedMessage :: String
}
defaultWelcomeMessage :: [Publisher] -> String
defaultWelcomeMessage ps = "Hello! I am a broadcast bot.\nI broadcast messages to all connected users from " <> publisherNames ps <> "."
defaultProhibitedMessage :: [Publisher] -> String
defaultProhibitedMessage ps = "Sorry, only these users can broadcast messages: " <> publisherNames ps <> ". Your message is deleted."
publisherNames :: [Publisher] -> String
publisherNames = T.unpack . T.intercalate ", " . map (("@" <>) . localDisplayName)
broadcastBotOpts :: FilePath -> FilePath -> Parser BroadcastBotOpts
broadcastBotOpts appDir defaultDbFileName = do
coreOptions <- coreChatOptsP appDir defaultDbFileName
publishers <-
option
parsePublishers
( long "publishers"
<> metavar "PUBLISHERS"
<> help "Comma-separated list of publishers in the format CONTACT_ID:DISPLAY_NAME whose messages will be broadcasted"
<> value []
)
welcomeMessage_ <-
optional $
strOption
( long "welcome"
<> metavar "WELCOME"
<> help "Welcome message to be sent to all connecting users (default message will list allowed publishers)"
)
prohibitedMessage_ <-
optional $
strOption
( long "prohibited"
<> metavar "PROHIBITED"
<> help "Reply to non-publishers who try to send messages (default reply will list allowed publishers)"
<> showDefault
)
pure
BroadcastBotOpts
{ coreOptions,
publishers,
welcomeMessage = fromMaybe (defaultWelcomeMessage publishers) welcomeMessage_,
prohibitedMessage = fromMaybe (defaultProhibitedMessage publishers) prohibitedMessage_
}
parsePublishers :: ReadM [Publisher]
parsePublishers = eitherReader $ parseAll publishersP . encodeUtf8 . T.pack
publishersP :: A.Parser [Publisher]
publishersP = publisherP `A.sepBy1` A.char ','
where
publisherP = do
contactId <- A.decimal <* A.char ':'
localDisplayName <- safeDecodeUtf8 <$> A.takeTill (A.inClass ", ")
pure Publisher {contactId, localDisplayName}
getBroadcastBotOpts :: FilePath -> FilePath -> IO BroadcastBotOpts
getBroadcastBotOpts appDir defaultDbFileName =
execParser $
info
(helper <*> versionOption <*> broadcastBotOpts appDir defaultDbFileName)
(header versionStr <> fullDesc <> progDesc "Start chat bot with DB_FILE file and use SERVER as SMP server")
where
versionStr = versionString versionNumber
versionOption = infoOption versionAndUpdate (long "version" <> short 'v' <> help "Show version")
versionAndUpdate = versionStr <> "\n" <> updateStr
mkChatOpts :: BroadcastBotOpts -> ChatOpts
mkChatOpts BroadcastBotOpts {coreOptions} =
ChatOpts
{ coreOptions,
chatCmd = "",
chatCmdDelay = 3,
chatServerPort = Nothing,
optFilesFolder = Nothing,
allowInstantFiles = True,
maintenance = False
}
|
f66c36259141fc50fd6dbd6e3f6b1f268a04b42f7ee5a3f04cc59c885ef0f661 | manuel-serrano/bigloo | remove.scm | (module saw_remove
(import type_type ast_var ast_node
saw_lib
saw_defs
)
(export (remove::block b::block))
(include "SawMill/remove.sch")
(static (wide-class defcollect::block)
(wide-class rcollect::block)
(wide-class ucollect::block)
(wide-class removed::rtl_ins)
(wide-class visited::block)
(wide-class bremoved::block)
(wide-class creg::rtl_reg defs nbuses) ))
(define (remove::block b::block) ;()
(make-def-use b)
(fix-remove (get-first-unused b) (get-first-removable b))
(let dfs ( (b b) )
(widen!::visited b)
(let ( (l (block-first b)) )
(let ( (nl (filter! (lambda (ins) (not (removed? ins))) l)) )
(if (null? nl)
(block-remove b)
(block-first-set! b nl) )))
(for-each (lambda (s) (if (not (or (visited? s) (bremoved? s))) (dfs s)))
(block-succs b) ))
(let find-entry ( (b b) )
(if (not (bremoved? b))
b
(let ( (s (car (block-succs b))) )
(if (eq? b s)
b
(find-entry s) )))))
(define (block-remove b::block) ;()
(with-access::block b (preds succs)
(if (or (null? succs) (pair? (cdr succs)))
(error 'unlink! "must have only one successor" b) )
(let ( (s (car succs)) )
(if (eq? b s)
pathological case of ( L0 ( ) )
(block-first-set! b (list (instantiate::rtl_ins
(fun (instantiate::rtl_nop))
(args '()) )))
(begin (widen!::bremoved b)
(for-each (lambda (p) (with-access::block p (succs)
(set! succs (subst succs b s)) ))
preds )
(let ( (npreds (subst-append (block-preds s) b preds)) )
(block-preds-set! s npreds) ))))))
(define (make-def-use b::block) ;()
(define (reg->creg r)
(if (not (creg? r))
(widen!::creg r (defs '()) (nbuses '0)) )
r )
(define (reg/read r)
(with-access::creg (reg->creg r) (nbuses)
(set! nbuses (+fx 1 nbuses)) ))
(define (reg/write r ins)
(with-access::creg (reg->creg r) (defs)
(set! defs (cons ins defs)) ))
(let defcollect ( (b b) )
(widen!::defcollect b)
(for-each (lambda (ins)
(with-access::rtl_ins ins (dest fun args)
(if dest (reg/write dest ins))
(for-each reg/read args) ))
(block-first b))
(for-each (lambda (s) (if (not (defcollect? s)) (defcollect s)))
(block-succs b) )))
(define (get-first-removable b::block) ;()
(let ( (rm '()) )
(let rcollect ( (b b) )
(widen!::rcollect b)
(for-each (lambda (ins) (if (removable? ins) (set! rm (cons ins rm))))
(block-first b) )
(for-each (lambda (s) (if (not (rcollect? s)) (rcollect s)))
(block-succs b) ))
rm ))
(define (get-first-unused b::block) ;()
(let ( (unused '()) )
(let ucollect ( (b b) )
(widen!::ucollect b)
(for-each (lambda (ins)
(with-access::rtl_ins ins (dest fun args)
;; Collect unused regs
(if (and dest
(=fx (creg-nbuses dest) 0)
(not (memq dest unused)) )
(set! unused (cons dest unused)) )))
(block-first b))
(for-each (lambda (s) (if (not (ucollect? s)) (ucollect s)))
(block-succs b) ))
unused ))
;;
;; fixpoint between useless functional call and useless variable
;;
(define (fix-remove unused rm) ;()
;; remove the unused functional call
(for-each (lambda (ins)
(widen!::removed ins)
(for-each (lambda (r)
(let ( (n (creg-nbuses r)) )
(creg-nbuses-set! r (-fx n 1))
(if (=fx n 1)
(set! unused (cons r unused)) )))
(rtl_ins-args ins) ))
rm )
(set! rm '())
;; remove the affectation of unused variables
(for-each (lambda (r)
(for-each (lambda (ins)
(rtl_ins-dest-set! ins #f)
(if (removable? ins) (set! rm (cons ins rm))) )
(creg-defs r) ))
unused )
;; fixpoint
(if (not (null? rm)) (fix-remove '() rm)) )
;;
;;
;;
(define (removable?::bool ins::rtl_ins) ;()
(with-access::rtl_ins ins (fun dest)
(and (not dest)
(rtl_pure? fun) )))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/1ae5b060fcfd05ad33440765b45add3a26ced5b4/comptime/SawMill/remove.scm | scheme | ()
()
()
()
()
Collect unused regs
fixpoint between useless functional call and useless variable
()
remove the unused functional call
remove the affectation of unused variables
fixpoint
() | (module saw_remove
(import type_type ast_var ast_node
saw_lib
saw_defs
)
(export (remove::block b::block))
(include "SawMill/remove.sch")
(static (wide-class defcollect::block)
(wide-class rcollect::block)
(wide-class ucollect::block)
(wide-class removed::rtl_ins)
(wide-class visited::block)
(wide-class bremoved::block)
(wide-class creg::rtl_reg defs nbuses) ))
(make-def-use b)
(fix-remove (get-first-unused b) (get-first-removable b))
(let dfs ( (b b) )
(widen!::visited b)
(let ( (l (block-first b)) )
(let ( (nl (filter! (lambda (ins) (not (removed? ins))) l)) )
(if (null? nl)
(block-remove b)
(block-first-set! b nl) )))
(for-each (lambda (s) (if (not (or (visited? s) (bremoved? s))) (dfs s)))
(block-succs b) ))
(let find-entry ( (b b) )
(if (not (bremoved? b))
b
(let ( (s (car (block-succs b))) )
(if (eq? b s)
b
(find-entry s) )))))
(with-access::block b (preds succs)
(if (or (null? succs) (pair? (cdr succs)))
(error 'unlink! "must have only one successor" b) )
(let ( (s (car succs)) )
(if (eq? b s)
pathological case of ( L0 ( ) )
(block-first-set! b (list (instantiate::rtl_ins
(fun (instantiate::rtl_nop))
(args '()) )))
(begin (widen!::bremoved b)
(for-each (lambda (p) (with-access::block p (succs)
(set! succs (subst succs b s)) ))
preds )
(let ( (npreds (subst-append (block-preds s) b preds)) )
(block-preds-set! s npreds) ))))))
(define (reg->creg r)
(if (not (creg? r))
(widen!::creg r (defs '()) (nbuses '0)) )
r )
(define (reg/read r)
(with-access::creg (reg->creg r) (nbuses)
(set! nbuses (+fx 1 nbuses)) ))
(define (reg/write r ins)
(with-access::creg (reg->creg r) (defs)
(set! defs (cons ins defs)) ))
(let defcollect ( (b b) )
(widen!::defcollect b)
(for-each (lambda (ins)
(with-access::rtl_ins ins (dest fun args)
(if dest (reg/write dest ins))
(for-each reg/read args) ))
(block-first b))
(for-each (lambda (s) (if (not (defcollect? s)) (defcollect s)))
(block-succs b) )))
(let ( (rm '()) )
(let rcollect ( (b b) )
(widen!::rcollect b)
(for-each (lambda (ins) (if (removable? ins) (set! rm (cons ins rm))))
(block-first b) )
(for-each (lambda (s) (if (not (rcollect? s)) (rcollect s)))
(block-succs b) ))
rm ))
(let ( (unused '()) )
(let ucollect ( (b b) )
(widen!::ucollect b)
(for-each (lambda (ins)
(with-access::rtl_ins ins (dest fun args)
(if (and dest
(=fx (creg-nbuses dest) 0)
(not (memq dest unused)) )
(set! unused (cons dest unused)) )))
(block-first b))
(for-each (lambda (s) (if (not (ucollect? s)) (ucollect s)))
(block-succs b) ))
unused ))
(for-each (lambda (ins)
(widen!::removed ins)
(for-each (lambda (r)
(let ( (n (creg-nbuses r)) )
(creg-nbuses-set! r (-fx n 1))
(if (=fx n 1)
(set! unused (cons r unused)) )))
(rtl_ins-args ins) ))
rm )
(set! rm '())
(for-each (lambda (r)
(for-each (lambda (ins)
(rtl_ins-dest-set! ins #f)
(if (removable? ins) (set! rm (cons ins rm))) )
(creg-defs r) ))
unused )
(if (not (null? rm)) (fix-remove '() rm)) )
(with-access::rtl_ins ins (fun dest)
(and (not dest)
(rtl_pure? fun) )))
|
edbf357c6debb5fe435de35de10984d6721b1516cefa0c319790e6565f880d24 | AccelerationNet/clsql-helper | date.lisp |
(in-package :clsql-helper)
(cl-interpol:enable-interpol-syntax)
(clsql:file-enable-sql-reader-syntax)
(defun current-sql-date ()
"current date"
(clsql-sys:get-date))
(defun current-sql-time ()
"current date and time"
(clsql-sys:get-time))
(defun print-nullable-date (field)
"if the date exists, prints m?m/d?d/yyyy"
(when field
(typecase field
(string field)
(T (clsql:print-date
(typecase field
(clsql-sys:date (clsql-sys::date->time field))
(clsql-sys:wall-time field))
:day)))))
(defmethod print-object ((o clsql-sys:date) stream)
(let ((date (print-nullable-date o)))
(if *print-escape*
(print-unreadable-object (o stream :type T :identity T)
(format stream "~A" date))
(format stream "~A" date))))
(defgeneric date-day (d)
(:documentation "Given an object that encodes a date, return the day component")
(:method (d)
(etypecase d
(clsql-sys:date
(third (multiple-value-list (clsql-sys:date-ymd d))))
(clsql-sys:wall-time
(third (multiple-value-list (clsql-sys:time-ymd d))))
((or string integer)
(date-day (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric date-year (d )
(:documentation "Given an object that encodes a date, return the year component")
(:method (d)
(etypecase d
(clsql-sys:date (clsql-sys:date-ymd d))
(clsql-sys:wall-time (clsql-sys:time-ymd d))
((or string integer)
(date-year (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric date-month (d)
(:documentation "Given an object that encodes a date, return the month component")
(:method (d)
(etypecase d
(clsql-sys:date
(second (multiple-value-list (clsql-sys:date-ymd d))))
(clsql-sys:wall-time
(second (multiple-value-list (clsql-sys:time-ymd d))))
((or string integer)
(date-month (convert-to-clsql-datetime d)))
(null nil))))
(defun month-string (d)
"Converts the date to the full name, January, February,etc"
(let ((d (date-month d)))
(when d (clsql-sys:month-name d))))
(defun month-day-string (d)
"prints dates as January 3"
(let ((d (date-day d))
(m (month-string d)))
(when (and d m) #?"${m} ${d}")))
(defun print-nullable-datetime (field)
"if the date exists, prints mm/dd/yyyy hh:mm:ss"
(let ((*print-pretty* nil))
(when field
(typecase field
(string field)
(T (multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime field))
(declare (ignore usec))
(format nil "~2,'0d/~2,'0d/~4,'0d ~2,'0d:~2,'0d:~2,'0d"
month day year hour minute second)))))))
(defun print-timestamp (field)
"if the date exists, prints yyyy-mm-dd hh:mm:ss.uuuuuu"
(let ((*print-pretty* nil))
(when field
(typecase field
(string field)
(T (multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime field))
(format nil "~4,'0d-~2,'0d-~2,'0d ~2,'0d:~2,'0d:~2,'0d.~3,'0d"
year month day hour minute second (floor usec 1000))))))))
(defmethod print-object ((o clsql:wall-time) stream)
(let ((date (print-nullable-datetime o)))
(if *print-escape*
(print-unreadable-object (o stream :type T :identity T)
(format stream "~A" date))
(format stream "~A" date))))
(defun clsql-datetime-equal (x y)
"Tries to handle full datetime equality reguardless of the format
(string datestamp, date, datetime, utime)"
(flet ((cast (x)
(typecase x
(integer (clsql-sys:utime->time x))
(clsql-sys:date (clsql-sys::date->time x))
(string (convert-to-clsql-datetime x))
(T x))))
(equalp (cast x) (cast y))))
(defvar *iso8601-timezone* nil)
(defvar *iso8601-microseconds* nil)
(defvar *iso8601-date-time-separator* " ")
(defvar *iso8601-time-separator* ":")
(defvar *iso8601-date-separator* "-")
(defgeneric iso8601-datestamp (d)
(:documentation "Given an object that encodes a date
return an iso8601-datestamp representation of it")
(:method (d)
(typecase d
((or clsql-sys:wall-time clsql-sys:date)
(format nil "~4,'0D~A~2,'0D~A~2,'0D"
(date-year d) *iso8601-date-separator* (date-month d)
*iso8601-date-separator* (date-day d)))
((or string integer) (iso8601-datestamp (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric iso8601-timestamp (d)
(:documentation
"CLSQL has a function (I wrote) to do this, but I wanted more flexibility in output
so that I could use this in more situations
clsql:iso-timestamp is used only to write to database backends, so a very strict ISO
is fine
")
(:method (d)
(typecase d
((or clsql-sys:wall-time clsql-sys:date string integer)
(multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime d))
;; oh yeah, we love recursive format processing
(apply
#'format nil "~4,'0D~A~2,'0D~A~2,'0D~A~2,'0D~a~2,'0D~A~2,'0D~?~?"
(nconc
(list year *iso8601-date-separator* month
*iso8601-date-separator* day
*iso8601-date-time-separator*
hour *iso8601-time-separator*
minute *iso8601-time-separator*
second)
(if *iso8601-microseconds*
(list ".~6,'0D" (list usec))
(list "" ()))
(cond
((eql *iso8601-timezone* T) (list "~A" (list 'Z)))
((stringp *iso8601-timezone*) (list "~A" (list *iso8601-timezone*)))
(T (list "" ())))))))
(null nil))))
(defparameter +date-sep+ "(?:/|-|\\.|:)")
(defparameter +date-time-regex+
(cl-ppcre:create-scanner
#?r"^(?:'|\")?(\d{1,2})${ +date-sep+ }(\d{1,2})${ +date-sep+ }(\d{2,4})(?:\s*(\d{1,2})${ +date-sep+ }(\d{1,2})(?:${ +date-sep+ }(\d{1,2}))?(?:\.(\d+))?\s*((?:a|p)m\.?)?)?(?:'|\")?"
:case-insensitive-mode t))
(defparameter +iso-8601-ish-regex-string+
#?r"^(?:'|\")?(\d{2,4})${ +date-sep+ }(\d{1,2})${ +date-sep+ }(\d{1,2})(?:(?:\s*|T)(\d{1,2})${ +date-sep+ }(\d{1,2})(?:${ +date-sep+ }(\d{1,2}))?(?:\.(\d+))?\s*((?:a|p)m\.?)?(?:Z|,,0|(?:-|\+)\d{1,2}:?\d{2}?)?)?(?:'|\")?")
(defparameter +iso-8601-ish-regex+
(cl-ppcre:create-scanner +iso-8601-ish-regex-string+ :case-insensitive-mode t))
(defgeneric convert-to-clsql-datetime (val)
(:documentation
"Converts a string timestamp into a clsql date time object
Makes every possible effort to understand your date that will invariably
be in some format it wont understand.")
(:method (val)
(macrolet ((regex-date-to-clsql-date ()
"Pretty fugly variable capture, but what are you gonna do.
I have the exact same code twice with like 6 vars to pass"
`(let ((hour (if (and h (< h 12)
(string-equal am/pm "PM"))
(+ 12 h)
h))
(year (and y
(cond
((< y 50) (+ y 2000))
((< y 100) (+ y 1900))
(T y))))
(usec (when usec
(* (parse-integer usec)
(expt 10 (- 6 (length usec)))))))
(clsql:make-time
:year year :month mon :day d
:hour (or hour 0) :minute (or m 0) :second (or s 0)
:usec (or usec 0)))))
(typecase val
(clsql:date (clsql-sys::date->time val))
(clsql:wall-time val)
(integer (clsql-sys::utime->time val))
(string
(or ; as best I can tell these just suck
( ignore - errors ( clsql - sys : parse - date - time val ) )
( ignore - errors ( clsql - sys : parse - timestring val ) )
(cl-ppcre:register-groups-bind
((#'parse-integer mon d y h m s ) usec am/pm)
(+date-time-regex+ val)
(regex-date-to-clsql-date))
(cl-ppcre:register-groups-bind
((#'parse-integer y mon d h m s) usec am/pm)
(+iso-8601-ish-regex+ val)
(regex-date-to-clsql-date)
)))))))
(defmacro convert-to-clsql-datetime! (&rest places)
`(setf ,@(iter (for p in places)
(collect p)
(collect `(convert-to-clsql-datetime ,p)))))
(defgeneric convert-to-clsql-date (val)
(:documentation "Convert your value into a clsql:date structure")
(:method (val)
(typecase val
(null nil)
(clsql:date val)
(clsql-sys::wall-time (clsql-sys::time->date val))
(t (convert-to-clsql-date (convert-to-clsql-datetime val))))))
(defmacro convert-to-clsql-date! (&rest places)
`(setf ,@(iter (for p in places)
(collect p)
(collect `(convert-to-clsql-date ,p)))))
(defun clsql-date/times->utime (obj &optional (timezone 0))
"obj is either a wall-time or a date in local time. Converts to UTC and returns a utime.
pass timezone nil to skip UTC conversion.
if you are looking for the other it is clsql-sys:utime->time
"
(apply #'encode-universal-time
(multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime obj))
(declare (ignore usec))
(list second minute hour day month year timezone))))
(defun last-of-the-month (start-date &aux (month (clsql-helper:date-month start-date)))
"Returns the first of next month eg: 2/14/2012->2/29/2012"
(iter
(for date from-date start-date)
(for yesterday previous date)
(while (eql month (clsql-helper:date-month date)))
(finally (return yesterday))))
(defun last-of-the-month? (start-date)
"Returns T if its the last day of the month"
(convert-to-clsql-date! start-date)
(not (eql
(clsql-helper:date-month start-date)
(clsql-helper:date-month
(clsql-sys:date+ start-date +a-day+)))))
(defun first-of-the-month (&optional (date (clsql-helper:current-sql-date)))
"returns the first of the month for the month/year of the date passed in"
(convert-to-clsql-date! date)
(clsql-sys:make-date :year (date-year date) :month (date-month date) :day 1))
(defun first-of-the-month? (&optional (date (clsql-helper:current-sql-date)))
"returns whether or not the date passed in is the first of the month"
(convert-to-clsql-date! date)
(= 1 (date-day date)))
(defun days-in-month (&optional (date (clsql-helper:current-sql-date)))
"Return the number of days in the month of the date passed in"
(date-day (last-of-the-month date)))
(defun day-before (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(when date (clsql:date- date +a-day+)))
(defun day-after (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(when date (clsql:date+ date +a-day+)))
(defun next-month (&optional (date (clsql-helper:current-sql-date))
&aux orig)
(convert-to-clsql-date! date)
(when date
(setf orig (clsql-helper:date-month date)
date (clsql-sys:date+ date +a-month+))
make sure we only go one month ( 1/31 + 1 - month = 3/3 )
(iter (while (= (clsql-helper:date-month date)
(+ 2 orig)))
(setf date (clsql-sys:date- date +a-day+)))
date))
(defun last-month (&optional (date (clsql-helper:current-sql-date))
&aux orig)
(convert-to-clsql-date! date)
(when date
(setf
orig (clsql-helper:date-month date)
date (clsql-sys:date- date +a-month+))
make sure we got into last month ( 3/31 - 1 - month = 3/3 )
(iter (while (= orig (clsql-helper:date-month date)))
(setf date (clsql-sys:date- date +a-day+)))
date))
(defun first-of-next-month (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(next-month (first-of-the-month date)))
(defun after-day-of-month (date day)
"Are we past a specific day of the month"
(> (date-day date) day))
(defun before-day-of-month (date day)
"Are we past a specific day of the month"
(< (date-day date) day))
(defun date-diff (d1 d2)
"Gets the difference in days between two dates
returns a negative number to indicate that d1 is after d2
returns a positive number to indicate that d2 is after d1"
(convert-to-clsql-date! d1 d2)
date - diff returns only days and seconds ( for times )
(let ((days (clsql-sys:duration-day (clsql-sys:date-difference d1 d2))))
(if (clsql:date< d1 d2)
days
(- days))))
(defun date-add (d dur)
(convert-to-clsql-date! d)
(when d
(clsql:date+ d (etypecase dur
(clsql:duration dur)
(integer (clsql:make-duration :day dur))))))
(defun dt< (&rest d)
(apply #'clsql:time< (mapcar #'convert-to-clsql-datetime d)))
(defun dt<= (&rest d)
(apply #'clsql:time<= (mapcar #'convert-to-clsql-datetime d)))
(defun dt> (&rest d)
(apply #'clsql:time> (mapcar #'convert-to-clsql-datetime d)))
(defun dt>= (&rest d)
(apply #'clsql:time>= (mapcar #'convert-to-clsql-datetime d)))
| null | https://raw.githubusercontent.com/AccelerationNet/clsql-helper/846b67a26906da5ea2cff790a36a4cb2f496a528/date.lisp | lisp | oh yeah, we love recursive format processing
as best I can tell these just suck |
(in-package :clsql-helper)
(cl-interpol:enable-interpol-syntax)
(clsql:file-enable-sql-reader-syntax)
(defun current-sql-date ()
"current date"
(clsql-sys:get-date))
(defun current-sql-time ()
"current date and time"
(clsql-sys:get-time))
(defun print-nullable-date (field)
"if the date exists, prints m?m/d?d/yyyy"
(when field
(typecase field
(string field)
(T (clsql:print-date
(typecase field
(clsql-sys:date (clsql-sys::date->time field))
(clsql-sys:wall-time field))
:day)))))
(defmethod print-object ((o clsql-sys:date) stream)
(let ((date (print-nullable-date o)))
(if *print-escape*
(print-unreadable-object (o stream :type T :identity T)
(format stream "~A" date))
(format stream "~A" date))))
(defgeneric date-day (d)
(:documentation "Given an object that encodes a date, return the day component")
(:method (d)
(etypecase d
(clsql-sys:date
(third (multiple-value-list (clsql-sys:date-ymd d))))
(clsql-sys:wall-time
(third (multiple-value-list (clsql-sys:time-ymd d))))
((or string integer)
(date-day (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric date-year (d )
(:documentation "Given an object that encodes a date, return the year component")
(:method (d)
(etypecase d
(clsql-sys:date (clsql-sys:date-ymd d))
(clsql-sys:wall-time (clsql-sys:time-ymd d))
((or string integer)
(date-year (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric date-month (d)
(:documentation "Given an object that encodes a date, return the month component")
(:method (d)
(etypecase d
(clsql-sys:date
(second (multiple-value-list (clsql-sys:date-ymd d))))
(clsql-sys:wall-time
(second (multiple-value-list (clsql-sys:time-ymd d))))
((or string integer)
(date-month (convert-to-clsql-datetime d)))
(null nil))))
(defun month-string (d)
"Converts the date to the full name, January, February,etc"
(let ((d (date-month d)))
(when d (clsql-sys:month-name d))))
(defun month-day-string (d)
"prints dates as January 3"
(let ((d (date-day d))
(m (month-string d)))
(when (and d m) #?"${m} ${d}")))
(defun print-nullable-datetime (field)
"if the date exists, prints mm/dd/yyyy hh:mm:ss"
(let ((*print-pretty* nil))
(when field
(typecase field
(string field)
(T (multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime field))
(declare (ignore usec))
(format nil "~2,'0d/~2,'0d/~4,'0d ~2,'0d:~2,'0d:~2,'0d"
month day year hour minute second)))))))
(defun print-timestamp (field)
"if the date exists, prints yyyy-mm-dd hh:mm:ss.uuuuuu"
(let ((*print-pretty* nil))
(when field
(typecase field
(string field)
(T (multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime field))
(format nil "~4,'0d-~2,'0d-~2,'0d ~2,'0d:~2,'0d:~2,'0d.~3,'0d"
year month day hour minute second (floor usec 1000))))))))
(defmethod print-object ((o clsql:wall-time) stream)
(let ((date (print-nullable-datetime o)))
(if *print-escape*
(print-unreadable-object (o stream :type T :identity T)
(format stream "~A" date))
(format stream "~A" date))))
(defun clsql-datetime-equal (x y)
"Tries to handle full datetime equality reguardless of the format
(string datestamp, date, datetime, utime)"
(flet ((cast (x)
(typecase x
(integer (clsql-sys:utime->time x))
(clsql-sys:date (clsql-sys::date->time x))
(string (convert-to-clsql-datetime x))
(T x))))
(equalp (cast x) (cast y))))
(defvar *iso8601-timezone* nil)
(defvar *iso8601-microseconds* nil)
(defvar *iso8601-date-time-separator* " ")
(defvar *iso8601-time-separator* ":")
(defvar *iso8601-date-separator* "-")
(defgeneric iso8601-datestamp (d)
(:documentation "Given an object that encodes a date
return an iso8601-datestamp representation of it")
(:method (d)
(typecase d
((or clsql-sys:wall-time clsql-sys:date)
(format nil "~4,'0D~A~2,'0D~A~2,'0D"
(date-year d) *iso8601-date-separator* (date-month d)
*iso8601-date-separator* (date-day d)))
((or string integer) (iso8601-datestamp (convert-to-clsql-datetime d)))
(null nil))))
(defgeneric iso8601-timestamp (d)
(:documentation
"CLSQL has a function (I wrote) to do this, but I wanted more flexibility in output
so that I could use this in more situations
clsql:iso-timestamp is used only to write to database backends, so a very strict ISO
is fine
")
(:method (d)
(typecase d
((or clsql-sys:wall-time clsql-sys:date string integer)
(multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime d))
(apply
#'format nil "~4,'0D~A~2,'0D~A~2,'0D~A~2,'0D~a~2,'0D~A~2,'0D~?~?"
(nconc
(list year *iso8601-date-separator* month
*iso8601-date-separator* day
*iso8601-date-time-separator*
hour *iso8601-time-separator*
minute *iso8601-time-separator*
second)
(if *iso8601-microseconds*
(list ".~6,'0D" (list usec))
(list "" ()))
(cond
((eql *iso8601-timezone* T) (list "~A" (list 'Z)))
((stringp *iso8601-timezone*) (list "~A" (list *iso8601-timezone*)))
(T (list "" ())))))))
(null nil))))
(defparameter +date-sep+ "(?:/|-|\\.|:)")
(defparameter +date-time-regex+
(cl-ppcre:create-scanner
#?r"^(?:'|\")?(\d{1,2})${ +date-sep+ }(\d{1,2})${ +date-sep+ }(\d{2,4})(?:\s*(\d{1,2})${ +date-sep+ }(\d{1,2})(?:${ +date-sep+ }(\d{1,2}))?(?:\.(\d+))?\s*((?:a|p)m\.?)?)?(?:'|\")?"
:case-insensitive-mode t))
(defparameter +iso-8601-ish-regex-string+
#?r"^(?:'|\")?(\d{2,4})${ +date-sep+ }(\d{1,2})${ +date-sep+ }(\d{1,2})(?:(?:\s*|T)(\d{1,2})${ +date-sep+ }(\d{1,2})(?:${ +date-sep+ }(\d{1,2}))?(?:\.(\d+))?\s*((?:a|p)m\.?)?(?:Z|,,0|(?:-|\+)\d{1,2}:?\d{2}?)?)?(?:'|\")?")
(defparameter +iso-8601-ish-regex+
(cl-ppcre:create-scanner +iso-8601-ish-regex-string+ :case-insensitive-mode t))
(defgeneric convert-to-clsql-datetime (val)
(:documentation
"Converts a string timestamp into a clsql date time object
Makes every possible effort to understand your date that will invariably
be in some format it wont understand.")
(:method (val)
(macrolet ((regex-date-to-clsql-date ()
"Pretty fugly variable capture, but what are you gonna do.
I have the exact same code twice with like 6 vars to pass"
`(let ((hour (if (and h (< h 12)
(string-equal am/pm "PM"))
(+ 12 h)
h))
(year (and y
(cond
((< y 50) (+ y 2000))
((< y 100) (+ y 1900))
(T y))))
(usec (when usec
(* (parse-integer usec)
(expt 10 (- 6 (length usec)))))))
(clsql:make-time
:year year :month mon :day d
:hour (or hour 0) :minute (or m 0) :second (or s 0)
:usec (or usec 0)))))
(typecase val
(clsql:date (clsql-sys::date->time val))
(clsql:wall-time val)
(integer (clsql-sys::utime->time val))
(string
( ignore - errors ( clsql - sys : parse - date - time val ) )
( ignore - errors ( clsql - sys : parse - timestring val ) )
(cl-ppcre:register-groups-bind
((#'parse-integer mon d y h m s ) usec am/pm)
(+date-time-regex+ val)
(regex-date-to-clsql-date))
(cl-ppcre:register-groups-bind
((#'parse-integer y mon d h m s) usec am/pm)
(+iso-8601-ish-regex+ val)
(regex-date-to-clsql-date)
)))))))
(defmacro convert-to-clsql-datetime! (&rest places)
`(setf ,@(iter (for p in places)
(collect p)
(collect `(convert-to-clsql-datetime ,p)))))
(defgeneric convert-to-clsql-date (val)
(:documentation "Convert your value into a clsql:date structure")
(:method (val)
(typecase val
(null nil)
(clsql:date val)
(clsql-sys::wall-time (clsql-sys::time->date val))
(t (convert-to-clsql-date (convert-to-clsql-datetime val))))))
(defmacro convert-to-clsql-date! (&rest places)
`(setf ,@(iter (for p in places)
(collect p)
(collect `(convert-to-clsql-date ,p)))))
(defun clsql-date/times->utime (obj &optional (timezone 0))
"obj is either a wall-time or a date in local time. Converts to UTC and returns a utime.
pass timezone nil to skip UTC conversion.
if you are looking for the other it is clsql-sys:utime->time
"
(apply #'encode-universal-time
(multiple-value-bind (usec second minute hour day month year)
(clsql-sys:decode-time (convert-to-clsql-datetime obj))
(declare (ignore usec))
(list second minute hour day month year timezone))))
(defun last-of-the-month (start-date &aux (month (clsql-helper:date-month start-date)))
"Returns the first of next month eg: 2/14/2012->2/29/2012"
(iter
(for date from-date start-date)
(for yesterday previous date)
(while (eql month (clsql-helper:date-month date)))
(finally (return yesterday))))
(defun last-of-the-month? (start-date)
"Returns T if its the last day of the month"
(convert-to-clsql-date! start-date)
(not (eql
(clsql-helper:date-month start-date)
(clsql-helper:date-month
(clsql-sys:date+ start-date +a-day+)))))
(defun first-of-the-month (&optional (date (clsql-helper:current-sql-date)))
"returns the first of the month for the month/year of the date passed in"
(convert-to-clsql-date! date)
(clsql-sys:make-date :year (date-year date) :month (date-month date) :day 1))
(defun first-of-the-month? (&optional (date (clsql-helper:current-sql-date)))
"returns whether or not the date passed in is the first of the month"
(convert-to-clsql-date! date)
(= 1 (date-day date)))
(defun days-in-month (&optional (date (clsql-helper:current-sql-date)))
"Return the number of days in the month of the date passed in"
(date-day (last-of-the-month date)))
(defun day-before (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(when date (clsql:date- date +a-day+)))
(defun day-after (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(when date (clsql:date+ date +a-day+)))
(defun next-month (&optional (date (clsql-helper:current-sql-date))
&aux orig)
(convert-to-clsql-date! date)
(when date
(setf orig (clsql-helper:date-month date)
date (clsql-sys:date+ date +a-month+))
make sure we only go one month ( 1/31 + 1 - month = 3/3 )
(iter (while (= (clsql-helper:date-month date)
(+ 2 orig)))
(setf date (clsql-sys:date- date +a-day+)))
date))
(defun last-month (&optional (date (clsql-helper:current-sql-date))
&aux orig)
(convert-to-clsql-date! date)
(when date
(setf
orig (clsql-helper:date-month date)
date (clsql-sys:date- date +a-month+))
make sure we got into last month ( 3/31 - 1 - month = 3/3 )
(iter (while (= orig (clsql-helper:date-month date)))
(setf date (clsql-sys:date- date +a-day+)))
date))
(defun first-of-next-month (&optional (date (clsql-helper:current-sql-date)))
(convert-to-clsql-date! date)
(next-month (first-of-the-month date)))
(defun after-day-of-month (date day)
"Are we past a specific day of the month"
(> (date-day date) day))
(defun before-day-of-month (date day)
"Are we past a specific day of the month"
(< (date-day date) day))
(defun date-diff (d1 d2)
"Gets the difference in days between two dates
returns a negative number to indicate that d1 is after d2
returns a positive number to indicate that d2 is after d1"
(convert-to-clsql-date! d1 d2)
date - diff returns only days and seconds ( for times )
(let ((days (clsql-sys:duration-day (clsql-sys:date-difference d1 d2))))
(if (clsql:date< d1 d2)
days
(- days))))
(defun date-add (d dur)
(convert-to-clsql-date! d)
(when d
(clsql:date+ d (etypecase dur
(clsql:duration dur)
(integer (clsql:make-duration :day dur))))))
(defun dt< (&rest d)
(apply #'clsql:time< (mapcar #'convert-to-clsql-datetime d)))
(defun dt<= (&rest d)
(apply #'clsql:time<= (mapcar #'convert-to-clsql-datetime d)))
(defun dt> (&rest d)
(apply #'clsql:time> (mapcar #'convert-to-clsql-datetime d)))
(defun dt>= (&rest d)
(apply #'clsql:time>= (mapcar #'convert-to-clsql-datetime d)))
|
d9ff15a6161c3fc8e61831f0bf0304ba869830487d2eda1ad906447715ddca3c | cfpb/qu | cache.clj | (ns ^:integration integration.test.cache
(:require [clojure.test :refer :all]
[clojure.core.cache :as cache]
[qu.test-util :refer :all]
[qu.data :as data]
[qu.query :as q]
[qu.cache :as c]
[qu.loader :as loader]
[monger.core :as mongo]
[monger.collection :as coll]
[monger.conversion :as conv]
[monger.db :as db]
[qu.main :as main]
[qu.app :as app]
[qu.app.mongo :refer [new-mongo]]
[com.stuartsierra.component :as component]))
(def db "integration_test")
(def coll "incomes")
(def qmap {:dataset db
:slice coll
:select "state_abbr, COUNT()"
:group "state_abbr"})
(def ^:dynamic cache nil)
(def ^:dynamic worker nil)
(def ^:dynamic query nil)
(def ^:dynamic agg nil)
(def ^:dynamic dbo nil)
(def worker-agent (atom nil))
(defn run-all-jobs [worker]
(reset! worker-agent (c/start-worker worker))
(await @worker-agent)
(swap! worker-agent c/stop-worker))
(defn mongo-setup
[test]
(let [mongo (new-mongo (main/default-mongo-options))]
(component/start mongo)
(loader/load-dataset db)
(binding [cache (c/create-query-cache)
dbo (mongo/get-db db)]
(db/drop-db (:database cache))
(binding [query (q/prepare (q/make-query qmap))]
(binding [worker (c/create-worker cache)
agg (q/mongo-aggregation query)]
(test))))
(component/stop mongo)))
(use-fixtures :once mongo-setup)
(deftest ^:integration test-cache
(testing "the default cache uses the query-cache db"
(does= (str (:database cache)) "query_cache"))
(testing "you can use other databases"
(does= (str (:database (c/create-query-cache "cashhhh")))
"cashhhh"))
(testing "it can be wiped"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/wipe-cache cache)
(is (not (coll/exists? dbo (:to agg))))))
(deftest ^:integration test-cleaning-cache
(testing "it can be cleaned"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/clean-cache cache (constantly [(:to agg)]))
(is (not (coll/exists? dbo (:to agg)))))
(testing "by default, it cleans nothing"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/clean-cache cache)
(is (coll/exists? dbo (:to agg))))
(testing "it runs cleaning operations as part of the worker cycle"
(let [cleanups (atom 0)
cache (c/create-query-cache "query_cache" (fn [_] (swap! cleanups inc) []))
worker (c/create-worker cache)]
(run-all-jobs worker)
(is (>= @cleanups 1)))))
(deftest ^:integration test-add-to-queue
(testing "it adds a document to jobs"
(c/clean-cache cache (constantly [(:to agg)]))
(does-contain (conv/from-db-object (c/add-to-queue cache agg) true)
{:_id (:to agg) :status "unprocessed"})))
(deftest ^:integration test-add-to-cache
(testing "it puts the aggregation results into the cache"
(c/add-to-cache cache agg)
(is (coll/exists? dbo (:to agg)))))
(deftest ^:integration test-lookup
(testing "it returns an empty result if not in the cache"
(c/clean-cache cache (constantly [(:to agg)]))
(is (nil? (cache/lookup cache query))))
(testing "it returns the cached results if they exist"
(c/add-to-cache cache agg)
(let [result (cache/lookup cache query)]
(is (not (nil? result)))
(is (= 4 (:total result))))))
(deftest ^:integration test-worker
(testing "it will process jobs"
(c/clean-cache cache (constantly [(:to agg)]))
(is (:computing (data/get-aggregation db coll agg)))
(run-all-jobs worker)
(is (not (:computing (data/get-aggregation db coll agg))))))
;; (run-tests)
| null | https://raw.githubusercontent.com/cfpb/qu/f460d9ab2f05ac22f6d68a98a9641daf0f7c7ba4/test/integration/test/cache.clj | clojure | (run-tests) | (ns ^:integration integration.test.cache
(:require [clojure.test :refer :all]
[clojure.core.cache :as cache]
[qu.test-util :refer :all]
[qu.data :as data]
[qu.query :as q]
[qu.cache :as c]
[qu.loader :as loader]
[monger.core :as mongo]
[monger.collection :as coll]
[monger.conversion :as conv]
[monger.db :as db]
[qu.main :as main]
[qu.app :as app]
[qu.app.mongo :refer [new-mongo]]
[com.stuartsierra.component :as component]))
(def db "integration_test")
(def coll "incomes")
(def qmap {:dataset db
:slice coll
:select "state_abbr, COUNT()"
:group "state_abbr"})
(def ^:dynamic cache nil)
(def ^:dynamic worker nil)
(def ^:dynamic query nil)
(def ^:dynamic agg nil)
(def ^:dynamic dbo nil)
(def worker-agent (atom nil))
(defn run-all-jobs [worker]
(reset! worker-agent (c/start-worker worker))
(await @worker-agent)
(swap! worker-agent c/stop-worker))
(defn mongo-setup
[test]
(let [mongo (new-mongo (main/default-mongo-options))]
(component/start mongo)
(loader/load-dataset db)
(binding [cache (c/create-query-cache)
dbo (mongo/get-db db)]
(db/drop-db (:database cache))
(binding [query (q/prepare (q/make-query qmap))]
(binding [worker (c/create-worker cache)
agg (q/mongo-aggregation query)]
(test))))
(component/stop mongo)))
(use-fixtures :once mongo-setup)
(deftest ^:integration test-cache
(testing "the default cache uses the query-cache db"
(does= (str (:database cache)) "query_cache"))
(testing "you can use other databases"
(does= (str (:database (c/create-query-cache "cashhhh")))
"cashhhh"))
(testing "it can be wiped"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/wipe-cache cache)
(is (not (coll/exists? dbo (:to agg))))))
(deftest ^:integration test-cleaning-cache
(testing "it can be cleaned"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/clean-cache cache (constantly [(:to agg)]))
(is (not (coll/exists? dbo (:to agg)))))
(testing "by default, it cleans nothing"
(data/get-aggregation db coll agg)
(run-all-jobs worker)
(is (coll/exists? dbo (:to agg)))
(c/clean-cache cache)
(is (coll/exists? dbo (:to agg))))
(testing "it runs cleaning operations as part of the worker cycle"
(let [cleanups (atom 0)
cache (c/create-query-cache "query_cache" (fn [_] (swap! cleanups inc) []))
worker (c/create-worker cache)]
(run-all-jobs worker)
(is (>= @cleanups 1)))))
(deftest ^:integration test-add-to-queue
(testing "it adds a document to jobs"
(c/clean-cache cache (constantly [(:to agg)]))
(does-contain (conv/from-db-object (c/add-to-queue cache agg) true)
{:_id (:to agg) :status "unprocessed"})))
(deftest ^:integration test-add-to-cache
(testing "it puts the aggregation results into the cache"
(c/add-to-cache cache agg)
(is (coll/exists? dbo (:to agg)))))
(deftest ^:integration test-lookup
(testing "it returns an empty result if not in the cache"
(c/clean-cache cache (constantly [(:to agg)]))
(is (nil? (cache/lookup cache query))))
(testing "it returns the cached results if they exist"
(c/add-to-cache cache agg)
(let [result (cache/lookup cache query)]
(is (not (nil? result)))
(is (= 4 (:total result))))))
(deftest ^:integration test-worker
(testing "it will process jobs"
(c/clean-cache cache (constantly [(:to agg)]))
(is (:computing (data/get-aggregation db coll agg)))
(run-all-jobs worker)
(is (not (:computing (data/get-aggregation db coll agg))))))
|
810dd5f0b1ef0befd1b37c43f07d7fe25a5fc61b5d639010b0be0e7d361cc9fc | ekmett/unboxed | FloatRep.hs | {-# Language DataKinds #-}
module FloatRep where
import GHC.Types
type Rep = 'FloatRep
| null | https://raw.githubusercontent.com/ekmett/unboxed/e4c6ca80fb8946b1abfe595ba1c36587a33931db/internal/FloatRep.hs | haskell | # Language DataKinds # | module FloatRep where
import GHC.Types
type Rep = 'FloatRep
|
fd29974f98fbc31301939abc4ede2cf1ae897bf9ed99b876431d3fce3f0d9b5f | eashanhatti/peridot | Common.hs | module Syntax.Common where
import Numeric.Natural
import Data.Text
import Data.Hashable
import GHC.Generics
import Data.Map qualified as Map
import Data.Sequence
data PassMethod = Explicit | Unification | DontCare
deriving (Eq, Show)
data Universe = Meta | Obj
deriving (Eq, Show)
newtype Field = Field { unField :: Text }
deriving (Eq, Show)
nameToField (UserName name) = Field name
fieldToName (Field name) = UserName name
newtype Index = Index { unIndex :: Natural }
deriving (Num, Eq, Ord, Enum, Real, Integral, Show)
newtype Level = Level { unLevel :: Natural }
deriving (Num, Eq, Enum, Show, Ord)
newtype Id = Id { unId :: Natural }
deriving (Eq, Ord, Generic, Num, Enum, Real, Integral, Show)
data Global
= UVGlobal Natural
| LVGlobal Natural
deriving (Eq, Ord, Show)
unGlobal (UVGlobal n) = n
unGlobal (LVGlobal n) = n
instance Hashable Id
data Name = UserName Text | MachineName Natural | Unbound
deriving (Eq, Ord, Show)
pattern NameIntro univ did <- RNameIntro _ univ did
data RigidTerm a
-- Object level
= TwoType
| TwoIntro0
| TwoIntro1
| SingType a a
| SingIntro a
| ObjIdType a a
| ObjIdIntro a
Meta level
| NameType Universe a
| RNameIntro Name Universe Id
| MetaConstIntro Id
| CodeObjType a
| CodeObjIntro a
| TextType
| TextIntroNil
| TextIntroCons Char a
-- Propositions
| PropConstIntro Id
| ImplType a a
| ConjType a a
| DisjType a a
| AllType a
| SomeType a
| PropIdType a a
| Iterate a a a
-- Other
| TypeType Universe
| ElabError
| Dummy Text
deriving (Eq, Show, Functor, Foldable, Traversable, Generic)
| null | https://raw.githubusercontent.com/eashanhatti/peridot/014d6b8e8b792ac80928fc43c8f1d26df8eb7d2d/src/Syntax/Common.hs | haskell | Object level
Propositions
Other | module Syntax.Common where
import Numeric.Natural
import Data.Text
import Data.Hashable
import GHC.Generics
import Data.Map qualified as Map
import Data.Sequence
data PassMethod = Explicit | Unification | DontCare
deriving (Eq, Show)
data Universe = Meta | Obj
deriving (Eq, Show)
newtype Field = Field { unField :: Text }
deriving (Eq, Show)
nameToField (UserName name) = Field name
fieldToName (Field name) = UserName name
newtype Index = Index { unIndex :: Natural }
deriving (Num, Eq, Ord, Enum, Real, Integral, Show)
newtype Level = Level { unLevel :: Natural }
deriving (Num, Eq, Enum, Show, Ord)
newtype Id = Id { unId :: Natural }
deriving (Eq, Ord, Generic, Num, Enum, Real, Integral, Show)
data Global
= UVGlobal Natural
| LVGlobal Natural
deriving (Eq, Ord, Show)
unGlobal (UVGlobal n) = n
unGlobal (LVGlobal n) = n
instance Hashable Id
data Name = UserName Text | MachineName Natural | Unbound
deriving (Eq, Ord, Show)
pattern NameIntro univ did <- RNameIntro _ univ did
data RigidTerm a
= TwoType
| TwoIntro0
| TwoIntro1
| SingType a a
| SingIntro a
| ObjIdType a a
| ObjIdIntro a
Meta level
| NameType Universe a
| RNameIntro Name Universe Id
| MetaConstIntro Id
| CodeObjType a
| CodeObjIntro a
| TextType
| TextIntroNil
| TextIntroCons Char a
| PropConstIntro Id
| ImplType a a
| ConjType a a
| DisjType a a
| AllType a
| SomeType a
| PropIdType a a
| Iterate a a a
| TypeType Universe
| ElabError
| Dummy Text
deriving (Eq, Show, Functor, Foldable, Traversable, Generic)
|
fbd7f284e5319c2454ad8d959984756822511bd2ddab9f4d36e9b54b629fc06e | vii/teepeedee2 | webapp.lisp | (in-package #:tpd2.webapp)
(declaim (inline webapp-default-page-footer webapp-default-page-head-contents))
(defun webapp-default-page-footer ()
(with-ml-output
(output-raw-ml
(js-library-footer))))
(defun webapp-default-page-head-contents ()
(with-ml-output
(output-raw-ml (js-library))
(js-html-script
(setf *channels-url*
(+ (unquote +channel-page-name+)
(unquote-splice
(when (webapp-frame-available-p)
(list
(strcat "?" +webapp-frame-id-param+ "=")
(force-string (frame-id (webapp-frame)))))))))))
(defmacro ml-to-byte-vector (ml)
`(sendbuf-to-byte-vector (with-ml-output-start ,ml)))
(defmacro webapp-ml (title-and-options &body body)
(with-unique-names (title-ml)
(destructuring-bind (title &key head-contents)
(typecase title-and-options
(null (list nil))
(list title-and-options)
(t (list title-and-options)))
`(let ((,title-ml
(ml-to-byte-vector ,title)))
(when (webapp-frame-available-p)
(setf (webapp-frame-var 'actions) nil))
(values
(with-frame-site
(with-ml-output-start
(output-raw-ml "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\""
" \"\">")
(<html
(<head
(current-site-call page-head ,title-ml)
,head-contents)
(<body
(current-site-call page-body-start ,title-ml)
,@body
(current-site-call page-body-footer ,title-ml)))))
+http-header-html-content-type+)))))
(defmacro webapp (title-and-options &body body)
(with-unique-names (l)
`(labels ((,l ()
(when (webapp-frame-available-p)
(setf (frame-current-page (webapp-frame))
#',l))
(webapp-ml ,title-and-options ,@body)))
(,l))))
(defmacro link-to-webapp (title &body body)
(with-unique-names (title-ml)
`(let ((,title-ml (ml-to-byte-vector ,title)))
(html-replace-link (output-raw-ml ,title-ml)
(webapp ((output-raw-ml ,title-ml)) ,@body)))))
(defmacro webapp-section (title &body body)
`(<div :class "webapp-section"
(<h3 ,@(force-list title))
,@body))
(defmacro webapp-select-one (title list-generation-form &key action replace display (describe '(lambda (x) (declare (ignore x)))))
(with-unique-names (i)
`(webapp-section ,title
(<ul
(loop for ,i in ,list-generation-form
do (let-current-values (,i)
,(cond
(action
`(<li (html-action-link (,display ,i)
(,action ,i))
(,describe ,i)
))
(replace
`(<li (html-replace-link (,display ,i) (,replace ,i)) (,describe ,i)))
(t (error "Please specify an action or a replacement")))))))))
(defmacro webapp-display (object)
`(output-object-to-ml ,object))
| null | https://raw.githubusercontent.com/vii/teepeedee2/a2ed78c51d782993591c3284562daeed3aba3d40/src/webapp/webapp.lisp | lisp | (in-package #:tpd2.webapp)
(declaim (inline webapp-default-page-footer webapp-default-page-head-contents))
(defun webapp-default-page-footer ()
(with-ml-output
(output-raw-ml
(js-library-footer))))
(defun webapp-default-page-head-contents ()
(with-ml-output
(output-raw-ml (js-library))
(js-html-script
(setf *channels-url*
(+ (unquote +channel-page-name+)
(unquote-splice
(when (webapp-frame-available-p)
(list
(strcat "?" +webapp-frame-id-param+ "=")
(force-string (frame-id (webapp-frame)))))))))))
(defmacro ml-to-byte-vector (ml)
`(sendbuf-to-byte-vector (with-ml-output-start ,ml)))
(defmacro webapp-ml (title-and-options &body body)
(with-unique-names (title-ml)
(destructuring-bind (title &key head-contents)
(typecase title-and-options
(null (list nil))
(list title-and-options)
(t (list title-and-options)))
`(let ((,title-ml
(ml-to-byte-vector ,title)))
(when (webapp-frame-available-p)
(setf (webapp-frame-var 'actions) nil))
(values
(with-frame-site
(with-ml-output-start
(output-raw-ml "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\""
" \"\">")
(<html
(<head
(current-site-call page-head ,title-ml)
,head-contents)
(<body
(current-site-call page-body-start ,title-ml)
,@body
(current-site-call page-body-footer ,title-ml)))))
+http-header-html-content-type+)))))
(defmacro webapp (title-and-options &body body)
(with-unique-names (l)
`(labels ((,l ()
(when (webapp-frame-available-p)
(setf (frame-current-page (webapp-frame))
#',l))
(webapp-ml ,title-and-options ,@body)))
(,l))))
(defmacro link-to-webapp (title &body body)
(with-unique-names (title-ml)
`(let ((,title-ml (ml-to-byte-vector ,title)))
(html-replace-link (output-raw-ml ,title-ml)
(webapp ((output-raw-ml ,title-ml)) ,@body)))))
(defmacro webapp-section (title &body body)
`(<div :class "webapp-section"
(<h3 ,@(force-list title))
,@body))
(defmacro webapp-select-one (title list-generation-form &key action replace display (describe '(lambda (x) (declare (ignore x)))))
(with-unique-names (i)
`(webapp-section ,title
(<ul
(loop for ,i in ,list-generation-form
do (let-current-values (,i)
,(cond
(action
`(<li (html-action-link (,display ,i)
(,action ,i))
(,describe ,i)
))
(replace
`(<li (html-replace-link (,display ,i) (,replace ,i)) (,describe ,i)))
(t (error "Please specify an action or a replacement")))))))))
(defmacro webapp-display (object)
`(output-object-to-ml ,object))
| |
fabced02436be5868ed0411937d0e6d1f35d25fbc8e3e08c16a144f872769a03 | stumpwm/stumpwm-contrib | package.lisp | package.lisp
(defpackage #:mem
(:use #:cl :common-lisp :stumpwm :cl-ppcre))
| null | https://raw.githubusercontent.com/stumpwm/stumpwm-contrib/a7dc1c663d04e6c73a4772c8a6ad56a34381096a/modeline/mem/package.lisp | lisp | package.lisp
(defpackage #:mem
(:use #:cl :common-lisp :stumpwm :cl-ppcre))
| |
eca601c082631e6a23c34b8af3768f84f0d55ae2f26c6045a8110a862fda69ca | eareese/htdp-exercises | 091-happy-cat-direction.rkt | #lang htdp/bsl
(require 2htdp/image)
(require 2htdp/universe)
Exercise 91
Extend your structure type definition and data definition from exercise 88 to
;; include a direction field. Adjust your happy-cat program so that the cat
;; moves in the specified direction. The program should move the cat in the
;; current direction, and it should turn the cat around when it reaches either
;; end of the scene.
(define-struct cat [x hap dir])
; Cat = (make-cat Number Number String)
ex : ( make - cat 10 1 " left " )
; interpretation (make-cat x n) describes a Cat that is at x-position 'x', is
; moving left (x decreases with each tick) and has happiness level 'n'.
VCat
The world state VCat is represented by a Cat with a Number for x - position and
; a Number for "happiness score" and a String for direction.
; interpretation The world state of a cat, represented by Cat values to denote
; the cat's position along x-axis, its happiness level, and its direction. Both
;; position and happiness level change on every clock tick, and direction
;; changes when the cat's position reaches either boundary of the scene.
(define cat1 (bitmap "images/cat1.png"))
(define cat2 (bitmap "images/cat2.png"))
(define CAT-Y 250)
(define HAP-DECAY 0.1)
(define WORLD-WIDTH (* 10 (image-width cat1)))
(define WORLD-HEIGHT (* 3 (image-height cat1)))
(define BACKGROUND
(rectangle WORLD-WIDTH WORLD-HEIGHT "outline" "black"))
(define HAPG-WIDTH 50)
(define HAPG-HEIGHT 100)
(define HAPG-MAX 100)
VCat - > Image
; places the cat in the world, using cat-x as the x position.
; also renders the happiness guage, using the value of cat-hap
; TODO: examples
(define (render vc)
(place-image
; animation: alternate cat1/cat2 images
(cond [(odd? (cat-x vc)) cat1] [else cat2])
(* 3 (cat-x vc))
CAT-Y
(draw-guage-on-bg (cat-hap vc))))
; TODO: examples
VCat - > Image
; draw the guage part only
(define (draw-guage-on-bg h)
(place-image/align
(rectangle HAPG-WIDTH h "solid" "blue")
(- WORLD-WIDTH (/ HAPG-WIDTH 2)) HAPG-HEIGHT
"middle" "bottom"
BACKGROUND))
VCat - > VCat
on each clock tick , cat - x should be incremented by 1 , and cat - hap
should fall by HAP - DECAY , but with a minimum possible value of 0 .
; UNLESS! if happiness should fall to 0, the cat will stop moving, i.e.
; maintain the same cat-x value.
; update:
; if direction is "right", increment cat-x
; if direction is "left", decrement cat-x
; also recognize bounds for R and L movement and switch accordingly
; TODO: implement
; TODO: examples
(define (tock vc)
(cond [(<= (cat-hap vc) 0)
(make-cat (cat-x vc) 0 (cat-dir vc))]
[else
(make-cat
(if (eq? "left" (cat-dir vc)) (- (cat-x vc) 1) (+ (cat-x vc) 1))
(min (- (cat-hap vc) HAP-DECAY) HAPG-MAX)
(cond
[(<= (cat-x vc) 0) "right"]
[(>= (* 3 (cat-x vc)) (- WORLD-WIDTH (image-width cat1))) "left"]
[else (cat-dir vc)]))]))
; event handler
VCat
down
hap inc by 1/5
; up arrow
hap up by 1/3
(define (hyper vc ke)
(cond
[(string=? "down" ke)
(make-cat
(cat-x vc)
(min HAPG-HEIGHT (* 1.2 (cat-hap vc)))
(cat-dir vc))]
[(string=? "up" ke)
(make-cat
(cat-x vc)
(min HAPG-HEIGHT (* 1.3333 (cat-hap vc)))
(cat-dir vc))]
[else vc]))
VCat - > VCat
; launches the program, given a Number value for x, and assuming that
happiness level starts at .
(define (main x)
(big-bang (make-cat x 100 "right")
[on-tick tock]
[to-draw render]
[on-key hyper]))
(main 130)
| null | https://raw.githubusercontent.com/eareese/htdp-exercises/a85ff3111d459dda0e94d9b463d01a09accbf9bf/part01-fixed-size-data/091-happy-cat-direction.rkt | racket | include a direction field. Adjust your happy-cat program so that the cat
moves in the specified direction. The program should move the cat in the
current direction, and it should turn the cat around when it reaches either
end of the scene.
Cat = (make-cat Number Number String)
interpretation (make-cat x n) describes a Cat that is at x-position 'x', is
moving left (x decreases with each tick) and has happiness level 'n'.
a Number for "happiness score" and a String for direction.
interpretation The world state of a cat, represented by Cat values to denote
the cat's position along x-axis, its happiness level, and its direction. Both
position and happiness level change on every clock tick, and direction
changes when the cat's position reaches either boundary of the scene.
places the cat in the world, using cat-x as the x position.
also renders the happiness guage, using the value of cat-hap
TODO: examples
animation: alternate cat1/cat2 images
TODO: examples
draw the guage part only
UNLESS! if happiness should fall to 0, the cat will stop moving, i.e.
maintain the same cat-x value.
update:
if direction is "right", increment cat-x
if direction is "left", decrement cat-x
also recognize bounds for R and L movement and switch accordingly
TODO: implement
TODO: examples
event handler
up arrow
launches the program, given a Number value for x, and assuming that | #lang htdp/bsl
(require 2htdp/image)
(require 2htdp/universe)
Exercise 91
Extend your structure type definition and data definition from exercise 88 to
(define-struct cat [x hap dir])
ex : ( make - cat 10 1 " left " )
VCat
The world state VCat is represented by a Cat with a Number for x - position and
(define cat1 (bitmap "images/cat1.png"))
(define cat2 (bitmap "images/cat2.png"))
(define CAT-Y 250)
(define HAP-DECAY 0.1)
(define WORLD-WIDTH (* 10 (image-width cat1)))
(define WORLD-HEIGHT (* 3 (image-height cat1)))
(define BACKGROUND
(rectangle WORLD-WIDTH WORLD-HEIGHT "outline" "black"))
(define HAPG-WIDTH 50)
(define HAPG-HEIGHT 100)
(define HAPG-MAX 100)
VCat - > Image
(define (render vc)
(place-image
(cond [(odd? (cat-x vc)) cat1] [else cat2])
(* 3 (cat-x vc))
CAT-Y
(draw-guage-on-bg (cat-hap vc))))
VCat - > Image
(define (draw-guage-on-bg h)
(place-image/align
(rectangle HAPG-WIDTH h "solid" "blue")
(- WORLD-WIDTH (/ HAPG-WIDTH 2)) HAPG-HEIGHT
"middle" "bottom"
BACKGROUND))
VCat - > VCat
on each clock tick , cat - x should be incremented by 1 , and cat - hap
should fall by HAP - DECAY , but with a minimum possible value of 0 .
(define (tock vc)
(cond [(<= (cat-hap vc) 0)
(make-cat (cat-x vc) 0 (cat-dir vc))]
[else
(make-cat
(if (eq? "left" (cat-dir vc)) (- (cat-x vc) 1) (+ (cat-x vc) 1))
(min (- (cat-hap vc) HAP-DECAY) HAPG-MAX)
(cond
[(<= (cat-x vc) 0) "right"]
[(>= (* 3 (cat-x vc)) (- WORLD-WIDTH (image-width cat1))) "left"]
[else (cat-dir vc)]))]))
VCat
down
hap inc by 1/5
hap up by 1/3
(define (hyper vc ke)
(cond
[(string=? "down" ke)
(make-cat
(cat-x vc)
(min HAPG-HEIGHT (* 1.2 (cat-hap vc)))
(cat-dir vc))]
[(string=? "up" ke)
(make-cat
(cat-x vc)
(min HAPG-HEIGHT (* 1.3333 (cat-hap vc)))
(cat-dir vc))]
[else vc]))
VCat - > VCat
happiness level starts at .
(define (main x)
(big-bang (make-cat x 100 "right")
[on-tick tock]
[to-draw render]
[on-key hyper]))
(main 130)
|
293347ca03bc949fe6b8ea7cf12750640df73de7978fd6b53c46726c92210465 | ghc/ghc | deriving-via-compile.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE DerivingVia #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE PolyKinds #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE MultiWayIf #-}
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
module DerivingViaCompile where
import Data.Void
import Data.Complex
import Data.Functor.Const
import Data.Functor.Identity
import Data.Ratio
import Control.Monad (liftM, ap)
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Control.Applicative hiding (WrappedMonad(..))
import Data.Bifunctor
import Data.Monoid
import Data.Kind
type f ~> g = forall xx. f xx -> g xx
-----
-- Simple example
-----
data Foo a = MkFoo a a
deriving Show
via (Identity (Foo a))
-----
Eta reduction at work
-----
newtype Flip p a b = Flip { runFlip :: p b a }
instance Bifunctor p => Bifunctor (Flip p) where
bimap f g = Flip . bimap g f . runFlip
instance Bifunctor p => Functor (Flip p a) where
fmap f = Flip . first f . runFlip
newtype Bar a = MkBar (Either a Int)
deriving Functor
via (Flip Either Int)
-----
Monad transformers
-----
type MTrans = (Type -> Type) -> (Type -> Type)
-- From `constraints'
data Dict c where
Dict :: c => Dict c
newtype a :- b = Sub (a => Dict b)
infixl 1 \\
(\\) :: a => (b => r) -> (a :- b) -> r
r \\ Sub Dict = r
-- With `-XQuantifiedConstraints' this just becomes
--
type Lifting cls trans = forall . cls mm = > cls ( trans mm )
--
type LiftingMonad trans = Lifting Monad trans
--
class LiftingMonad (trans :: MTrans) where
proof :: Monad m :- Monad (trans m)
instance LiftingMonad (StateT s :: MTrans) where
proof :: Monad m :- Monad (StateT s m)
proof = Sub Dict
instance Monoid w => LiftingMonad (WriterT w :: MTrans) where
proof :: Monad m :- Monad (WriterT w m)
proof = Sub Dict
instance (LiftingMonad trans, LiftingMonad trans') => LiftingMonad (ComposeT trans trans' :: MTrans) where
proof :: forall m. Monad m :- Monad (ComposeT trans trans' m)
proof = Sub (Dict \\ proof @trans @(trans' m) \\ proof @trans' @m)
newtype Stack :: MTrans where
Stack :: ReaderT Int (StateT Bool (WriterT String m)) a -> Stack m a
deriving newtype
( Functor
, Applicative
, Monad
, MonadReader Int
, MonadState Bool
, MonadWriter String
)
deriving (MonadTrans, MFunctor)
via (ReaderT Int `ComposeT` StateT Bool `ComposeT` WriterT String)
class MFunctor (trans :: MTrans) where
hoist :: Monad m => (m ~> m') -> (trans m ~> trans m')
instance MFunctor (ReaderT r :: MTrans) where
hoist :: Monad m => (m ~> m') -> (ReaderT r m ~> ReaderT r m')
hoist nat = ReaderT . fmap nat . runReaderT
instance MFunctor (StateT s :: MTrans) where
hoist :: Monad m => (m ~> m') -> (StateT s m ~> StateT s m')
hoist nat = StateT . fmap nat . runStateT
instance MFunctor (WriterT w :: MTrans) where
hoist :: Monad m => (m ~> m') -> (WriterT w m ~> WriterT w m')
hoist nat = WriterT . nat . runWriterT
infixr 9 `ComposeT`
newtype ComposeT :: MTrans -> MTrans -> MTrans where
ComposeT :: { getComposeT :: f (g m) a } -> ComposeT f g m a
deriving newtype (Functor, Applicative, Monad)
instance (MonadTrans f, MonadTrans g, LiftingMonad g) => MonadTrans (ComposeT f g) where
lift :: forall m. Monad m => m ~> ComposeT f g m
lift = ComposeT . lift . lift
\\ proof @g @m
instance (MFunctor f, MFunctor g, LiftingMonad g) => MFunctor (ComposeT f g) where
hoist :: forall m m'. Monad m => (m ~> m') -> (ComposeT f g m ~> ComposeT f g m')
hoist f = ComposeT . hoist (hoist f) . getComposeT
\\ proof @g @m
-----
-- Using tuples in a `via` type
-----
newtype X a = X (a, a)
deriving (Semigroup, Monoid)
via (Product a, Sum a)
deriving (Show, Eq)
via (a, a)
-----
-- Abstract data types
-----
class C f where
c :: f a -> Int
newtype X2 f a = X2 (f a)
instance C (X2 f) where
c = const 0
deriving via (X2 IO) instance C IO
----
-- Testing parser
----
newtype P0 a = P0 a deriving Show via a
newtype P1 a = P1 [a] deriving Show via [a]
newtype P2 a = P2 (a, a) deriving Show via (a, a)
newtype P3 a = P3 (Maybe a) deriving Show via (First a)
newtype P4 a = P4 (Maybe a) deriving Show via (First $ a)
newtype P5 a = P5 a deriving Show via (Identity $ a)
newtype P6 a = P6 [a] deriving Show via ([] $ a)
newtype P7 a = P7 (a, a) deriving Show via (Identity $ (a, a))
newtype P8 a = P8 (Either () a) deriving Functor via (($) (Either ()))
newtype f $ a = APP (f a) deriving newtype Show deriving newtype Functor
----
From Baldur 's notes
----
----
1
----
newtype WrapApplicative f a = WrappedApplicative (f a)
deriving (Functor, Applicative)
instance (Applicative f, Num a) => Num (WrapApplicative f a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
negate = fmap negate
fromInteger = pure . fromInteger
abs = fmap abs
signum = fmap signum
instance (Applicative f, Fractional a) => Fractional (WrapApplicative f a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Applicative f, Floating a) => Floating (WrapApplicative f a) where
pi = pure pi
sqrt = fmap sqrt
exp = fmap exp
log = fmap log
sin = fmap sin
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acosh
instance (Applicative f, Semigroup s) => Semigroup (WrapApplicative f s) where
(<>) = liftA2 (<>)
instance (Applicative f, Monoid m) => Monoid (WrapApplicative f m) where
mempty = pure mempty
----
2
----
class Pointed p where
pointed :: a -> p a
newtype WrapMonad f a = WrappedMonad (f a)
deriving newtype (Pointed, Monad)
instance (Monad m, Pointed m) => Functor (WrapMonad m) where
fmap = liftM
instance (Monad m, Pointed m) => Applicative (WrapMonad m) where
pure = pointed
(<*>) = ap
-- data
data Sorted a = Sorted a a a
deriving (Functor, Applicative)
via (WrapMonad Sorted)
deriving (Num, Fractional, Floating, Semigroup, Monoid)
via (WrapApplicative Sorted a)
instance Monad Sorted where
(>>=) :: Sorted a -> (a -> Sorted b) -> Sorted b
Sorted a b c >>= f = Sorted a' b' c' where
Sorted a' _ _ = f a
Sorted _ b' _ = f b
Sorted _ _ c' = f c
instance Pointed Sorted where
pointed :: a -> Sorted a
pointed a = Sorted a a a
----
3
----
class IsZero a where
isZero :: a -> Bool
newtype WrappedNumEq a = WrappedNumEq a
newtype WrappedShow a = WrappedShow a
newtype WrappedNumEq2 a = WrappedNumEq2 a
instance (Num a, Eq a) => IsZero (WrappedNumEq a) where
isZero :: WrappedNumEq a -> Bool
isZero (WrappedNumEq a) = 0 == a
instance Show a => IsZero (WrappedShow a) where
isZero :: WrappedShow a -> Bool
isZero (WrappedShow a) = "0" == show a
instance (Num a, Eq a) => IsZero (WrappedNumEq2 a) where
isZero :: WrappedNumEq2 a -> Bool
isZero (WrappedNumEq2 a) = a + a == a
newtype INT = INT Int
deriving newtype Show
deriving IsZero via (WrappedNumEq Int)
newtype VOID = VOID Void deriving IsZero via (WrappedShow Void)
----
4
----
class Bifunctor p => Biapplicative p where
bipure :: a -> b -> p a b
biliftA2
:: (a -> b -> c)
-> (a' -> b' -> c')
-> p a a'
-> p b b'
-> p c c'
instance Biapplicative (,) where
bipure = (,)
biliftA2 f f' (a, a') (b, b') =
(f a b, f' a' b')
newtype WrapBiapp p a b = WrapBiap (p a b)
deriving newtype (Functor, Bifunctor, Biapplicative, Eq)
instance (Biapplicative p, Num a, Num b) => Num (WrapBiapp p a b) where
(+) = biliftA2 (+) (+)
(-) = biliftA2 (*) (*)
(*) = biliftA2 (*) (*)
negate = bimap negate negate
abs = bimap abs abs
signum = bimap signum signum
fromInteger n = fromInteger n `bipure` fromInteger n
newtype INT2 = INT2 (Int, Int)
deriving IsZero via (WrappedNumEq2 (WrapBiapp (,) Int Int))
----
5
----
class Monoid a => MonoidNull a where
null :: a -> Bool
newtype WrpMonNull a = WRM a deriving (Eq, Semigroup, Monoid)
instance (Eq a, Monoid a) => MonoidNull (WrpMonNull a) where
null :: WrpMonNull a -> Bool
null = (== mempty)
deriving via (WrpMonNull Any) instance MonoidNull Any
deriving via () instance MonoidNull ()
deriving via Ordering instance MonoidNull Ordering
----
6
----
#L635
class Lattice a where
sup :: a -> a -> a
(.>=) :: a -> a -> Bool
(.>) :: a -> a -> Bool
newtype WrapOrd a = WrappedOrd a
deriving newtype (Eq, Ord)
instance Ord a => Lattice (WrapOrd a) where
sup = max
(.>=) = (>=)
(.>) = (>)
deriving via [a] instance Ord a => Lattice [a]
deriving via (a, b) instance (Ord a, Ord b) => Lattice (a, b)
--mkLattice_(Bool)
deriving via Bool instance Lattice Bool
--mkLattice_(Char)
deriving via Char instance Lattice Char
--mkLattice_(Int)
deriving via Int instance Lattice Int
--mkLattice_(Integer)
deriving via Integer instance Lattice Integer
--mkLattice_(Float)
deriving via Float instance Lattice Float
--mkLattice_(Double)
deriving via Double instance Lattice Double
--mkLattice_(Rational)
deriving via Rational instance Lattice Rational
----
7
----
-- -1.20.7/docs/src/Linear-Affine.html
class Functor f => Additive f where
zero :: Num a => f a
(^+^) :: Num a => f a -> f a -> f a
(^+^) = liftU2 (+)
(^-^) :: Num a => f a -> f a -> f a
x ^-^ y = x ^+^ fmap negate y
liftU2 :: (a -> a -> a) -> f a -> f a -> f a
instance Additive [] where
zero = []
liftU2 f = go where
go (x:xs) (y:ys) = f x y : go xs ys
go [] ys = ys
go xs [] = xs
instance Additive Maybe where
zero = Nothing
liftU2 f (Just a) (Just b) = Just (f a b)
liftU2 _ Nothing ys = ys
liftU2 _ xs Nothing = xs
instance Applicative f => Additive (WrapApplicative f) where
zero = pure 0
liftU2 = liftA2
deriving via (WrapApplicative ((->) a)) instance Additive ((->) a)
deriving via (WrapApplicative Complex) instance Additive Complex
deriving via (WrapApplicative Identity) instance Additive Identity
instance Additive ZipList where
zero = ZipList []
liftU2 f (ZipList xs) (ZipList ys) = ZipList (liftU2 f xs ys)
class Additive (Diff p) => Affine p where
type Diff p :: Type -> Type
(.-.) :: Num a => p a -> p a -> Diff p a
(.+^) :: Num a => p a -> Diff p a -> p a
(.-^) :: Num a => p a -> Diff p a -> p a
p .-^ v = p .+^ fmap negate v
# define ADDITIVEC(CTX , T ) instance CTX = > Affine T where type Diff T = T ; \
( .- . ) = ( ^-^ ) ; { - # INLINE ( .- . ) # - } ; ( .+^ ) = ( ^+^ ) ; { - # INLINE ( .+^ ) # - } ; \
( .-^ ) = ( ^-^ ) ; { - # INLINE ( .-^ ) # - }
-- #define ADDITIVE(T) ADDITIVEC((), T)
newtype WrapAdditive f a = WrappedAdditive (f a)
instance Additive f => Affine (WrapAdditive f) where
type Diff (WrapAdditive f) = f
WrappedAdditive a .-. WrappedAdditive b = a ^-^ b
WrappedAdditive a .+^ b = WrappedAdditive (a ^+^ b)
WrappedAdditive a .-^ b = WrappedAdditive (a ^-^ b)
-- ADDITIVE(((->) a))
deriving via (WrapAdditive ((->) a)) instance Affine ((->) a)
-- ADDITIVE([])
deriving via (WrapAdditive []) instance Affine []
ADDITIVE(Complex )
deriving via (WrapAdditive Complex) instance Affine Complex
-- ADDITIVE(Maybe)
deriving via (WrapAdditive Maybe) instance Affine Maybe
-- ADDITIVE(ZipList)
deriving via (WrapAdditive ZipList) instance Affine ZipList
-- ADDITIVE(Identity)
deriving via (WrapAdditive Identity) instance Affine Identity
----
8
----
class C2 a b c where
c2 :: a -> b -> c
instance C2 a b (Const a b) where
c2 x _ = Const x
newtype Fweemp a = Fweemp a
deriving (C2 a b)
via (Const a (b :: Type))
| null | https://raw.githubusercontent.com/ghc/ghc/aacf616df0b4059e6b177ecb64624ae6fb1d1c87/testsuite/tests/deriving/should_compile/deriving-via-compile.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE GADTs #
# LANGUAGE ConstraintKinds #
# LANGUAGE MultiWayIf #
---
Simple example
---
---
---
---
---
From `constraints'
With `-XQuantifiedConstraints' this just becomes
---
Using tuples in a `via` type
---
---
Abstract data types
---
--
Testing parser
--
--
--
--
--
--
--
data
--
--
--
--
--
--
--
--
mkLattice_(Bool)
mkLattice_(Char)
mkLattice_(Int)
mkLattice_(Integer)
mkLattice_(Float)
mkLattice_(Double)
mkLattice_(Rational)
--
--
-1.20.7/docs/src/Linear-Affine.html
#define ADDITIVE(T) ADDITIVEC((), T)
ADDITIVE(((->) a))
ADDITIVE([])
ADDITIVE(Maybe)
ADDITIVE(ZipList)
ADDITIVE(Identity)
--
-- | # LANGUAGE DerivingStrategies #
# LANGUAGE DerivingVia #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE StandaloneDeriving #
# LANGUAGE InstanceSigs #
# LANGUAGE PolyKinds #
# LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
module DerivingViaCompile where
import Data.Void
import Data.Complex
import Data.Functor.Const
import Data.Functor.Identity
import Data.Ratio
import Control.Monad (liftM, ap)
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Control.Applicative hiding (WrappedMonad(..))
import Data.Bifunctor
import Data.Monoid
import Data.Kind
type f ~> g = forall xx. f xx -> g xx
data Foo a = MkFoo a a
deriving Show
via (Identity (Foo a))
Eta reduction at work
newtype Flip p a b = Flip { runFlip :: p b a }
instance Bifunctor p => Bifunctor (Flip p) where
bimap f g = Flip . bimap g f . runFlip
instance Bifunctor p => Functor (Flip p a) where
fmap f = Flip . first f . runFlip
newtype Bar a = MkBar (Either a Int)
deriving Functor
via (Flip Either Int)
Monad transformers
type MTrans = (Type -> Type) -> (Type -> Type)
data Dict c where
Dict :: c => Dict c
newtype a :- b = Sub (a => Dict b)
infixl 1 \\
(\\) :: a => (b => r) -> (a :- b) -> r
r \\ Sub Dict = r
type Lifting cls trans = forall . cls mm = > cls ( trans mm )
type LiftingMonad trans = Lifting Monad trans
class LiftingMonad (trans :: MTrans) where
proof :: Monad m :- Monad (trans m)
instance LiftingMonad (StateT s :: MTrans) where
proof :: Monad m :- Monad (StateT s m)
proof = Sub Dict
instance Monoid w => LiftingMonad (WriterT w :: MTrans) where
proof :: Monad m :- Monad (WriterT w m)
proof = Sub Dict
instance (LiftingMonad trans, LiftingMonad trans') => LiftingMonad (ComposeT trans trans' :: MTrans) where
proof :: forall m. Monad m :- Monad (ComposeT trans trans' m)
proof = Sub (Dict \\ proof @trans @(trans' m) \\ proof @trans' @m)
newtype Stack :: MTrans where
Stack :: ReaderT Int (StateT Bool (WriterT String m)) a -> Stack m a
deriving newtype
( Functor
, Applicative
, Monad
, MonadReader Int
, MonadState Bool
, MonadWriter String
)
deriving (MonadTrans, MFunctor)
via (ReaderT Int `ComposeT` StateT Bool `ComposeT` WriterT String)
class MFunctor (trans :: MTrans) where
hoist :: Monad m => (m ~> m') -> (trans m ~> trans m')
instance MFunctor (ReaderT r :: MTrans) where
hoist :: Monad m => (m ~> m') -> (ReaderT r m ~> ReaderT r m')
hoist nat = ReaderT . fmap nat . runReaderT
instance MFunctor (StateT s :: MTrans) where
hoist :: Monad m => (m ~> m') -> (StateT s m ~> StateT s m')
hoist nat = StateT . fmap nat . runStateT
instance MFunctor (WriterT w :: MTrans) where
hoist :: Monad m => (m ~> m') -> (WriterT w m ~> WriterT w m')
hoist nat = WriterT . nat . runWriterT
infixr 9 `ComposeT`
newtype ComposeT :: MTrans -> MTrans -> MTrans where
ComposeT :: { getComposeT :: f (g m) a } -> ComposeT f g m a
deriving newtype (Functor, Applicative, Monad)
instance (MonadTrans f, MonadTrans g, LiftingMonad g) => MonadTrans (ComposeT f g) where
lift :: forall m. Monad m => m ~> ComposeT f g m
lift = ComposeT . lift . lift
\\ proof @g @m
instance (MFunctor f, MFunctor g, LiftingMonad g) => MFunctor (ComposeT f g) where
hoist :: forall m m'. Monad m => (m ~> m') -> (ComposeT f g m ~> ComposeT f g m')
hoist f = ComposeT . hoist (hoist f) . getComposeT
\\ proof @g @m
newtype X a = X (a, a)
deriving (Semigroup, Monoid)
via (Product a, Sum a)
deriving (Show, Eq)
via (a, a)
class C f where
c :: f a -> Int
newtype X2 f a = X2 (f a)
instance C (X2 f) where
c = const 0
deriving via (X2 IO) instance C IO
newtype P0 a = P0 a deriving Show via a
newtype P1 a = P1 [a] deriving Show via [a]
newtype P2 a = P2 (a, a) deriving Show via (a, a)
newtype P3 a = P3 (Maybe a) deriving Show via (First a)
newtype P4 a = P4 (Maybe a) deriving Show via (First $ a)
newtype P5 a = P5 a deriving Show via (Identity $ a)
newtype P6 a = P6 [a] deriving Show via ([] $ a)
newtype P7 a = P7 (a, a) deriving Show via (Identity $ (a, a))
newtype P8 a = P8 (Either () a) deriving Functor via (($) (Either ()))
newtype f $ a = APP (f a) deriving newtype Show deriving newtype Functor
From Baldur 's notes
1
newtype WrapApplicative f a = WrappedApplicative (f a)
deriving (Functor, Applicative)
instance (Applicative f, Num a) => Num (WrapApplicative f a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
negate = fmap negate
fromInteger = pure . fromInteger
abs = fmap abs
signum = fmap signum
instance (Applicative f, Fractional a) => Fractional (WrapApplicative f a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Applicative f, Floating a) => Floating (WrapApplicative f a) where
pi = pure pi
sqrt = fmap sqrt
exp = fmap exp
log = fmap log
sin = fmap sin
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acosh
instance (Applicative f, Semigroup s) => Semigroup (WrapApplicative f s) where
(<>) = liftA2 (<>)
instance (Applicative f, Monoid m) => Monoid (WrapApplicative f m) where
mempty = pure mempty
2
class Pointed p where
pointed :: a -> p a
newtype WrapMonad f a = WrappedMonad (f a)
deriving newtype (Pointed, Monad)
instance (Monad m, Pointed m) => Functor (WrapMonad m) where
fmap = liftM
instance (Monad m, Pointed m) => Applicative (WrapMonad m) where
pure = pointed
(<*>) = ap
data Sorted a = Sorted a a a
deriving (Functor, Applicative)
via (WrapMonad Sorted)
deriving (Num, Fractional, Floating, Semigroup, Monoid)
via (WrapApplicative Sorted a)
instance Monad Sorted where
(>>=) :: Sorted a -> (a -> Sorted b) -> Sorted b
Sorted a b c >>= f = Sorted a' b' c' where
Sorted a' _ _ = f a
Sorted _ b' _ = f b
Sorted _ _ c' = f c
instance Pointed Sorted where
pointed :: a -> Sorted a
pointed a = Sorted a a a
3
class IsZero a where
isZero :: a -> Bool
newtype WrappedNumEq a = WrappedNumEq a
newtype WrappedShow a = WrappedShow a
newtype WrappedNumEq2 a = WrappedNumEq2 a
instance (Num a, Eq a) => IsZero (WrappedNumEq a) where
isZero :: WrappedNumEq a -> Bool
isZero (WrappedNumEq a) = 0 == a
instance Show a => IsZero (WrappedShow a) where
isZero :: WrappedShow a -> Bool
isZero (WrappedShow a) = "0" == show a
instance (Num a, Eq a) => IsZero (WrappedNumEq2 a) where
isZero :: WrappedNumEq2 a -> Bool
isZero (WrappedNumEq2 a) = a + a == a
newtype INT = INT Int
deriving newtype Show
deriving IsZero via (WrappedNumEq Int)
newtype VOID = VOID Void deriving IsZero via (WrappedShow Void)
4
class Bifunctor p => Biapplicative p where
bipure :: a -> b -> p a b
biliftA2
:: (a -> b -> c)
-> (a' -> b' -> c')
-> p a a'
-> p b b'
-> p c c'
instance Biapplicative (,) where
bipure = (,)
biliftA2 f f' (a, a') (b, b') =
(f a b, f' a' b')
newtype WrapBiapp p a b = WrapBiap (p a b)
deriving newtype (Functor, Bifunctor, Biapplicative, Eq)
instance (Biapplicative p, Num a, Num b) => Num (WrapBiapp p a b) where
(+) = biliftA2 (+) (+)
(-) = biliftA2 (*) (*)
(*) = biliftA2 (*) (*)
negate = bimap negate negate
abs = bimap abs abs
signum = bimap signum signum
fromInteger n = fromInteger n `bipure` fromInteger n
newtype INT2 = INT2 (Int, Int)
deriving IsZero via (WrappedNumEq2 (WrapBiapp (,) Int Int))
5
class Monoid a => MonoidNull a where
null :: a -> Bool
newtype WrpMonNull a = WRM a deriving (Eq, Semigroup, Monoid)
instance (Eq a, Monoid a) => MonoidNull (WrpMonNull a) where
null :: WrpMonNull a -> Bool
null = (== mempty)
deriving via (WrpMonNull Any) instance MonoidNull Any
deriving via () instance MonoidNull ()
deriving via Ordering instance MonoidNull Ordering
6
#L635
class Lattice a where
sup :: a -> a -> a
(.>=) :: a -> a -> Bool
(.>) :: a -> a -> Bool
newtype WrapOrd a = WrappedOrd a
deriving newtype (Eq, Ord)
instance Ord a => Lattice (WrapOrd a) where
sup = max
(.>=) = (>=)
(.>) = (>)
deriving via [a] instance Ord a => Lattice [a]
deriving via (a, b) instance (Ord a, Ord b) => Lattice (a, b)
deriving via Bool instance Lattice Bool
deriving via Char instance Lattice Char
deriving via Int instance Lattice Int
deriving via Integer instance Lattice Integer
deriving via Float instance Lattice Float
deriving via Double instance Lattice Double
deriving via Rational instance Lattice Rational
7
class Functor f => Additive f where
zero :: Num a => f a
(^+^) :: Num a => f a -> f a -> f a
(^+^) = liftU2 (+)
(^-^) :: Num a => f a -> f a -> f a
x ^-^ y = x ^+^ fmap negate y
liftU2 :: (a -> a -> a) -> f a -> f a -> f a
instance Additive [] where
zero = []
liftU2 f = go where
go (x:xs) (y:ys) = f x y : go xs ys
go [] ys = ys
go xs [] = xs
instance Additive Maybe where
zero = Nothing
liftU2 f (Just a) (Just b) = Just (f a b)
liftU2 _ Nothing ys = ys
liftU2 _ xs Nothing = xs
instance Applicative f => Additive (WrapApplicative f) where
zero = pure 0
liftU2 = liftA2
deriving via (WrapApplicative ((->) a)) instance Additive ((->) a)
deriving via (WrapApplicative Complex) instance Additive Complex
deriving via (WrapApplicative Identity) instance Additive Identity
instance Additive ZipList where
zero = ZipList []
liftU2 f (ZipList xs) (ZipList ys) = ZipList (liftU2 f xs ys)
class Additive (Diff p) => Affine p where
type Diff p :: Type -> Type
(.-.) :: Num a => p a -> p a -> Diff p a
(.+^) :: Num a => p a -> Diff p a -> p a
(.-^) :: Num a => p a -> Diff p a -> p a
p .-^ v = p .+^ fmap negate v
# define ADDITIVEC(CTX , T ) instance CTX = > Affine T where type Diff T = T ; \
( .- . ) = ( ^-^ ) ; { - # INLINE ( .- . ) # - } ; ( .+^ ) = ( ^+^ ) ; { - # INLINE ( .+^ ) # - } ; \
( .-^ ) = ( ^-^ ) ; { - # INLINE ( .-^ ) # - }
newtype WrapAdditive f a = WrappedAdditive (f a)
instance Additive f => Affine (WrapAdditive f) where
type Diff (WrapAdditive f) = f
WrappedAdditive a .-. WrappedAdditive b = a ^-^ b
WrappedAdditive a .+^ b = WrappedAdditive (a ^+^ b)
WrappedAdditive a .-^ b = WrappedAdditive (a ^-^ b)
deriving via (WrapAdditive ((->) a)) instance Affine ((->) a)
deriving via (WrapAdditive []) instance Affine []
ADDITIVE(Complex )
deriving via (WrapAdditive Complex) instance Affine Complex
deriving via (WrapAdditive Maybe) instance Affine Maybe
deriving via (WrapAdditive ZipList) instance Affine ZipList
deriving via (WrapAdditive Identity) instance Affine Identity
8
class C2 a b c where
c2 :: a -> b -> c
instance C2 a b (Const a b) where
c2 x _ = Const x
newtype Fweemp a = Fweemp a
deriving (C2 a b)
via (Const a (b :: Type))
|
a9f2398c7d6e5720381bba7df8f5b6d64a54bf5f1920d794a4255f2ca1f1ec20 | debug-ito/net-spider | Internal.hs | # LANGUAGE DeriveGeneric #
-- |
Module : NetSpider . Snapshot . Internal
-- Description: Implementation of Snapshot graph types
Maintainer : < >
--
-- __this module is internal. End-users should not use this.__
--
-- Implementation of Snapshot graph types. This module is for internal
-- and testing purposes only.
--
@since 0.3.0.0
module NetSpider.Snapshot.Internal
( SnapshotGraph,
SnapshotLink(..),
linkNodeTuple,
linkNodePair,
SnapshotNode(..)
) where
import Control.Applicative (many, (*>))
import Data.Aeson (ToJSON(..), FromJSON(..))
import qualified Data.Aeson as Aeson
import Data.Bifunctor (Bifunctor(..))
import Data.Char (isUpper, toLower)
import GHC.Generics (Generic)
import NetSpider.Pair (Pair(..))
import NetSpider.Timestamp (Timestamp)
import qualified Text.Regex.Applicative as RE
-- | The snapshot graph, which is a collection nodes and links.
--
@since 0.3.1.0
type SnapshotGraph n na la = ([SnapshotNode n na], [SnapshotLink n la])
-- | A link in the snapshot graph.
--
' SnapshotLink ' is summary of one or more link observations by
-- different subject nodes. Basically the latest of these observations
-- is used to make 'SnapshotLink'.
--
-- - type @n@: node ID.
-- - type @la@: link attributes.
data SnapshotLink n la =
SnapshotLink
{ _sourceNode :: n,
_destinationNode :: n,
_isDirected :: Bool,
_linkTimestamp :: Timestamp,
_linkAttributes :: la
-- Maybe it's a good idea to include 'observationLogs', which can
contain warnings or other logs about making this SnapshotLink .
}
deriving (Show,Eq,Generic)
-- | Comparison by node-tuple (source node, destination node).
instance (Ord n, Eq la) => Ord (SnapshotLink n la) where
compare l r = compare (linkNodeTuple l) (linkNodeTuple r)
| @since 0.3.0.0
instance Functor (SnapshotLink n) where
fmap f l = l { _linkAttributes = f $ _linkAttributes l }
| @since 0.3.0.0
instance Bifunctor SnapshotLink where
bimap fn fla l = l { _linkAttributes = fla $ _linkAttributes l,
_sourceNode = fn $ _sourceNode l,
_destinationNode = fn $ _destinationNode l
}
aesonOpt :: Aeson.Options
aesonOpt = Aeson.defaultOptions
{ Aeson.fieldLabelModifier = modifier
}
where
modifier = RE.replace reSnake . RE.replace reAttr . RE.replace reDest . RE.replace reTime
reDest = fmap (const "dest") $ RE.string "destination"
reAttr = fmap (const "Attrs") $ RE.string "Attributes"
reTime = fmap (const "timestamp") (many RE.anySym *> RE.string "Timestamp")
reSnake = RE.msym $ \c ->
if c == '_'
then Just ""
else if isUpper c
then Just ['_', toLower c]
else Nothing
| @since
instance (FromJSON n, FromJSON la) => FromJSON (SnapshotLink n la) where
parseJSON = Aeson.genericParseJSON aesonOpt
| @since
instance (ToJSON n, ToJSON la) => ToJSON (SnapshotLink n la) where
toJSON = Aeson.genericToJSON aesonOpt
toEncoding = Aeson.genericToEncoding aesonOpt
-- | Node-tuple (source node, destination node) of the link.
linkNodeTuple :: SnapshotLink n la -> (n, n)
linkNodeTuple link = (_sourceNode link, _destinationNode link)
-- | Like 'linkNodeTuple', but this returns a 'Pair'.
linkNodePair :: SnapshotLink n la -> Pair n
linkNodePair = Pair . linkNodeTuple
-- | A node in the snapshot graph.
data SnapshotNode n na =
SnapshotNode
{ _nodeId :: n,
_isOnBoundary :: Bool,
_nodeTimestamp :: Maybe Timestamp,
_nodeAttributes :: Maybe na
}
deriving (Show,Eq,Generic)
-- | Comparison by node ID.
instance (Ord n, Eq na) => Ord (SnapshotNode n na) where
compare l r = compare (_nodeId l) (_nodeId r)
| @since 0.3.0.0
instance Functor (SnapshotNode n) where
fmap f n = n { _nodeAttributes = fmap f $ _nodeAttributes n }
| @since 0.3.0.0
instance Bifunctor SnapshotNode where
bimap fn fna n = n { _nodeAttributes = fmap fna $ _nodeAttributes n,
_nodeId = fn $ _nodeId n
}
| @since
instance (FromJSON n, FromJSON na) => FromJSON (SnapshotNode n na) where
parseJSON = Aeson.genericParseJSON aesonOpt
| @since
instance (ToJSON n, ToJSON na) => ToJSON (SnapshotNode n na) where
toJSON = Aeson.genericToJSON aesonOpt
toEncoding = Aeson.genericToEncoding aesonOpt
| null | https://raw.githubusercontent.com/debug-ito/net-spider/82dfbdca1add1edfd54ef36cb1ca5129d528b814/net-spider/src/NetSpider/Snapshot/Internal.hs | haskell | |
Description: Implementation of Snapshot graph types
__this module is internal. End-users should not use this.__
Implementation of Snapshot graph types. This module is for internal
and testing purposes only.
| The snapshot graph, which is a collection nodes and links.
| A link in the snapshot graph.
different subject nodes. Basically the latest of these observations
is used to make 'SnapshotLink'.
- type @n@: node ID.
- type @la@: link attributes.
Maybe it's a good idea to include 'observationLogs', which can
| Comparison by node-tuple (source node, destination node).
| Node-tuple (source node, destination node) of the link.
| Like 'linkNodeTuple', but this returns a 'Pair'.
| A node in the snapshot graph.
| Comparison by node ID. | # LANGUAGE DeriveGeneric #
Module : NetSpider . Snapshot . Internal
Maintainer : < >
@since 0.3.0.0
module NetSpider.Snapshot.Internal
( SnapshotGraph,
SnapshotLink(..),
linkNodeTuple,
linkNodePair,
SnapshotNode(..)
) where
import Control.Applicative (many, (*>))
import Data.Aeson (ToJSON(..), FromJSON(..))
import qualified Data.Aeson as Aeson
import Data.Bifunctor (Bifunctor(..))
import Data.Char (isUpper, toLower)
import GHC.Generics (Generic)
import NetSpider.Pair (Pair(..))
import NetSpider.Timestamp (Timestamp)
import qualified Text.Regex.Applicative as RE
@since 0.3.1.0
type SnapshotGraph n na la = ([SnapshotNode n na], [SnapshotLink n la])
' SnapshotLink ' is summary of one or more link observations by
data SnapshotLink n la =
SnapshotLink
{ _sourceNode :: n,
_destinationNode :: n,
_isDirected :: Bool,
_linkTimestamp :: Timestamp,
_linkAttributes :: la
contain warnings or other logs about making this SnapshotLink .
}
deriving (Show,Eq,Generic)
instance (Ord n, Eq la) => Ord (SnapshotLink n la) where
compare l r = compare (linkNodeTuple l) (linkNodeTuple r)
| @since 0.3.0.0
instance Functor (SnapshotLink n) where
fmap f l = l { _linkAttributes = f $ _linkAttributes l }
| @since 0.3.0.0
instance Bifunctor SnapshotLink where
bimap fn fla l = l { _linkAttributes = fla $ _linkAttributes l,
_sourceNode = fn $ _sourceNode l,
_destinationNode = fn $ _destinationNode l
}
aesonOpt :: Aeson.Options
aesonOpt = Aeson.defaultOptions
{ Aeson.fieldLabelModifier = modifier
}
where
modifier = RE.replace reSnake . RE.replace reAttr . RE.replace reDest . RE.replace reTime
reDest = fmap (const "dest") $ RE.string "destination"
reAttr = fmap (const "Attrs") $ RE.string "Attributes"
reTime = fmap (const "timestamp") (many RE.anySym *> RE.string "Timestamp")
reSnake = RE.msym $ \c ->
if c == '_'
then Just ""
else if isUpper c
then Just ['_', toLower c]
else Nothing
| @since
instance (FromJSON n, FromJSON la) => FromJSON (SnapshotLink n la) where
parseJSON = Aeson.genericParseJSON aesonOpt
| @since
instance (ToJSON n, ToJSON la) => ToJSON (SnapshotLink n la) where
toJSON = Aeson.genericToJSON aesonOpt
toEncoding = Aeson.genericToEncoding aesonOpt
linkNodeTuple :: SnapshotLink n la -> (n, n)
linkNodeTuple link = (_sourceNode link, _destinationNode link)
linkNodePair :: SnapshotLink n la -> Pair n
linkNodePair = Pair . linkNodeTuple
data SnapshotNode n na =
SnapshotNode
{ _nodeId :: n,
_isOnBoundary :: Bool,
_nodeTimestamp :: Maybe Timestamp,
_nodeAttributes :: Maybe na
}
deriving (Show,Eq,Generic)
instance (Ord n, Eq na) => Ord (SnapshotNode n na) where
compare l r = compare (_nodeId l) (_nodeId r)
| @since 0.3.0.0
instance Functor (SnapshotNode n) where
fmap f n = n { _nodeAttributes = fmap f $ _nodeAttributes n }
| @since 0.3.0.0
instance Bifunctor SnapshotNode where
bimap fn fna n = n { _nodeAttributes = fmap fna $ _nodeAttributes n,
_nodeId = fn $ _nodeId n
}
| @since
instance (FromJSON n, FromJSON na) => FromJSON (SnapshotNode n na) where
parseJSON = Aeson.genericParseJSON aesonOpt
| @since
instance (ToJSON n, ToJSON na) => ToJSON (SnapshotNode n na) where
toJSON = Aeson.genericToJSON aesonOpt
toEncoding = Aeson.genericToEncoding aesonOpt
|
516f43d7c383e3542150b1538857bb3bc37df32f7f1c93d122279d4bd12c9f44 | generateme/inferme | anglican.clj | (ns anglican.anglican
(:require [fastmath.core :as m]
[inferme.core :refer :all]
[inferme.plot :as plot]
[fastmath.stats :as stats]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
;; from presentation
(defn generate-observations
[p n]
(repeatedly n #(flip p)))
(def observations-a (generate-observations 0.07 150))
(def observations-b (generate-observations 0.04 250))
(defmodel ab-test
[p-a (:uniform-real)
p-b (:uniform-real)]
(model-result [(observe (distr :bernoulli {:p p-a}) observations-a)
(observe (distr :bernoulli {:p p-b}) observations-b)]
{:p-delta (- p-a p-b)}))
(def posterior (time (infer :metropolis-hastings ab-test {:samples 30000 :burn 5000 :thin 10 :step-scale 0.02})))
(def posterior (time (infer :metropolis-within-gibbs ab-test {:samples 10000 :burn 1000 :thin 3 :step-scale 0.04})))
(def posterior (time (infer :elliptical-slice-sampling ab-test {:samples 30000})))
(:acceptance-ratio posterior)
(:steps posterior)
(stats/mean observations-a)
(stats/mean (trace posterior :p-a))
(stats/mean observations-b)
(stats/mean (trace posterior :p-b))
(plot/histogram (trace posterior :p-a))
(plot/histogram (trace posterior :p-b))
(plot/histogram (trace posterior :p-delta))
(plot/lag (trace posterior :p-a))
| null | https://raw.githubusercontent.com/generateme/inferme/3cee4695bdf2c9c4d79d5257184f5db5e4774bdf/notebooks/anglican/anglican.clj | clojure | from presentation | (ns anglican.anglican
(:require [fastmath.core :as m]
[inferme.core :refer :all]
[inferme.plot :as plot]
[fastmath.stats :as stats]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defn generate-observations
[p n]
(repeatedly n #(flip p)))
(def observations-a (generate-observations 0.07 150))
(def observations-b (generate-observations 0.04 250))
(defmodel ab-test
[p-a (:uniform-real)
p-b (:uniform-real)]
(model-result [(observe (distr :bernoulli {:p p-a}) observations-a)
(observe (distr :bernoulli {:p p-b}) observations-b)]
{:p-delta (- p-a p-b)}))
(def posterior (time (infer :metropolis-hastings ab-test {:samples 30000 :burn 5000 :thin 10 :step-scale 0.02})))
(def posterior (time (infer :metropolis-within-gibbs ab-test {:samples 10000 :burn 1000 :thin 3 :step-scale 0.04})))
(def posterior (time (infer :elliptical-slice-sampling ab-test {:samples 30000})))
(:acceptance-ratio posterior)
(:steps posterior)
(stats/mean observations-a)
(stats/mean (trace posterior :p-a))
(stats/mean observations-b)
(stats/mean (trace posterior :p-b))
(plot/histogram (trace posterior :p-a))
(plot/histogram (trace posterior :p-b))
(plot/histogram (trace posterior :p-delta))
(plot/lag (trace posterior :p-a))
|
b153a22a00a2cb642b0632a242f382b677ae0c786a15e830334e0b9e14526a6d | kind2-mc/kind2 | invs.mli | This file is part of the Kind 2 model checker .
Copyright ( c ) 2015 by the Board of Trustees of the University of Iowa
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you
may not use this file except in compliance with the License . You
may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or
implied . See the License for the specific language governing
permissions and limitations under the License .
Copyright (c) 2015 by the Board of Trustees of the University of Iowa
Licensed under the Apache License, Version 2.0 (the "License"); you
may not use this file except in compliance with the License. You
may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
*)
* Invariants are stored in two hash tables mapping them to their certificate .
One table is for one - state invariants , the other is for two - state invariants .
One table is for one-state invariants, the other is for two-state invariants.
*)
(** Stores invariants. *)
type t
val copy: t -> t
(** The empty collection of invariants. *)
val empty : unit -> t
(** True if no invariants. *)
val is_empty : t -> bool
* Number of invariants ( one - state , two - state ) .
val len : t -> int * int
* Bumps invariants .
If second parameter is [ true ] , include two - state invariants .
If second parameter is [true], include two-state invariants. *)
val of_bound : t -> bool -> Numeral.t -> Term.t list
* Filters some invariants .
Function takes a boolean flag indicating if the invariant is two state .
Function takes a boolean flag indicating if the invariant is two state. *)
val filter : (bool -> Term.t -> Certificate.t -> bool) -> t -> t
* Adds a one - state invariant .
val add_os : t -> Term.t -> Certificate.t -> unit
* Adds a two - state invariant .
val add_ts : t -> Term.t -> Certificate.t -> unit
(** Remove all the invariants. *)
val clear : t -> unit
* The one - state invariants .
val get_os : t -> Term.TermSet.t
* The two - state invariants .
val get_ts : t -> Term.TermSet.t
(** Checks if a term is a known invariant. *)
val mem : t -> Term.t -> bool
(** Returns [Some cert] if [term] is a known invariant, or [None] otherwise. *)
val find : t -> Term.t -> Certificate.t option
(** {e Temporary.} Flattens some invariants into a list. *)
val flatten : t -> (Term.t * Certificate.t) list
* Merges two collections of invariants ( non - destructive ) .
val merge : t -> t -> t
(** Formats some invariants. *)
val fmt : Format.formatter -> t -> unit
Local Variables :
compile - command : " make -C .. -k "
tuareg - interactive - program : " ./kind2.top -I ./_build -I / SExpr "
indent - tabs - mode : nil
End :
Local Variables:
compile-command: "make -C .. -k"
tuareg-interactive-program: "./kind2.top -I ./_build -I ./_build/SExpr"
indent-tabs-mode: nil
End:
*)
| null | https://raw.githubusercontent.com/kind2-mc/kind2/c601470eb68af9bd3b88828b04dbcdbd6bd6bbf5/src/terms/invs.mli | ocaml | * Stores invariants.
* The empty collection of invariants.
* True if no invariants.
* Remove all the invariants.
* Checks if a term is a known invariant.
* Returns [Some cert] if [term] is a known invariant, or [None] otherwise.
* {e Temporary.} Flattens some invariants into a list.
* Formats some invariants. | This file is part of the Kind 2 model checker .
Copyright ( c ) 2015 by the Board of Trustees of the University of Iowa
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you
may not use this file except in compliance with the License . You
may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or
implied . See the License for the specific language governing
permissions and limitations under the License .
Copyright (c) 2015 by the Board of Trustees of the University of Iowa
Licensed under the Apache License, Version 2.0 (the "License"); you
may not use this file except in compliance with the License. You
may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
*)
* Invariants are stored in two hash tables mapping them to their certificate .
One table is for one - state invariants , the other is for two - state invariants .
One table is for one-state invariants, the other is for two-state invariants.
*)
type t
val copy: t -> t
val empty : unit -> t
val is_empty : t -> bool
* Number of invariants ( one - state , two - state ) .
val len : t -> int * int
* Bumps invariants .
If second parameter is [ true ] , include two - state invariants .
If second parameter is [true], include two-state invariants. *)
val of_bound : t -> bool -> Numeral.t -> Term.t list
* Filters some invariants .
Function takes a boolean flag indicating if the invariant is two state .
Function takes a boolean flag indicating if the invariant is two state. *)
val filter : (bool -> Term.t -> Certificate.t -> bool) -> t -> t
* Adds a one - state invariant .
val add_os : t -> Term.t -> Certificate.t -> unit
* Adds a two - state invariant .
val add_ts : t -> Term.t -> Certificate.t -> unit
val clear : t -> unit
* The one - state invariants .
val get_os : t -> Term.TermSet.t
* The two - state invariants .
val get_ts : t -> Term.TermSet.t
val mem : t -> Term.t -> bool
val find : t -> Term.t -> Certificate.t option
val flatten : t -> (Term.t * Certificate.t) list
* Merges two collections of invariants ( non - destructive ) .
val merge : t -> t -> t
val fmt : Format.formatter -> t -> unit
Local Variables :
compile - command : " make -C .. -k "
tuareg - interactive - program : " ./kind2.top -I ./_build -I / SExpr "
indent - tabs - mode : nil
End :
Local Variables:
compile-command: "make -C .. -k"
tuareg-interactive-program: "./kind2.top -I ./_build -I ./_build/SExpr"
indent-tabs-mode: nil
End:
*)
|
0446207efa005a1142af854c8d5631a0c45a807f19f777135a1bf26fcf637ec5 | input-output-hk/offchain-metadata-tools | Types.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
module Test.Cardano.Metadata.Types
( tests
) where
import Data.Aeson ( FromJSON, ToJSON )
import qualified Data.Aeson as Aeson
import qualified Data.Bifunctor as Bifunctor
import Data.ByteArray.Encoding ( Base (Base16), convertToBase )
import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Hedgehog ( Gen, forAll, property, (===) )
import qualified Hedgehog as H ( Property )
import Hedgehog.Internal.Property ( forAllT )
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( Assertion, testCase, (@?=) )
import Test.Tasty.Hedgehog
import Text.RawString.QQ ( r )
import Text.Read ( readEither )
import Cardano.Crypto.DSIGN
import Test.Cardano.Helpers
( prop_json_only_has_keys, prop_json_roundtrips, prop_read_show_roundtrips )
import qualified Test.Cardano.Metadata.Generators as Gen
import Cardano.Metadata.Types.Common
( AttestedProperty (AttestedProperty)
, HashFn (Blake2b224, Blake2b256, SHA256)
, asAttestationSignature
, asPublicKey
, seqZero
, unPropertyName
, unSubject
)
import qualified Cardano.Metadata.Types.Weakly as Weakly
tests :: TestTree
tests = testGroup "Metadata type tests"
[ testGroup "Parsers and printers"
[
testProperty "Metadata/json/roundtrips" (prop_json_roundtrips Gen.weaklyMetadata)
, testProperty "Metadata/json/matches-spec" prop_json_metadata_spec
, testProperty "AttestedProperty/json/roundtrips" (prop_json_roundtrips Gen.attestedProperty')
, testCase "AttestedProperty/json/missing-signatures-ok" unit_attested_property_missing_annotatedSignatures
, testProperty "Name/json/roundtrips" (prop_json_roundtrips Gen.name)
, testProperty "Description/json/roundtrips" (prop_json_roundtrips Gen.description)
, testProperty "Subject/json/roundtrips" (prop_json_roundtrips Gen.subject)
, testProperty "Subject/json/matches-spec" prop_json_subject_spec
, testProperty "PropertyName/json/roundtrips" (prop_json_roundtrips Gen.propertyName)
, testProperty "PropertyName/json/matches-spec" prop_json_propertyName_spec
, testProperty "AnnotatedSignature/json/roundtrips" (prop_json_roundtrips Gen.annotatedSignature')
, testProperty "AnnotatedSignature/json/matches-spec" prop_json_annotatedSignature_spec
, testProperty "PreImage/json/roundtrips" (prop_json_roundtrips Gen.preImage)
, testProperty "PreImage/json/matches-spec-keys" (prop_json_only_has_keys Gen.preImage ["value", "hashFn"])
, testProperty "Owner/json/roundtrips" (prop_json_roundtrips Gen.owner)
, testProperty "Owner/json/matches-spec-keys" (prop_json_only_has_keys Gen.owner ["publicKey", "signature"])
, testProperty "HashFn/read/show/roundtrips" (prop_read_show_roundtrips Gen.hashFn)
, testCase "HashFn/show/matches-spec" unit_hashfn_show_spec
, testProperty "HashFn/json/roundtrips" (prop_json_roundtrips Gen.hashFn)
, testProperty "HashFn/json/matches-spec-keys" (prop_json_read_show_align_spec Gen.hashFn)
]
]
-- | The from/to JSON instances should simply match the show/read
-- instances.
prop_json_read_show_align_spec :: forall a. (Eq a, Show a, Read a, ToJSON a, FromJSON a) => Gen a -> H.Property
prop_json_read_show_align_spec gen = property $ do
a <- forAll gen
(Aeson.String $ T.pack $ show a) === (Aeson.toJSON a)
(Aeson.eitherDecode $ BLC.pack $ "\"" <> show a <> "\"" :: Either String a) === (readEither $ show a)
unit_hashfn_show_spec :: Assertion
unit_hashfn_show_spec = do
show Blake2b256 @?= "blake2b-256"
show Blake2b224 @?= "blake2b-224"
show SHA256 @?= "sha256"
-- | Attested properties without a signature key are treated as an
-- attested property with no signatures.
unit_attested_property_missing_annotatedSignatures :: Assertion
unit_attested_property_missing_annotatedSignatures = do
let
json = [r|
{
"value": "string",
"sequenceNumber": 0
}
|]
Aeson.eitherDecode json @?= Right (AttestedProperty (Aeson.String "string") [] seqZero)
prop_json_subject_spec :: H.Property
prop_json_subject_spec = property $ do
subj <- forAll Gen.subject
Aeson.toJSON subj === Aeson.String (unSubject subj)
prop_json_propertyName_spec :: H.Property
prop_json_propertyName_spec = property $ do
p <- forAll Gen.propertyName
Aeson.toJSON p === Aeson.String (unPropertyName p)
prop_json_annotatedSignature_spec :: H.Property
prop_json_annotatedSignature_spec = property $ do
as <- forAllT Gen.annotatedSignature'
Aeson.toJSON as === Aeson.Object (HM.fromList
[ ("signature", Aeson.String $ T.decodeUtf8 $ convertToBase Base16 $ rawSerialiseSigDSIGN $ asAttestationSignature as)
, ("publicKey", Aeson.String $ T.decodeUtf8 $ convertToBase Base16 $ rawSerialiseVerKeyDSIGN $ asPublicKey as)
]
)
prop_json_metadata_spec :: H.Property
prop_json_metadata_spec = property $ do
m <- forAllT Gen.weaklyMetadata
Aeson.toJSON m === Aeson.Object (HM.fromList $
[ ("subject", Aeson.String $ unSubject $ Weakly.metaSubject m) ]
<> (fmap (Bifunctor.first unPropertyName) $ HM.toList $ fmap Aeson.toJSON $ Weakly.metaProperties m)
)
| null | https://raw.githubusercontent.com/input-output-hk/offchain-metadata-tools/794f08cedbf555e9d207bccc45c08abbcf98add9/metadata-lib/test/Test/Cardano/Metadata/Types.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
| The from/to JSON instances should simply match the show/read
instances.
| Attested properties without a signature key are treated as an
attested property with no signatures. | # LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
module Test.Cardano.Metadata.Types
( tests
) where
import Data.Aeson ( FromJSON, ToJSON )
import qualified Data.Aeson as Aeson
import qualified Data.Bifunctor as Bifunctor
import Data.ByteArray.Encoding ( Base (Base16), convertToBase )
import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Hedgehog ( Gen, forAll, property, (===) )
import qualified Hedgehog as H ( Property )
import Hedgehog.Internal.Property ( forAllT )
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( Assertion, testCase, (@?=) )
import Test.Tasty.Hedgehog
import Text.RawString.QQ ( r )
import Text.Read ( readEither )
import Cardano.Crypto.DSIGN
import Test.Cardano.Helpers
( prop_json_only_has_keys, prop_json_roundtrips, prop_read_show_roundtrips )
import qualified Test.Cardano.Metadata.Generators as Gen
import Cardano.Metadata.Types.Common
( AttestedProperty (AttestedProperty)
, HashFn (Blake2b224, Blake2b256, SHA256)
, asAttestationSignature
, asPublicKey
, seqZero
, unPropertyName
, unSubject
)
import qualified Cardano.Metadata.Types.Weakly as Weakly
tests :: TestTree
tests = testGroup "Metadata type tests"
[ testGroup "Parsers and printers"
[
testProperty "Metadata/json/roundtrips" (prop_json_roundtrips Gen.weaklyMetadata)
, testProperty "Metadata/json/matches-spec" prop_json_metadata_spec
, testProperty "AttestedProperty/json/roundtrips" (prop_json_roundtrips Gen.attestedProperty')
, testCase "AttestedProperty/json/missing-signatures-ok" unit_attested_property_missing_annotatedSignatures
, testProperty "Name/json/roundtrips" (prop_json_roundtrips Gen.name)
, testProperty "Description/json/roundtrips" (prop_json_roundtrips Gen.description)
, testProperty "Subject/json/roundtrips" (prop_json_roundtrips Gen.subject)
, testProperty "Subject/json/matches-spec" prop_json_subject_spec
, testProperty "PropertyName/json/roundtrips" (prop_json_roundtrips Gen.propertyName)
, testProperty "PropertyName/json/matches-spec" prop_json_propertyName_spec
, testProperty "AnnotatedSignature/json/roundtrips" (prop_json_roundtrips Gen.annotatedSignature')
, testProperty "AnnotatedSignature/json/matches-spec" prop_json_annotatedSignature_spec
, testProperty "PreImage/json/roundtrips" (prop_json_roundtrips Gen.preImage)
, testProperty "PreImage/json/matches-spec-keys" (prop_json_only_has_keys Gen.preImage ["value", "hashFn"])
, testProperty "Owner/json/roundtrips" (prop_json_roundtrips Gen.owner)
, testProperty "Owner/json/matches-spec-keys" (prop_json_only_has_keys Gen.owner ["publicKey", "signature"])
, testProperty "HashFn/read/show/roundtrips" (prop_read_show_roundtrips Gen.hashFn)
, testCase "HashFn/show/matches-spec" unit_hashfn_show_spec
, testProperty "HashFn/json/roundtrips" (prop_json_roundtrips Gen.hashFn)
, testProperty "HashFn/json/matches-spec-keys" (prop_json_read_show_align_spec Gen.hashFn)
]
]
prop_json_read_show_align_spec :: forall a. (Eq a, Show a, Read a, ToJSON a, FromJSON a) => Gen a -> H.Property
prop_json_read_show_align_spec gen = property $ do
a <- forAll gen
(Aeson.String $ T.pack $ show a) === (Aeson.toJSON a)
(Aeson.eitherDecode $ BLC.pack $ "\"" <> show a <> "\"" :: Either String a) === (readEither $ show a)
unit_hashfn_show_spec :: Assertion
unit_hashfn_show_spec = do
show Blake2b256 @?= "blake2b-256"
show Blake2b224 @?= "blake2b-224"
show SHA256 @?= "sha256"
unit_attested_property_missing_annotatedSignatures :: Assertion
unit_attested_property_missing_annotatedSignatures = do
let
json = [r|
{
"value": "string",
"sequenceNumber": 0
}
|]
Aeson.eitherDecode json @?= Right (AttestedProperty (Aeson.String "string") [] seqZero)
prop_json_subject_spec :: H.Property
prop_json_subject_spec = property $ do
subj <- forAll Gen.subject
Aeson.toJSON subj === Aeson.String (unSubject subj)
prop_json_propertyName_spec :: H.Property
prop_json_propertyName_spec = property $ do
p <- forAll Gen.propertyName
Aeson.toJSON p === Aeson.String (unPropertyName p)
prop_json_annotatedSignature_spec :: H.Property
prop_json_annotatedSignature_spec = property $ do
as <- forAllT Gen.annotatedSignature'
Aeson.toJSON as === Aeson.Object (HM.fromList
[ ("signature", Aeson.String $ T.decodeUtf8 $ convertToBase Base16 $ rawSerialiseSigDSIGN $ asAttestationSignature as)
, ("publicKey", Aeson.String $ T.decodeUtf8 $ convertToBase Base16 $ rawSerialiseVerKeyDSIGN $ asPublicKey as)
]
)
prop_json_metadata_spec :: H.Property
prop_json_metadata_spec = property $ do
m <- forAllT Gen.weaklyMetadata
Aeson.toJSON m === Aeson.Object (HM.fromList $
[ ("subject", Aeson.String $ unSubject $ Weakly.metaSubject m) ]
<> (fmap (Bifunctor.first unPropertyName) $ HM.toList $ fmap Aeson.toJSON $ Weakly.metaProperties m)
)
|
98fab2256acb13cf90ba93250fcd60a342205a40a154c798d17a04bd29c2612c | cracauer/sbcl-ita | ucd.lisp | (in-package "SB-COLD")
;;; Common functions
(defparameter *output-directory*
(merge-pathnames
(make-pathname :directory '(:relative :up "output"))
(make-pathname :directory (pathname-directory *load-truename*))))
(defparameter *unicode-character-database*
(make-pathname :directory (pathname-directory *load-truename*)))
(defmacro with-input-txt-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "txt"
:defaults *unicode-character-database*))
,@body))
(defmacro with-output-dat-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "dat"
:defaults *output-directory*)
:direction :output :element-type '(unsigned-byte 8)
:if-exists :supersede :if-does-not-exist :create)
,@body))
(defmacro with-ucd-output-syntax (&body body)
`(with-standard-io-syntax
(let ((*readtable* (copy-readtable))
(*print-readably* nil)
(*print-pretty* t))
,@body)))
(defmacro with-output-lisp-expr-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "lisp-expr"
:defaults *output-directory*)
:direction :output :element-type 'character
:if-exists :supersede :if-does-not-exist :create)
(with-ucd-output-syntax
,@body)))
(defun split-string (line character)
(loop for prev-position = 0 then (1+ position)
for position = (position character line :start prev-position)
collect (subseq line prev-position position)
do (unless position
(loop-finish))))
(defun parse-codepoints (string &key (singleton-list t))
"Gets a list of codepoints out of 'aaaa bbbb cccc', stripping surrounding space"
(let ((list (mapcar
(lambda (s) (parse-integer s :radix 16))
(remove "" (split-string string #\Space) :test #'string=))))
(if (not (or (cdr list) singleton-list)) (car list) list)))
(defun parse-codepoint-range (string)
"Parse the Unicode syntax DDDD|DDDD..DDDD into an inclusive range (start end)"
(destructuring-bind (start &optional empty end) (split-string string #\.)
(declare (ignore empty))
(let* ((head (parse-integer start :radix 16))
(tail (if end
(parse-integer end :radix 16 :end (position #\Space end))
head)))
(list head tail))))
(defun init-indices (strings)
(let ((hash (make-hash-table :test #'equal)))
(loop for string in strings
for index from 0
do (setf (gethash string hash) index))
hash))
(defun clear-flag (bit integer)
(logandc2 integer (ash 1 bit)))
;;; Output storage globals
(defstruct ucd misc decomp)
(defparameter *unicode-names* (make-hash-table))
(defparameter *unicode-1-names* (make-hash-table))
(defparameter *decompositions*
(make-array 10000 :element-type '(unsigned-byte 24) :fill-pointer 0
10000 is not a significant number
(defparameter *decomposition-corrections*
(with-input-txt-file (s "NormalizationCorrections")
(loop with result = nil
for line = (read-line s nil nil) while line
do (when (position #\; line)
(destructuring-bind (cp old-decomp correction version)
(split-string line #\;)
(declare (ignore old-decomp version))
(push (cons (parse-integer cp :radix 16)
(parse-integer correction :radix 16))
result)))
finally (return result)))
"List of decompsotions that were amended in Unicode corrigenda")
(defparameter *compositions* (make-hash-table :test #'equal))
(defparameter *composition-exclusions*
(with-input-txt-file (s "CompositionExclusions")
(loop with result = nil
for line = (read-line s nil nil) while line
when (and (> (length line) 0) (char/= (char line 0) #\#))
do (push (parse-integer line :end (position #\Space line) :radix 16)
result) finally (return result)))
"Characters that are excluded from composition according to UAX#15")
(defparameter *different-titlecases* nil)
(defparameter *different-casefolds* nil)
(defparameter *case-mapping*
(with-input-txt-file (s "SpecialCasing")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (%cp %lower %title %upper &optional context comment)
(split-string line #\;)
(unless (and context comment)
(let ((cp (parse-integer %cp :radix 16))
(lower (parse-codepoints %lower :singleton-list nil))
(title (parse-codepoints %title :singleton-list nil))
(upper (parse-codepoints %upper :singleton-list nil)))
(setf (gethash cp hash) (cons upper lower))
(unless (equal title upper) (push (cons cp title) *different-titlecases*)))))
finally (return hash)))
"Maps cp -> (cons uppercase|(uppercase ...) lowercase|(lowercase ...))")
(defparameter *misc-table* (make-array 3000 :fill-pointer 0)
"Holds the entries in the Unicode database's miscellanious array, stored as lists.
These lists have the form (gc-index bidi-index ccc digit decomposition-info
flags script line-break age). Flags is a bit-bashed integer containing
cl-both-case-p, has-case-p, and bidi-mirrored-p, and an east asian width.
Length should be adjusted when the standard changes.")
(defparameter *misc-hash* (make-hash-table :test #'equal)
"Maps a misc list to its position in the misc table.")
(defparameter *different-numerics* nil)
(defparameter *ucd-entries* (make-hash-table))
;; Mappings of the general categories and bidi classes to integers
Letter classes go first to optimize certain cl character type checks
BN is the first BIDI class so that unallocated characters are BN
Uppercase in the CL sense must have GC = 0 , lowercase must GC = 1
(defparameter *general-categories*
(init-indices '("Lu" "Ll" "Lt" "Lm" "Lo" "Cc" "Cf" "Co" "Cs" "Cn"
"Mc" "Me" "Mn" "Nd" "Nl" "No" "Pc" "Pd" "Pe" "Pf"
"Pi" "Po" "Ps" "Sc" "Sk" "Sm" "So" "Zl" "Zp" "Zs")))
(defparameter *bidi-classes*
(init-indices '("BN" "AL" "AN" "B" "CS" "EN" "ES" "ET" "L" "LRE" "LRO"
"NSM" "ON" "PDF" "R" "RLE" "RLO" "S" "WS" "LRI" "RLI"
"FSI" "PDI")))
(defparameter *east-asian-widths* (init-indices '("N" "A" "H" "W" "F" "Na")))
(defparameter *scripts*
(init-indices
'("Unknown" "Common" "Latin" "Greek" "Cyrillic" "Armenian" "Hebrew" "Arabic"
"Syriac" "Thaana" "Devanagari" "Bengali" "Gurmukhi" "Gujarati" "Oriya"
"Tamil" "Telugu" "Kannada" "Malayalam" "Sinhala" "Thai" "Lao" "Tibetan"
"Myanmar" "Georgian" "Hangul" "Ethiopic" "Cherokee" "Canadian_Aboriginal"
"Ogham" "Runic" "Khmer" "Mongolian" "Hiragana" "Katakana" "Bopomofo" "Han"
"Yi" "Old_Italic" "Gothic" "Deseret" "Inherited" "Tagalog" "Hanunoo" "Buhid"
"Tagbanwa" "Limbu" "Tai_Le" "Linear_B" "Ugaritic" "Shavian" "Osmanya"
"Cypriot" "Braille" "Buginese" "Coptic" "New_Tai_Lue" "Glagolitic"
"Tifinagh" "Syloti_Nagri" "Old_Persian" "Kharoshthi" "Balinese" "Cuneiform"
"Phoenician" "Phags_Pa" "Nko" "Sundanese" "Lepcha" "Ol_Chiki" "Vai"
"Saurashtra" "Kayah_Li" "Rejang" "Lycian" "Carian" "Lydian" "Cham"
"Tai_Tham" "Tai_Viet" "Avestan" "Egyptian_Hieroglyphs" "Samaritan" "Lisu"
"Bamum" "Javanese" "Meetei_Mayek" "Imperial_Aramaic" "Old_South_Arabian"
"Inscriptional_Parthian" "Inscriptional_Pahlavi" "Old_Turkic" "Kaithi"
"Batak" "Brahmi" "Mandaic" "Chakma" "Meroitic_Cursive"
"Meroitic_Hieroglyphs" "Miao" "Sharada" "Sora_Sompeng" "Takri"
"Bassa_Vah" "Mahajani" "Pahawh_Hmong" "Caucasian_Albanian" "Manichaean"
"Palmyrene" "Duployan" "Mende_Kikakui" "Pau_Cin_Hau" "Elbasan" "Modi"
"Psalter_Pahlavi" "Grantha" "Mro" "Siddham" "Khojki" "Nabataean" "Tirhuta"
"Khudawadi" "Old_North_Arabian" "Warang_Citi" "Linear_A" "Old_Permic")))
(defparameter *line-break-classes*
(init-indices
'("XX" "AI" "AL" "B2" "BA" "BB" "BK" "CB" "CJ" "CL" "CM" "CP" "CR" "EX" "GL"
"HL" "HY" "ID" "IN" "IS" "LF" "NL" "NS" "NU" "OP" "PO" "PR" "QU" "RI" "SA"
"SG" "SP" "SY" "WJ" "ZW")))
(defparameter *east-asian-width-table*
(with-input-txt-file (s "EastAsianWidth")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(string-right-trim " " (subseq line 0 (position #\# line))) #\;)
(let ((range (parse-codepoint-range codepoints))
(index (gethash value *east-asian-widths*)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of East Asian Widths. Used in the creation of misc entries.")
(defparameter *script-table*
(with-input-txt-file (s "Scripts")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(string-right-trim " " (subseq line 0 (position #\# line))) #\;)
(let ((range (parse-codepoint-range codepoints))
(index (gethash (subseq value 1) *scripts*)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of scripts. Used in the creation of misc entries.")
(defparameter *line-break-class-table*
(with-input-txt-file (s "LineBreakProperty")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(string-right-trim " " (subseq line 0 (position #\# line))) #\;)
(let ((range (parse-codepoint-range codepoints))
Hangul syllables temporarily go to " Unkwown "
(index (gethash value *line-break-classes* 0)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of line break classes. Used in the creation of misc entries.")
(defparameter *age-table*
(with-input-txt-file (s "DerivedAge")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(string-right-trim " " (subseq line 0 (position #\# line))) #\;)
(let* ((range (parse-codepoint-range codepoints))
(age-parts (mapcar #'parse-integer (split-string value #\.)))
(age (logior (ash (car age-parts) 3) (cadr age-parts))))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) age))))
finally (return hash)))
"Table of character ages. Used in the creation of misc entries.")
(defvar *block-first* nil)
Unicode data file parsing
(defun hash-misc (gc-index bidi-index ccc digit decomposition-info flags
script line-break age)
(let* ((list (list gc-index bidi-index ccc digit decomposition-info flags
script line-break age))
(index (gethash list *misc-hash*)))
(or index
(progn
(setf (gethash list *misc-hash*)
(fill-pointer *misc-table*))
(when (eql nil (vector-push list *misc-table*))
(error "Misc table too small."))
(gethash list *misc-hash*)))))
(defun ordered-ranges-member (item vector)
(labels ((recurse (start end)
(when (< start end)
(let* ((i (+ start (truncate (- end start) 2)))
(index (* 2 i))
(elt1 (svref vector index))
(elt2 (svref vector (1+ index))))
(cond ((< item elt1)
(recurse start i))
((> item elt2)
(recurse (+ 1 i) end))
(t
item))))))
(recurse 0 (/ (length vector) 2))))
(defun unallocated-bidi-class (code-point)
;; See tests/data/DerivedBidiClass.txt for more information
(flet ((in (vector class)
(when (ordered-ranges-member code-point vector)
(gethash class *bidi-classes*))))
(cond
((in
#(#x0600 #x07BF #x08A0 #x08FF #xFB50 #xFDCF #xFDF0 #xFDFF #xFE70 #xFEFF
#x1EE00 #x1EEFF) "AL"))
((in
#(#x0590 #x05FF #x07C0 #x089F #xFB1D #xFB4F #x10800 #x10FFF #x1E800 #x1EDFF
#x1EF00 #x1EFFF) "R"))
((in #(#x20A0 #x20CF) "ET"))
BN is non - characters and default - ignorable .
;; Default-ignorable will be dealt with elsewhere
((in #(#xFDD0 #xFDEF #xFFFE #xFFFF #x1FFFE #x1FFFF #x2FFFE #x2FFFF
#x3FFFE #x3FFFF #x4FFFE #x4FFFF #x5FFFE #x5FFFF #x6FFFE #x6FFFF
#x7FFFE #x7FFFF #x8FFFE #x8FFFF #x9FFFE #x9FFFF #xAFFFE #xAFFFF
#xBFFFE #xBFFFF #xCFFFE #xCFFFF #xDFFFE #xDFFFF #xEFFFE #xEFFFF
#xFFFFE #xFFFFF #x10FFFE #x10FFFF)
"BN"))
((in #(#x0 #x10FFFF) "L"))
(t (error "Somehow we've gone too far in unallocated bidi determination")))))
(defun complete-misc-table ()
(loop for code-point from 0 to #x10FFFF do ; Flood-fil unallocated codepoints
(unless (second (multiple-value-list (gethash code-point *ucd-entries*)))
(let* ((unallocated-misc
unallocated characters have a GC of " Cn " , are n't digits
( digit = 128 ) , have a bidi that depends on their block , and
do n't decompose , combine , or have case . They have an East
Asian Width ( eaw ) of " N " ( 0 ) , and a script , line breaking
;; class, and age of 0 ("Unknown"), unless some of those
;; properties are otherwise assigned.
`(,(gethash "Cn" *general-categories*)
,(unallocated-bidi-class code-point) 0 128 0
,(gethash code-point *east-asian-width-table* 0)
0 ,(gethash code-point *line-break-class-table* 0)
,(gethash code-point *age-table* 0)))
(unallocated-index (apply #'hash-misc unallocated-misc))
(unallocated-ucd (make-ucd :misc unallocated-index)))
(setf (gethash code-point *ucd-entries*) unallocated-ucd)))))
(defun expand-decomposition (decomposition)
(loop for cp in decomposition
for ucd = (gethash cp *ucd-entries*)
for length = (elt (aref *misc-table* (ucd-misc ucd)) 4)
if (and (not (logbitp 7 length))
(plusp length))
append (expand-decomposition (ucd-decomp ucd))
else
collect cp))
;;; Recursively expand canonical decompositions
(defun fixup-decompositions ()
(loop for did-something = nil
do
(loop for ucd being each hash-value of *ucd-entries*
when (and (ucd-decomp ucd)
(not (logbitp 7 (elt (aref *misc-table* (ucd-misc ucd)) 4))))
do
(let ((expanded (expand-decomposition (ucd-decomp ucd))))
(unless (equal expanded (ucd-decomp ucd))
(setf (ucd-decomp ucd) expanded
did-something t))))
while did-something)
(loop for i below (hash-table-count *ucd-entries*)
for ucd = (gethash i *ucd-entries*)
for decomp = (ucd-decomp ucd)
do
(setf (ucd-decomp ucd)
(cond ((not (consp decomp)) 0)
((logbitp 7 (elt (aref *misc-table* (ucd-misc ucd)) 4))
(prog1 (length *decompositions*)
(loop for cp in decomp
do (vector-push-extend cp *decompositions*))))
(t
(let ((misc-entry (copy-list (aref *misc-table* (ucd-misc ucd)))))
(setf (elt misc-entry 4) (length decomp)
(ucd-misc ucd) (apply #'hash-misc misc-entry))
(prog1 (length *decompositions*)
(loop for cp in decomp
do (vector-push-extend cp *decompositions*)))))))))
(defun fixup-compositions ()
(flet ((fixup (k v)
(declare (ignore v))
(let* ((cp (car k))
(ucd (gethash cp *ucd-entries*))
(misc (aref *misc-table* (ucd-misc ucd)))
(ccc (third misc)))
we can do everything in the first pass except for
accounting for decompositions where the first
;; character of the decomposition is not a starter.
(when (/= ccc 0)
(remhash k *compositions*)))))
(maphash #'fixup *compositions*)))
(defun add-jamo-information (line table)
(let* ((split (split-string line #\;))
(code (parse-integer (first split) :radix 16))
(syllable (string-trim
" "
(subseq (second split) 0 (position #\# (second split))))))
(setf (gethash code table) syllable)))
(defun fixup-hangul-syllables ()
" Hangul Syllable Composition , Unicode 5.1 section 3 - 12 "
(let* ((sbase #xac00)
(lbase #x1100)
(vbase #x1161)
(tbase #x11a7)
(scount 11172)
(lcount 19)
(vcount 21)
(tcount 28)
(ncount (* vcount tcount))
(table (make-hash-table)))
(declare (ignore lcount))
(with-input-txt-file (*standard-input* "Jamo")
(loop for line = (read-line nil nil)
while line
if (position #\; line)
do (add-jamo-information line table)))
(dotimes (sindex scount)
(let* ((l (+ lbase (floor sindex ncount)))
(v (+ vbase (floor (mod sindex ncount) tcount)))
(tee (+ tbase (mod sindex tcount)))
(code-point (+ sbase sindex))
(name (format nil "HANGUL_SYLLABLE_~A~A~:[~A~;~]"
(gethash l table) (gethash v table)
(= tee tbase) (gethash tee table))))
(setf (gethash code-point *unicode-names*) name)))))
(defun normalize-character-name (name)
(when (find #\_ name)
(error "Bad name for a character: ~A" name))
U+1F5CF ( PAGE ) 's name conflicts with the ANSI CL - assigned
name for form feed ( ^L , ) . To avoid a case where
more than one character has a particular name while remaining
standards - compliant , we remove U+1F5CF 's name here .
(when (string= name "PAGE")
(return-from normalize-character-name "UNICODE_PAGE"))
(unless (or (zerop (length name)) (find #\< name) (find #\> name))
(substitute #\_ #\Space name)))
3400 -- 4DB5 : cjk ideograph extension a ; Lo;0;L;;;;;N ; ; ; ; ;
AC00 -- D7A3 : hangul syllables ; Lo;0;L;;;;;N ; ; ; ; ;
D800 -- F8FF : surrogates and private use
20000 -- 2A6D6 : cjk ideograph extension b ; Lo;0;L;;;;;N ; ; ; ; ;
F0000 -- FFFFD : private use
100000 -- 10FFFD : private use
(defun encode-ucd-line (line code-point)
(destructuring-bind (name general-category canonical-combining-class
bidi-class decomposition-type-and-mapping
decimal-digit digit numeric bidi-mirrored
unicode-1-name iso-10646-comment simple-uppercase
simple-lowercase simple-titlecase)
line
(declare (ignore iso-10646-comment))
(if (and (> (length name) 8)
(string= ", First>" name :start2 (- (length name) 8)))
(progn
(setf *block-first* code-point)
nil)
(let* ((gc-index (or (gethash general-category *general-categories*)
(error "unknown general category ~A"
general-category)))
(bidi-index (or (gethash bidi-class *bidi-classes*)
(error "unknown bidirectional class ~A"
bidi-class)))
(ccc (parse-integer canonical-combining-class))
(digit-index (if (string= "" digit) 128 ; non-digits have high bit
(let ((%digit (parse-integer digit)))
(if (string= digit decimal-digit)
decimal - digit - p is in bit 6
(logior (ash 1 6) %digit) %digit))))
(upper-index (unless (string= "" simple-uppercase)
(parse-integer simple-uppercase :radix 16)))
(lower-index (unless (string= "" simple-lowercase)
(parse-integer simple-lowercase :radix 16)))
(title-index (unless (string= "" simple-titlecase)
(parse-integer simple-titlecase :radix 16)))
(cl-both-case-p (or (and (= gc-index 0) lower-index)
(and (= gc-index 1) upper-index)))
(bidi-mirrored-p (string= bidi-mirrored "Y"))
(decomposition-info 0)
(eaw-index (gethash code-point *east-asian-width-table*))
(script-index (gethash code-point *script-table* 0))
(line-break-index (gethash code-point *line-break-class-table* 0))
(age-index (gethash code-point *age-table* 0))
decomposition)
#+nil
(when (and (not cl-both-case-p)
(< gc-index 2))
(format t "~A~%" name))
(when (string/= "" decomposition-type-and-mapping)
(let* ((compatibility-p (position #\> decomposition-type-and-mapping)))
(setf decomposition
(parse-codepoints
(subseq decomposition-type-and-mapping
(if compatibility-p (1+ compatibility-p) 0))))
(when (assoc code-point *decomposition-corrections*)
(setf decomposition
(list (cdr (assoc code-point *decomposition-corrections*)))))
(setf decomposition-info
(logior (length decomposition) (if compatibility-p 128 0)))
(unless compatibility-p
;; Primary composition excludes:
;; * singleton decompositions;
;; * decompositions of non-starters;
;; * script-specific decompositions;
;; * later-version decompositions;
* decompositions whose first character is a
;; non-starter.
;; All but the last case can be handled here;
for the fixup , see FIXUP - COMPOSITIONS
(when (and (> decomposition-info 1)
(= ccc 0)
(not (member code-point *composition-exclusions*)))
(unless (= decomposition-info 2)
(error "canonical decomposition unexpectedly long"))
(setf (gethash (cons (first decomposition)
(second decomposition))
*compositions*)
code-point)))))
Hangul decomposition ; see Unicode 6.2 section 3 - 12
(when (= code-point #xd7a3)
: The decomposition - length for Hangul syllables in the
;; misc database will be a bit of a lie. It doesn't really matter
;; since the only purpose of the length is to index into the
decompositions array ( which Hangul decomposition does n't use ) .
;; The decomposition index is 0 because we won't be going into the
;; array
(setf decomposition-info 3))
Exclude codepoints from SpecialCasing
(when (string/= simple-uppercase simple-titlecase)
(push (cons code-point title-index) *different-titlecases*))
(and (or upper-index lower-index)
(setf (gethash code-point *case-mapping*)
(cons
(or upper-index code-point)
(or lower-index code-point)))))
(when (string/= digit numeric)
(push (cons code-point numeric) *different-numerics*))
(when (> ccc 255)
(error "canonical combining class too large ~A" ccc))
(let* ((flags (logior
(if cl-both-case-p (ash 1 7) 0)
(if (gethash code-point *case-mapping*) (ash 1 6) 0)
(if bidi-mirrored-p (ash 1 5) 0)
eaw-index))
(misc-index (hash-misc gc-index bidi-index ccc digit-index
decomposition-info flags script-index
line-break-index age-index))
(result (make-ucd :misc misc-index
:decomp decomposition)))
(when (and (> (length name) 7)
(string= ", Last>" name :start2 (- (length name) 7)))
We can still do this despite East Asian Width being in the
databasce since each of the UCD < First><Last > blocks
has a consistent East Asian Width
(loop for point from *block-first* to code-point do
(setf (gethash point *ucd-entries*) result)))
(values result (normalize-character-name name)
(normalize-character-name unicode-1-name)))))))
(defun slurp-ucd-line (line)
(let* ((split-line (split-string line #\;))
(code-point (parse-integer (first split-line) :radix 16)))
(multiple-value-bind (encoding name unicode-1-name)
(encode-ucd-line (cdr split-line) code-point)
(setf (gethash code-point *ucd-entries*) encoding
(gethash code-point *unicode-names*) name)
(when unicode-1-name
(setf (gethash code-point *unicode-1-names*) unicode-1-name)))))
this fixes up the case conversion discrepancy between CL and
Unicode : CL operators depend on char - downcase / char - upcase being
inverses , which is not true in general in Unicode even for
;;; characters which change case to single characters.
;;; Also, fix misassigned age values, which are not constant across blocks
(defun second-pass ()
(let ((case-mapping
(sort (loop for code-point being the hash-keys in *case-mapping*
using (hash-value value)
collect (cons code-point value))
#'< :key #'car)))
(loop for (code-point upper . lower) in case-mapping
for misc-index = (ucd-misc (gethash code-point *ucd-entries*))
for (gc bidi ccc digit decomp flags script lb age) = (aref *misc-table* misc-index)
when (logbitp 7 flags) do
(when (or (not (atom upper)) (not (atom lower))
(and (= gc 0)
(not (equal (car (gethash lower *case-mapping*)) code-point)))
(and (= gc 1)
(not (equal (cdr (gethash upper *case-mapping*)) code-point))))
(let* ((new-flags (clear-flag 7 flags))
(new-misc (hash-misc gc bidi ccc digit decomp new-flags script lb age)))
(setf (ucd-misc (gethash code-point *ucd-entries*)) new-misc))))))
(defun fixup-casefolding ()
(with-input-txt-file (s "CaseFolding")
(loop for line = (read-line s nil nil)
while line
line ) ) ( equal ( position # \ # line ) 0 ) )
do (destructuring-bind (original type mapping comment)
(split-string line #\;)
(declare (ignore comment))
(let ((cp (parse-integer original :radix 16))
(fold (parse-codepoints mapping :singleton-list nil)))
(unless (or (string= type " S") (string= type " T"))
(when (not (equal (cdr (gethash cp *case-mapping*)) fold))
(push (cons cp fold) *different-casefolds*))))))))
(defun fixup-ages ()
(let ((age (sort
(loop for code-point being the hash-keys in *age-table*
using (hash-value true-age)
collect (cons code-point true-age))
#'< :key #'car)))
(loop for (code-point . true-age) in age
for misc-index = (ucd-misc (gethash code-point *ucd-entries*))
for (gc bidi ccc digit decomp flags script lb age) = (aref *misc-table* misc-index)
unless (= age true-age) do
(let* ((new-misc (hash-misc gc bidi ccc digit decomp flags script lb true-age))
(new-ucd (make-ucd
:misc new-misc
:decomp (ucd-decomp (gethash code-point *ucd-entries*)))))
(setf (gethash code-point *ucd-entries*) new-ucd)))))
(defun slurp-ucd ()
(with-input-txt-file (*standard-input* "UnicodeData")
(format t "~%//slurp-ucd~%")
(loop for line = (read-line nil nil)
while line
do (slurp-ucd-line line)))
(second-pass)
(fixup-compositions)
(fixup-hangul-syllables)
(complete-misc-table)
(fixup-casefolding)
(fixup-ages)
(fixup-decompositions)
nil)
PropList.txt
(defparameter **proplist-properties** nil
"A list of properties extracted from PropList.txt")
(defun parse-property (stream &optional name)
(let ((result (make-array 1 :fill-pointer 0 :adjustable t)))
FIXME : something in this loop provokes a warning from CLISP
(loop for line = (read-line stream nil nil)
Deal with Blah = Blah in DerivedNormalizationProps.txt
while (and line (not (position #\= (substitute #\Space #\= line :count 1))))
for entry = (subseq line 0 (position #\# line))
when (and entry (string/= entry ""))
do
(destructuring-bind (start end)
(parse-codepoint-range (car (split-string entry #\;)))
(vector-push-extend start result)
(vector-push-extend end result)))
(when name
(push name **proplist-properties**)
(push result **proplist-properties**))))
(defun slurp-proplist ()
(with-input-txt-file (s "PropList")
(parse-property s) ;; Initial comments
(parse-property s :white-space)
(parse-property s :bidi-control)
(parse-property s :join-control)
(parse-property s :dash)
(parse-property s :hyphen)
(parse-property s :quotation-mark)
(parse-property s :terminal-punctuation)
(parse-property s :other-math)
(parse-property s :hex-digit)
(parse-property s :ascii-hex-digit)
(parse-property s :other-alphabetic)
(parse-property s :ideographic)
(parse-property s :diacritic)
(parse-property s :extender)
(parse-property s :other-lowercase)
(parse-property s :other-uppercase)
(parse-property s :noncharacter-code-point)
(parse-property s :other-grapheme-extend)
(parse-property s :ids-binary-operator)
(parse-property s :ids-trinary-operator)
(parse-property s :radical)
(parse-property s :unified-ideograph)
(parse-property s :other-default-ignorable-code-point)
(parse-property s :deprecated)
(parse-property s :soft-dotted)
(parse-property s :logical-order-exception)
(parse-property s :other-id-start)
(parse-property s :other-id-continue)
(parse-property s :sterm)
(parse-property s :variation-selector)
(parse-property s :pattern-white-space)
(parse-property s :pattern-syntax))
(with-input-txt-file (s "DerivedNormalizationProps")
(parse-property s) ;; Initial comments
(parse-property s) ;; FC_NFKC_Closure
(parse-property s) ;; FC_NFKC_Closure
(parse-property s) ;; Full_Composition_Exclusion
(parse-property s) ;; NFD_QC Comments
(parse-property s :nfd-qc)
(parse-property s) ;; NFC_QC Comments
(parse-property s :nfc-qc)
(parse-property s :nfc-qc-maybe)
(parse-property s) ;; NFKD_QC Comments
(parse-property s :nfkd-qc)
NFKC_QC Comments
(parse-property s :nfkc-qc)
(parse-property s :nfkc-qc-maybe))
(setf **proplist-properties** (nreverse **proplist-properties**))
(values))
;;; Collation keys
(defvar *maximum-variable-key* 1)
(defun bitpack-collation-key (primary secondary tertiary)
;; 0 <= primary <= #xFFFD (default table)
0 < = secondary < = # x10C [ 9 bits ]
0 < = tertiary < = # x1E ( # x1F allowed ) [ 5 bits ]
;; Because of this, the bit packs don't overlap
(logior (ash primary 16) (ash secondary 5) tertiary))
(defun parse-collation-line (line)
(destructuring-bind (%code-points %keys) (split-string line #\;)
(let* ((code-points (parse-codepoints %code-points))
(keys
(remove
""
(split-string (remove #\[ (remove #\Space %keys)) #\]) :test #'string=))
(ret
(loop for key in keys
for variable-p = (position #\* key)
for parsed =
Do n't need first value , it 's always just " "
(cdr (mapcar (lambda (x) (parse-integer x :radix 16 :junk-allowed t))
(split-string (substitute #\. #\* key) #\.)))
collect
(destructuring-bind (primary secondary tertiary) parsed
(when variable-p (setf *maximum-variable-key*
(max primary *maximum-variable-key*)))
(bitpack-collation-key primary secondary tertiary)))))
(values code-points ret))))
(defparameter *collation-table*
(with-input-txt-file (stream "Allkeys70")
(loop with hash = (make-hash-table :test #'equal)
for line = (read-line stream nil nil) while line
unless (eql 0 (position #\# line))
do (multiple-value-bind (codepoints keys) (parse-collation-line line)
(setf (gethash codepoints hash) keys))
finally (return hash))))
;;; Other properties
(defparameter *confusables*
(with-input-txt-file (s "ConfusablesEdited")
(loop for line = (read-line s nil nil) while line
unless (eql 0 (position #\# line))
collect (mapcar #'parse-codepoints (split-string line #\<))))
"List of confusable codepoint sets")
(defparameter *bidi-mirroring-glyphs*
(with-input-txt-file (s "BidiMirroring")
(loop for line = (read-line s nil nil) while line
when (and (plusp (length line))
(char/= (char line 0) #\#))
collect
(mapcar
#'(lambda (c) (parse-codepoints c :singleton-list nil))
(split-string (subseq line 0 (position #\# line)) #\;))))
"List of BIDI mirroring glyph pairs")
(defparameter *block-ranges*
(with-input-txt-file (stream "Blocks")
(loop with result = (make-array (* 252 2) :fill-pointer 0)
for line = (read-line stream nil nil) while line
unless (or (string= line "") (position #\# line))
do
(map nil #'(lambda (x) (vector-push x result))
(parse-codepoint-range (car (split-string line #\;))))
finally (return result)))
"Vector of block starts and ends in a form acceptable to `ordered-ranges-position`.
Used to look up block data.")
;;; Output code
(defun write-codepoint (code-point stream)
(declare (type (unsigned-byte 32) code-point))
(write-byte (ldb (byte 8 16) code-point) stream)
(write-byte (ldb (byte 8 8) code-point) stream)
(write-byte (ldb (byte 8 0) code-point) stream))
(defun write-4-byte (value stream)
(declare (type (unsigned-byte 32) value))
(write-byte (ldb (byte 8 24) value) stream)
(write-byte (ldb (byte 8 16) value) stream)
(write-byte (ldb (byte 8 8) value) stream)
(write-byte (ldb (byte 8 0) value) stream))
(defun output-misc-data ()
(with-output-dat-file (stream "ucdmisc")
(loop for (gc-index bidi-index ccc digit decomposition-info flags
script line-break age)
across *misc-table*
three bits spare here
do (write-byte gc-index stream)
three bits spare here
(write-byte bidi-index stream)
(write-byte ccc stream)
bits 0 - 3 encode [ 0,9 ] , bit 7 is for non - digit status ,
bit 6 is the decimal - digit flag . Two bits spare
(write-byte digit stream)
(write-byte decomposition-info stream)
includes EAW in bits 0 - 3 , bit 4 is free
(write-byte script stream)
(write-byte line-break stream)
(write-byte age stream))))
(defun output-ucd-data ()
(with-output-dat-file (high-pages "ucdhigh")
(with-output-dat-file (low-pages "ucdlow")
;; Output either the index into the misc array (if all the points in the
;; high-page have the same misc value) or an index into the law-pages
array / 256 . For indexes into the misc array , set bit 15 ( high bit ) .
;; We should never have that many misc entries, so that's not a problem.
;; If Unicode ever allocates an all-decomposing <First>/<Last> block (the
;; only way to get a high page that outputs as the same and has a
;; non-zero decomposition-index, which there's nowhere to store now),
;; find me, slap me with a fish, and have fun fixing this mess.
(loop with low-pages-index = 0
for high-page from 0 to (ash #x10FFFF -8)
for uniq-ucd-entries = nil do
(loop for low-page from 0 to #xFF do
(pushnew
(gethash (logior low-page (ash high-page 8)) *ucd-entries*)
uniq-ucd-entries :test #'equalp))
(flet ((write-2-byte (int stream)
(declare (type (unsigned-byte 16) int))
(write-byte (ldb (byte 8 8) int) stream)
(write-byte (ldb (byte 8 0) int) stream)))
(case (length uniq-ucd-entries)
(0 (error "Somehow, a high page has no codepoints in it."))
(1 (write-2-byte (logior
(ash 1 15)
(ucd-misc (car uniq-ucd-entries)))
high-pages))
(t (loop for low-page from 0 to #xFF
for cp = (logior low-page (ash high-page 8))
for entry = (gethash cp *ucd-entries*) do
(write-2-byte (ucd-misc entry) low-pages)
(write-2-byte (ucd-decomp entry) low-pages)
finally (write-2-byte low-pages-index high-pages)
(incf low-pages-index)))))
finally (assert (< low-pages-index (ash 1 15))) (print low-pages-index)))))
(defun output-decomposition-data ()
(with-output-dat-file (stream "decomp")
(loop for cp across *decompositions* do
(write-codepoint cp stream)))
(print (length *decompositions*)))
(defun output-composition-data ()
(with-output-dat-file (stream "comp")
(let (comp)
(maphash (lambda (k v) (push (cons k v) comp)) *compositions*)
(setq comp (sort comp #'< :key #'cdr))
(loop for (k . v) in comp
do (write-codepoint (car k) stream)
(write-codepoint (cdr k) stream)
(write-codepoint v stream)))))
(defun output-case-data ()
(let (casing-pages points-with-case)
(with-output-dat-file (stream "case")
(loop for cp being the hash-keys in *case-mapping*
do (push cp points-with-case))
(setf points-with-case (sort points-with-case #'<))
(loop for cp in points-with-case
for (upper . lower) = (gethash cp *case-mapping*) do
(pushnew (ash cp -6) casing-pages)
(write-codepoint cp stream)
(write-byte (if (atom upper) 0 (length upper)) stream)
(if (atom upper) (write-codepoint upper stream)
(map 'nil (lambda (c) (write-codepoint c stream)) upper))
(write-byte (if (atom lower) 0 (length lower)) stream)
(if (atom lower) (write-codepoint lower stream)
(map 'nil (lambda (c) (write-codepoint c stream)) lower))))
(setf casing-pages (sort casing-pages #'<))
(assert (< (length casing-pages) 256))
(let* ((size (1+ (reduce #'max casing-pages)))
(array (make-array size :initial-element 255))
(page -1))
(dolist (entry casing-pages)
(setf (aref array entry) (incf page)))
(with-output-dat-file (stream "casepages")
(dotimes (i size)
(write-byte (aref array i) stream))))
(with-output-lisp-expr-file (stream "casepages")
(print casing-pages stream))))
(defun output-collation-data ()
(with-output-dat-file (stream "collation")
(flet ((length-tag (list1 list2)
takes two lists of UB32 ( with the caveat that list1[0 ]
needs its high 8 bits free ( codepoints always have
;; that) and do
(let* ((l1 (length list1)) (l2 (length list2))
(tag (dpb l1 (byte 4 28) (dpb l2 (byte 5 23) (car list1)))))
(assert (<= l1 3))
(write-4-byte tag stream)
(map nil #'(lambda (l) (write-4-byte l stream)) (append (cdr list1) list2)))))
(let (coll)
(maphash (lambda (k v) (push (cons k v) coll)) *collation-table*)
(labels ((sorter (o1 o2)
(cond
((null o1) t)
((null o2) nil)
(t (or (< (car o1) (car o2))
(and (= (car o1) (car o2))
(sorter (cdr o1) (cdr o2))))))))
(setq coll (sort coll #'sorter :key #'car)))
(loop for (k . v) in coll
do (length-tag k v)))))
(with-output-lisp-expr-file (*standard-output* "other-collation-info")
(write-string ";;; The highest primary variable collation index")
(terpri)
(prin1 *maximum-variable-key*) (terpri)))
(defun output ()
(output-misc-data)
(output-ucd-data)
(output-decomposition-data)
(output-composition-data)
(output-case-data)
(output-collation-data)
(with-output-lisp-expr-file (*standard-output* "misc-properties")
(prin1 **proplist-properties**))
(with-output-lisp-expr-file (f "ucd-names")
(write-string ";;; Do not edit by hand: generated by ucd.lisp" f)
(maphash (lambda (code name)
(when name
(print code f)
(prin1 name f)))
*unicode-names*)
(setf *unicode-names* nil))
(with-output-lisp-expr-file (f "ucd1-names")
(write-string ";;; Do not edit by hand: generated by ucd.lisp" f)
(maphash (lambda (code name)
(when name
(print code f)
(prin1 name f)))
*unicode-1-names*)
(setf *unicode-1-names* nil))
(with-output-lisp-expr-file (*standard-output* "numerics")
(let ((result (make-array (* (length *different-numerics*) 2))))
(loop for (code . value) in (sort *different-numerics* #'< :key #'car)
for i by 2
do (setf (aref result i) code
(aref result (1+ i)) (read-from-string value)))
(prin1 result)))
(with-output-lisp-expr-file (*standard-output* "titlecases")
(prin1 *different-titlecases*))
(with-output-lisp-expr-file (*standard-output* "foldcases")
(prin1 *different-casefolds*))
(with-output-lisp-expr-file (*standard-output* "confusables")
(prin1 *confusables*))
(with-output-lisp-expr-file (*standard-output* "bidi-mirrors")
(prin1 *bidi-mirroring-glyphs*))
(with-output-lisp-expr-file (*standard-output* "blocks")
(prin1 *block-ranges*))
(values))
| null | https://raw.githubusercontent.com/cracauer/sbcl-ita/f85a8cf0d1fb6e8c7b258e898b7af3233713e0b9/tools-for-build/ucd.lisp | lisp | Common functions
Output storage globals
line)
)
)
Mappings of the general categories and bidi classes to integers
)
)
)
)
See tests/data/DerivedBidiClass.txt for more information
Default-ignorable will be dealt with elsewhere
Flood-fil unallocated codepoints
class, and age of 0 ("Unknown"), unless some of those
properties are otherwise assigned.
Recursively expand canonical decompositions
character of the decomposition is not a starter.
))
line)
Lo;0;L;;;;;N ; ; ; ; ;
Lo;0;L;;;;;N ; ; ; ; ;
Lo;0;L;;;;;N ; ; ; ; ;
non-digits have high bit
Primary composition excludes:
* singleton decompositions;
* decompositions of non-starters;
* script-specific decompositions;
* later-version decompositions;
non-starter.
All but the last case can be handled here;
see Unicode 6.2 section 3 - 12
misc database will be a bit of a lie. It doesn't really matter
since the only purpose of the length is to index into the
The decomposition index is 0 because we won't be going into the
array
))
characters which change case to single characters.
Also, fix misassigned age values, which are not constant across blocks
)
)))
Initial comments
Initial comments
FC_NFKC_Closure
FC_NFKC_Closure
Full_Composition_Exclusion
NFD_QC Comments
NFC_QC Comments
NFKD_QC Comments
Collation keys
0 <= primary <= #xFFFD (default table)
Because of this, the bit packs don't overlap
)
Other properties
))))
))))
Output code
Output either the index into the misc array (if all the points in the
high-page have the same misc value) or an index into the law-pages
We should never have that many misc entries, so that's not a problem.
If Unicode ever allocates an all-decomposing <First>/<Last> block (the
only way to get a high page that outputs as the same and has a
non-zero decomposition-index, which there's nowhere to store now),
find me, slap me with a fish, and have fun fixing this mess.
that) and do | (in-package "SB-COLD")
(defparameter *output-directory*
(merge-pathnames
(make-pathname :directory '(:relative :up "output"))
(make-pathname :directory (pathname-directory *load-truename*))))
(defparameter *unicode-character-database*
(make-pathname :directory (pathname-directory *load-truename*)))
(defmacro with-input-txt-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "txt"
:defaults *unicode-character-database*))
,@body))
(defmacro with-output-dat-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "dat"
:defaults *output-directory*)
:direction :output :element-type '(unsigned-byte 8)
:if-exists :supersede :if-does-not-exist :create)
,@body))
(defmacro with-ucd-output-syntax (&body body)
`(with-standard-io-syntax
(let ((*readtable* (copy-readtable))
(*print-readably* nil)
(*print-pretty* t))
,@body)))
(defmacro with-output-lisp-expr-file ((s name) &body body)
`(with-open-file (,s (make-pathname :name ,name :type "lisp-expr"
:defaults *output-directory*)
:direction :output :element-type 'character
:if-exists :supersede :if-does-not-exist :create)
(with-ucd-output-syntax
,@body)))
(defun split-string (line character)
(loop for prev-position = 0 then (1+ position)
for position = (position character line :start prev-position)
collect (subseq line prev-position position)
do (unless position
(loop-finish))))
(defun parse-codepoints (string &key (singleton-list t))
"Gets a list of codepoints out of 'aaaa bbbb cccc', stripping surrounding space"
(let ((list (mapcar
(lambda (s) (parse-integer s :radix 16))
(remove "" (split-string string #\Space) :test #'string=))))
(if (not (or (cdr list) singleton-list)) (car list) list)))
(defun parse-codepoint-range (string)
"Parse the Unicode syntax DDDD|DDDD..DDDD into an inclusive range (start end)"
(destructuring-bind (start &optional empty end) (split-string string #\.)
(declare (ignore empty))
(let* ((head (parse-integer start :radix 16))
(tail (if end
(parse-integer end :radix 16 :end (position #\Space end))
head)))
(list head tail))))
(defun init-indices (strings)
(let ((hash (make-hash-table :test #'equal)))
(loop for string in strings
for index from 0
do (setf (gethash string hash) index))
hash))
(defun clear-flag (bit integer)
(logandc2 integer (ash 1 bit)))
(defstruct ucd misc decomp)
(defparameter *unicode-names* (make-hash-table))
(defparameter *unicode-1-names* (make-hash-table))
(defparameter *decompositions*
(make-array 10000 :element-type '(unsigned-byte 24) :fill-pointer 0
10000 is not a significant number
(defparameter *decomposition-corrections*
(with-input-txt-file (s "NormalizationCorrections")
(loop with result = nil
for line = (read-line s nil nil) while line
(destructuring-bind (cp old-decomp correction version)
(declare (ignore old-decomp version))
(push (cons (parse-integer cp :radix 16)
(parse-integer correction :radix 16))
result)))
finally (return result)))
"List of decompsotions that were amended in Unicode corrigenda")
(defparameter *compositions* (make-hash-table :test #'equal))
(defparameter *composition-exclusions*
(with-input-txt-file (s "CompositionExclusions")
(loop with result = nil
for line = (read-line s nil nil) while line
when (and (> (length line) 0) (char/= (char line 0) #\#))
do (push (parse-integer line :end (position #\Space line) :radix 16)
result) finally (return result)))
"Characters that are excluded from composition according to UAX#15")
(defparameter *different-titlecases* nil)
(defparameter *different-casefolds* nil)
(defparameter *case-mapping*
(with-input-txt-file (s "SpecialCasing")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (%cp %lower %title %upper &optional context comment)
(unless (and context comment)
(let ((cp (parse-integer %cp :radix 16))
(lower (parse-codepoints %lower :singleton-list nil))
(title (parse-codepoints %title :singleton-list nil))
(upper (parse-codepoints %upper :singleton-list nil)))
(setf (gethash cp hash) (cons upper lower))
(unless (equal title upper) (push (cons cp title) *different-titlecases*)))))
finally (return hash)))
"Maps cp -> (cons uppercase|(uppercase ...) lowercase|(lowercase ...))")
(defparameter *misc-table* (make-array 3000 :fill-pointer 0)
"Holds the entries in the Unicode database's miscellanious array, stored as lists.
These lists have the form (gc-index bidi-index ccc digit decomposition-info
flags script line-break age). Flags is a bit-bashed integer containing
cl-both-case-p, has-case-p, and bidi-mirrored-p, and an east asian width.
Length should be adjusted when the standard changes.")
(defparameter *misc-hash* (make-hash-table :test #'equal)
"Maps a misc list to its position in the misc table.")
(defparameter *different-numerics* nil)
(defparameter *ucd-entries* (make-hash-table))
Letter classes go first to optimize certain cl character type checks
BN is the first BIDI class so that unallocated characters are BN
Uppercase in the CL sense must have GC = 0 , lowercase must GC = 1
(defparameter *general-categories*
(init-indices '("Lu" "Ll" "Lt" "Lm" "Lo" "Cc" "Cf" "Co" "Cs" "Cn"
"Mc" "Me" "Mn" "Nd" "Nl" "No" "Pc" "Pd" "Pe" "Pf"
"Pi" "Po" "Ps" "Sc" "Sk" "Sm" "So" "Zl" "Zp" "Zs")))
(defparameter *bidi-classes*
(init-indices '("BN" "AL" "AN" "B" "CS" "EN" "ES" "ET" "L" "LRE" "LRO"
"NSM" "ON" "PDF" "R" "RLE" "RLO" "S" "WS" "LRI" "RLI"
"FSI" "PDI")))
(defparameter *east-asian-widths* (init-indices '("N" "A" "H" "W" "F" "Na")))
(defparameter *scripts*
(init-indices
'("Unknown" "Common" "Latin" "Greek" "Cyrillic" "Armenian" "Hebrew" "Arabic"
"Syriac" "Thaana" "Devanagari" "Bengali" "Gurmukhi" "Gujarati" "Oriya"
"Tamil" "Telugu" "Kannada" "Malayalam" "Sinhala" "Thai" "Lao" "Tibetan"
"Myanmar" "Georgian" "Hangul" "Ethiopic" "Cherokee" "Canadian_Aboriginal"
"Ogham" "Runic" "Khmer" "Mongolian" "Hiragana" "Katakana" "Bopomofo" "Han"
"Yi" "Old_Italic" "Gothic" "Deseret" "Inherited" "Tagalog" "Hanunoo" "Buhid"
"Tagbanwa" "Limbu" "Tai_Le" "Linear_B" "Ugaritic" "Shavian" "Osmanya"
"Cypriot" "Braille" "Buginese" "Coptic" "New_Tai_Lue" "Glagolitic"
"Tifinagh" "Syloti_Nagri" "Old_Persian" "Kharoshthi" "Balinese" "Cuneiform"
"Phoenician" "Phags_Pa" "Nko" "Sundanese" "Lepcha" "Ol_Chiki" "Vai"
"Saurashtra" "Kayah_Li" "Rejang" "Lycian" "Carian" "Lydian" "Cham"
"Tai_Tham" "Tai_Viet" "Avestan" "Egyptian_Hieroglyphs" "Samaritan" "Lisu"
"Bamum" "Javanese" "Meetei_Mayek" "Imperial_Aramaic" "Old_South_Arabian"
"Inscriptional_Parthian" "Inscriptional_Pahlavi" "Old_Turkic" "Kaithi"
"Batak" "Brahmi" "Mandaic" "Chakma" "Meroitic_Cursive"
"Meroitic_Hieroglyphs" "Miao" "Sharada" "Sora_Sompeng" "Takri"
"Bassa_Vah" "Mahajani" "Pahawh_Hmong" "Caucasian_Albanian" "Manichaean"
"Palmyrene" "Duployan" "Mende_Kikakui" "Pau_Cin_Hau" "Elbasan" "Modi"
"Psalter_Pahlavi" "Grantha" "Mro" "Siddham" "Khojki" "Nabataean" "Tirhuta"
"Khudawadi" "Old_North_Arabian" "Warang_Citi" "Linear_A" "Old_Permic")))
(defparameter *line-break-classes*
(init-indices
'("XX" "AI" "AL" "B2" "BA" "BB" "BK" "CB" "CJ" "CL" "CM" "CP" "CR" "EX" "GL"
"HL" "HY" "ID" "IN" "IS" "LF" "NL" "NS" "NU" "OP" "PO" "PR" "QU" "RI" "SA"
"SG" "SP" "SY" "WJ" "ZW")))
(defparameter *east-asian-width-table*
(with-input-txt-file (s "EastAsianWidth")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(let ((range (parse-codepoint-range codepoints))
(index (gethash value *east-asian-widths*)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of East Asian Widths. Used in the creation of misc entries.")
(defparameter *script-table*
(with-input-txt-file (s "Scripts")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(let ((range (parse-codepoint-range codepoints))
(index (gethash (subseq value 1) *scripts*)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of scripts. Used in the creation of misc entries.")
(defparameter *line-break-class-table*
(with-input-txt-file (s "LineBreakProperty")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(let ((range (parse-codepoint-range codepoints))
Hangul syllables temporarily go to " Unkwown "
(index (gethash value *line-break-classes* 0)))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) index))))
finally (return hash)))
"Table of line break classes. Used in the creation of misc entries.")
(defparameter *age-table*
(with-input-txt-file (s "DerivedAge")
(loop with hash = (make-hash-table)
for line = (read-line s nil nil) while line
unless (or (not (position #\# line)) (= 0 (position #\# line)))
do (destructuring-bind (codepoints value)
(split-string
(let* ((range (parse-codepoint-range codepoints))
(age-parts (mapcar #'parse-integer (split-string value #\.)))
(age (logior (ash (car age-parts) 3) (cadr age-parts))))
(loop for i from (car range) to (cadr range)
do (setf (gethash i hash) age))))
finally (return hash)))
"Table of character ages. Used in the creation of misc entries.")
(defvar *block-first* nil)
Unicode data file parsing
(defun hash-misc (gc-index bidi-index ccc digit decomposition-info flags
script line-break age)
(let* ((list (list gc-index bidi-index ccc digit decomposition-info flags
script line-break age))
(index (gethash list *misc-hash*)))
(or index
(progn
(setf (gethash list *misc-hash*)
(fill-pointer *misc-table*))
(when (eql nil (vector-push list *misc-table*))
(error "Misc table too small."))
(gethash list *misc-hash*)))))
(defun ordered-ranges-member (item vector)
(labels ((recurse (start end)
(when (< start end)
(let* ((i (+ start (truncate (- end start) 2)))
(index (* 2 i))
(elt1 (svref vector index))
(elt2 (svref vector (1+ index))))
(cond ((< item elt1)
(recurse start i))
((> item elt2)
(recurse (+ 1 i) end))
(t
item))))))
(recurse 0 (/ (length vector) 2))))
(defun unallocated-bidi-class (code-point)
(flet ((in (vector class)
(when (ordered-ranges-member code-point vector)
(gethash class *bidi-classes*))))
(cond
((in
#(#x0600 #x07BF #x08A0 #x08FF #xFB50 #xFDCF #xFDF0 #xFDFF #xFE70 #xFEFF
#x1EE00 #x1EEFF) "AL"))
((in
#(#x0590 #x05FF #x07C0 #x089F #xFB1D #xFB4F #x10800 #x10FFF #x1E800 #x1EDFF
#x1EF00 #x1EFFF) "R"))
((in #(#x20A0 #x20CF) "ET"))
BN is non - characters and default - ignorable .
((in #(#xFDD0 #xFDEF #xFFFE #xFFFF #x1FFFE #x1FFFF #x2FFFE #x2FFFF
#x3FFFE #x3FFFF #x4FFFE #x4FFFF #x5FFFE #x5FFFF #x6FFFE #x6FFFF
#x7FFFE #x7FFFF #x8FFFE #x8FFFF #x9FFFE #x9FFFF #xAFFFE #xAFFFF
#xBFFFE #xBFFFF #xCFFFE #xCFFFF #xDFFFE #xDFFFF #xEFFFE #xEFFFF
#xFFFFE #xFFFFF #x10FFFE #x10FFFF)
"BN"))
((in #(#x0 #x10FFFF) "L"))
(t (error "Somehow we've gone too far in unallocated bidi determination")))))
(defun complete-misc-table ()
(unless (second (multiple-value-list (gethash code-point *ucd-entries*)))
(let* ((unallocated-misc
unallocated characters have a GC of " Cn " , are n't digits
( digit = 128 ) , have a bidi that depends on their block , and
do n't decompose , combine , or have case . They have an East
Asian Width ( eaw ) of " N " ( 0 ) , and a script , line breaking
`(,(gethash "Cn" *general-categories*)
,(unallocated-bidi-class code-point) 0 128 0
,(gethash code-point *east-asian-width-table* 0)
0 ,(gethash code-point *line-break-class-table* 0)
,(gethash code-point *age-table* 0)))
(unallocated-index (apply #'hash-misc unallocated-misc))
(unallocated-ucd (make-ucd :misc unallocated-index)))
(setf (gethash code-point *ucd-entries*) unallocated-ucd)))))
(defun expand-decomposition (decomposition)
(loop for cp in decomposition
for ucd = (gethash cp *ucd-entries*)
for length = (elt (aref *misc-table* (ucd-misc ucd)) 4)
if (and (not (logbitp 7 length))
(plusp length))
append (expand-decomposition (ucd-decomp ucd))
else
collect cp))
(defun fixup-decompositions ()
(loop for did-something = nil
do
(loop for ucd being each hash-value of *ucd-entries*
when (and (ucd-decomp ucd)
(not (logbitp 7 (elt (aref *misc-table* (ucd-misc ucd)) 4))))
do
(let ((expanded (expand-decomposition (ucd-decomp ucd))))
(unless (equal expanded (ucd-decomp ucd))
(setf (ucd-decomp ucd) expanded
did-something t))))
while did-something)
(loop for i below (hash-table-count *ucd-entries*)
for ucd = (gethash i *ucd-entries*)
for decomp = (ucd-decomp ucd)
do
(setf (ucd-decomp ucd)
(cond ((not (consp decomp)) 0)
((logbitp 7 (elt (aref *misc-table* (ucd-misc ucd)) 4))
(prog1 (length *decompositions*)
(loop for cp in decomp
do (vector-push-extend cp *decompositions*))))
(t
(let ((misc-entry (copy-list (aref *misc-table* (ucd-misc ucd)))))
(setf (elt misc-entry 4) (length decomp)
(ucd-misc ucd) (apply #'hash-misc misc-entry))
(prog1 (length *decompositions*)
(loop for cp in decomp
do (vector-push-extend cp *decompositions*)))))))))
(defun fixup-compositions ()
(flet ((fixup (k v)
(declare (ignore v))
(let* ((cp (car k))
(ucd (gethash cp *ucd-entries*))
(misc (aref *misc-table* (ucd-misc ucd)))
(ccc (third misc)))
we can do everything in the first pass except for
accounting for decompositions where the first
(when (/= ccc 0)
(remhash k *compositions*)))))
(maphash #'fixup *compositions*)))
(defun add-jamo-information (line table)
(code (parse-integer (first split) :radix 16))
(syllable (string-trim
" "
(subseq (second split) 0 (position #\# (second split))))))
(setf (gethash code table) syllable)))
(defun fixup-hangul-syllables ()
" Hangul Syllable Composition , Unicode 5.1 section 3 - 12 "
(let* ((sbase #xac00)
(lbase #x1100)
(vbase #x1161)
(tbase #x11a7)
(scount 11172)
(lcount 19)
(vcount 21)
(tcount 28)
(ncount (* vcount tcount))
(table (make-hash-table)))
(declare (ignore lcount))
(with-input-txt-file (*standard-input* "Jamo")
(loop for line = (read-line nil nil)
while line
do (add-jamo-information line table)))
(dotimes (sindex scount)
(let* ((l (+ lbase (floor sindex ncount)))
(v (+ vbase (floor (mod sindex ncount) tcount)))
(tee (+ tbase (mod sindex tcount)))
(code-point (+ sbase sindex))
(name (format nil "HANGUL_SYLLABLE_~A~A~:[~A~;~]"
(gethash l table) (gethash v table)
(= tee tbase) (gethash tee table))))
(setf (gethash code-point *unicode-names*) name)))))
(defun normalize-character-name (name)
(when (find #\_ name)
(error "Bad name for a character: ~A" name))
U+1F5CF ( PAGE ) 's name conflicts with the ANSI CL - assigned
name for form feed ( ^L , ) . To avoid a case where
more than one character has a particular name while remaining
standards - compliant , we remove U+1F5CF 's name here .
(when (string= name "PAGE")
(return-from normalize-character-name "UNICODE_PAGE"))
(unless (or (zerop (length name)) (find #\< name) (find #\> name))
(substitute #\_ #\Space name)))
D800 -- F8FF : surrogates and private use
F0000 -- FFFFD : private use
100000 -- 10FFFD : private use
(defun encode-ucd-line (line code-point)
(destructuring-bind (name general-category canonical-combining-class
bidi-class decomposition-type-and-mapping
decimal-digit digit numeric bidi-mirrored
unicode-1-name iso-10646-comment simple-uppercase
simple-lowercase simple-titlecase)
line
(declare (ignore iso-10646-comment))
(if (and (> (length name) 8)
(string= ", First>" name :start2 (- (length name) 8)))
(progn
(setf *block-first* code-point)
nil)
(let* ((gc-index (or (gethash general-category *general-categories*)
(error "unknown general category ~A"
general-category)))
(bidi-index (or (gethash bidi-class *bidi-classes*)
(error "unknown bidirectional class ~A"
bidi-class)))
(ccc (parse-integer canonical-combining-class))
(let ((%digit (parse-integer digit)))
(if (string= digit decimal-digit)
decimal - digit - p is in bit 6
(logior (ash 1 6) %digit) %digit))))
(upper-index (unless (string= "" simple-uppercase)
(parse-integer simple-uppercase :radix 16)))
(lower-index (unless (string= "" simple-lowercase)
(parse-integer simple-lowercase :radix 16)))
(title-index (unless (string= "" simple-titlecase)
(parse-integer simple-titlecase :radix 16)))
(cl-both-case-p (or (and (= gc-index 0) lower-index)
(and (= gc-index 1) upper-index)))
(bidi-mirrored-p (string= bidi-mirrored "Y"))
(decomposition-info 0)
(eaw-index (gethash code-point *east-asian-width-table*))
(script-index (gethash code-point *script-table* 0))
(line-break-index (gethash code-point *line-break-class-table* 0))
(age-index (gethash code-point *age-table* 0))
decomposition)
#+nil
(when (and (not cl-both-case-p)
(< gc-index 2))
(format t "~A~%" name))
(when (string/= "" decomposition-type-and-mapping)
(let* ((compatibility-p (position #\> decomposition-type-and-mapping)))
(setf decomposition
(parse-codepoints
(subseq decomposition-type-and-mapping
(if compatibility-p (1+ compatibility-p) 0))))
(when (assoc code-point *decomposition-corrections*)
(setf decomposition
(list (cdr (assoc code-point *decomposition-corrections*)))))
(setf decomposition-info
(logior (length decomposition) (if compatibility-p 128 0)))
(unless compatibility-p
* decompositions whose first character is a
for the fixup , see FIXUP - COMPOSITIONS
(when (and (> decomposition-info 1)
(= ccc 0)
(not (member code-point *composition-exclusions*)))
(unless (= decomposition-info 2)
(error "canonical decomposition unexpectedly long"))
(setf (gethash (cons (first decomposition)
(second decomposition))
*compositions*)
code-point)))))
(when (= code-point #xd7a3)
: The decomposition - length for Hangul syllables in the
decompositions array ( which Hangul decomposition does n't use ) .
(setf decomposition-info 3))
Exclude codepoints from SpecialCasing
(when (string/= simple-uppercase simple-titlecase)
(push (cons code-point title-index) *different-titlecases*))
(and (or upper-index lower-index)
(setf (gethash code-point *case-mapping*)
(cons
(or upper-index code-point)
(or lower-index code-point)))))
(when (string/= digit numeric)
(push (cons code-point numeric) *different-numerics*))
(when (> ccc 255)
(error "canonical combining class too large ~A" ccc))
(let* ((flags (logior
(if cl-both-case-p (ash 1 7) 0)
(if (gethash code-point *case-mapping*) (ash 1 6) 0)
(if bidi-mirrored-p (ash 1 5) 0)
eaw-index))
(misc-index (hash-misc gc-index bidi-index ccc digit-index
decomposition-info flags script-index
line-break-index age-index))
(result (make-ucd :misc misc-index
:decomp decomposition)))
(when (and (> (length name) 7)
(string= ", Last>" name :start2 (- (length name) 7)))
We can still do this despite East Asian Width being in the
databasce since each of the UCD < First><Last > blocks
has a consistent East Asian Width
(loop for point from *block-first* to code-point do
(setf (gethash point *ucd-entries*) result)))
(values result (normalize-character-name name)
(normalize-character-name unicode-1-name)))))))
(defun slurp-ucd-line (line)
(code-point (parse-integer (first split-line) :radix 16)))
(multiple-value-bind (encoding name unicode-1-name)
(encode-ucd-line (cdr split-line) code-point)
(setf (gethash code-point *ucd-entries*) encoding
(gethash code-point *unicode-names*) name)
(when unicode-1-name
(setf (gethash code-point *unicode-1-names*) unicode-1-name)))))
this fixes up the case conversion discrepancy between CL and
Unicode : CL operators depend on char - downcase / char - upcase being
inverses , which is not true in general in Unicode even for
(defun second-pass ()
(let ((case-mapping
(sort (loop for code-point being the hash-keys in *case-mapping*
using (hash-value value)
collect (cons code-point value))
#'< :key #'car)))
(loop for (code-point upper . lower) in case-mapping
for misc-index = (ucd-misc (gethash code-point *ucd-entries*))
for (gc bidi ccc digit decomp flags script lb age) = (aref *misc-table* misc-index)
when (logbitp 7 flags) do
(when (or (not (atom upper)) (not (atom lower))
(and (= gc 0)
(not (equal (car (gethash lower *case-mapping*)) code-point)))
(and (= gc 1)
(not (equal (cdr (gethash upper *case-mapping*)) code-point))))
(let* ((new-flags (clear-flag 7 flags))
(new-misc (hash-misc gc bidi ccc digit decomp new-flags script lb age)))
(setf (ucd-misc (gethash code-point *ucd-entries*)) new-misc))))))
(defun fixup-casefolding ()
(with-input-txt-file (s "CaseFolding")
(loop for line = (read-line s nil nil)
while line
line ) ) ( equal ( position # \ # line ) 0 ) )
do (destructuring-bind (original type mapping comment)
(declare (ignore comment))
(let ((cp (parse-integer original :radix 16))
(fold (parse-codepoints mapping :singleton-list nil)))
(unless (or (string= type " S") (string= type " T"))
(when (not (equal (cdr (gethash cp *case-mapping*)) fold))
(push (cons cp fold) *different-casefolds*))))))))
(defun fixup-ages ()
(let ((age (sort
(loop for code-point being the hash-keys in *age-table*
using (hash-value true-age)
collect (cons code-point true-age))
#'< :key #'car)))
(loop for (code-point . true-age) in age
for misc-index = (ucd-misc (gethash code-point *ucd-entries*))
for (gc bidi ccc digit decomp flags script lb age) = (aref *misc-table* misc-index)
unless (= age true-age) do
(let* ((new-misc (hash-misc gc bidi ccc digit decomp flags script lb true-age))
(new-ucd (make-ucd
:misc new-misc
:decomp (ucd-decomp (gethash code-point *ucd-entries*)))))
(setf (gethash code-point *ucd-entries*) new-ucd)))))
(defun slurp-ucd ()
(with-input-txt-file (*standard-input* "UnicodeData")
(format t "~%//slurp-ucd~%")
(loop for line = (read-line nil nil)
while line
do (slurp-ucd-line line)))
(second-pass)
(fixup-compositions)
(fixup-hangul-syllables)
(complete-misc-table)
(fixup-casefolding)
(fixup-ages)
(fixup-decompositions)
nil)
PropList.txt
(defparameter **proplist-properties** nil
"A list of properties extracted from PropList.txt")
(defun parse-property (stream &optional name)
(let ((result (make-array 1 :fill-pointer 0 :adjustable t)))
FIXME : something in this loop provokes a warning from CLISP
(loop for line = (read-line stream nil nil)
Deal with Blah = Blah in DerivedNormalizationProps.txt
while (and line (not (position #\= (substitute #\Space #\= line :count 1))))
for entry = (subseq line 0 (position #\# line))
when (and entry (string/= entry ""))
do
(destructuring-bind (start end)
(vector-push-extend start result)
(vector-push-extend end result)))
(when name
(push name **proplist-properties**)
(push result **proplist-properties**))))
(defun slurp-proplist ()
(with-input-txt-file (s "PropList")
(parse-property s :white-space)
(parse-property s :bidi-control)
(parse-property s :join-control)
(parse-property s :dash)
(parse-property s :hyphen)
(parse-property s :quotation-mark)
(parse-property s :terminal-punctuation)
(parse-property s :other-math)
(parse-property s :hex-digit)
(parse-property s :ascii-hex-digit)
(parse-property s :other-alphabetic)
(parse-property s :ideographic)
(parse-property s :diacritic)
(parse-property s :extender)
(parse-property s :other-lowercase)
(parse-property s :other-uppercase)
(parse-property s :noncharacter-code-point)
(parse-property s :other-grapheme-extend)
(parse-property s :ids-binary-operator)
(parse-property s :ids-trinary-operator)
(parse-property s :radical)
(parse-property s :unified-ideograph)
(parse-property s :other-default-ignorable-code-point)
(parse-property s :deprecated)
(parse-property s :soft-dotted)
(parse-property s :logical-order-exception)
(parse-property s :other-id-start)
(parse-property s :other-id-continue)
(parse-property s :sterm)
(parse-property s :variation-selector)
(parse-property s :pattern-white-space)
(parse-property s :pattern-syntax))
(with-input-txt-file (s "DerivedNormalizationProps")
(parse-property s :nfd-qc)
(parse-property s :nfc-qc)
(parse-property s :nfc-qc-maybe)
(parse-property s :nfkd-qc)
NFKC_QC Comments
(parse-property s :nfkc-qc)
(parse-property s :nfkc-qc-maybe))
(setf **proplist-properties** (nreverse **proplist-properties**))
(values))
(defvar *maximum-variable-key* 1)
(defun bitpack-collation-key (primary secondary tertiary)
0 < = secondary < = # x10C [ 9 bits ]
0 < = tertiary < = # x1E ( # x1F allowed ) [ 5 bits ]
(logior (ash primary 16) (ash secondary 5) tertiary))
(defun parse-collation-line (line)
(let* ((code-points (parse-codepoints %code-points))
(keys
(remove
""
(split-string (remove #\[ (remove #\Space %keys)) #\]) :test #'string=))
(ret
(loop for key in keys
for variable-p = (position #\* key)
for parsed =
Do n't need first value , it 's always just " "
(cdr (mapcar (lambda (x) (parse-integer x :radix 16 :junk-allowed t))
(split-string (substitute #\. #\* key) #\.)))
collect
(destructuring-bind (primary secondary tertiary) parsed
(when variable-p (setf *maximum-variable-key*
(max primary *maximum-variable-key*)))
(bitpack-collation-key primary secondary tertiary)))))
(values code-points ret))))
(defparameter *collation-table*
(with-input-txt-file (stream "Allkeys70")
(loop with hash = (make-hash-table :test #'equal)
for line = (read-line stream nil nil) while line
unless (eql 0 (position #\# line))
do (multiple-value-bind (codepoints keys) (parse-collation-line line)
(setf (gethash codepoints hash) keys))
finally (return hash))))
(defparameter *confusables*
(with-input-txt-file (s "ConfusablesEdited")
(loop for line = (read-line s nil nil) while line
unless (eql 0 (position #\# line))
collect (mapcar #'parse-codepoints (split-string line #\<))))
"List of confusable codepoint sets")
(defparameter *bidi-mirroring-glyphs*
(with-input-txt-file (s "BidiMirroring")
(loop for line = (read-line s nil nil) while line
when (and (plusp (length line))
(char/= (char line 0) #\#))
collect
(mapcar
#'(lambda (c) (parse-codepoints c :singleton-list nil))
"List of BIDI mirroring glyph pairs")
(defparameter *block-ranges*
(with-input-txt-file (stream "Blocks")
(loop with result = (make-array (* 252 2) :fill-pointer 0)
for line = (read-line stream nil nil) while line
unless (or (string= line "") (position #\# line))
do
(map nil #'(lambda (x) (vector-push x result))
finally (return result)))
"Vector of block starts and ends in a form acceptable to `ordered-ranges-position`.
Used to look up block data.")
(defun write-codepoint (code-point stream)
(declare (type (unsigned-byte 32) code-point))
(write-byte (ldb (byte 8 16) code-point) stream)
(write-byte (ldb (byte 8 8) code-point) stream)
(write-byte (ldb (byte 8 0) code-point) stream))
(defun write-4-byte (value stream)
(declare (type (unsigned-byte 32) value))
(write-byte (ldb (byte 8 24) value) stream)
(write-byte (ldb (byte 8 16) value) stream)
(write-byte (ldb (byte 8 8) value) stream)
(write-byte (ldb (byte 8 0) value) stream))
(defun output-misc-data ()
(with-output-dat-file (stream "ucdmisc")
(loop for (gc-index bidi-index ccc digit decomposition-info flags
script line-break age)
across *misc-table*
three bits spare here
do (write-byte gc-index stream)
three bits spare here
(write-byte bidi-index stream)
(write-byte ccc stream)
bits 0 - 3 encode [ 0,9 ] , bit 7 is for non - digit status ,
bit 6 is the decimal - digit flag . Two bits spare
(write-byte digit stream)
(write-byte decomposition-info stream)
includes EAW in bits 0 - 3 , bit 4 is free
(write-byte script stream)
(write-byte line-break stream)
(write-byte age stream))))
(defun output-ucd-data ()
(with-output-dat-file (high-pages "ucdhigh")
(with-output-dat-file (low-pages "ucdlow")
array / 256 . For indexes into the misc array , set bit 15 ( high bit ) .
(loop with low-pages-index = 0
for high-page from 0 to (ash #x10FFFF -8)
for uniq-ucd-entries = nil do
(loop for low-page from 0 to #xFF do
(pushnew
(gethash (logior low-page (ash high-page 8)) *ucd-entries*)
uniq-ucd-entries :test #'equalp))
(flet ((write-2-byte (int stream)
(declare (type (unsigned-byte 16) int))
(write-byte (ldb (byte 8 8) int) stream)
(write-byte (ldb (byte 8 0) int) stream)))
(case (length uniq-ucd-entries)
(0 (error "Somehow, a high page has no codepoints in it."))
(1 (write-2-byte (logior
(ash 1 15)
(ucd-misc (car uniq-ucd-entries)))
high-pages))
(t (loop for low-page from 0 to #xFF
for cp = (logior low-page (ash high-page 8))
for entry = (gethash cp *ucd-entries*) do
(write-2-byte (ucd-misc entry) low-pages)
(write-2-byte (ucd-decomp entry) low-pages)
finally (write-2-byte low-pages-index high-pages)
(incf low-pages-index)))))
finally (assert (< low-pages-index (ash 1 15))) (print low-pages-index)))))
(defun output-decomposition-data ()
(with-output-dat-file (stream "decomp")
(loop for cp across *decompositions* do
(write-codepoint cp stream)))
(print (length *decompositions*)))
(defun output-composition-data ()
(with-output-dat-file (stream "comp")
(let (comp)
(maphash (lambda (k v) (push (cons k v) comp)) *compositions*)
(setq comp (sort comp #'< :key #'cdr))
(loop for (k . v) in comp
do (write-codepoint (car k) stream)
(write-codepoint (cdr k) stream)
(write-codepoint v stream)))))
(defun output-case-data ()
(let (casing-pages points-with-case)
(with-output-dat-file (stream "case")
(loop for cp being the hash-keys in *case-mapping*
do (push cp points-with-case))
(setf points-with-case (sort points-with-case #'<))
(loop for cp in points-with-case
for (upper . lower) = (gethash cp *case-mapping*) do
(pushnew (ash cp -6) casing-pages)
(write-codepoint cp stream)
(write-byte (if (atom upper) 0 (length upper)) stream)
(if (atom upper) (write-codepoint upper stream)
(map 'nil (lambda (c) (write-codepoint c stream)) upper))
(write-byte (if (atom lower) 0 (length lower)) stream)
(if (atom lower) (write-codepoint lower stream)
(map 'nil (lambda (c) (write-codepoint c stream)) lower))))
(setf casing-pages (sort casing-pages #'<))
(assert (< (length casing-pages) 256))
(let* ((size (1+ (reduce #'max casing-pages)))
(array (make-array size :initial-element 255))
(page -1))
(dolist (entry casing-pages)
(setf (aref array entry) (incf page)))
(with-output-dat-file (stream "casepages")
(dotimes (i size)
(write-byte (aref array i) stream))))
(with-output-lisp-expr-file (stream "casepages")
(print casing-pages stream))))
(defun output-collation-data ()
(with-output-dat-file (stream "collation")
(flet ((length-tag (list1 list2)
takes two lists of UB32 ( with the caveat that list1[0 ]
needs its high 8 bits free ( codepoints always have
(let* ((l1 (length list1)) (l2 (length list2))
(tag (dpb l1 (byte 4 28) (dpb l2 (byte 5 23) (car list1)))))
(assert (<= l1 3))
(write-4-byte tag stream)
(map nil #'(lambda (l) (write-4-byte l stream)) (append (cdr list1) list2)))))
(let (coll)
(maphash (lambda (k v) (push (cons k v) coll)) *collation-table*)
(labels ((sorter (o1 o2)
(cond
((null o1) t)
((null o2) nil)
(t (or (< (car o1) (car o2))
(and (= (car o1) (car o2))
(sorter (cdr o1) (cdr o2))))))))
(setq coll (sort coll #'sorter :key #'car)))
(loop for (k . v) in coll
do (length-tag k v)))))
(with-output-lisp-expr-file (*standard-output* "other-collation-info")
(write-string ";;; The highest primary variable collation index")
(terpri)
(prin1 *maximum-variable-key*) (terpri)))
(defun output ()
(output-misc-data)
(output-ucd-data)
(output-decomposition-data)
(output-composition-data)
(output-case-data)
(output-collation-data)
(with-output-lisp-expr-file (*standard-output* "misc-properties")
(prin1 **proplist-properties**))
(with-output-lisp-expr-file (f "ucd-names")
(write-string ";;; Do not edit by hand: generated by ucd.lisp" f)
(maphash (lambda (code name)
(when name
(print code f)
(prin1 name f)))
*unicode-names*)
(setf *unicode-names* nil))
(with-output-lisp-expr-file (f "ucd1-names")
(write-string ";;; Do not edit by hand: generated by ucd.lisp" f)
(maphash (lambda (code name)
(when name
(print code f)
(prin1 name f)))
*unicode-1-names*)
(setf *unicode-1-names* nil))
(with-output-lisp-expr-file (*standard-output* "numerics")
(let ((result (make-array (* (length *different-numerics*) 2))))
(loop for (code . value) in (sort *different-numerics* #'< :key #'car)
for i by 2
do (setf (aref result i) code
(aref result (1+ i)) (read-from-string value)))
(prin1 result)))
(with-output-lisp-expr-file (*standard-output* "titlecases")
(prin1 *different-titlecases*))
(with-output-lisp-expr-file (*standard-output* "foldcases")
(prin1 *different-casefolds*))
(with-output-lisp-expr-file (*standard-output* "confusables")
(prin1 *confusables*))
(with-output-lisp-expr-file (*standard-output* "bidi-mirrors")
(prin1 *bidi-mirroring-glyphs*))
(with-output-lisp-expr-file (*standard-output* "blocks")
(prin1 *block-ranges*))
(values))
|
e711e8f2d668b4d49410fe79e83c3983ea96ab9fda9b1c21ab0351f329fcb992 | Tclv/HaskellBook | Main.hs | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.Random (randomRIO)
type WordList = [ String ]
newtype WordList =
WordList [String]
deriving (Eq, Show)
allWords :: IO WordList
allWords = do
dict <- readFile "data/words"
return $ WordList (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
numberOfGuesses :: Int
numberOfGuesses = 7
gameWords :: IO WordList
gameWords = do
(WordList aw) <- allWords
return $ WordList (filter gameLength aw)
where gameLength w =
let l = length w
in l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord (WordList wl) = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char] Int
instance Show Puzzle where
show (Puzzle _ discovered guessed guessesLeft) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed ++ " Guesses left: " ++ show guessesLeft
freshPuzzle :: String -> Puzzle
freshPuzzle word = Puzzle word (map (const Nothing) word) [] numberOfGuesses
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle word _ _ _) guess = guess `elem` word
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessedSoFar _) guess = guess `elem` guessedSoFar
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just a) = a
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s noG) c =
Puzzle word newFilledInSoFar (c : s) noG
where
zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
missedGuess :: Puzzle -> Char -> Puzzle
missedGuess puzzle guess = Puzzle w f m (noG - 1)
where
(Puzzle w f m noG) = fillInCharacter puzzle guess
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "Your already guessed that character, pick something else"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly"
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in the word, try again"
return (missedGuess puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed noGuesses) =
if (noGuesses) <= 0 then
do putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _ _) =
if all isJust filledInSoFar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameWin puzzle
gameOver puzzle
putStrLn $ "Current puzzle is " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character"
main :: IO ()
main = do
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
| null | https://raw.githubusercontent.com/Tclv/HaskellBook/78eaa5c67579526b0f00f85a10be3156bc304c14/ch13/hangman/src/Main.hs | haskell | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.Random (randomRIO)
type WordList = [ String ]
newtype WordList =
WordList [String]
deriving (Eq, Show)
allWords :: IO WordList
allWords = do
dict <- readFile "data/words"
return $ WordList (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
numberOfGuesses :: Int
numberOfGuesses = 7
gameWords :: IO WordList
gameWords = do
(WordList aw) <- allWords
return $ WordList (filter gameLength aw)
where gameLength w =
let l = length w
in l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord (WordList wl) = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char] Int
instance Show Puzzle where
show (Puzzle _ discovered guessed guessesLeft) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed ++ " Guesses left: " ++ show guessesLeft
freshPuzzle :: String -> Puzzle
freshPuzzle word = Puzzle word (map (const Nothing) word) [] numberOfGuesses
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle word _ _ _) guess = guess `elem` word
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessedSoFar _) guess = guess `elem` guessedSoFar
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just a) = a
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar s noG) c =
Puzzle word newFilledInSoFar (c : s) noG
where
zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar = zipWith (zipper c) word filledInSoFar
missedGuess :: Puzzle -> Char -> Puzzle
missedGuess puzzle guess = Puzzle w f m (noG - 1)
where
(Puzzle w f m noG) = fillInCharacter puzzle guess
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "Your already guessed that character, pick something else"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly"
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character wasn't in the word, try again"
return (missedGuess puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed noGuesses) =
if (noGuesses) <= 0 then
do putStrLn "You lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _ _) =
if all isJust filledInSoFar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameWin puzzle
gameOver puzzle
putStrLn $ "Current puzzle is " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character"
main :: IO ()
main = do
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
| |
d8c3500906f6cb91d83f0af02a4170a39dedbdf3f552de7e3f860eb3cdb6ab0e | racket/redex | delivered-tue-mor.rkt | #lang racket
(require redex "common.rkt" (only-in "mon-aft.rkt" fv))
(provide eval-value)
-- reductions for LC_v
-- standard reductions for LC_v
-- semantics for LC_v
;; -- cross-testing Racket with LC_v
(define-extended-language Lambda-calculus Lambda
(e ::= .... n)
(v ::= n (lambda (x ...) e))
(n ::= number)
(C ::= hole (e ... C e ...) (lambda (x_!_ ...) C)))
(define lambda? (redex-match? Lambda-calculus e))
(define context? (redex-match? Lambda-calculus C))
;; a metafunction that acts like a macro in Lambda-calculus
exercise 3 from Monday afternoon
(define-metafunction Lambda-calculus
;; let : ((x e) ...) e -> e but e plus hole
let : ((x any) ...) any -> any
[(let ([x_lhs any_rhs] ...) any_body)
((lambda (x_lhs ...) any_body) any_rhs ...)])
(module+ test
(define C1 (term ((lambda (x y) x) hole 1)))
(define C2 (term ((lambda (x y) hole) 0 1)))
(define C3 (term (let ([x hole][y 3]) (lambda (a) (a (x 1 y 2))))))
(test-equal (context? C1) #true)
(test-equal (context? C2) #true)
(test-equal (context? C3) #true)
(define e1 (term (in-hole ,C1 1)))
(define e2 (term (in-hole ,C2 x)))
(define e3 (term (in-hole ,C3 (lambda (x y z) x))))
(test-equal (lambda? e1) #true)
(test-equal (lambda? e2) #true)
(test-equal (lambda? e3) #true))
;; model the λβv calculus, reductions only
(module+ test
;; transitive closure testing
(test-->> -->βv #:equiv =α/racket e1 1)
(test-->> -->βv #:equiv =α/racket e3 (term (lambda (a) (a 1))))
one - step reduction testing
reduces to TWO expressions
a term that contains TWO βv redexes
(term
((lambda (x y)
[(lambda (f) (f (x 1 y 2)))
(lambda (w) 42)])
[(lambda (x) x) (lambda (a b c) a)]
3)))
(define e4-one-step
(term
((lambda (x y)
((lambda (f) (f (x 1 y 2)))
(lambda (w) 42)))
(lambda (a b c) a)
3)))
(define e4-other-step
(term
((lambda (x y)
((lambda (w) 42) (x 1 y 2)))
((lambda (x) x) (lambda (a b c) a))
3)))
(test--> -->βv #:equiv =α/racket e4 e4-other-step e4-one-step)
(test-->> -->βv #:equiv =α/racket e4 42))
(define -->βv
(reduction-relation
Lambda-calculus
(--> (in-hole C ((lambda (x_1 ..._n) e) v_1 ..._n))
(in-hole C (subst ([v_1 x_1] ...) e))
βv)))
#;
(module+ test
(traces -->βv e4))
;; model standard reduction for by-name and by-value calculus
(define-extended-language Standard Lambda-calculus
(E ::= hole (v ... E e ...)))
(module+ test
yields only one term , leftmost - outermost
(test--> s-->βv e4 e4-one-step))
(define s-->βv
(reduction-relation
Standard
(--> (in-hole E ((lambda (x_1 ..._n) e) v_1 ..._n))
(in-hole E (subst ((v_1 x_1) ...) e)))))
#;
(module+ test
(traces s-->βv e4))
;; -----------------------------------------------------------------------------
;; a semantics
(module+ test
(test-equal (term (eval-value ,e4)) 42)
(test-equal (term (eval-value ,e4-one-step)) 42)
(test-equal (term (eval-value ,e3)) 'closure))
(define-metafunction Standard
eval-value : e -> v or closure or stuck
[(eval-value e) any_1 (where any_1 (run-value e))])
(define-metafunction Standard
run-value : e -> v or closure or stuck
[(run-value n) n]
[(run-value v) closure]
[(run-value e)
(run-value e_again)
(where (e_again) ,(apply-reduction-relation s-->βv (term e)))]
[(run-value any) stuck])
;; ---------------------------------------------------------
;; testing against Racket
;; --- this is all Racket ---
(define-namespace-anchor A)
(define N (namespace-anchor->namespace A))
Lambda.e - >
(define (racket-evaluator t0)
(define result
(with-handlers ((exn:fail? values))
(eval t0 N)))
(cond
[(number? result) result]
[(procedure? result) (term closure)]
[else 'stuck]))
;; --- end of Racket magic
(module+ test
(test-equal (term (theorem:racket=eval-value ,e1)) #true)
(test-equal (term (theorem:racket=eval-value ,e2)) #true)
(test-equal (term (theorem:racket=eval-value ,e3)) #true)
(test-equal (term (theorem:racket=eval-value ,e4)) #true))
(define-metafunction Standard
theorem:racket=eval-value : e -> boolean
[(theorem:racket=eval-value e)
,(equal? (racket-evaluator (term e)) (term (run-value e)))])
(module+ test
(require "close.rkt")
(redex-check Standard e
(begin (displayln (term e))
(term (theorem:racket=eval-value e)))
#:prepare (close-over-fv-with lambda?)
#:attempts 12))
(module+ test
(test-results)) | null | https://raw.githubusercontent.com/racket/redex/4c2dc96d90cedeb08ec1850575079b952c5ad396/redex-doc/redex/scribblings/long-tut/code/delivered-tue-mor.rkt | racket | -- cross-testing Racket with LC_v
a metafunction that acts like a macro in Lambda-calculus
let : ((x e) ...) e -> e but e plus hole
model the λβv calculus, reductions only
transitive closure testing
model standard reduction for by-name and by-value calculus
-----------------------------------------------------------------------------
a semantics
---------------------------------------------------------
testing against Racket
--- this is all Racket ---
--- end of Racket magic | #lang racket
(require redex "common.rkt" (only-in "mon-aft.rkt" fv))
(provide eval-value)
-- reductions for LC_v
-- standard reductions for LC_v
-- semantics for LC_v
(define-extended-language Lambda-calculus Lambda
(e ::= .... n)
(v ::= n (lambda (x ...) e))
(n ::= number)
(C ::= hole (e ... C e ...) (lambda (x_!_ ...) C)))
(define lambda? (redex-match? Lambda-calculus e))
(define context? (redex-match? Lambda-calculus C))
exercise 3 from Monday afternoon
(define-metafunction Lambda-calculus
let : ((x any) ...) any -> any
[(let ([x_lhs any_rhs] ...) any_body)
((lambda (x_lhs ...) any_body) any_rhs ...)])
(module+ test
(define C1 (term ((lambda (x y) x) hole 1)))
(define C2 (term ((lambda (x y) hole) 0 1)))
(define C3 (term (let ([x hole][y 3]) (lambda (a) (a (x 1 y 2))))))
(test-equal (context? C1) #true)
(test-equal (context? C2) #true)
(test-equal (context? C3) #true)
(define e1 (term (in-hole ,C1 1)))
(define e2 (term (in-hole ,C2 x)))
(define e3 (term (in-hole ,C3 (lambda (x y z) x))))
(test-equal (lambda? e1) #true)
(test-equal (lambda? e2) #true)
(test-equal (lambda? e3) #true))
(module+ test
(test-->> -->βv #:equiv =α/racket e1 1)
(test-->> -->βv #:equiv =α/racket e3 (term (lambda (a) (a 1))))
one - step reduction testing
reduces to TWO expressions
a term that contains TWO βv redexes
(term
((lambda (x y)
[(lambda (f) (f (x 1 y 2)))
(lambda (w) 42)])
[(lambda (x) x) (lambda (a b c) a)]
3)))
(define e4-one-step
(term
((lambda (x y)
((lambda (f) (f (x 1 y 2)))
(lambda (w) 42)))
(lambda (a b c) a)
3)))
(define e4-other-step
(term
((lambda (x y)
((lambda (w) 42) (x 1 y 2)))
((lambda (x) x) (lambda (a b c) a))
3)))
(test--> -->βv #:equiv =α/racket e4 e4-other-step e4-one-step)
(test-->> -->βv #:equiv =α/racket e4 42))
(define -->βv
(reduction-relation
Lambda-calculus
(--> (in-hole C ((lambda (x_1 ..._n) e) v_1 ..._n))
(in-hole C (subst ([v_1 x_1] ...) e))
βv)))
(module+ test
(traces -->βv e4))
(define-extended-language Standard Lambda-calculus
(E ::= hole (v ... E e ...)))
(module+ test
yields only one term , leftmost - outermost
(test--> s-->βv e4 e4-one-step))
(define s-->βv
(reduction-relation
Standard
(--> (in-hole E ((lambda (x_1 ..._n) e) v_1 ..._n))
(in-hole E (subst ((v_1 x_1) ...) e)))))
(module+ test
(traces s-->βv e4))
(module+ test
(test-equal (term (eval-value ,e4)) 42)
(test-equal (term (eval-value ,e4-one-step)) 42)
(test-equal (term (eval-value ,e3)) 'closure))
(define-metafunction Standard
eval-value : e -> v or closure or stuck
[(eval-value e) any_1 (where any_1 (run-value e))])
(define-metafunction Standard
run-value : e -> v or closure or stuck
[(run-value n) n]
[(run-value v) closure]
[(run-value e)
(run-value e_again)
(where (e_again) ,(apply-reduction-relation s-->βv (term e)))]
[(run-value any) stuck])
(define-namespace-anchor A)
(define N (namespace-anchor->namespace A))
Lambda.e - >
(define (racket-evaluator t0)
(define result
(with-handlers ((exn:fail? values))
(eval t0 N)))
(cond
[(number? result) result]
[(procedure? result) (term closure)]
[else 'stuck]))
(module+ test
(test-equal (term (theorem:racket=eval-value ,e1)) #true)
(test-equal (term (theorem:racket=eval-value ,e2)) #true)
(test-equal (term (theorem:racket=eval-value ,e3)) #true)
(test-equal (term (theorem:racket=eval-value ,e4)) #true))
(define-metafunction Standard
theorem:racket=eval-value : e -> boolean
[(theorem:racket=eval-value e)
,(equal? (racket-evaluator (term e)) (term (run-value e)))])
(module+ test
(require "close.rkt")
(redex-check Standard e
(begin (displayln (term e))
(term (theorem:racket=eval-value e)))
#:prepare (close-over-fv-with lambda?)
#:attempts 12))
(module+ test
(test-results)) |
3d8e4de37d63c6ea92a071059671bc1640e88a966e206a57eed8a822d1db5c67 | ocaml/odoc | test.mli | (** Testing some canonical edge cases *)
(** This is the simplest case *)
module A__ : sig
(** @canonical Test.A *)
type t
end
module A = A__
type test = A__.t
(** Dune-style wrapped library *)
module Wrapped__X : sig
type t
end
module Wrapper : sig
(** @canonical Test.Wrapper.X *)
module X = Wrapped__X
end
type test2 = Wrapper.X.t
(** Dune-style wrapped library with hand-written wrapper module *)
module Wrapped2__X : sig
type t
end
module Wrapper2__ : sig
* @canonical Test . Wrapper2.X
module X = Wrapped2__X
end
module Wrapper2 : sig
open Wrapper2__
module X = X
end
type test3 = Wrapper2__.X.t
(** Dune-style wrapped library with hand-written wrapper module, but wrong *)
module Wrapped3__X : sig
type t
end
module Wrapper3__ : sig
(** @canonical Test.Wrapper3.X *)
module X = Wrapped3__X
end
module Wrapper3 : sig
open Wrapper3__
module X : module type of struct include X end with type t := X.t
end
val test3a : Wrapper3__.X.t
(** Non-hidden *)
module B_ : sig
(** @canonical Test.B *)
type t
end
module B = B_
type test4 = B_.t
* does n't know it 's canonical
module C_ : sig
type t
end
module C = C_
(** @canonical Test.C *)
module D = C
type test5 = D.t
| null | https://raw.githubusercontent.com/ocaml/odoc/bbc00f9c75d4e855bca69124ccc947949da808f7/test/xref2/canonical_alias.t/test.mli | ocaml | * Testing some canonical edge cases
* This is the simplest case
* @canonical Test.A
* Dune-style wrapped library
* @canonical Test.Wrapper.X
* Dune-style wrapped library with hand-written wrapper module
* Dune-style wrapped library with hand-written wrapper module, but wrong
* @canonical Test.Wrapper3.X
* Non-hidden
* @canonical Test.B
* @canonical Test.C |
module A__ : sig
type t
end
module A = A__
type test = A__.t
module Wrapped__X : sig
type t
end
module Wrapper : sig
module X = Wrapped__X
end
type test2 = Wrapper.X.t
module Wrapped2__X : sig
type t
end
module Wrapper2__ : sig
* @canonical Test . Wrapper2.X
module X = Wrapped2__X
end
module Wrapper2 : sig
open Wrapper2__
module X = X
end
type test3 = Wrapper2__.X.t
module Wrapped3__X : sig
type t
end
module Wrapper3__ : sig
module X = Wrapped3__X
end
module Wrapper3 : sig
open Wrapper3__
module X : module type of struct include X end with type t := X.t
end
val test3a : Wrapper3__.X.t
module B_ : sig
type t
end
module B = B_
type test4 = B_.t
* does n't know it 's canonical
module C_ : sig
type t
end
module C = C_
module D = C
type test5 = D.t
|
27afab9ef6e008457f622614cbecc9f1dbba00fff1dae15901410b1064483795 | joelburget/lvca | Nonbinding.mli | open Lvca_util
(** Lots of interesting domains have no binding. At that point they're not really
languages, just data types. This module gives a tighter representation for such types
and allows conversion to / from binding types. *)
type t =
| Operator of Provenance.t * string * t list
| Primitive of Primitive.All.t
val equivalent : ?info_eq:(Provenance.t -> Provenance.t -> bool) -> t -> t -> bool
val ( = ) : t -> t -> bool
* { 1 info }
* { 1 de Bruijn conversion }
type de_bruijn_conversion_error =
| Scope_encountered of DeBruijn.scope
| Var_encountered of DeBruijn.term
val of_de_bruijn : DeBruijn.term -> (t, de_bruijn_conversion_error) Result.t
val to_de_bruijn : t -> DeBruijn.term
* { 1 Nominal conversion }
val of_nominal : Nominal.Term.t -> (t, Nominal.Conversion_error.t) Result.t
val to_nominal : t -> Nominal.Term.t
(** {1 Printing} *)
val pp : t Fmt.t
(** {1 Parsing} *)
val parse : t Lvca_parsing.t
(** {1 Misc} *)
val hash : t -> string
val select_path : path:int list -> t -> (t, string) Result.t
(** {1 Serialization} *)
val jsonify : t Json.serializer
val unjsonify : t Json.deserializer
type nonbinding = t
module type Convertible_s = sig
include Language_object_intf.S with type t = t
val of_nonbinding : nonbinding -> (t, nonbinding) Result.t
val to_nonbinding : t -> nonbinding
end
| null | https://raw.githubusercontent.com/joelburget/lvca/80f202eae45a9d383104bc750be70216b81d8864/syntax/Nonbinding.mli | ocaml | * Lots of interesting domains have no binding. At that point they're not really
languages, just data types. This module gives a tighter representation for such types
and allows conversion to / from binding types.
* {1 Printing}
* {1 Parsing}
* {1 Misc}
* {1 Serialization} | open Lvca_util
type t =
| Operator of Provenance.t * string * t list
| Primitive of Primitive.All.t
val equivalent : ?info_eq:(Provenance.t -> Provenance.t -> bool) -> t -> t -> bool
val ( = ) : t -> t -> bool
* { 1 info }
* { 1 de Bruijn conversion }
type de_bruijn_conversion_error =
| Scope_encountered of DeBruijn.scope
| Var_encountered of DeBruijn.term
val of_de_bruijn : DeBruijn.term -> (t, de_bruijn_conversion_error) Result.t
val to_de_bruijn : t -> DeBruijn.term
* { 1 Nominal conversion }
val of_nominal : Nominal.Term.t -> (t, Nominal.Conversion_error.t) Result.t
val to_nominal : t -> Nominal.Term.t
val pp : t Fmt.t
val parse : t Lvca_parsing.t
val hash : t -> string
val select_path : path:int list -> t -> (t, string) Result.t
val jsonify : t Json.serializer
val unjsonify : t Json.deserializer
type nonbinding = t
module type Convertible_s = sig
include Language_object_intf.S with type t = t
val of_nonbinding : nonbinding -> (t, nonbinding) Result.t
val to_nonbinding : t -> nonbinding
end
|
3113eb72839a1c783669ce8acf8590ff27f3c3b6f3e00b2b7ed71e868e4e957a | matlux/game-of-life | project.clj | (defproject game-of-life "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/algo.monads "0.1.4"]
[org.clojure/math.numeric-tower "0.0.3"]
[quil "2.2.2"]
]
:main game-of-life.core)
| null | https://raw.githubusercontent.com/matlux/game-of-life/5e95e2fc2de266e784a35415f5b4bcde8ffd6838/project.clj | clojure | (defproject game-of-life "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/algo.monads "0.1.4"]
[org.clojure/math.numeric-tower "0.0.3"]
[quil "2.2.2"]
]
:main game-of-life.core)
| |
1c1cbcf137e41d5499ba94a4427186d851eae08559109fc7247ee9caa04e13ba | orbitz/ocaml-riakc | list_buckets.ml | open Core.Std
open Async.Std
let option_to_string = function
| Some v -> v
| None -> "<none>"
let fail s =
printf "%s\n" s;
shutdown 1
let exec () =
let host = Sys.argv.(1) in
let port = Int.of_string Sys.argv.(2) in
Riakc.Conn.with_conn
~host
~port
Riakc.Conn.list_buckets
let eval () =
exec () >>| function
| Ok buckets -> begin
List.iter
~f:(printf "%s\n")
buckets;
shutdown 0
end
| Error `Bad_conn -> fail "Bad_conn"
| Error `Bad_payload -> fail "Bad_payload"
| Error `Incomplete_payload -> fail "Incomplete_payload"
| Error `Notfound -> fail "Notfound"
| Error `Incomplete -> fail "Incomplete"
| Error `Overflow -> fail "Overflow"
| Error `Unknown_type -> fail "Unknown_type"
| Error `Wrong_type -> fail "Wrong_type"
let () =
ignore (eval ());
never_returns (Scheduler.go ())
| null | https://raw.githubusercontent.com/orbitz/ocaml-riakc/507241edbdb26d65961e5a90d69c219b2674db9d/src/examples/list_buckets.ml | ocaml | open Core.Std
open Async.Std
let option_to_string = function
| Some v -> v
| None -> "<none>"
let fail s =
printf "%s\n" s;
shutdown 1
let exec () =
let host = Sys.argv.(1) in
let port = Int.of_string Sys.argv.(2) in
Riakc.Conn.with_conn
~host
~port
Riakc.Conn.list_buckets
let eval () =
exec () >>| function
| Ok buckets -> begin
List.iter
~f:(printf "%s\n")
buckets;
shutdown 0
end
| Error `Bad_conn -> fail "Bad_conn"
| Error `Bad_payload -> fail "Bad_payload"
| Error `Incomplete_payload -> fail "Incomplete_payload"
| Error `Notfound -> fail "Notfound"
| Error `Incomplete -> fail "Incomplete"
| Error `Overflow -> fail "Overflow"
| Error `Unknown_type -> fail "Unknown_type"
| Error `Wrong_type -> fail "Wrong_type"
let () =
ignore (eval ());
never_returns (Scheduler.go ())
| |
248db047af1fb8896219eb2283ad2d76d8262de243684218520ef4cf63e03c44 | conscell/hugs-android | SrcDist.hs | # OPTIONS_GHC -cpp #
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.SrcDist
Copyright : 2004
--
Maintainer : < >
-- Stability : alpha
-- Portability : portable
--
Copyright ( c ) 2003 - 2004 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
-- NOTE: FIX: we don't have a great way of testing this module, since
-- we can't easily look inside a tarball once its created.
module Distribution.Simple.SrcDist (
sdist
) where
import Distribution.PackageDescription
(PackageDescription(..), BuildInfo(..), Executable(..), Library(..),
setupMessage, libModules)
import Distribution.Package (showPackageId, PackageIdentifier(pkgVersion))
import Distribution.Version (Version(versionBranch))
import Distribution.Simple.Utils
(smartCopySources, die, findPackageDesc, findFile, copyFileVerbose)
import Distribution.Setup (SDistFlags(..))
import Distribution.PreProcess (PPSuffixHandler, ppSuffixes, removePreprocessed)
import Control.Monad(when)
import Data.Char (isSpace, toLower)
import Data.List (isPrefixOf)
import System.Cmd (system)
import System.Time (getClockTime, toCalendarTime, CalendarTime(..))
import Distribution.Compat.Directory (doesFileExist, doesDirectoryExist,
getCurrentDirectory, createDirectoryIfMissing, removeDirectoryRecursive)
import Distribution.Compat.FilePath (joinFileName, splitFileName)
-- |Create a source distribution. FIX: Calls tar directly (won't work
-- on windows).
sdist :: PackageDescription
-> SDistFlags -- verbose & snapshot
-> FilePath -- ^build prefix (temp dir)
-> FilePath -- ^TargetPrefix
-> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes)
-> IO ()
sdist pkg_descr_orig (SDistFlags snapshot verbose) tmpDir targetPref pps = do
time <- getClockTime
ct <- toCalendarTime time
let date = ctYear ct*10000 + (fromEnum (ctMonth ct) + 1)*100 + ctDay ct
let pkg_descr
| snapshot = updatePackage (updatePkgVersion
(updateVersionBranch (++ [date]))) pkg_descr_orig
| otherwise = pkg_descr_orig
setupMessage "Building source dist for" pkg_descr
ex <- doesDirectoryExist tmpDir
when ex (die $ "Source distribution already in place. please move: " ++ tmpDir)
let targetDir = tmpDir `joinFileName` (nameVersion pkg_descr)
createDirectoryIfMissing True targetDir
-- maybe move the library files into place
maybe (return ()) (\l -> prepareDir verbose targetDir pps (libModules pkg_descr) (libBuildInfo l))
(library pkg_descr)
-- move the executables into place
flip mapM_ (executables pkg_descr) $ \ (Executable _ mainPath exeBi) -> do
prepareDir verbose targetDir pps [] exeBi
srcMainFile <- findFile (hsSourceDirs exeBi) mainPath
copyFileTo verbose targetDir srcMainFile
flip mapM_ (dataFiles pkg_descr) $ \ file -> do
let (dir, _) = splitFileName file
createDirectoryIfMissing True (targetDir `joinFileName` dir)
copyFileVerbose verbose file (targetDir `joinFileName` file)
when (not (null (licenseFile pkg_descr))) $
copyFileTo verbose targetDir (licenseFile pkg_descr)
flip mapM_ (extraSrcFiles pkg_descr) $ \ fpath -> do
copyFileTo verbose targetDir fpath
-- setup isn't listed in the description file.
hsExists <- doesFileExist "Setup.hs"
lhsExists <- doesFileExist "Setup.lhs"
if hsExists then copyFileTo verbose targetDir "Setup.hs"
else if lhsExists then copyFileTo verbose targetDir "Setup.lhs"
else writeFile (targetDir `joinFileName` "Setup.hs") $ unlines [
"import Distribution.Simple",
"main = defaultMainWithHooks defaultUserHooks"]
-- the description file itself
descFile <- getCurrentDirectory >>= findPackageDesc
let targetDescFile = targetDir `joinFileName` descFile
-- We could just writePackageDescription targetDescFile pkg_descr,
-- but that would lose comments and formatting.
if snapshot then do
contents <- readFile descFile
writeFile targetDescFile $
unlines $ map (appendVersion date) $ lines $ contents
else copyFileVerbose verbose descFile targetDescFile
let tarBallFilePath = targetPref `joinFileName` tarBallName pkg_descr
system $ "(cd " ++ tmpDir
++ ";tar cf - " ++ (nameVersion pkg_descr) ++ ") | gzip -9 >"
++ tarBallFilePath
removeDirectoryRecursive tmpDir
putStrLn $ "Source tarball created: " ++ tarBallFilePath
where
updatePackage f pd = pd { package = f (package pd) }
updatePkgVersion f pkg = pkg { pkgVersion = f (pkgVersion pkg) }
updateVersionBranch f v = v { versionBranch = f (versionBranch v) }
appendVersion :: Int -> String -> String
appendVersion n line
| "version:" `isPrefixOf` map toLower line =
trimTrailingSpace line ++ "." ++ show n
| otherwise = line
trimTrailingSpace :: String -> String
trimTrailingSpace = reverse . dropWhile isSpace . reverse
-- |Move the sources into place based on buildInfo
prepareDir :: Int -- ^verbose
-> FilePath -- ^TargetPrefix
-> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes)
-> [String] -- ^Exposed modules
-> BuildInfo
-> IO ()
prepareDir verbose inPref pps mods BuildInfo{hsSourceDirs=srcDirs, otherModules=mods', cSources=cfiles}
= do let suff = ppSuffixes pps ++ ["hs", "lhs"]
smartCopySources verbose srcDirs inPref (mods++mods') suff True True
removePreprocessed (map (joinFileName inPref) srcDirs) mods suff
mapM_ (copyFileTo verbose inPref) cfiles
copyFileTo :: Int -> FilePath -> FilePath -> IO ()
copyFileTo verbose dir file = do
let targetFile = dir `joinFileName` file
createDirectoryIfMissing True (fst (splitFileName targetFile))
copyFileVerbose verbose file targetFile
------------------------------------------------------------
-- |The file name of the tarball
tarBallName :: PackageDescription -> FilePath
tarBallName p = (nameVersion p) ++ ".tar.gz"
nameVersion :: PackageDescription -> String
nameVersion = showPackageId . package
-- ------------------------------------------------------------
-- * Testing
-- ------------------------------------------------------------
| null | https://raw.githubusercontent.com/conscell/hugs-android/31e5861bc1a1dd9931e6b2471a9f45c14e3c6c7e/hugs/lib/hugs/packages/Cabal/Distribution/Simple/SrcDist.hs | haskell | ---------------------------------------------------------------------------
|
Module : Distribution.Simple.SrcDist
Stability : alpha
Portability : portable
NOTE: FIX: we don't have a great way of testing this module, since
we can't easily look inside a tarball once its created.
|Create a source distribution. FIX: Calls tar directly (won't work
on windows).
verbose & snapshot
^build prefix (temp dir)
^TargetPrefix
^ extra preprocessors (includes suffixes)
maybe move the library files into place
move the executables into place
setup isn't listed in the description file.
the description file itself
We could just writePackageDescription targetDescFile pkg_descr,
but that would lose comments and formatting.
|Move the sources into place based on buildInfo
^verbose
^TargetPrefix
^ extra preprocessors (includes suffixes)
^Exposed modules
----------------------------------------------------------
|The file name of the tarball
------------------------------------------------------------
* Testing
------------------------------------------------------------ | # OPTIONS_GHC -cpp #
Copyright : 2004
Maintainer : < >
Copyright ( c ) 2003 - 2004 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.SrcDist (
sdist
) where
import Distribution.PackageDescription
(PackageDescription(..), BuildInfo(..), Executable(..), Library(..),
setupMessage, libModules)
import Distribution.Package (showPackageId, PackageIdentifier(pkgVersion))
import Distribution.Version (Version(versionBranch))
import Distribution.Simple.Utils
(smartCopySources, die, findPackageDesc, findFile, copyFileVerbose)
import Distribution.Setup (SDistFlags(..))
import Distribution.PreProcess (PPSuffixHandler, ppSuffixes, removePreprocessed)
import Control.Monad(when)
import Data.Char (isSpace, toLower)
import Data.List (isPrefixOf)
import System.Cmd (system)
import System.Time (getClockTime, toCalendarTime, CalendarTime(..))
import Distribution.Compat.Directory (doesFileExist, doesDirectoryExist,
getCurrentDirectory, createDirectoryIfMissing, removeDirectoryRecursive)
import Distribution.Compat.FilePath (joinFileName, splitFileName)
sdist :: PackageDescription
-> IO ()
sdist pkg_descr_orig (SDistFlags snapshot verbose) tmpDir targetPref pps = do
time <- getClockTime
ct <- toCalendarTime time
let date = ctYear ct*10000 + (fromEnum (ctMonth ct) + 1)*100 + ctDay ct
let pkg_descr
| snapshot = updatePackage (updatePkgVersion
(updateVersionBranch (++ [date]))) pkg_descr_orig
| otherwise = pkg_descr_orig
setupMessage "Building source dist for" pkg_descr
ex <- doesDirectoryExist tmpDir
when ex (die $ "Source distribution already in place. please move: " ++ tmpDir)
let targetDir = tmpDir `joinFileName` (nameVersion pkg_descr)
createDirectoryIfMissing True targetDir
maybe (return ()) (\l -> prepareDir verbose targetDir pps (libModules pkg_descr) (libBuildInfo l))
(library pkg_descr)
flip mapM_ (executables pkg_descr) $ \ (Executable _ mainPath exeBi) -> do
prepareDir verbose targetDir pps [] exeBi
srcMainFile <- findFile (hsSourceDirs exeBi) mainPath
copyFileTo verbose targetDir srcMainFile
flip mapM_ (dataFiles pkg_descr) $ \ file -> do
let (dir, _) = splitFileName file
createDirectoryIfMissing True (targetDir `joinFileName` dir)
copyFileVerbose verbose file (targetDir `joinFileName` file)
when (not (null (licenseFile pkg_descr))) $
copyFileTo verbose targetDir (licenseFile pkg_descr)
flip mapM_ (extraSrcFiles pkg_descr) $ \ fpath -> do
copyFileTo verbose targetDir fpath
hsExists <- doesFileExist "Setup.hs"
lhsExists <- doesFileExist "Setup.lhs"
if hsExists then copyFileTo verbose targetDir "Setup.hs"
else if lhsExists then copyFileTo verbose targetDir "Setup.lhs"
else writeFile (targetDir `joinFileName` "Setup.hs") $ unlines [
"import Distribution.Simple",
"main = defaultMainWithHooks defaultUserHooks"]
descFile <- getCurrentDirectory >>= findPackageDesc
let targetDescFile = targetDir `joinFileName` descFile
if snapshot then do
contents <- readFile descFile
writeFile targetDescFile $
unlines $ map (appendVersion date) $ lines $ contents
else copyFileVerbose verbose descFile targetDescFile
let tarBallFilePath = targetPref `joinFileName` tarBallName pkg_descr
system $ "(cd " ++ tmpDir
++ ";tar cf - " ++ (nameVersion pkg_descr) ++ ") | gzip -9 >"
++ tarBallFilePath
removeDirectoryRecursive tmpDir
putStrLn $ "Source tarball created: " ++ tarBallFilePath
where
updatePackage f pd = pd { package = f (package pd) }
updatePkgVersion f pkg = pkg { pkgVersion = f (pkgVersion pkg) }
updateVersionBranch f v = v { versionBranch = f (versionBranch v) }
appendVersion :: Int -> String -> String
appendVersion n line
| "version:" `isPrefixOf` map toLower line =
trimTrailingSpace line ++ "." ++ show n
| otherwise = line
trimTrailingSpace :: String -> String
trimTrailingSpace = reverse . dropWhile isSpace . reverse
-> BuildInfo
-> IO ()
prepareDir verbose inPref pps mods BuildInfo{hsSourceDirs=srcDirs, otherModules=mods', cSources=cfiles}
= do let suff = ppSuffixes pps ++ ["hs", "lhs"]
smartCopySources verbose srcDirs inPref (mods++mods') suff True True
removePreprocessed (map (joinFileName inPref) srcDirs) mods suff
mapM_ (copyFileTo verbose inPref) cfiles
copyFileTo :: Int -> FilePath -> FilePath -> IO ()
copyFileTo verbose dir file = do
let targetFile = dir `joinFileName` file
createDirectoryIfMissing True (fst (splitFileName targetFile))
copyFileVerbose verbose file targetFile
tarBallName :: PackageDescription -> FilePath
tarBallName p = (nameVersion p) ++ ".tar.gz"
nameVersion :: PackageDescription -> String
nameVersion = showPackageId . package
|
eb9ae66ea4f560bf2e21c65598d55ef15c1374b23ef0531c6a95cf993d48aca9 | binaryage/chromex | webview_tag.cljs | (ns chromex.app.webview-tag (:require-macros [chromex.app.webview-tag :refer [gen-wrap]])
(:require [chromex.core]))
; -- properties -------------------------------------------------------------------------------------------------------------
(defn content-window* [config]
(gen-wrap :property ::content-window config))
(defn request* [config]
(gen-wrap :property ::request config))
(defn context-menus* [config]
(gen-wrap :property ::context-menus config))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get-audio-state* [config]
(gen-wrap :function ::get-audio-state config))
(defn set-audio-muted* [config mute]
(gen-wrap :function ::set-audio-muted config mute))
(defn is-audio-muted* [config]
(gen-wrap :function ::is-audio-muted config))
(defn capture-visible-region* [config options]
(gen-wrap :function ::capture-visible-region config options))
(defn add-content-scripts* [config content-script-list]
(gen-wrap :function ::add-content-scripts config content-script-list))
(defn back* [config]
(gen-wrap :function ::back config))
(defn can-go-back* [config]
(gen-wrap :function ::can-go-back config))
(defn can-go-forward* [config]
(gen-wrap :function ::can-go-forward config))
(defn clear-data* [config options types]
(gen-wrap :function ::clear-data config options types))
(defn execute-script* [config details]
(gen-wrap :function ::execute-script config details))
(defn find* [config search-text options]
(gen-wrap :function ::find config search-text options))
(defn forward* [config]
(gen-wrap :function ::forward config))
(defn get-process-id* [config]
(gen-wrap :function ::get-process-id config))
(defn get-user-agent* [config]
(gen-wrap :function ::get-user-agent config))
(defn get-zoom* [config]
(gen-wrap :function ::get-zoom config))
(defn get-zoom-mode* [config]
(gen-wrap :function ::get-zoom-mode config))
(defn go* [config relative-index]
(gen-wrap :function ::go config relative-index))
(defn insert-css* [config details]
(gen-wrap :function ::insert-css config details))
(defn is-user-agent-overridden* [config]
(gen-wrap :function ::is-user-agent-overridden config))
(defn print* [config]
(gen-wrap :function ::print config))
(defn reload* [config]
(gen-wrap :function ::reload config))
(defn remove-content-scripts* [config script-name-list]
(gen-wrap :function ::remove-content-scripts config script-name-list))
(defn set-user-agent-override* [config user-agent]
(gen-wrap :function ::set-user-agent-override config user-agent))
(defn set-zoom* [config zoom-factor]
(gen-wrap :function ::set-zoom config zoom-factor))
(defn set-zoom-mode* [config zoom-mode]
(gen-wrap :function ::set-zoom-mode config zoom-mode))
(defn stop* [config]
(gen-wrap :function ::stop config))
(defn stop-finding* [config action]
(gen-wrap :function ::stop-finding config action))
(defn load-data-with-base-url* [config data-url base-url virtual-url]
(gen-wrap :function ::load-data-with-base-url config data-url base-url virtual-url))
(defn set-spatial-navigation-enabled* [config enabled]
(gen-wrap :function ::set-spatial-navigation-enabled config enabled))
(defn is-spatial-navigation-enabled* [config]
(gen-wrap :function ::is-spatial-navigation-enabled config))
(defn terminate* [config]
(gen-wrap :function ::terminate config))
| null | https://raw.githubusercontent.com/binaryage/chromex/33834ba5dd4f4238a3c51f99caa0416f30c308c5/src/apps/chromex/app/webview_tag.cljs | clojure | -- properties ------------------------------------------------------------------------------------------------------------- | (ns chromex.app.webview-tag (:require-macros [chromex.app.webview-tag :refer [gen-wrap]])
(:require [chromex.core]))
(defn content-window* [config]
(gen-wrap :property ::content-window config))
(defn request* [config]
(gen-wrap :property ::request config))
(defn context-menus* [config]
(gen-wrap :property ::context-menus config))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get-audio-state* [config]
(gen-wrap :function ::get-audio-state config))
(defn set-audio-muted* [config mute]
(gen-wrap :function ::set-audio-muted config mute))
(defn is-audio-muted* [config]
(gen-wrap :function ::is-audio-muted config))
(defn capture-visible-region* [config options]
(gen-wrap :function ::capture-visible-region config options))
(defn add-content-scripts* [config content-script-list]
(gen-wrap :function ::add-content-scripts config content-script-list))
(defn back* [config]
(gen-wrap :function ::back config))
(defn can-go-back* [config]
(gen-wrap :function ::can-go-back config))
(defn can-go-forward* [config]
(gen-wrap :function ::can-go-forward config))
(defn clear-data* [config options types]
(gen-wrap :function ::clear-data config options types))
(defn execute-script* [config details]
(gen-wrap :function ::execute-script config details))
(defn find* [config search-text options]
(gen-wrap :function ::find config search-text options))
(defn forward* [config]
(gen-wrap :function ::forward config))
(defn get-process-id* [config]
(gen-wrap :function ::get-process-id config))
(defn get-user-agent* [config]
(gen-wrap :function ::get-user-agent config))
(defn get-zoom* [config]
(gen-wrap :function ::get-zoom config))
(defn get-zoom-mode* [config]
(gen-wrap :function ::get-zoom-mode config))
(defn go* [config relative-index]
(gen-wrap :function ::go config relative-index))
(defn insert-css* [config details]
(gen-wrap :function ::insert-css config details))
(defn is-user-agent-overridden* [config]
(gen-wrap :function ::is-user-agent-overridden config))
(defn print* [config]
(gen-wrap :function ::print config))
(defn reload* [config]
(gen-wrap :function ::reload config))
(defn remove-content-scripts* [config script-name-list]
(gen-wrap :function ::remove-content-scripts config script-name-list))
(defn set-user-agent-override* [config user-agent]
(gen-wrap :function ::set-user-agent-override config user-agent))
(defn set-zoom* [config zoom-factor]
(gen-wrap :function ::set-zoom config zoom-factor))
(defn set-zoom-mode* [config zoom-mode]
(gen-wrap :function ::set-zoom-mode config zoom-mode))
(defn stop* [config]
(gen-wrap :function ::stop config))
(defn stop-finding* [config action]
(gen-wrap :function ::stop-finding config action))
(defn load-data-with-base-url* [config data-url base-url virtual-url]
(gen-wrap :function ::load-data-with-base-url config data-url base-url virtual-url))
(defn set-spatial-navigation-enabled* [config enabled]
(gen-wrap :function ::set-spatial-navigation-enabled config enabled))
(defn is-spatial-navigation-enabled* [config]
(gen-wrap :function ::is-spatial-navigation-enabled config))
(defn terminate* [config]
(gen-wrap :function ::terminate config))
|
a77e47d97bfb5f36f8ff8459446f0cf92c66a8d19d081c7e13444b063fe7f22f | sjl/temperance | 6-optimization.lisp | (in-package :temperance)
;;;; ,,--. . .
;;;; |`, | ,-. |- . ,-,-. . ,_, ,-. |- . ,-. ,-.
;;;; | | | | | | | | | | / ,-| | | | | | |
;;;; `---' |-' `' ' ' ' ' ' '"' `-^ `' ' `-' ' '
;;;; |
;;;; '
Optimization of the WAM instructions happens between the precompilation
;;; phase and the rendering phase. We perform a number of passes over the
circle of instructions , doing one optimization each time .
(defun optimize-get-constant (node constant register)
1 . get_structure c/0 , Ai - > get_constant c , Ai
(circle-replace node `(:get-constant ,constant ,register)))
(defun optimize-put-constant (node constant register)
2 . put_structure c/0 , Ai - > put_constant c , Ai
(circle-replace node `(:put-constant ,constant ,register)))
(defun optimize-subterm-constant-query (node constant register)
3 . put_structure c/0 , Xi * * * WE ARE HERE
;; ...
(loop
:with previous = (circle-prev node)
;; Search for the corresponding set-value instruction
:for n = (circle-forward-remove node) :then (circle-forward n)
:while n
:for (opcode . arguments) = (circle-value n)
:when (and (eql opcode :subterm-value-local)
(register= register (first arguments)))
:do
(circle-replace n `(:subterm-constant ,constant))
(return previous)))
(defun optimize-subterm-constant-program (node constant register)
4 . subterm_variable
;; ...
get_structure c/0 , Xi * * * WE ARE HERE
(loop
;; Search backward for the corresponding subterm-variable instruction
:for n = (circle-backward node) :then (circle-backward n)
:while n
:for (opcode . arguments) = (circle-value n)
:when (and (eql opcode :subterm-variable-local)
(register= register (first arguments)))
:do
(circle-replace n `(:subterm-constant ,constant))
(return (circle-backward-remove node))))
(defun optimize-constants (instructions)
From the book and the erratum , there are four optimizations we can do for
;; constants (0-arity structures).
(flet ((optimize-put (node functor register)
(if (register-argument-p register)
(optimize-put-constant node functor register)
(optimize-subterm-constant-query node functor register)))
(optimize-get (node functor register)
(if (register-argument-p register)
(optimize-get-constant node functor register)
(optimize-subterm-constant-program node functor register))))
(loop
:for node = (circle-forward instructions) :then (circle-forward node)
:while node :do
(destructuring-bind (opcode . arguments) (circle-value node)
(when (member opcode '(:put-structure :get-structure))
(destructuring-bind (functor arity register) arguments
(when (zerop arity)
(setf node
(case opcode
(:put-structure (optimize-put node functor register))
(:get-structure (optimize-get node functor register))))))))))
instructions)
(defun optimize-void-runs (instructions)
We can optimize runs of N (: unify - void 1 ) instructions into a single one
;; that does all N at once.
(loop
:for node = (circle-forward instructions) :then (circle-forward node)
:while node
:for opcode = (car (circle-value node))
:when (eq opcode :subterm-void)
:do
(loop
:with beginning = (circle-backward node)
:for run-node = node :then (circle-forward run-node)
:for run-opcode = (car (circle-value run-node))
:while (eq opcode run-opcode)
:do (circle-remove run-node)
:sum 1 :into run-length fixnum ; lol
:finally
(progn
(setf node (circle-forward beginning))
(circle-insert-after beginning
`(,opcode ,run-length)))))
instructions)
(defun optimize-instructions (instructions)
(-<> instructions
optimize-constants
optimize-void-runs))
| null | https://raw.githubusercontent.com/sjl/temperance/f7e68f46b7afaeecf643c009eb2e130500556e31/src/compiler/6-optimization.lisp | lisp | ,,--. . .
|`, | ,-. |- . ,-,-. . ,_, ,-. |- . ,-. ,-.
| | | | | | | | | | / ,-| | | | | | |
`---' |-' `' ' ' ' ' ' '"' `-^ `' ' `-' ' '
|
'
phase and the rendering phase. We perform a number of passes over the
...
Search for the corresponding set-value instruction
...
Search backward for the corresponding subterm-variable instruction
constants (0-arity structures).
that does all N at once.
lol | (in-package :temperance)
Optimization of the WAM instructions happens between the precompilation
circle of instructions , doing one optimization each time .
(defun optimize-get-constant (node constant register)
1 . get_structure c/0 , Ai - > get_constant c , Ai
(circle-replace node `(:get-constant ,constant ,register)))
(defun optimize-put-constant (node constant register)
2 . put_structure c/0 , Ai - > put_constant c , Ai
(circle-replace node `(:put-constant ,constant ,register)))
(defun optimize-subterm-constant-query (node constant register)
3 . put_structure c/0 , Xi * * * WE ARE HERE
(loop
:with previous = (circle-prev node)
:for n = (circle-forward-remove node) :then (circle-forward n)
:while n
:for (opcode . arguments) = (circle-value n)
:when (and (eql opcode :subterm-value-local)
(register= register (first arguments)))
:do
(circle-replace n `(:subterm-constant ,constant))
(return previous)))
(defun optimize-subterm-constant-program (node constant register)
4 . subterm_variable
get_structure c/0 , Xi * * * WE ARE HERE
(loop
:for n = (circle-backward node) :then (circle-backward n)
:while n
:for (opcode . arguments) = (circle-value n)
:when (and (eql opcode :subterm-variable-local)
(register= register (first arguments)))
:do
(circle-replace n `(:subterm-constant ,constant))
(return (circle-backward-remove node))))
(defun optimize-constants (instructions)
From the book and the erratum , there are four optimizations we can do for
(flet ((optimize-put (node functor register)
(if (register-argument-p register)
(optimize-put-constant node functor register)
(optimize-subterm-constant-query node functor register)))
(optimize-get (node functor register)
(if (register-argument-p register)
(optimize-get-constant node functor register)
(optimize-subterm-constant-program node functor register))))
(loop
:for node = (circle-forward instructions) :then (circle-forward node)
:while node :do
(destructuring-bind (opcode . arguments) (circle-value node)
(when (member opcode '(:put-structure :get-structure))
(destructuring-bind (functor arity register) arguments
(when (zerop arity)
(setf node
(case opcode
(:put-structure (optimize-put node functor register))
(:get-structure (optimize-get node functor register))))))))))
instructions)
(defun optimize-void-runs (instructions)
We can optimize runs of N (: unify - void 1 ) instructions into a single one
(loop
:for node = (circle-forward instructions) :then (circle-forward node)
:while node
:for opcode = (car (circle-value node))
:when (eq opcode :subterm-void)
:do
(loop
:with beginning = (circle-backward node)
:for run-node = node :then (circle-forward run-node)
:for run-opcode = (car (circle-value run-node))
:while (eq opcode run-opcode)
:do (circle-remove run-node)
:finally
(progn
(setf node (circle-forward beginning))
(circle-insert-after beginning
`(,opcode ,run-length)))))
instructions)
(defun optimize-instructions (instructions)
(-<> instructions
optimize-constants
optimize-void-runs))
|
b321c2963848a692e07608b8e6a64f0fa1cbedabb0e035f51b3fab46ef900ef5 | eugeneia/athens | util.lisp | (in-package :cl-user)
(defpackage jonathan.util
(:use :cl
:jonathan.error)
(:export :+impl-comma-p+
:my-plist-p
:integer-char-p
:make-keyword
:comma-p
:comma-expr
:*quasiquote*))
(in-package :jonathan.util)
(defparameter +impl-comma-p+ (and (find-package :sb-impl)
(find-symbol "COMMA-P" :sb-impl)
(find-symbol "COMMA-EXPR" :sb-impl)
t))
(defun my-plist-p (list)
(typecase list
(null t)
(cons (loop for (key val next) on list by #'cddr
if (not (keywordp key))
return nil
else
unless next return t))))
(declaim (inline integer-char-p))
(defun integer-char-p (char)
(or (char<= #\0 char #\9)
(char= char #\-)))
(defun make-keyword (str)
(intern str #.(find-package :keyword)))
(defun comma-p (comma)
(if +impl-comma-p+
(when (uiop:symbol-call :sb-impl "COMMA-P" comma)
(if (= (uiop:symbol-call :sb-impl "COMMA-KIND" comma) 0)
;; (comma-kind comma
;; => 0: just only comma
1 : with dot
2 : with at
t
(error '<jonathan-not-supported-error> :object "Comma with dot or at")))
(error '<jonathan-not-supported-error> :object " Comma")))
(defun comma-expr (comma)
(if +impl-comma-p+
(uiop:symbol-call :sb-impl "COMMA-EXPR" comma)
nil))
(defvar *quasiquote* (if +impl-comma-p+
(find-symbol "QUASIQUOTE" :sb-int)
nil))
| null | https://raw.githubusercontent.com/eugeneia/athens/cc9d456edd3891b764b0fbf0202a3e2f58865cbf/quicklisp/dists/quicklisp/software/jonathan-20180430-git/src/util.lisp | lisp | (comma-kind comma
=> 0: just only comma | (in-package :cl-user)
(defpackage jonathan.util
(:use :cl
:jonathan.error)
(:export :+impl-comma-p+
:my-plist-p
:integer-char-p
:make-keyword
:comma-p
:comma-expr
:*quasiquote*))
(in-package :jonathan.util)
(defparameter +impl-comma-p+ (and (find-package :sb-impl)
(find-symbol "COMMA-P" :sb-impl)
(find-symbol "COMMA-EXPR" :sb-impl)
t))
(defun my-plist-p (list)
(typecase list
(null t)
(cons (loop for (key val next) on list by #'cddr
if (not (keywordp key))
return nil
else
unless next return t))))
(declaim (inline integer-char-p))
(defun integer-char-p (char)
(or (char<= #\0 char #\9)
(char= char #\-)))
(defun make-keyword (str)
(intern str #.(find-package :keyword)))
(defun comma-p (comma)
(if +impl-comma-p+
(when (uiop:symbol-call :sb-impl "COMMA-P" comma)
(if (= (uiop:symbol-call :sb-impl "COMMA-KIND" comma) 0)
1 : with dot
2 : with at
t
(error '<jonathan-not-supported-error> :object "Comma with dot or at")))
(error '<jonathan-not-supported-error> :object " Comma")))
(defun comma-expr (comma)
(if +impl-comma-p+
(uiop:symbol-call :sb-impl "COMMA-EXPR" comma)
nil))
(defvar *quasiquote* (if +impl-comma-p+
(find-symbol "QUASIQUOTE" :sb-int)
nil))
|
422223eba6af5696a23ad04c9a2b1a730a0ae8520df27e83405f28edd9739308 | nasa/Common-Metadata-Repository | system.clj | (ns cmr.metadata-db.system
"Defines functions for creating, starting, and stopping the application. Applications are
represented as a map of components. Design based on
-composition and related posts."
(:require
[cmr.acl.core :as acl]
[cmr.common-app.api.health :as common-health]
[cmr.common-app.services.cache-info :as cache-info]
[cmr.common-app.services.jvm-info :as jvm-info]
[cmr.common.api.web-server :as web]
[cmr.common.config :as cfg :refer [defconfig]]
[cmr.common.jobs :as jobs]
[cmr.common.lifecycle :as lifecycle]
[cmr.common.log :as log :refer [debug info warn error]]
[cmr.common.nrepl :as nrepl]
[cmr.common.system :as common-sys]
[cmr.message-queue.config :as queue-config]
[cmr.message-queue.queue.queue-broker :as queue-broker]
[cmr.metadata-db.api.routes :as routes]
[cmr.metadata-db.config :as config]
[cmr.metadata-db.services.jobs :as mdb-jobs]
[cmr.oracle.config :as oracle-config]
[cmr.oracle.connection :as oracle]
[cmr.transmit.config :as transmit-config]))
;; Design based on -composition and related posts
(def ^:private component-order
"Defines the order to start the components."
[:log :caches :db :queue-broker :scheduler :unclustered-scheduler :web :nrepl])
(def system-holder
"Required for jobs"
(atom nil))
(defconfig log-level
"App logging level"
{:default "info"})
(defn create-system
"Returns a new instance of the whole application."
([]
(create-system "metadata-db"))
([connection-pool-name]
(let [sys {:db (assoc (oracle/create-db (config/db-spec connection-pool-name))
:result-set-fetch-size
(config/result-set-fetch-size))
:log (log/create-logger-with-log-level (log-level))
:web (web/create-web-server (transmit-config/metadata-db-port) routes/make-api)
:nrepl (nrepl/create-nrepl-if-configured (config/metadata-db-nrepl-port))
:parallel-chunk-size (config/parallel-chunk-size)
:caches {acl/token-imp-cache-key (acl/create-token-imp-cache)
common-health/health-cache-key (common-health/create-health-cache)}
:scheduler (jobs/create-clustered-scheduler `system-holder :db mdb-jobs/jobs)
:unclustered-scheduler (jobs/create-scheduler
`system-holder [jvm-info/log-jvm-statistics-job
(cache-info/create-log-cache-info-job "metadata-db")])
:queue-broker (queue-broker/create-queue-broker (config/queue-config))
:relative-root-url (transmit-config/metadata-db-relative-root-url)}]
(transmit-config/system-with-connections sys [:access-control :echo-rest]))))
(def start
"Performs side effects to initialize the system, acquire resources,
and start it running. Returns an updated instance of the system."
(common-sys/start-fn "Metadata DB" component-order))
(def stop
"Performs side effects to shut down the system and release its
resources. Returns an updated instance of the system."
(common-sys/stop-fn "Metadata DB" component-order))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/7ea1e3700c641b07ad262f3237c539c807380ade/metadata-db-app/src/cmr/metadata_db/system.clj | clojure | Design based on -composition and related posts | (ns cmr.metadata-db.system
"Defines functions for creating, starting, and stopping the application. Applications are
represented as a map of components. Design based on
-composition and related posts."
(:require
[cmr.acl.core :as acl]
[cmr.common-app.api.health :as common-health]
[cmr.common-app.services.cache-info :as cache-info]
[cmr.common-app.services.jvm-info :as jvm-info]
[cmr.common.api.web-server :as web]
[cmr.common.config :as cfg :refer [defconfig]]
[cmr.common.jobs :as jobs]
[cmr.common.lifecycle :as lifecycle]
[cmr.common.log :as log :refer [debug info warn error]]
[cmr.common.nrepl :as nrepl]
[cmr.common.system :as common-sys]
[cmr.message-queue.config :as queue-config]
[cmr.message-queue.queue.queue-broker :as queue-broker]
[cmr.metadata-db.api.routes :as routes]
[cmr.metadata-db.config :as config]
[cmr.metadata-db.services.jobs :as mdb-jobs]
[cmr.oracle.config :as oracle-config]
[cmr.oracle.connection :as oracle]
[cmr.transmit.config :as transmit-config]))
(def ^:private component-order
"Defines the order to start the components."
[:log :caches :db :queue-broker :scheduler :unclustered-scheduler :web :nrepl])
(def system-holder
"Required for jobs"
(atom nil))
(defconfig log-level
"App logging level"
{:default "info"})
(defn create-system
"Returns a new instance of the whole application."
([]
(create-system "metadata-db"))
([connection-pool-name]
(let [sys {:db (assoc (oracle/create-db (config/db-spec connection-pool-name))
:result-set-fetch-size
(config/result-set-fetch-size))
:log (log/create-logger-with-log-level (log-level))
:web (web/create-web-server (transmit-config/metadata-db-port) routes/make-api)
:nrepl (nrepl/create-nrepl-if-configured (config/metadata-db-nrepl-port))
:parallel-chunk-size (config/parallel-chunk-size)
:caches {acl/token-imp-cache-key (acl/create-token-imp-cache)
common-health/health-cache-key (common-health/create-health-cache)}
:scheduler (jobs/create-clustered-scheduler `system-holder :db mdb-jobs/jobs)
:unclustered-scheduler (jobs/create-scheduler
`system-holder [jvm-info/log-jvm-statistics-job
(cache-info/create-log-cache-info-job "metadata-db")])
:queue-broker (queue-broker/create-queue-broker (config/queue-config))
:relative-root-url (transmit-config/metadata-db-relative-root-url)}]
(transmit-config/system-with-connections sys [:access-control :echo-rest]))))
(def start
"Performs side effects to initialize the system, acquire resources,
and start it running. Returns an updated instance of the system."
(common-sys/start-fn "Metadata DB" component-order))
(def stop
"Performs side effects to shut down the system and release its
resources. Returns an updated instance of the system."
(common-sys/stop-fn "Metadata DB" component-order))
|
dac3b34e9f67eaf045b2c7c3e259a8bfd9d3e0da2a28fc1368c9b89b661fdca9 | bobzhang/fan | test_sig_parser.ml | open Format
#camlp4o;;
let of_file filename =
let chan = open_in filename in
Stream.of_channel chan
let stream = of_file "_build/camlp4ast_signature.inferred.mli" ;;
open Camlp4.PreCast
open Fan_camlp4
< : < type .$typ : type$. > >
let f = wrap_stream_parser Syntax.parse_interf;;
let a = f stream;;
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/todoml/test/test_sig_parser.ml | ocaml | open Format
#camlp4o;;
let of_file filename =
let chan = open_in filename in
Stream.of_channel chan
let stream = of_file "_build/camlp4ast_signature.inferred.mli" ;;
open Camlp4.PreCast
open Fan_camlp4
< : < type .$typ : type$. > >
let f = wrap_stream_parser Syntax.parse_interf;;
let a = f stream;;
| |
1507837daff58289868947ac1d6c497a339789b12ef97ba31aa0adc6cbb7359b | jeapostrophe/opencl | oclVectorAdd.rkt | #lang racket
(require opencl/c)
(require "../utils/utils.rkt")
(require ffi/cvector)
(require ffi/unsafe/cvector)
(require ffi/unsafe)
(define (vectorAddHost data1 data2 result numElements)
(for ([i (in-range numElements)])
(ptr-set! result _cl_float i (+ (ptr-ref data1 _cl_float i)
(ptr-ref data2 _cl_float i)))))
(define event #f)
(define iNumElements 11444777) ;Length of float arrays to process (odd # for illustration)
(define cSourceFile "VectorAdd.cl")
(display "Starting...\n\n")
(printf "# of float elements per Array \t= ~a~n" iNumElements)
set and log Global and Local work size dimensions
(define szLocalWorkSize 128)
(define szGlobalWorkSize (roundUp szLocalWorkSize iNumElements)) ; rounded up to the nearest multiple of the LocalWorkSize
(printf "Global Work Size \t\t= ~a~nLocal Work Size \t\t= ~a~n# of Work Groups \t\t= ~a~n~n"
szGlobalWorkSize szLocalWorkSize (/ szGlobalWorkSize szLocalWorkSize))
(display "Allocate and Init Host Mem...\n")
(define srcA (malloc _cl_float szGlobalWorkSize 'raw))
(define srcB (malloc _cl_float szGlobalWorkSize 'raw))
(define dst (malloc _cl_float szGlobalWorkSize 'raw))
(define Golden (malloc _cl_float iNumElements 'raw))
(fillArray srcA iNumElements)
(fillArray srcB iNumElements)
;get platform
(display "clGetPlatformID...\n")
(define platform (cvector-ref (clGetPlatformIDs:vector) 0))
get gpu
(display "clGetDeviceIDs...\n")
(define devices (clGetDeviceIDs:vector platform 'CL_DEVICE_TYPE_GPU))
;create context
(display "clCreateContext...\n")
(define context (clCreateContext #f (cvector->vector devices)))
;create command queue
(display "clCreateCommandQueue...\n")
(define commandQueue (clCreateCommandQueue context (cvector-ref devices 0) '()))
Allocate the OpenCL buffer memory objects for source and result on the device GMEM
(display "clCreateBuffer...\n")
(define cmDevSrcA (clCreateBuffer context 'CL_MEM_READ_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
(define cmDevSrcB (clCreateBuffer context 'CL_MEM_READ_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
(define cmDevDst (clCreateBuffer context 'CL_MEM_WRITE_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
;Set up program
(printf "oclLoadProgSource (~a)...~n" cSourceFile)
(define sourceBytes (file->bytes cSourceFile))
(display "clCreateProgramWithSource...\n")
(define program (clCreateProgramWithSource context (make-vector 1 sourceBytes)))
(display "clBuildProgram...\n")
(clBuildProgram program (make-vector 0) (make-bytes 0))
;Set up kernal
(display "clCreateKernel (VectorAdd)...\n")
(define kernel (clCreateKernel program #"VectorAdd"))
(display "clSetKernelArg 0 - 3...\n\n")
(clSetKernelArg:_cl_mem kernel 0 cmDevSrcA)
(clSetKernelArg:_cl_mem kernel 1 cmDevSrcB)
(clSetKernelArg:_cl_mem kernel 2 cmDevDst)
(clSetKernelArg:_cl_int kernel 3 iNumElements)
;Asynchronous write of data to GPU
(display "clEnqueueWriteBuffer (SrcA and SrcB)...\n")
(set! event (clEnqueueWriteBuffer commandQueue cmDevSrcA 'CL_FALSE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) srcA (make-vector 0)))
(set! event (clEnqueueWriteBuffer commandQueue cmDevSrcB 'CL_FALSE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) srcB (make-vector 0)))
;Launch Kernel
(display "clEnqueueNDRangeKernel (VectorAdd)...\n")
(set! event (clEnqueueNDRangeKernel commandQueue kernel 1 (make-vector 1 szGlobalWorkSize) (make-vector 1 szLocalWorkSize) (make-vector 0)))
;Synchronous/blocking read of results, and check accumulated errors
(display "clEnqueueReadBuffer (Dst)...\n\n")
(set! event (clEnqueueReadBuffer commandQueue cmDevDst 'CL_TRUE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) dst (make-vector 0)))
;Compute and compare results for golden-host and report errors and pass/fail
(display "Comparing against Host/C++ computation...\n\n")
(vectorAddHost srcA srcB Golden iNumElements)
(if (compareArrays dst Golden iNumElements)
(display "Passed\n\n")
(display "Failed\n\n"))
;Cleanup
(display "Starting Cleanup...\n\n")
(when kernel (clReleaseKernel kernel))
(when program (clReleaseProgram program))
(when commandQueue (clReleaseCommandQueue commandQueue))
(when context (clReleaseContext context))
(when cmDevSrcA (clReleaseMemObject cmDevSrcA))
(when cmDevSrcB (clReleaseMemObject cmDevSrcB))
(when cmDevDst (clReleaseMemObject cmDevDst))
(free srcA)
(free srcB)
(free dst)
(free Golden)
(display "oclVectorAdd Exiting...\n")
| null | https://raw.githubusercontent.com/jeapostrophe/opencl/f984050b0c02beb6df186d1d531c4a92a98df1a1/tests/opencl/samples/nvidiaSamples/oclVectorAdd/oclVectorAdd.rkt | racket | Length of float arrays to process (odd # for illustration)
rounded up to the nearest multiple of the LocalWorkSize
get platform
create context
create command queue
Set up program
Set up kernal
Asynchronous write of data to GPU
Launch Kernel
Synchronous/blocking read of results, and check accumulated errors
Compute and compare results for golden-host and report errors and pass/fail
Cleanup | #lang racket
(require opencl/c)
(require "../utils/utils.rkt")
(require ffi/cvector)
(require ffi/unsafe/cvector)
(require ffi/unsafe)
(define (vectorAddHost data1 data2 result numElements)
(for ([i (in-range numElements)])
(ptr-set! result _cl_float i (+ (ptr-ref data1 _cl_float i)
(ptr-ref data2 _cl_float i)))))
(define event #f)
(define cSourceFile "VectorAdd.cl")
(display "Starting...\n\n")
(printf "# of float elements per Array \t= ~a~n" iNumElements)
set and log Global and Local work size dimensions
(define szLocalWorkSize 128)
(printf "Global Work Size \t\t= ~a~nLocal Work Size \t\t= ~a~n# of Work Groups \t\t= ~a~n~n"
szGlobalWorkSize szLocalWorkSize (/ szGlobalWorkSize szLocalWorkSize))
(display "Allocate and Init Host Mem...\n")
(define srcA (malloc _cl_float szGlobalWorkSize 'raw))
(define srcB (malloc _cl_float szGlobalWorkSize 'raw))
(define dst (malloc _cl_float szGlobalWorkSize 'raw))
(define Golden (malloc _cl_float iNumElements 'raw))
(fillArray srcA iNumElements)
(fillArray srcB iNumElements)
(display "clGetPlatformID...\n")
(define platform (cvector-ref (clGetPlatformIDs:vector) 0))
get gpu
(display "clGetDeviceIDs...\n")
(define devices (clGetDeviceIDs:vector platform 'CL_DEVICE_TYPE_GPU))
(display "clCreateContext...\n")
(define context (clCreateContext #f (cvector->vector devices)))
(display "clCreateCommandQueue...\n")
(define commandQueue (clCreateCommandQueue context (cvector-ref devices 0) '()))
Allocate the OpenCL buffer memory objects for source and result on the device GMEM
(display "clCreateBuffer...\n")
(define cmDevSrcA (clCreateBuffer context 'CL_MEM_READ_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
(define cmDevSrcB (clCreateBuffer context 'CL_MEM_READ_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
(define cmDevDst (clCreateBuffer context 'CL_MEM_WRITE_ONLY (* (ctype-sizeof _cl_float) szGlobalWorkSize) #f))
(printf "oclLoadProgSource (~a)...~n" cSourceFile)
(define sourceBytes (file->bytes cSourceFile))
(display "clCreateProgramWithSource...\n")
(define program (clCreateProgramWithSource context (make-vector 1 sourceBytes)))
(display "clBuildProgram...\n")
(clBuildProgram program (make-vector 0) (make-bytes 0))
(display "clCreateKernel (VectorAdd)...\n")
(define kernel (clCreateKernel program #"VectorAdd"))
(display "clSetKernelArg 0 - 3...\n\n")
(clSetKernelArg:_cl_mem kernel 0 cmDevSrcA)
(clSetKernelArg:_cl_mem kernel 1 cmDevSrcB)
(clSetKernelArg:_cl_mem kernel 2 cmDevDst)
(clSetKernelArg:_cl_int kernel 3 iNumElements)
(display "clEnqueueWriteBuffer (SrcA and SrcB)...\n")
(set! event (clEnqueueWriteBuffer commandQueue cmDevSrcA 'CL_FALSE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) srcA (make-vector 0)))
(set! event (clEnqueueWriteBuffer commandQueue cmDevSrcB 'CL_FALSE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) srcB (make-vector 0)))
(display "clEnqueueNDRangeKernel (VectorAdd)...\n")
(set! event (clEnqueueNDRangeKernel commandQueue kernel 1 (make-vector 1 szGlobalWorkSize) (make-vector 1 szLocalWorkSize) (make-vector 0)))
(display "clEnqueueReadBuffer (Dst)...\n\n")
(set! event (clEnqueueReadBuffer commandQueue cmDevDst 'CL_TRUE 0 (* (ctype-sizeof _cl_float) szGlobalWorkSize) dst (make-vector 0)))
(display "Comparing against Host/C++ computation...\n\n")
(vectorAddHost srcA srcB Golden iNumElements)
(if (compareArrays dst Golden iNumElements)
(display "Passed\n\n")
(display "Failed\n\n"))
(display "Starting Cleanup...\n\n")
(when kernel (clReleaseKernel kernel))
(when program (clReleaseProgram program))
(when commandQueue (clReleaseCommandQueue commandQueue))
(when context (clReleaseContext context))
(when cmDevSrcA (clReleaseMemObject cmDevSrcA))
(when cmDevSrcB (clReleaseMemObject cmDevSrcB))
(when cmDevDst (clReleaseMemObject cmDevDst))
(free srcA)
(free srcB)
(free dst)
(free Golden)
(display "oclVectorAdd Exiting...\n")
|
51a2bc28273be497af820c7f577fe6bc33f7c48bc01cf3b12014eb0da5b9429f | orbitz/ocaml-protobuf | parser.ml | open Core.Std
type error = [ `Incomplete | `Overflow | `Unknown_type | `Wrong_type ]
module State = struct
type t = { tags : Protocol.Value.t list Int.Map.t
}
let tags_of_bits bits =
let append field map =
let module F = Protocol.Field in
match Int.Map.find map (F.tag field) with
| Some v ->
Int.Map.add ~key:(F.tag field) ~data:((F.value field)::v) map
| None ->
Int.Map.add ~key:(F.tag field) ~data:([F.value field]) map
in
let rec tags_of_bits' acc bits =
if Bitstring.bitstring_length bits > 0 then begin
let open Result.Monad_infix in
Protocol.next bits >>= fun (field, bits) ->
tags_of_bits' (append field acc) bits
end
else
Ok acc
in
tags_of_bits' (Int.Map.empty) bits
let create bits =
let open Result.Monad_infix in
tags_of_bits bits >>= fun tags ->
Ok { tags }
end
type 'a t = { run : State.t -> (('a * State.t), error) Result.t }
type tag = int
type 'a _t = 'a t
let fail err = { run = fun _ -> Error err }
include (Monad.Make (struct
type 'a t = 'a _t
let return a = { run = fun s -> Ok (a, s) }
let bind t f =
{ run = fun s ->
match t.run s with
| Ok (a, s') ->
let t' = f a in
t'.run s'
| Error `Incomplete ->
let t' = fail `Incomplete in
t'.run s
| Error `Overflow ->
let t' = fail `Overflow in
t'.run s
| Error `Unknown_type ->
let t' = fail `Unknown_type in
t'.run s
| Error `Wrong_type ->
let t' = fail `Wrong_type in
t'.run s
}
let map = `Define_using_bind
end) : Monad.S with type 'a t := 'a _t)
let rec break_foldl ~f ~init = function
| [] ->
Ok init
| x::xs ->
let open Result.Monad_infix in
f init x >>= fun init ->
break_foldl ~f ~init xs
let check_type f l =
let open Result.Monad_infix in
let check acc v =
f v >>= fun v ->
Ok (v::acc)
in
(*
* The values are put in the tags map in reverse order
* that they are seen, this foldl will put it back in the
* correct order
*)
break_foldl ~f:check ~init:[] l >>= fun l ->
Ok l
let rec consume_type d f bits =
if Bitstring.bitstring_length bits = 0 then
Ok []
else begin
let open Result.Monad_infix in
d bits >>= fun (v, rest) ->
f v >>= fun v ->
consume_type d f rest >>= fun r ->
Ok (v::r)
end
let check_pkd_type d f l =
match List.last l with
| None ->
Ok []
| Some (Protocol.Value.Sequence last) ->
consume_type d f last
| Some _ ->
Error `Wrong_type
let extract_opt l =
return (List.last l)
let required = function
| Some x ->
return x
| None ->
fail `Incomplete
* Handling all the errors is ugly but it 's so we
* can get a n open polymorphic vairant in the type
* of [ run ] , to make it work nicely in a monad
* Handling all the errors is ugly but it's so we
* can get a n open polymorphic vairant in the type
* of [run], to make it work nicely in a monad
*)
let run t s =
match t.run s with
| Ok (v, s) ->
Ok (v, s)
| Error `Incomplete ->
Error `Incomplete
| Error `Overflow ->
Error `Overflow
| Error `Unknown_type ->
Error `Unknown_type
| Error `Wrong_type ->
Error `Wrong_type
let read tag f s =
let module S = State in
let open Result.Monad_infix in
match Int.Map.find s.S.tags tag with
| Some values ->
let s = { S.tags = Int.Map.remove s.S.tags tag } in
f values >>= fun a ->
Ok (a, s)
| None ->
f [] >>= fun a ->
Ok (a, s)
let make_t tag f =
{ run = read tag f }
let enum_rep tag c =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v -> begin
match Int64.to_int v with
| Some v -> c v
| None -> Error `Overflow
end
| _ -> Error `Wrong_type))
let enum_opt tag c =
enum_rep tag c >>= extract_opt
let enum tag c =
enum_opt tag c >>= required
let enum_pkd tag c =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v ->
match Int64.to_int v with
| Some v -> c v
| None -> Error `Overflow))
let bool_conv = function
| 0 -> Ok false
| 1 -> Ok true
| _ -> Error `Overflow
let bool_rep tag =
let open Protocol.Value in
let open Int64 in
enum_rep
tag
bool_conv
let bool_opt tag =
bool_rep tag >>= extract_opt
let bool tag =
bool_opt tag >>= required
let bool_pkd tag =
let open Protocol.Value in
let open Int64 in
enum_pkd
tag
bool_conv
let int32_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v -> begin
match Int32.of_int64 v with
| Some v -> Ok v
| None -> Error `Overflow
end
| Fixed32 v ->
Ok v
| _ ->
Error `Wrong_type))
let int32_opt tag =
int32_rep tag >>= extract_opt
let int32 tag =
int32_opt tag >>= required
let int32_pkd tag =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v ->
match Int32.of_int64 v with
| Some v -> Ok v
| None -> Error `Overflow))
let sint32_rep tag =
int32_rep tag >>= fun ints ->
return (List.map ~f:Sint32.decode ints)
let sint32_opt tag =
sint32_rep tag >>= extract_opt
let sint32 tag =
sint32_opt tag >>= required
let sint32_pkd tag =
int32_pkd tag >>= fun ints ->
return (List.map ~f:Sint32.decode ints)
let int64_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v ->
Ok v
| Fixed64 v ->
Ok v
| _ ->
Error `Wrong_type))
let int64_opt tag =
int64_rep tag >>= extract_opt
let int64 tag =
int64_opt tag >>= required
let int64_pkd tag =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v -> Ok v))
let sint64_rep tag =
int64_rep tag >>= fun ints ->
return (List.map ~f:Sint64.decode ints)
let sint64_opt tag =
sint64_rep tag >>= extract_opt
let sint64 tag =
sint64_opt tag >>= required
let sint64_pkd tag =
int64_pkd tag >>= fun ints ->
return (List.map ~f:Sint64.decode ints)
let float_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Fixed32 v ->
Ok (Int32.float_of_bits v)
| _ ->
Error `Wrong_type))
let float_opt tag =
float_rep tag >>= extract_opt
let float tag =
float_opt tag >>= required
let float_pkd tag =
make_t
tag
(check_pkd_type
Fixed32.of_bitstring
(fun v -> Ok (Int32.float_of_bits v)))
let double_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Fixed64 v ->
Ok (Int64.float_of_bits v)
| _ ->
Error `Wrong_type))
let double_opt tag =
double_rep tag >>= extract_opt
let double tag =
double_opt tag >>= required
let double_pkd tag =
make_t
tag
(check_pkd_type
Fixed64.of_bitstring
(fun v -> Ok (Int64.float_of_bits v)))
let bytes_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Sequence bits ->
Ok (Bitstring.string_of_bitstring bits)
| _ ->
Error `Wrong_type))
let bytes_opt tag =
bytes_rep tag >>= extract_opt
let bytes tag =
bytes_opt tag >>= required
let string_rep = bytes_rep
let string_opt = bytes_opt
let string = bytes
let embd_msg_rep tag r =
let open Protocol.Value in
let open Result.Monad_infix in
make_t
tag
(check_type
(function
| Sequence bits ->
State.create bits >>= fun s ->
run r s >>= fun (a, _) ->
Ok a
| _ ->
Error `Wrong_type))
let embd_msg_opt tag r =
embd_msg_rep tag r >>= extract_opt
let embd_msg tag r =
embd_msg_opt tag r >>= required
| null | https://raw.githubusercontent.com/orbitz/ocaml-protobuf/66228bbde1aba144b5b6b4b016df0d48e8092744/lib/protobuf/parser.ml | ocaml |
* The values are put in the tags map in reverse order
* that they are seen, this foldl will put it back in the
* correct order
| open Core.Std
type error = [ `Incomplete | `Overflow | `Unknown_type | `Wrong_type ]
module State = struct
type t = { tags : Protocol.Value.t list Int.Map.t
}
let tags_of_bits bits =
let append field map =
let module F = Protocol.Field in
match Int.Map.find map (F.tag field) with
| Some v ->
Int.Map.add ~key:(F.tag field) ~data:((F.value field)::v) map
| None ->
Int.Map.add ~key:(F.tag field) ~data:([F.value field]) map
in
let rec tags_of_bits' acc bits =
if Bitstring.bitstring_length bits > 0 then begin
let open Result.Monad_infix in
Protocol.next bits >>= fun (field, bits) ->
tags_of_bits' (append field acc) bits
end
else
Ok acc
in
tags_of_bits' (Int.Map.empty) bits
let create bits =
let open Result.Monad_infix in
tags_of_bits bits >>= fun tags ->
Ok { tags }
end
type 'a t = { run : State.t -> (('a * State.t), error) Result.t }
type tag = int
type 'a _t = 'a t
let fail err = { run = fun _ -> Error err }
include (Monad.Make (struct
type 'a t = 'a _t
let return a = { run = fun s -> Ok (a, s) }
let bind t f =
{ run = fun s ->
match t.run s with
| Ok (a, s') ->
let t' = f a in
t'.run s'
| Error `Incomplete ->
let t' = fail `Incomplete in
t'.run s
| Error `Overflow ->
let t' = fail `Overflow in
t'.run s
| Error `Unknown_type ->
let t' = fail `Unknown_type in
t'.run s
| Error `Wrong_type ->
let t' = fail `Wrong_type in
t'.run s
}
let map = `Define_using_bind
end) : Monad.S with type 'a t := 'a _t)
let rec break_foldl ~f ~init = function
| [] ->
Ok init
| x::xs ->
let open Result.Monad_infix in
f init x >>= fun init ->
break_foldl ~f ~init xs
let check_type f l =
let open Result.Monad_infix in
let check acc v =
f v >>= fun v ->
Ok (v::acc)
in
break_foldl ~f:check ~init:[] l >>= fun l ->
Ok l
let rec consume_type d f bits =
if Bitstring.bitstring_length bits = 0 then
Ok []
else begin
let open Result.Monad_infix in
d bits >>= fun (v, rest) ->
f v >>= fun v ->
consume_type d f rest >>= fun r ->
Ok (v::r)
end
let check_pkd_type d f l =
match List.last l with
| None ->
Ok []
| Some (Protocol.Value.Sequence last) ->
consume_type d f last
| Some _ ->
Error `Wrong_type
let extract_opt l =
return (List.last l)
let required = function
| Some x ->
return x
| None ->
fail `Incomplete
* Handling all the errors is ugly but it 's so we
* can get a n open polymorphic vairant in the type
* of [ run ] , to make it work nicely in a monad
* Handling all the errors is ugly but it's so we
* can get a n open polymorphic vairant in the type
* of [run], to make it work nicely in a monad
*)
let run t s =
match t.run s with
| Ok (v, s) ->
Ok (v, s)
| Error `Incomplete ->
Error `Incomplete
| Error `Overflow ->
Error `Overflow
| Error `Unknown_type ->
Error `Unknown_type
| Error `Wrong_type ->
Error `Wrong_type
let read tag f s =
let module S = State in
let open Result.Monad_infix in
match Int.Map.find s.S.tags tag with
| Some values ->
let s = { S.tags = Int.Map.remove s.S.tags tag } in
f values >>= fun a ->
Ok (a, s)
| None ->
f [] >>= fun a ->
Ok (a, s)
let make_t tag f =
{ run = read tag f }
let enum_rep tag c =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v -> begin
match Int64.to_int v with
| Some v -> c v
| None -> Error `Overflow
end
| _ -> Error `Wrong_type))
let enum_opt tag c =
enum_rep tag c >>= extract_opt
let enum tag c =
enum_opt tag c >>= required
let enum_pkd tag c =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v ->
match Int64.to_int v with
| Some v -> c v
| None -> Error `Overflow))
let bool_conv = function
| 0 -> Ok false
| 1 -> Ok true
| _ -> Error `Overflow
let bool_rep tag =
let open Protocol.Value in
let open Int64 in
enum_rep
tag
bool_conv
let bool_opt tag =
bool_rep tag >>= extract_opt
let bool tag =
bool_opt tag >>= required
let bool_pkd tag =
let open Protocol.Value in
let open Int64 in
enum_pkd
tag
bool_conv
let int32_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v -> begin
match Int32.of_int64 v with
| Some v -> Ok v
| None -> Error `Overflow
end
| Fixed32 v ->
Ok v
| _ ->
Error `Wrong_type))
let int32_opt tag =
int32_rep tag >>= extract_opt
let int32 tag =
int32_opt tag >>= required
let int32_pkd tag =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v ->
match Int32.of_int64 v with
| Some v -> Ok v
| None -> Error `Overflow))
let sint32_rep tag =
int32_rep tag >>= fun ints ->
return (List.map ~f:Sint32.decode ints)
let sint32_opt tag =
sint32_rep tag >>= extract_opt
let sint32 tag =
sint32_opt tag >>= required
let sint32_pkd tag =
int32_pkd tag >>= fun ints ->
return (List.map ~f:Sint32.decode ints)
let int64_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Varint v ->
Ok v
| Fixed64 v ->
Ok v
| _ ->
Error `Wrong_type))
let int64_opt tag =
int64_rep tag >>= extract_opt
let int64 tag =
int64_opt tag >>= required
let int64_pkd tag =
make_t
tag
(check_pkd_type
Varint.of_bitstring
(fun v -> Ok v))
let sint64_rep tag =
int64_rep tag >>= fun ints ->
return (List.map ~f:Sint64.decode ints)
let sint64_opt tag =
sint64_rep tag >>= extract_opt
let sint64 tag =
sint64_opt tag >>= required
let sint64_pkd tag =
int64_pkd tag >>= fun ints ->
return (List.map ~f:Sint64.decode ints)
let float_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Fixed32 v ->
Ok (Int32.float_of_bits v)
| _ ->
Error `Wrong_type))
let float_opt tag =
float_rep tag >>= extract_opt
let float tag =
float_opt tag >>= required
let float_pkd tag =
make_t
tag
(check_pkd_type
Fixed32.of_bitstring
(fun v -> Ok (Int32.float_of_bits v)))
let double_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Fixed64 v ->
Ok (Int64.float_of_bits v)
| _ ->
Error `Wrong_type))
let double_opt tag =
double_rep tag >>= extract_opt
let double tag =
double_opt tag >>= required
let double_pkd tag =
make_t
tag
(check_pkd_type
Fixed64.of_bitstring
(fun v -> Ok (Int64.float_of_bits v)))
let bytes_rep tag =
let open Protocol.Value in
make_t
tag
(check_type
(function
| Sequence bits ->
Ok (Bitstring.string_of_bitstring bits)
| _ ->
Error `Wrong_type))
let bytes_opt tag =
bytes_rep tag >>= extract_opt
let bytes tag =
bytes_opt tag >>= required
let string_rep = bytes_rep
let string_opt = bytes_opt
let string = bytes
let embd_msg_rep tag r =
let open Protocol.Value in
let open Result.Monad_infix in
make_t
tag
(check_type
(function
| Sequence bits ->
State.create bits >>= fun s ->
run r s >>= fun (a, _) ->
Ok a
| _ ->
Error `Wrong_type))
let embd_msg_opt tag r =
embd_msg_rep tag r >>= extract_opt
let embd_msg tag r =
embd_msg_opt tag r >>= required
|
cd9c867c4fd5582ad25b60d583c9f33893f330d1facbfa82b293a5b028aecb55 | kmi/irs | common-concepts.lisp | -*- Mode : LISP ; Syntax : Common - lisp ; Base : 10 ; Package : OCML ; -*-
(in-package "OCML")
(in-ontology common-concepts)
(def-relation has-address (?x ?c)
:constraint (and (or (organization ?x)
(person ?x))
(postal-address ?c))
)
(def-relation HAS-WEB-ADDRESS (?x ?C)
:constraint (URL ?c))
(def-relation has-author (?x ?C)
"?C has been produced by ?x")
(def-class generic-agent ())
(def-class generic-agent-type () ?x
:iff-def (subclass-of ?X generic-agent))
(def-class person (generic-agent temporal-thing)
((full-name :type string)
(has-gender :type gender)
(has-address :type postal-address)
(has-web-address :type web-page)
(has-email-address :type email-address)))
(def-class URL (string))
(def-class email-address ())
(def-class man (person)
((has-gender :value male)))
(def-class woman (person)
((has-gender :value female)))
(def-class child (person))
(def-class male-child (child)
((has-gender :value male)))
(def-class female-child (child)
((has-gender :value female)))
(def-class gender () ?x
()
:iff-def (member ?x (male-gender female-gender)))
(def-instance male gender)
(def-instance female gender)
(def-class technology (temporal-thing)
((has-full-name :type string)
(has-author :type person)
(made-by :type organization)
(technology-builds-on :type technology)))
(def-class uk-location ())
(def-class uk-county (uk-location)
((has-name :type string)
(has-alternative-name :type string)
(has-town :type uk-town)))
(def-class uk-town (uk-location)
((has-name :type string)
(has-county :type uk-county)))
(def-class organization (temporal-thing)
((has-full-name :type string)
(has-web-address :type URL)
(has-address :type (or postal-address uk-address))
(affiliated-people :type affiliated-person)
(organization-part-of :type organization)
(has-organization-unit :type organization-unit)
(headed-by :type affiliated-person)
(has-organization-size :type organization-size-type)
(in-economic-sector :type economic-sector-type)))
(def-class affiliated-person (person)
((has-affiliation :type organization :min-cardinality 1)))
(def-class postal-address ()
((address-street :type string)
(address-area :type local-district)
(address-number :type integer)
(address-building :type string)
(address-city-or-village :type municipal-unit)
(address-postcode :type string)
(address-region :type geographical-region)
(address-country :type country)))
(def-class uk-address (postal-address)
((address-country :value united-kingdom)
(address-county :type uk-county)
(address-city-or-village :type uk-town)))
(def-class human-settlement ())
(def-class geo-political-region (human-settlement )
((has-name :type string)))
(def-class location ()
((has-name :type string)))
(def-class geographical-region (location ))
(def-class Geopolitical-Entity (Geographical-Region Generic-Agent))
(def-class city (geopolitical-entity)
((located-in-country :type country)))
(def-class village (geopolitical-entity))
(def-class local-district (geopolitical-entity))
(def-class country (Geopolitical-Entity)
((has-capital :type capital-city)))
(def-class document ()
((has-author :type person)))
(def-class lecture ()
((has-title :type string)
(has-author :type person)))
(def-class demonstration ()
((has-title :type string)
(has-author :type person)
(thing-demoed)))
(def-class degree ())
(def-class academic-degree (degree))
(def-instance phd academic-degree)
(def-instance msc academic-degree)
(def-instance ba academic-degree)
;;;;;;;;;;
(def-class communication-technology ())
(def-class communication-medium (communication-technology))
(def-class computing-technology (technology)
())
(def-class hardware-technology (computing-technology))
(def-class hardware-platform (hardware-technology))
(def-class software-technology (computing-technology)
(
(hardware-platforms :type hardware-platform)
(runs-on-operating-system :type operating-system)
(software-requirements :type software-technology)
(status :type software-status
:documentation "Whether the software is finished, alpha or beta")))
(def-class internet-technology (software-technology hardware-technology))
(def-class web-technology (internet-technology )
((technology-builds-on :value web)))
(def-instance web internet-technology)
;;;;;;;;;;;;;;;;;;;;
(def-class publishing-medium (communication-medium))
(def-class electronic-publishing-medium (publishing-medium ))
(def-class paper-based-publishing-medium (publishing-medium ))
(def-class news ()
"News is a collection of news-item"
((has-news-items :type news-item
:min-cardinality 1)))
(def-relation has-contents (?x ?c)
:constraint (and (news-item ?X)
(string ?c)))
(def-class news-item (temporal-thing)
((has-author :type person)
(has-headline :type string)
(has-contents :type string)
(expressed-in-medium :type publishing-medium)
(published-date :type calendar-date)
(relates-events :min-cardinality 1 :type event))
:slot-renaming ((published-date start-time)))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/ontologies/domains/common-concepts/common-concepts.lisp | lisp | Syntax : Common - lisp ; Base : 10 ; Package : OCML ; -*-
|
(in-package "OCML")
(in-ontology common-concepts)
(def-relation has-address (?x ?c)
:constraint (and (or (organization ?x)
(person ?x))
(postal-address ?c))
)
(def-relation HAS-WEB-ADDRESS (?x ?C)
:constraint (URL ?c))
(def-relation has-author (?x ?C)
"?C has been produced by ?x")
(def-class generic-agent ())
(def-class generic-agent-type () ?x
:iff-def (subclass-of ?X generic-agent))
(def-class person (generic-agent temporal-thing)
((full-name :type string)
(has-gender :type gender)
(has-address :type postal-address)
(has-web-address :type web-page)
(has-email-address :type email-address)))
(def-class URL (string))
(def-class email-address ())
(def-class man (person)
((has-gender :value male)))
(def-class woman (person)
((has-gender :value female)))
(def-class child (person))
(def-class male-child (child)
((has-gender :value male)))
(def-class female-child (child)
((has-gender :value female)))
(def-class gender () ?x
()
:iff-def (member ?x (male-gender female-gender)))
(def-instance male gender)
(def-instance female gender)
(def-class technology (temporal-thing)
((has-full-name :type string)
(has-author :type person)
(made-by :type organization)
(technology-builds-on :type technology)))
(def-class uk-location ())
(def-class uk-county (uk-location)
((has-name :type string)
(has-alternative-name :type string)
(has-town :type uk-town)))
(def-class uk-town (uk-location)
((has-name :type string)
(has-county :type uk-county)))
(def-class organization (temporal-thing)
((has-full-name :type string)
(has-web-address :type URL)
(has-address :type (or postal-address uk-address))
(affiliated-people :type affiliated-person)
(organization-part-of :type organization)
(has-organization-unit :type organization-unit)
(headed-by :type affiliated-person)
(has-organization-size :type organization-size-type)
(in-economic-sector :type economic-sector-type)))
(def-class affiliated-person (person)
((has-affiliation :type organization :min-cardinality 1)))
(def-class postal-address ()
((address-street :type string)
(address-area :type local-district)
(address-number :type integer)
(address-building :type string)
(address-city-or-village :type municipal-unit)
(address-postcode :type string)
(address-region :type geographical-region)
(address-country :type country)))
(def-class uk-address (postal-address)
((address-country :value united-kingdom)
(address-county :type uk-county)
(address-city-or-village :type uk-town)))
(def-class human-settlement ())
(def-class geo-political-region (human-settlement )
((has-name :type string)))
(def-class location ()
((has-name :type string)))
(def-class geographical-region (location ))
(def-class Geopolitical-Entity (Geographical-Region Generic-Agent))
(def-class city (geopolitical-entity)
((located-in-country :type country)))
(def-class village (geopolitical-entity))
(def-class local-district (geopolitical-entity))
(def-class country (Geopolitical-Entity)
((has-capital :type capital-city)))
(def-class document ()
((has-author :type person)))
(def-class lecture ()
((has-title :type string)
(has-author :type person)))
(def-class demonstration ()
((has-title :type string)
(has-author :type person)
(thing-demoed)))
(def-class degree ())
(def-class academic-degree (degree))
(def-instance phd academic-degree)
(def-instance msc academic-degree)
(def-instance ba academic-degree)
(def-class communication-technology ())
(def-class communication-medium (communication-technology))
(def-class computing-technology (technology)
())
(def-class hardware-technology (computing-technology))
(def-class hardware-platform (hardware-technology))
(def-class software-technology (computing-technology)
(
(hardware-platforms :type hardware-platform)
(runs-on-operating-system :type operating-system)
(software-requirements :type software-technology)
(status :type software-status
:documentation "Whether the software is finished, alpha or beta")))
(def-class internet-technology (software-technology hardware-technology))
(def-class web-technology (internet-technology )
((technology-builds-on :value web)))
(def-instance web internet-technology)
(def-class publishing-medium (communication-medium))
(def-class electronic-publishing-medium (publishing-medium ))
(def-class paper-based-publishing-medium (publishing-medium ))
(def-class news ()
"News is a collection of news-item"
((has-news-items :type news-item
:min-cardinality 1)))
(def-relation has-contents (?x ?c)
:constraint (and (news-item ?X)
(string ?c)))
(def-class news-item (temporal-thing)
((has-author :type person)
(has-headline :type string)
(has-contents :type string)
(expressed-in-medium :type publishing-medium)
(published-date :type calendar-date)
(relates-events :min-cardinality 1 :type event))
:slot-renaming ((published-date start-time)))
|
85725b0366e781f0a0901789e6c8ab18d8213be73ace8f52eac8bcab33323081 | franks42/clj-ns-browser | web.clj | ;; This file was created by copying src/clj/clojure/java/browse.clj
from the Clojure source code , then applying the patch file
;; clj-896-browse-url-uses-xdg-open-patch2.txt to it (available at the
;; URL below), and then changing the namespace name.
The intent is that if and when Clojure 's
;; clojure.java.browser/browse-url has been updated appropriately,
;; this file and namespace can be removed completely.
Until then , there are platforms , like Lubuntu , where the built - in
;; browse-url uses a very poor HTML renderer.
Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns
^{:author "Christophe Grand",
:doc "Start a web browser from Clojure"}
clj-ns-browser.web
(:require [clojure.java.shell :as sh]
[clojure.string :as str])
(:import (java.net URI)))
(defn- macosx? []
(-> "os.name" System/getProperty .toLowerCase
(.startsWith "mac os x")))
(defn- xdg-open-loc []
try / catch needed to mask exception on Windows without
(let [which-out (try (:out (sh/sh "which" "xdg-open"))
(catch Exception e ""))]
(if (= which-out "")
nil
(str/trim-newline which-out))))
(defn- open-url-script-val []
(if (macosx?)
"/usr/bin/open"
(xdg-open-loc)))
;; We could assign (open-url-script-val) to *open-url-script* right
;; away in the def below, but clojure.java.shell/sh creates a future
that causes a long wait for the JVM to exit during Clojure compiles
;; (unless we can somehow here make it call (shutdown-agents) later).
Better to initialize it when we first need it , in browse - url .
(def ^:dynamic *open-url-script* (atom :uninitialized))
(defn- open-url-in-browser
"Opens url (a string) in the default system web browser. May not
work on all platforms. Returns url on success, nil if not
supported."
[url]
(try
(when (clojure.lang.Reflector/invokeStaticMethod "java.awt.Desktop"
"isDesktopSupported" (to-array nil))
(-> (clojure.lang.Reflector/invokeStaticMethod "java.awt.Desktop"
"getDesktop" (to-array nil))
(.browse (URI. url)))
url)
(catch ClassNotFoundException e
nil)))
(defn- open-url-in-swing
"Opens url (a string) in a Swing window."
[url]
; the implementation of this function resides in another namespace to be loaded "on demand"
this fixes a bug on where the process turns into a GUI app
; see -contrib/issues/detail?id=32
(require 'clojure.java.browse-ui)
((find-var 'clojure.java.browse-ui/open-url-in-swing) url))
(defn browse-url
"Open url in a browser"
{:added "1.2"}
[url]
(let [script @*open-url-script*
script (if (= :uninitialized script)
(reset! *open-url-script* (open-url-script-val))
script)]
(or (when script (sh/sh script (str url)) true)
(open-url-in-browser url)
(open-url-in-swing url))))
| null | https://raw.githubusercontent.com/franks42/clj-ns-browser/c5fc570f9c8aeb6dd1cc9383fcf32bdaa9b936e3/src/clj_ns_browser/web.clj | clojure | This file was created by copying src/clj/clojure/java/browse.clj
clj-896-browse-url-uses-xdg-open-patch2.txt to it (available at the
URL below), and then changing the namespace name.
clojure.java.browser/browse-url has been updated appropriately,
this file and namespace can be removed completely.
browse-url uses a very poor HTML renderer.
The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
We could assign (open-url-script-val) to *open-url-script* right
away in the def below, but clojure.java.shell/sh creates a future
(unless we can somehow here make it call (shutdown-agents) later).
the implementation of this function resides in another namespace to be loaded "on demand"
see -contrib/issues/detail?id=32 | from the Clojure source code , then applying the patch file
The intent is that if and when Clojure 's
Until then , there are platforms , like Lubuntu , where the built - in
Copyright ( c ) . All rights reserved .
(ns
^{:author "Christophe Grand",
:doc "Start a web browser from Clojure"}
clj-ns-browser.web
(:require [clojure.java.shell :as sh]
[clojure.string :as str])
(:import (java.net URI)))
(defn- macosx? []
(-> "os.name" System/getProperty .toLowerCase
(.startsWith "mac os x")))
(defn- xdg-open-loc []
try / catch needed to mask exception on Windows without
(let [which-out (try (:out (sh/sh "which" "xdg-open"))
(catch Exception e ""))]
(if (= which-out "")
nil
(str/trim-newline which-out))))
(defn- open-url-script-val []
(if (macosx?)
"/usr/bin/open"
(xdg-open-loc)))
that causes a long wait for the JVM to exit during Clojure compiles
Better to initialize it when we first need it , in browse - url .
(def ^:dynamic *open-url-script* (atom :uninitialized))
(defn- open-url-in-browser
"Opens url (a string) in the default system web browser. May not
work on all platforms. Returns url on success, nil if not
supported."
[url]
(try
(when (clojure.lang.Reflector/invokeStaticMethod "java.awt.Desktop"
"isDesktopSupported" (to-array nil))
(-> (clojure.lang.Reflector/invokeStaticMethod "java.awt.Desktop"
"getDesktop" (to-array nil))
(.browse (URI. url)))
url)
(catch ClassNotFoundException e
nil)))
(defn- open-url-in-swing
"Opens url (a string) in a Swing window."
[url]
this fixes a bug on where the process turns into a GUI app
(require 'clojure.java.browse-ui)
((find-var 'clojure.java.browse-ui/open-url-in-swing) url))
(defn browse-url
"Open url in a browser"
{:added "1.2"}
[url]
(let [script @*open-url-script*
script (if (= :uninitialized script)
(reset! *open-url-script* (open-url-script-val))
script)]
(or (when script (sh/sh script (str url)) true)
(open-url-in-browser url)
(open-url-in-swing url))))
|
329a19a0b8d1407ac4edce740332e8a90264cb1468438fe4b26622d5c781cb3d | exoscale/seql | crud_integration_test.clj | (ns seql.crud-integration-test
(:require [seql.mutation :refer [mutate!]]
[clojure.spec.alpha :as s]
[seql.env :as env]
[seql.query :as q]
[seql.helpers :refer [make-schema entity-from-spec add-create-mutation
add-update-by-id-mutation add-delete-by-id-mutation]]
[db.fixtures :refer [jdbc-config with-db-fixtures]]
[clojure.test :refer [use-fixtures testing deftest is]]))
(use-fixtures :each (with-db-fixtures :small))
(create-ns 'my.entities)
(create-ns 'my.entities.account)
(alias 'account 'my.entities.account)
(s/def ::account/name string?)
(s/def ::account/id nat-int?)
(s/def ::account/state #{:active :suspended :terminated})
(s/def ::account/account (s/keys :opt [::account/id] :req [::account/name ::account/state]))
(s/def ::account/create ::account/account)
(s/def ::account/update (s/keys :opt [::account/name ::account/state]))
(s/def ::account/delete (s/keys :req [::account/id]))
(def schema
"As gradually explained in the project's README"
(make-schema
(entity-from-spec ::account/account
(add-create-mutation)
(add-update-by-id-mutation ::account/id)
(add-delete-by-id-mutation ::account/id))))
(def env
(env/make-env jdbc-config schema))
(deftest crud-test
(let [stored-id (atom nil)]
(testing "adding a new account"
(mutate! env ::account/create #::account{:name "foo" :state :active}))
(testing "retrieving newly created account by name"
(let [{::account/keys [id state] :as account} (q/execute env [::account/name "foo"])]
(is (some? account))
(is (= state :active))
(is (pos? id))
(reset! stored-id id)))
(testing "updating account state"
(when (some? @stored-id)
(mutate! env ::account/update #::account{:id @stored-id :state :suspended}))
(is (= #::account{:id @stored-id :state :suspended :name "foo"}
(q/execute env [::account/name "foo"]))))
(testing "deleting account"
(when (some? @stored-id)
(mutate! env ::account/delete {::account/id @stored-id}))
(is (nil? (q/execute env [::account/name "foo"]))))))
| null | https://raw.githubusercontent.com/exoscale/seql/7142132a5c364baf201936052095589489320e99/test/seql/crud_integration_test.clj | clojure | (ns seql.crud-integration-test
(:require [seql.mutation :refer [mutate!]]
[clojure.spec.alpha :as s]
[seql.env :as env]
[seql.query :as q]
[seql.helpers :refer [make-schema entity-from-spec add-create-mutation
add-update-by-id-mutation add-delete-by-id-mutation]]
[db.fixtures :refer [jdbc-config with-db-fixtures]]
[clojure.test :refer [use-fixtures testing deftest is]]))
(use-fixtures :each (with-db-fixtures :small))
(create-ns 'my.entities)
(create-ns 'my.entities.account)
(alias 'account 'my.entities.account)
(s/def ::account/name string?)
(s/def ::account/id nat-int?)
(s/def ::account/state #{:active :suspended :terminated})
(s/def ::account/account (s/keys :opt [::account/id] :req [::account/name ::account/state]))
(s/def ::account/create ::account/account)
(s/def ::account/update (s/keys :opt [::account/name ::account/state]))
(s/def ::account/delete (s/keys :req [::account/id]))
(def schema
"As gradually explained in the project's README"
(make-schema
(entity-from-spec ::account/account
(add-create-mutation)
(add-update-by-id-mutation ::account/id)
(add-delete-by-id-mutation ::account/id))))
(def env
(env/make-env jdbc-config schema))
(deftest crud-test
(let [stored-id (atom nil)]
(testing "adding a new account"
(mutate! env ::account/create #::account{:name "foo" :state :active}))
(testing "retrieving newly created account by name"
(let [{::account/keys [id state] :as account} (q/execute env [::account/name "foo"])]
(is (some? account))
(is (= state :active))
(is (pos? id))
(reset! stored-id id)))
(testing "updating account state"
(when (some? @stored-id)
(mutate! env ::account/update #::account{:id @stored-id :state :suspended}))
(is (= #::account{:id @stored-id :state :suspended :name "foo"}
(q/execute env [::account/name "foo"]))))
(testing "deleting account"
(when (some? @stored-id)
(mutate! env ::account/delete {::account/id @stored-id}))
(is (nil? (q/execute env [::account/name "foo"]))))))
| |
8d20f8ac4ca9e87b15a1510f74cafd664abf29e885393288c1aa6cea7a4c7d1a | HaskellZhangSong/Introduction_to_Haskell_2ed_source | derive.hs | -- derive.hs
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -ddump - splices #
import Data.Derive.Class.Arities
import Data.Derive.Arities
import Data.Derive.Show
import Data.Derive.Eq
import Data.DeriveTH
data Shape = Circle Double | Triangle Double Double Double
derive makeEq ''Shape
derive makeShow ''Shape
derive makeArities ''Shape | null | https://raw.githubusercontent.com/HaskellZhangSong/Introduction_to_Haskell_2ed_source/140c50fdccfe608fe499ecf2d8a3732f531173f5/C09/derive.hs | haskell | derive.hs
| # LANGUAGE TemplateHaskell #
# OPTIONS_GHC -ddump - splices #
import Data.Derive.Class.Arities
import Data.Derive.Arities
import Data.Derive.Show
import Data.Derive.Eq
import Data.DeriveTH
data Shape = Circle Double | Triangle Double Double Double
derive makeEq ''Shape
derive makeShow ''Shape
derive makeArities ''Shape |
4b7d532c561e45d2de8f92338e49e78e120f121df893242b65c9e3561e0ae7b8 | yzh44yzh/erma | returning_tests.erl | -module(returning_tests).
-include("erma.hrl").
-include_lib("eunit/include/eunit.hrl").
returning_test_() ->
test_utils:generate(
[{
%%
{insert, "users", ["first", "last"], ["Chris", "Granger"], [{returning, id}]},
%%
<<"INSERT INTO users (\"first\", \"last\") VALUES ('Chris', 'Granger') RETURNING id">>
},
{
%%
{insert, "users", ["first", "last", "age"], ["Bob", "Dou", 25],
[{returning, ["id", "first", "last"]}]},
%%
<<"INSERT INTO users (\"first\", \"last\", age) VALUES ('Bob', 'Dou', 25) ",
"RETURNING id, \"first\", \"last\"">>
},
{
%%
{insert, "users", ["first", "last"], ["?", "?"], [{returning, id}]},
%%
<<"INSERT INTO users (\"first\", \"last\") VALUES (?, ?) RETURNING id">>
},
{
%%
{insert_rows, "users", ["first", "last", "age"],
[["Bill", "Foo", 24], ["Bob", "Dou", 25], ["Helen", "Rice", 21]],
[{returning, id}]},
%%
<<"INSERT INTO users (\"first\", \"last\", age) ",
"VALUES ('Bill', 'Foo', 24), ('Bob', 'Dou', 25), ('Helen', 'Rice', 21) ",
"RETURNING id">>
},
{
%%
{insert, <<"users">>, [],
[5, "Bob", "Dou", 25],
[{returning, ["name", <<"age">>, "id"]}]},
%%
<<"INSERT INTO users VALUES (5, 'Bob', 'Dou', 25) ",
"RETURNING \"name\", age, id">>
},
{
%%
{insert_rows, "users", [],
[[1, "Bob", "Dou", 65], [6, "Bill", "Foo", 31]],
[{returning, id}]},
%%
<<"INSERT INTO users VALUES (1, 'Bob', 'Dou', 65), (6, 'Bill', 'Foo', 31) ",
"RETURNING id">>
},
{
%%
{update, "users", [{"first", "Chris"}], [{returning, id}]},
%%
<<"UPDATE users SET \"first\" = 'Chris' RETURNING id">>
},
{
%%
{update, <<"users">>, [{"first", "?"}],
[{where, [{"id", "?"}]},
{returning, [<<"id">>, <<"first">>]}]},
%%
<<"UPDATE users SET \"first\" = ? WHERE id = ? RETURNING id, \"first\"">>
},
{
%%
{update, "users", [{"first", "Chris"}, {"last", "?"}],
[{where, [{"id", "?"}]},
{returning, ["id", "name", <<"first">>, "last", <<"age">>]}]},
%%
<<"UPDATE users SET \"first\" = 'Chris', \"last\" = ? WHERE id = ? ",
"RETURNING id, \"name\", \"first\", \"last\", age">>
},
{
%%
{update, "users",
[{"first", "?"}, {"last", "?"}],
[{returning, ["id"]}]},
%%
<<"UPDATE users SET \"first\" = ?, \"last\" = ? RETURNING id">>
},
{
%%
{update, "users",
[{"first", "Chris"},
{"last", "Granger"}],
[{where, [{"id", 3}]},
{returning, id}]},
%%
<<"UPDATE users SET \"first\" = 'Chris', \"last\" = 'Granger' ",
"WHERE id = 3 RETURNING id">>
},
{
%%
{delete, "users", [{where, [{"id", 3}]}, {returning, id}]},
%%
<<"DELETE FROM users WHERE id = 3 RETURNING id">>
},
{
%%
{delete, "users", [{where, []}, {returning, id}]},
%%
<<"DELETE FROM users RETURNING id">>
},
{
%%
{delete, "users", [{returning, ["name", "age"]}]},
%%
<<"DELETE FROM users RETURNING \"name\", age">>
},
{
%%
{delete, "users", [{returning, id}]},
%%
<<"DELETE FROM users RETURNING id">>
}
]).
| null | https://raw.githubusercontent.com/yzh44yzh/erma/454f127fcf6df0407de2f357f40a1d57f016d694/test/returning_tests.erl | erlang | -module(returning_tests).
-include("erma.hrl").
-include_lib("eunit/include/eunit.hrl").
returning_test_() ->
test_utils:generate(
[{
{insert, "users", ["first", "last"], ["Chris", "Granger"], [{returning, id}]},
<<"INSERT INTO users (\"first\", \"last\") VALUES ('Chris', 'Granger') RETURNING id">>
},
{
{insert, "users", ["first", "last", "age"], ["Bob", "Dou", 25],
[{returning, ["id", "first", "last"]}]},
<<"INSERT INTO users (\"first\", \"last\", age) VALUES ('Bob', 'Dou', 25) ",
"RETURNING id, \"first\", \"last\"">>
},
{
{insert, "users", ["first", "last"], ["?", "?"], [{returning, id}]},
<<"INSERT INTO users (\"first\", \"last\") VALUES (?, ?) RETURNING id">>
},
{
{insert_rows, "users", ["first", "last", "age"],
[["Bill", "Foo", 24], ["Bob", "Dou", 25], ["Helen", "Rice", 21]],
[{returning, id}]},
<<"INSERT INTO users (\"first\", \"last\", age) ",
"VALUES ('Bill', 'Foo', 24), ('Bob', 'Dou', 25), ('Helen', 'Rice', 21) ",
"RETURNING id">>
},
{
{insert, <<"users">>, [],
[5, "Bob", "Dou", 25],
[{returning, ["name", <<"age">>, "id"]}]},
<<"INSERT INTO users VALUES (5, 'Bob', 'Dou', 25) ",
"RETURNING \"name\", age, id">>
},
{
{insert_rows, "users", [],
[[1, "Bob", "Dou", 65], [6, "Bill", "Foo", 31]],
[{returning, id}]},
<<"INSERT INTO users VALUES (1, 'Bob', 'Dou', 65), (6, 'Bill', 'Foo', 31) ",
"RETURNING id">>
},
{
{update, "users", [{"first", "Chris"}], [{returning, id}]},
<<"UPDATE users SET \"first\" = 'Chris' RETURNING id">>
},
{
{update, <<"users">>, [{"first", "?"}],
[{where, [{"id", "?"}]},
{returning, [<<"id">>, <<"first">>]}]},
<<"UPDATE users SET \"first\" = ? WHERE id = ? RETURNING id, \"first\"">>
},
{
{update, "users", [{"first", "Chris"}, {"last", "?"}],
[{where, [{"id", "?"}]},
{returning, ["id", "name", <<"first">>, "last", <<"age">>]}]},
<<"UPDATE users SET \"first\" = 'Chris', \"last\" = ? WHERE id = ? ",
"RETURNING id, \"name\", \"first\", \"last\", age">>
},
{
{update, "users",
[{"first", "?"}, {"last", "?"}],
[{returning, ["id"]}]},
<<"UPDATE users SET \"first\" = ?, \"last\" = ? RETURNING id">>
},
{
{update, "users",
[{"first", "Chris"},
{"last", "Granger"}],
[{where, [{"id", 3}]},
{returning, id}]},
<<"UPDATE users SET \"first\" = 'Chris', \"last\" = 'Granger' ",
"WHERE id = 3 RETURNING id">>
},
{
{delete, "users", [{where, [{"id", 3}]}, {returning, id}]},
<<"DELETE FROM users WHERE id = 3 RETURNING id">>
},
{
{delete, "users", [{where, []}, {returning, id}]},
<<"DELETE FROM users RETURNING id">>
},
{
{delete, "users", [{returning, ["name", "age"]}]},
<<"DELETE FROM users RETURNING \"name\", age">>
},
{
{delete, "users", [{returning, id}]},
<<"DELETE FROM users RETURNING id">>
}
]).
| |
a58ad09b54b7cf25ac2b83f00a3bb75dc6d50fe50d9bdb0d072a09bfa11a9513 | skanev/playground | 45.scm | SICP exercise 5.45
;
; By comparing the stack operations used by compiled code to the stack
; operations used by the evaluator for the same computation, we can determine
; the extent to which the compiler optimizes use of the stack, both in speed
; (reducing the total number of stack operations) and in space (reducing the
; maximum stack depth). Comparing this optimized stack use to performance of a
; special-purpose machine for the same computation gives some indication of
; the quality of the compiler.
;
; a. Exercise 5.27 asked you to determine, as a function of n, the number of
; pushes and the number of maximum stack depth needed by the evaluator to
; compute n! using the recursive factorial procedure given above. Exercise
5.14 asked you to do the same measurements for the special - purpose factorial
machine shown in figure 5.11 . Now perform the same analysis using the
; compiled factorial procedure.
;
; Take the ratio of the number of pushes in the compiled version to the number
; of pushes in the interpreted version, and do the same for the maximum stack
; depth. Since the number of operations and the stack depth used to compute n!
; are linear in n, these ratios should approach constants as n becomes large.
; What are these constants? Similarly, find the ratios of the stack usage in a
; special-purpose machine to the usage in the interpreted version.
;
; Compare the ratios for the special-purpose versus interpreted code to the
; ratios for compiled versus interpreted code. You should find that the
; special-purpose machine does much better than the compiled code, since the
; hand-tailored controller code should be much better than what is produced by
; our rudimentary general-purpose compiler.
;
; b. Can you suggest improvements to the compiler that would help it generate
; code that would come closer in performance to the hand-tailored version?
; a. Let's compare both the open-coding compiler and the simpler one.
;
; Without open-coding optimizations:
1 ! takes ( total - pushes = 7 maximum - depth = 3 )
2 ! takes ( total - pushes = 13 maximum - depth = 5 )
3 ! takes ( total - pushes = 19 maximum - depth = 8)
4 ! takes ( total - pushes = 25 maximum - depth = 11 )
5 ! takes ( total - pushes = 31 maximum - depth = 14 )
6 ! takes ( total - pushes = 37 maximum - depth = 17 )
7 ! takes ( total - pushes = 43 maximum - depth = 20 )
8 ! takes ( total - pushes = 49 maximum - depth = 23 )
9 ! takes ( total - pushes = 55 maximum - depth = 26 )
; With open-coding optimizations:
1 ! takes ( total - pushes = 5 maximum - depth = 3 )
2 ! takes ( total - pushes = 7 maximum - depth = 3 )
3 ! takes ( total - pushes = 9 maximum - depth = 4 )
4 ! takes ( total - pushes = 11 maximum - depth = 6 )
5 ! takes ( total - pushes = 13 maximum - depth = 8)
6 ! takes ( total - pushes = 15 maximum - depth = 10 )
7 ! takes ( total - pushes = 17 maximum - depth = 12 )
8 ! takes ( total - pushes = 19 maximum - depth = 14 )
9 ! takes ( total - pushes = 21 maximum - depth = 16 )
;
; As usual, code to reproduce is below.
;
; Now we can do a table
; +----+-----------------------+-----------------------+
; | | total-pushes | maximum-depth |
; | +-----+-----+-----+-----+-----+-----+-----+-----+
; | | int | cmp | opc | sht | int | cmp | opc | sht |
; +----+-----+-----+-----+-----+-----+-----+-----+-----+
| 1 ! | 16 | 7 | 5 | 0 | 8 | 3 | 3 | 0 |
| 2 ! | 48 | 13 | 7 | 2 | 13 | 5 | 3 | 2 |
| 3 ! | 80 | 19 | 9 | 4 | 18 | 8 | 4 | 4 |
| 4 ! | 112 | 25 | 11 | 6 | 23 | 11 | 6 | 6 |
| 5 ! | 144 | 31 | 13 | 8 | 28 | 14 | 8 | 8 |
| 6 ! | 176 | 37 | 15 | 10 | 33 | 17 | 10 | 10 |
| 7 ! | 208 | 43 | 17 | 12 | 38 | 20 | 12 | 12 |
| 8 ! | 240 | 49 | 19 | 14 | 43 | 23 | 14 | 14 |
| 9 ! | 272 | 55 | 21 | 16 | 48 | 26 | 16 | 16 |
; +----+-----+-----+-----+-----+-----+-----+-----+-----+
; Legend: * int - interpreted
* cmp - compiled with the 5.5 compiler
; * opc - compiled with open-coding primitives
; * sht - special hand-tailored version
;
; We can compare ratios by comparing the ratio of the differences between
; computing n! and (n + 1)!
;
; total pushes:
int / cmp is 32 / 6 ≈ 5.333
int / opc is 32 / 2 = 16.0
cmp / sht is 6 / 2 = 3.0
opc / sht is 2 / 2 = 1.0
;
That is , the compiled code is 5.3 times faster than the interpreted ( 16
times if open - coding instructions ) and the hand - tailored version is 3 times
; faster than the copmiled (or as fast with the hand-tailored version).
;
; maximum-depth
int / cmp is 5 / 3 ≈ 1.666
int / opc is 5 / 2 = 2.5
cmp / sht is 3 / 2 = 1.5
opc / sht is 2 / 2 = 1.0
;
That is , the compiled code uses 1.66 less space than the interpreted ( 2.5
; times less if open-coding instructions) and the hand-tailored version uses
1.5 less space than the compiled ( or as much if open - coding instructions ) .
;
; Note that we're speaking asymptotically and we're ignoring the number of
; performed instructions as opposed to checking stack pushes.
;
; b. Open-coding comes pretty near. Of course, this assumes that the
instruction count does not matter . There are two thinks we can do to get
; even closer.
;
First , we can do away with storing variables in environments and just use
; the registers. That way we will eliminate environment lookup for n and
; factorial.
;
Second , we can replace the check if factorial is a primitive procedure with
; a jump to the beginning of the function.
;
Those two along with open - coding will come to pretty much the same code as
; the hand-tailored version.
(load-relative "showcase/compiler/helpers.scm")
(load-relative "tests/helpers/monitored-stack.scm")
(define code
'(define (factorial n)
(if (= n 1)
1
(* (factorial (- n 1)) n))))
(define (report-stats)
(define machine (make-machine (append '(arg1 arg2) ec-registers)
(append `((+ ,+) (- ,-) (* ,*) (= ,=)) cm-operations)
explicit+compile-text))
(compile-in-machine machine code)
(for ([n (in-range 1 10)])
(set-register-contents! machine 'flag false)
(printf " ~a! takes ~a\n" n (stack-stats-for machine (list 'factorial n)))))
(printf "Without open-coding optimizations:\n")
(report-stats)
(load-relative "38.scm")
(printf "With open-coding optimizations:\n")
(report-stats)
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/05/45.scm | scheme |
By comparing the stack operations used by compiled code to the stack
operations used by the evaluator for the same computation, we can determine
the extent to which the compiler optimizes use of the stack, both in speed
(reducing the total number of stack operations) and in space (reducing the
maximum stack depth). Comparing this optimized stack use to performance of a
special-purpose machine for the same computation gives some indication of
the quality of the compiler.
a. Exercise 5.27 asked you to determine, as a function of n, the number of
pushes and the number of maximum stack depth needed by the evaluator to
compute n! using the recursive factorial procedure given above. Exercise
compiled factorial procedure.
Take the ratio of the number of pushes in the compiled version to the number
of pushes in the interpreted version, and do the same for the maximum stack
depth. Since the number of operations and the stack depth used to compute n!
are linear in n, these ratios should approach constants as n becomes large.
What are these constants? Similarly, find the ratios of the stack usage in a
special-purpose machine to the usage in the interpreted version.
Compare the ratios for the special-purpose versus interpreted code to the
ratios for compiled versus interpreted code. You should find that the
special-purpose machine does much better than the compiled code, since the
hand-tailored controller code should be much better than what is produced by
our rudimentary general-purpose compiler.
b. Can you suggest improvements to the compiler that would help it generate
code that would come closer in performance to the hand-tailored version?
a. Let's compare both the open-coding compiler and the simpler one.
Without open-coding optimizations:
With open-coding optimizations:
As usual, code to reproduce is below.
Now we can do a table
+----+-----------------------+-----------------------+
| | total-pushes | maximum-depth |
| +-----+-----+-----+-----+-----+-----+-----+-----+
| | int | cmp | opc | sht | int | cmp | opc | sht |
+----+-----+-----+-----+-----+-----+-----+-----+-----+
+----+-----+-----+-----+-----+-----+-----+-----+-----+
Legend: * int - interpreted
* opc - compiled with open-coding primitives
* sht - special hand-tailored version
We can compare ratios by comparing the ratio of the differences between
computing n! and (n + 1)!
total pushes:
faster than the copmiled (or as fast with the hand-tailored version).
maximum-depth
times less if open-coding instructions) and the hand-tailored version uses
Note that we're speaking asymptotically and we're ignoring the number of
performed instructions as opposed to checking stack pushes.
b. Open-coding comes pretty near. Of course, this assumes that the
even closer.
the registers. That way we will eliminate environment lookup for n and
factorial.
a jump to the beginning of the function.
the hand-tailored version. | SICP exercise 5.45
5.14 asked you to do the same measurements for the special - purpose factorial
machine shown in figure 5.11 . Now perform the same analysis using the
1 ! takes ( total - pushes = 7 maximum - depth = 3 )
2 ! takes ( total - pushes = 13 maximum - depth = 5 )
3 ! takes ( total - pushes = 19 maximum - depth = 8)
4 ! takes ( total - pushes = 25 maximum - depth = 11 )
5 ! takes ( total - pushes = 31 maximum - depth = 14 )
6 ! takes ( total - pushes = 37 maximum - depth = 17 )
7 ! takes ( total - pushes = 43 maximum - depth = 20 )
8 ! takes ( total - pushes = 49 maximum - depth = 23 )
9 ! takes ( total - pushes = 55 maximum - depth = 26 )
1 ! takes ( total - pushes = 5 maximum - depth = 3 )
2 ! takes ( total - pushes = 7 maximum - depth = 3 )
3 ! takes ( total - pushes = 9 maximum - depth = 4 )
4 ! takes ( total - pushes = 11 maximum - depth = 6 )
5 ! takes ( total - pushes = 13 maximum - depth = 8)
6 ! takes ( total - pushes = 15 maximum - depth = 10 )
7 ! takes ( total - pushes = 17 maximum - depth = 12 )
8 ! takes ( total - pushes = 19 maximum - depth = 14 )
9 ! takes ( total - pushes = 21 maximum - depth = 16 )
| 1 ! | 16 | 7 | 5 | 0 | 8 | 3 | 3 | 0 |
| 2 ! | 48 | 13 | 7 | 2 | 13 | 5 | 3 | 2 |
| 3 ! | 80 | 19 | 9 | 4 | 18 | 8 | 4 | 4 |
| 4 ! | 112 | 25 | 11 | 6 | 23 | 11 | 6 | 6 |
| 5 ! | 144 | 31 | 13 | 8 | 28 | 14 | 8 | 8 |
| 6 ! | 176 | 37 | 15 | 10 | 33 | 17 | 10 | 10 |
| 7 ! | 208 | 43 | 17 | 12 | 38 | 20 | 12 | 12 |
| 8 ! | 240 | 49 | 19 | 14 | 43 | 23 | 14 | 14 |
| 9 ! | 272 | 55 | 21 | 16 | 48 | 26 | 16 | 16 |
* cmp - compiled with the 5.5 compiler
int / cmp is 32 / 6 ≈ 5.333
int / opc is 32 / 2 = 16.0
cmp / sht is 6 / 2 = 3.0
opc / sht is 2 / 2 = 1.0
That is , the compiled code is 5.3 times faster than the interpreted ( 16
times if open - coding instructions ) and the hand - tailored version is 3 times
int / cmp is 5 / 3 ≈ 1.666
int / opc is 5 / 2 = 2.5
cmp / sht is 3 / 2 = 1.5
opc / sht is 2 / 2 = 1.0
That is , the compiled code uses 1.66 less space than the interpreted ( 2.5
1.5 less space than the compiled ( or as much if open - coding instructions ) .
instruction count does not matter . There are two thinks we can do to get
First , we can do away with storing variables in environments and just use
Second , we can replace the check if factorial is a primitive procedure with
Those two along with open - coding will come to pretty much the same code as
(load-relative "showcase/compiler/helpers.scm")
(load-relative "tests/helpers/monitored-stack.scm")
(define code
'(define (factorial n)
(if (= n 1)
1
(* (factorial (- n 1)) n))))
(define (report-stats)
(define machine (make-machine (append '(arg1 arg2) ec-registers)
(append `((+ ,+) (- ,-) (* ,*) (= ,=)) cm-operations)
explicit+compile-text))
(compile-in-machine machine code)
(for ([n (in-range 1 10)])
(set-register-contents! machine 'flag false)
(printf " ~a! takes ~a\n" n (stack-stats-for machine (list 'factorial n)))))
(printf "Without open-coding optimizations:\n")
(report-stats)
(load-relative "38.scm")
(printf "With open-coding optimizations:\n")
(report-stats)
|
b04f694255a9197bb8ef9c96412fffc31a2af41d3ac59bffd2e8bc881aeb78f6 | lemmaandrew/CodingBatHaskell | lastChars.hs | From
Given 2 strings , a and b , return a new string made of the first char of a and the
last char of b , so \"yo\ " and \"java\ " yields \"ya\ " . If either string is length 0 , use
' @ ' for its missing char .
Given 2 strings, a and b, return a new string made of the first char of a and the
last char of b, so \"yo\" and \"java\" yields \"ya\". If either string is length 0, use
'@' for its missing char.
-}
import Test.Hspec ( hspec, describe, it, shouldBe )
lastChars :: String -> String -> String
lastChars a b = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "\"ls\"" $
lastChars "last" "chars" `shouldBe` "ls"
it "\"ya\"" $
lastChars "yo" "java" `shouldBe` "ya"
it "\"h@\"" $
lastChars "hi" "" `shouldBe` "h@"
it "\"@o\"" $
lastChars "" "hello" `shouldBe` "@o"
it "\"@@\"" $
lastChars "" "" `shouldBe` "@@"
it "\"ki\"" $
lastChars "kitten" "hi" `shouldBe` "ki"
it "\"kp\"" $
lastChars "k" "zip" `shouldBe` "kp"
it "\"k@\"" $
lastChars "kitten" "" `shouldBe` "k@"
it "\"kp\"" $
lastChars "kitten" "zip" `shouldBe` "kp"
| null | https://raw.githubusercontent.com/lemmaandrew/CodingBatHaskell/d839118be02e1867504206657a0664fd79d04736/CodingBat/String-1/lastChars.hs | haskell | From
Given 2 strings , a and b , return a new string made of the first char of a and the
last char of b , so \"yo\ " and \"java\ " yields \"ya\ " . If either string is length 0 , use
' @ ' for its missing char .
Given 2 strings, a and b, return a new string made of the first char of a and the
last char of b, so \"yo\" and \"java\" yields \"ya\". If either string is length 0, use
'@' for its missing char.
-}
import Test.Hspec ( hspec, describe, it, shouldBe )
lastChars :: String -> String -> String
lastChars a b = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "\"ls\"" $
lastChars "last" "chars" `shouldBe` "ls"
it "\"ya\"" $
lastChars "yo" "java" `shouldBe` "ya"
it "\"h@\"" $
lastChars "hi" "" `shouldBe` "h@"
it "\"@o\"" $
lastChars "" "hello" `shouldBe` "@o"
it "\"@@\"" $
lastChars "" "" `shouldBe` "@@"
it "\"ki\"" $
lastChars "kitten" "hi" `shouldBe` "ki"
it "\"kp\"" $
lastChars "k" "zip" `shouldBe` "kp"
it "\"k@\"" $
lastChars "kitten" "" `shouldBe` "k@"
it "\"kp\"" $
lastChars "kitten" "zip" `shouldBe` "kp"
| |
c7565215ab43e78fe43e642ba4a84e1577727f0fd4090f86f3cfe9dff9b55577 | open-telemetry/opentelemetry-erlang | opentelemetry_experimental_app.erl | %%%-------------------------------------------------------------------
%% @doc opentelemetry_experimental public API
%% @end
%%%-------------------------------------------------------------------
-module(opentelemetry_experimental_app).
-behaviour(application).
-export([start/2,
prep_stop/1,
stop/1]).
-include_lib("opentelemetry_api_experimental/include/otel_meter.hrl").
start(_StartType, _StartArgs) ->
Config = otel_configuration:merge_with_os(
application:get_all_env(opentelemetry_experimental)),
{ok, Pid} = opentelemetry_experimental_sup:start_link(Config),
{ok, _} = opentelemetry_experimental:start_meter_provider(?GLOBAL_METER_PROVIDER_NAME, Config),
{ok, Pid}.
%% called before the supervision tree is shutdown.
prep_stop(_State) ->
%% on application stop set meter to the noop implementation.
%% This is to ensure no crashes if the sdk isn't the last
%% thing to shutdown or if the opentelemetry application crashed.
opentelemetry_experimental:set_default_meter({otel_meter_noop, []}),
ok.
stop(_State) ->
ok.
%% internal functions
| null | https://raw.githubusercontent.com/open-telemetry/opentelemetry-erlang/89c4b00b592050b35a4791f2ddecf7651fc083b0/apps/opentelemetry_experimental/src/opentelemetry_experimental_app.erl | erlang | -------------------------------------------------------------------
@doc opentelemetry_experimental public API
@end
-------------------------------------------------------------------
called before the supervision tree is shutdown.
on application stop set meter to the noop implementation.
This is to ensure no crashes if the sdk isn't the last
thing to shutdown or if the opentelemetry application crashed.
internal functions |
-module(opentelemetry_experimental_app).
-behaviour(application).
-export([start/2,
prep_stop/1,
stop/1]).
-include_lib("opentelemetry_api_experimental/include/otel_meter.hrl").
start(_StartType, _StartArgs) ->
Config = otel_configuration:merge_with_os(
application:get_all_env(opentelemetry_experimental)),
{ok, Pid} = opentelemetry_experimental_sup:start_link(Config),
{ok, _} = opentelemetry_experimental:start_meter_provider(?GLOBAL_METER_PROVIDER_NAME, Config),
{ok, Pid}.
prep_stop(_State) ->
opentelemetry_experimental:set_default_meter({otel_meter_noop, []}),
ok.
stop(_State) ->
ok.
|
ced423ab9a8bb9492016c94634131d7c13c671e838333d0d3cb1d09e1d6edeff | ocaml/ocamlbuild | aa.mli | (***********************************************************************)
(* *)
(* ocamlbuild *)
(* *)
, , projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
val bar : int
| null | https://raw.githubusercontent.com/ocaml/ocamlbuild/792b7c8abdbc712c98ed7e69469ed354b87e125b/test/test4/a/aa.mli | ocaml | *********************************************************************
ocamlbuild
the special exception on linking described in file ../LICENSE.
********************************************************************* | , , projet Gallium , INRIA Rocquencourt
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
val bar : int
|
f550c509a3f382c9f28a5a084486cc8b5b0c348c1cb9f1e7341371225b50f790 | aysylu/loom | alg.cljc | (ns ^{:doc "Graph algorithms. Any graph record/type that satisfies the
Graph, Digraph, or WeightedGraph protocols (as appropriate per algorithm)
can use these functions."
:author "Justin Kramer"}
loom.alg
(:require [loom.alg-generic :as gen]
[loom.flow :as flow]
[loom.graph
:refer [add-nodes add-edges nodes edges successors weight predecessors
out-degree in-degree weighted? directed? graph digraph transpose]
:as graph]
[loom.alg-generic :refer [trace-path preds->span]]
#?(:clj [clojure.data.priority-map :as pm]
:cljs [tailrecursion.priority-map :as pm])
[clojure.set :as clj.set]))
;;;
;;; Convenience wrappers for loom.alg-generic functions
;;;
(defn- traverse-all
[nodes traverse]
(persistent! (second
(reduce
(fn [[seen trav] n]
(if (seen n)
[seen trav]
(let [ctrav (traverse n :seen seen)]
[(into seen ctrav) (reduce conj! trav ctrav)])))
[#{} (transient [])]
nodes))))
(defn pre-traverse
"Traverses graph g depth-first from start. Returns a lazy seq of nodes.
When no starting node is provided, traverses the entire graph, connected
or not."
([g]
(traverse-all (nodes g) (partial gen/pre-traverse (graph/successors g))))
([g start]
(gen/pre-traverse (graph/successors g) start)))
(defn pre-span
"Returns a depth-first spanning tree of the form {node [successors]}"
([g]
(second
(reduce
(fn [[seen span] n]
(if (seen n)
[seen span]
(let [[cspan seen] (gen/pre-span
(graph/successors g)
n :seen seen :return-seen true)]
[seen (merge span {n []} cspan)])))
[#{} {}]
(nodes g))))
([g start]
(gen/pre-span (graph/successors g) start)))
(defn post-traverse
"Traverses graph g depth-first, post-order from start. Returns a
vector of the nodes."
([g]
(traverse-all (nodes g) (partial gen/post-traverse (graph/successors g))))
([g start & opts]
(apply gen/post-traverse (graph/successors g) start opts)))
(defn topsort
"Topological sort of a directed acyclic graph (DAG). Returns nil if
g contains any cycles."
([g]
(loop [seen #{}
result ()
[n & ns] (seq (nodes g))]
(if-not n
result
(if (seen n)
(recur seen result ns)
(when-let [cresult (gen/topsort-component
(graph/successors g) n seen seen)]
(recur (into seen cresult) (concat cresult result) ns))))))
([g start]
(gen/topsort-component (graph/successors g) start)))
(defn bf-traverse
"Traverses graph g breadth-first from start. When option :f is provided,
returns a lazy seq of (f node predecessor-map depth) for each node traversed.
Otherwise, returns a lazy seq of the nodes. When option :when is provided,
filters successors with (f neighbor predecessor depth)."
([g]
(first
(reduce
(fn [[cc predmap] n]
(if (contains? predmap n)
[cc predmap]
(reduce
(fn [[cc _] [n pm _]]
[(conj cc n) pm])
[cc predmap]
(gen/bf-traverse (graph/successors g) n :f vector :seen predmap))))
[[] {}]
(nodes g))))
([g start]
(gen/bf-traverse (graph/successors g) start))
([g start & opts]
(apply gen/bf-traverse (graph/successors g) start opts)))
(defn bf-span
"Returns a breadth-first spanning tree of the form {node [successors]}"
([g]
(preds->span
(reduce
(fn [predmap n]
(if (contains? predmap n)
predmap
(last (gen/bf-traverse (graph/successors g) n
:f (fn [_ pm _] pm)
:seen predmap))))
{}
(nodes g))))
([g start]
(gen/bf-span (graph/successors g) start)))
(defn bf-path
"Returns a path from start to end with the fewest hops (i.e. irrespective
of edge weights)"
[g start end & opts]
(apply gen/bf-path (graph/successors g) start end opts))
(defn bf-path-bi
"Using a bidirectional breadth-first search, finds a path from start to
end with the fewest hops (i.e. irrespective of edge weights). Can be much
faster than a unidirectional search on certain types of graphs"
[g start end]
(if (directed? g)
(gen/bf-path-bi (graph/successors g) (predecessors g) start end)
(gen/bf-path-bi (graph/successors g) (graph/successors g) start end)))
(defn dijkstra-traverse
"Returns a lazy-seq of [current-node state] where state is a map in
the format {node [distance predecessor]}. When f is provided,
returns a lazy-seq of (f node state) for each node"
([g]
(gen/dijkstra-traverse
(graph/successors g) (graph/weight g) (first (nodes g))))
([g start]
(gen/dijkstra-traverse (graph/successors g) (graph/weight g) start vector))
([g start f]
(gen/dijkstra-traverse (graph/successors g) (graph/weight g) start f)))
(defn dijkstra-span
"Finds all shortest distances from start. Returns a map in the
format {node {successor distance}}"
([g]
(gen/dijkstra-span
(graph/successors g) (graph/weight g) (first (nodes g))))
([g start]
(gen/dijkstra-span (graph/successors g) (graph/weight g) start)))
(defn dijkstra-path-dist
"Finds the shortest path from start to end. Returns a vector:
[path distance]"
[g start end]
(gen/dijkstra-path-dist (graph/successors g) (graph/weight g) start end))
(defn dijkstra-path
"Finds the shortest path from start to end"
[g start end]
(first (dijkstra-path-dist g start end)))
(defn- can-relax-edge?
"Tests for whether we can improve the shortest path to v found so far
by going through u."
[[u v :as edge] weight costs]
(let [vd (get costs v)
ud (get costs u)
sum (+ ud weight)]
(> vd sum)))
(defn- relax-edge
"If there's a shorter path from s to v via u,
update our map of estimated path costs and
map of paths from source to vertex v"
[[u v :as edge] weight [costs paths :as estimates]]
(let [ud (get costs u)
sum (+ ud weight)]
(if (can-relax-edge? edge weight costs)
[(assoc costs v sum) (assoc paths v u)]
estimates)))
(defn- relax-edges
"Performs edge relaxation on all edges in weighted directed graph"
[g start estimates]
(->> (edges g)
(reduce (fn [estimates [u v :as edge]]
(relax-edge edge (graph/weight g u v) estimates))
estimates)))
(defn- init-estimates
"Initializes path cost estimates and paths from source to all vertices,
for Bellman-Ford algorithm"
[graph start]
(let [nodes (disj (nodes graph) start)
path-costs {start 0}
paths {start nil}
infinities (repeat #?(:clj Double/POSITIVE_INFINITY
:cljs js/Infinity))
nils (repeat nil)
init-costs (interleave nodes infinities)
init-paths (interleave nodes nils)]
[(apply assoc path-costs init-costs)
(apply assoc paths init-paths)]))
;;;
;;; Graph algorithms
;;;
(defn bellman-ford
"Given a weighted, directed graph G = (V, E) with source start,
the Bellman-Ford algorithm produces map of single source shortest
paths and their costs if no negative-weight cycle that is reachable
from the source exists, and false otherwise, indicating that no
solution exists."
[g start]
(let [initial-estimates (init-estimates g start)
relax - edges is calculated for all edges V-1 times
[costs paths] (reduce (fn [estimates _]
(relax-edges g start estimates))
initial-estimates
(-> g nodes count dec range))
edges (edges g)]
(if (some
(fn [[u v :as edge]]
(can-relax-edge? edge (graph/weight g u v) costs))
edges)
false
[costs
(->> (keys paths)
;;remove vertices that are unreachable from source
(remove #(= #?(:clj Double/POSITIVE_INFINITY
:cljs js/Infinity)
(get costs %)))
(reduce
(fn [final-paths v]
(assoc final-paths v
;; follows the parent pointers
;; to construct path from source to node v
(loop [node v
path ()]
(if node
(recur (get paths node) (cons node path))
path))))
{}))])))
(defn dag?
"Returns true if g is a directed acyclic graph"
[g]
(boolean (topsort g)))
(defn shortest-path
"Finds the shortest path from start to end in graph g, using Dijkstra's
algorithm if the graph is weighted, breadth-first search otherwise."
[g start end]
(if (weighted? g)
(dijkstra-path g start end)
(bf-path g start end)))
(defn longest-shortest-path
"Finds the longest shortest path beginning at start, using Dijkstra's
algorithm if the graph is weighted, breadth-first search otherwise."
[g start]
(reverse
(if (weighted? g)
(reduce
(fn [path1 [n state]]
(let [path2 (trace-path (comp second state) n)]
(if (< (count path1) (count path2)) path2 path1)))
[start]
(dijkstra-traverse g start vector))
(reduce
(fn [path1 [n predmap _]]
(let [path2 (trace-path predmap n)]
(if (< (count path1) (count path2)) path2 path1)))
[start]
(bf-traverse g start :f vector)))))
(defn- bellman-ford-transform
"Helper function for Johnson's algorithm. Uses Bellman-Ford to remove negative weights."
[wg]
(let [q (first (drop-while (partial graph/has-node? wg) (repeatedly gensym)))
es (for [v (graph/nodes wg)] [q v 0])
bf-results (bellman-ford (graph/add-edges* wg es) q)]
(if bf-results
(let [[dist-q _] bf-results
new-es (map (juxt first second (fn [[u v]]
(+ (weight wg u v) (- (dist-q u)
(dist-q v)))))
(graph/edges wg))]
(graph/add-edges* wg new-es))
false)))
(defn johnson
"Finds all-pairs shortest paths using Bellman-Ford to remove any negative edges before
using Dijkstra's algorithm to find the shortest paths from each vertex to every other.
This algorithm is efficient for sparse graphs.
If the graph is unweighted, a default weight of 1 will be used. Note that it is more efficient
to use breadth-first spans for a graph with a uniform edge weight rather than Dijkstra's algorithm.
Most callers should use shortest-paths and allow the most efficient implementation be selected
for the graph."
[g]
(let [g (if (and (weighted? g) (some (partial > 0) (map (graph/weight g) (graph/edges g))))
(bellman-ford-transform g)
g)]
(if (false? g)
false
(let [dist (if (weighted? g)
(weight g)
(fn [u v] (when (graph/has-edge? g u v) 1)))]
(reduce (fn [acc node]
(assoc acc node (gen/dijkstra-span (successors g) dist node)))
{}
(nodes g))))))
(defn bf-all-pairs-shortest-paths
"Uses bf-span on each node in the graph."
[g]
(reduce (fn [spans node]
(assoc spans node (bf-span g node)))
{}
(nodes g)))
(defn all-pairs-shortest-paths
"Finds all-pairs shortest paths in a graph. Uses Johnson's algorithm for weighted graphs
which is efficient for sparse graphs. Breadth-first spans are used for unweighted graphs."
[g]
(if (weighted? g)
(johnson g)
(bf-all-pairs-shortest-paths g)))
(defn connected-components
"Returns the connected components of graph g as a vector of vectors. If g
is directed, returns the weakly-connected components."
[g]
(let [nb (if-not (directed? g) (graph/successors g)
#(concat (graph/successors g %) (predecessors g %)))]
(first
(reduce
(fn [[cc predmap] n]
(if (contains? predmap n)
[cc predmap]
(let [[c pm] (reduce
(fn [[c _] [n pm _]]
[(conj c n) pm])
[[] nil]
(gen/bf-traverse nb n :f vector :seen predmap))]
[(conj cc c) pm])))
[[] {}]
(nodes g)))))
(defn connected?
"Returns true if g is connected"
[g]
(== (count (first (connected-components g))) (count (nodes g))))
(defn scc
"Returns the strongly-connected components of directed graph g as a vector of
vectors. Uses Kosaraju's algorithm."
[g]
(let [gt (transpose g)]
(loop [stack (reverse (post-traverse g))
seen #{}
cc (transient [])]
(if (empty? stack)
(persistent! cc)
(if (seen (first stack))
(recur (rest stack) seen cc)
(let [[c seen] (post-traverse gt (first stack)
:seen seen :return-seen true)]
(recur (rest stack)
seen
(conj! cc c))))
))))
(defn strongly-connected?
[g]
(== (count (first (scc g))) (count (nodes g))))
(defn connect
"Returns graph g with all connected components connected to each other"
[g]
(reduce add-edges g (partition 2 1 (map first (connected-components g)))))
(defn density
"Return the density of graph g"
[g & {:keys [loops] :or {loops false}}]
(let [order (count (nodes g))]
(/ (count (edges g))
(* order (if loops
order
(dec order))))))
(defn loners
"Returns nodes with no connections to other nodes (i.e., isolated nodes)"
[g]
(let [degree-total (if (directed? g)
#(+ (in-degree g %) (out-degree g %))
#(out-degree g %))]
(filter (comp zero? degree-total) (nodes g))))
(defn distinct-edges
"Returns the distinct edges of g. Only useful for undirected graphs"
[g]
(if (directed? g)
(edges g)
(second
(reduce
(fn [[seen es] e]
(let [eset (set (take 2 e))]
(if (seen eset)
[seen es]
[(conj seen eset)
(conj es e)])))
[#{} []]
(edges g)))))
(defn bipartite-color
"Attempts a two-coloring of graph g. When successful, returns a map of
nodes to colors (1 or 0). Otherwise, returns nil."
[g]
(letfn [(color-component [coloring start]
(loop [coloring (assoc coloring start 1)
queue (conj #?(:clj clojure.lang.PersistentQueue/EMPTY
:cljs cljs.core/PersistentQueue.EMPTY) start)]
(if (empty? queue)
coloring
(let [v (peek queue)
color (- 1 (coloring v))
nbrs (graph/successors g v)]
;; TODO: could be better
(if (some #(and (coloring %) (= (coloring v) (coloring %)))
nbrs)
nil ; graph is not bipartite
(let [nbrs (remove coloring nbrs)]
(recur (into coloring (for [nbr nbrs] [nbr color]))
(into (pop queue) nbrs))))))))]
(loop [[node & nodes] (seq (nodes g))
coloring {}]
(when coloring
(if (nil? node)
coloring
(if (coloring node)
(recur nodes coloring)
(recur nodes (color-component coloring node))))))))
(defn bipartite?
"Returns true if g is bipartite"
[g]
(boolean (bipartite-color g)))
(defn bipartite-sets
"Returns two sets of nodes, one for each color of the bipartite coloring,
or nil if g is not bipartite"
[g]
(when-let [coloring (bipartite-color g)]
(reduce
(fn [[s1 s2] [node color]]
(if (zero? color)
[(conj s1 node) s2]
[s1 (conj s2 node)]))
[#{} #{}]
coloring)))
(defn- neighbor-colors
"Given a putative coloring of a graph, returns the colors of all the
neighbors of a given node."
[g node coloring]
(let [successors (graph/successors g node)
neighbors (if-not (directed? g)
successors
(concat successors
(graph/predecessors g node)))]
(set (remove nil?
(map #(get coloring %)
neighbors)))))
(defn coloring?
"Returns true if a map of nodes to colors is a proper coloring of a graph."
[g coloring]
(letfn [(different-colors? [node]
(not (contains? (neighbor-colors g node coloring)
(coloring node))))]
(and (every? different-colors? (nodes g))
(every? (complement nil?) (map #(get coloring %)
(nodes g))))))
(defn greedy-coloring
"Greedily color the vertices of a graph using the first-fit heuristic.
Returns a map of nodes to colors (0, 1, ...)."
[g]
(loop [node-seq (bf-traverse g)
coloring {}
colors #{}]
(if (empty? node-seq)
coloring
(let [node (first node-seq)
possible-colors (clj.set/difference colors
(neighbor-colors g
node
coloring))
node-color (if (empty? possible-colors)
(count colors)
(apply min possible-colors))]
(recur (rest node-seq)
(conj coloring [node node-color])
(conj colors node-color))))))
(defn max-flow
"Returns [flow-map flow-value], where flow-map is a weighted adjacency map
representing the maximum flow. The argument should be a weighted digraph,
where the edge weights are flow capacities. Source and sink are the vertices
representing the flow source and sink vertices. Optionally, pass in
:method :algorithm to use. Currently, the only option is :edmonds-karp ."
[g source sink & {:keys [method] :or {method :edmonds-karp}}]
(let [method-set #{:edmonds-karp}
n (graph/successors g),
i (predecessors g),
c (graph/weight g),
s source,
t sink
[flow-map flow-value] (case method
:edmonds-karp (flow/edmonds-karp n i c s t)
(throw
(ex-info
(str "Method not found. Choose from: "
method-set)
{:method-set method-set})))]
[flow-map flow-value]))
;; mst algorithms
convenience functions for mst algo
(defn- edge-weights
"Wrapper function to return edges along with weights for a given graph.
For un-weighted graphs a default value of one is produced. The function
returns values of the form [[[u v] 10] [[x y] 20] ...]"
[wg v]
(let [edge-weight (fn [u v]
(if (weighted? wg) (weight wg u v) 1))]
(map #(vec [%1 [v (edge-weight v %1)] ])
(successors wg v)))
)
(defn prim-mst-edges
"An edge-list of an minimum spanning tree along with weights that
represents an MST of the given graph. Returns the MST edge-list
for un-weighted graphs."
([wg]
(cond
(directed? wg) (throw (#?(:clj Exception. :cljs js/Error)
"Spanning tree only defined for undirected graphs"))
:else (let [mst (prim-mst-edges wg (nodes wg) nil #{} [])]
(if (weighted? wg)
mst
(map #(vec [(first %1) (second %1)]) mst)))))
([wg n h visited acc]
(cond
(empty? n) acc
(empty? h) (let [v (first n)
h (into (pm/priority-map-keyfn second) (edge-weights wg v))]
(recur wg (disj n v) h (conj visited v) acc))
:else (let [next_edge (peek h)
u (first (second next_edge))
v (first next_edge)
update-dist (fn [h [v [u wt]]]
(cond
(nil? (get h v)) (assoc h v [u wt])
(> (second (get h v)) wt) (assoc h v [u wt])
:else h))]
(let [wt (second (second next_edge))
visited (conj visited v)
h (reduce update-dist (pop h)
(filter #((complement visited) (first %) )
(edge-weights wg v)))]
(recur wg (disj n v) h (conj visited v)(conj acc [u v wt])))))))
(defn prim-mst
"Minimum spanning tree of given graph. If the graph contains more than one
component then returns a spanning forest of minimum spanning trees."
[wg]
(let [mst (apply graph/weighted-graph (prim-mst-edges wg))
]
(cond
(= ((comp count nodes) wg) ((comp count nodes) mst)) mst
:else (apply add-nodes mst (filter #(zero? (out-degree wg %)) (nodes wg)))
)))
(defn astar-path
"Returns the shortest path using A* algorithm. Returns a map of predecessors."
([g src target heur]
(let [heur (if (nil? heur) (fn [x y] 0) heur)
;; store in q => {u [heur+dist parent act est]}
q (pm/priority-map-keyfn first src [0 nil 0 0])
explored (hash-map)]
(astar-path g src target heur q explored))
)
([g src target heur q explored]
(cond
;; queue empty, target not reachable
(empty? q) (throw (ex-info "Target not reachable from source" {}))
;; target found, build path and return
(= (first (peek q)) target) (let [u (first (peek q))
parent ((second (peek q)) 1)
explored(assoc explored target parent)
path (loop [s target acc {}]
(cond
(nil? s) acc
(= s src) (assoc acc s nil)
:else (recur (explored s)
(assoc acc s (explored s)))))
]
path
)
;; continue searching
:else (let
[curr-node (first (peek q))
curr-dist ((second (peek q)) 2)
;; update path
explored (assoc explored curr-node ((second (peek q)) 1))
nbrs (remove (into #{} (keys explored)) (successors g curr-node))
;; we do this for following reasons
;; a. avoiding duplicate heuristics computation
;; b. duplicate entries for nodes, which needs to be removed later
;; TODO: this could be sped up if we priority-map supported transients
update-dist (fn [curr-node curr-dist q v]
(let [act (+ curr-dist
(if (weighted? g) (weight g curr-node v) 1))
est (if (nil? (get q v))
(heur v target) ((get q v) 3))
]
(cond
(or (nil? (get q v))
(> ((get q v) 2) act))
(assoc q v [(+ act est ) curr-node act est])
:else q)))
q (reduce (partial update-dist curr-node curr-dist) (pop q)
nbrs)]
(recur g src target heur q explored)))))
(defn astar-dist
"Returns the length of the shortest path between src and target using
the A* algorithm"
[g src target heur]
(let [path (astar-path g src target heur)
dist (reduce (fn [c [u v]]
(if (nil? v)
c
(+ c (if (weighted? g) (weight g v u) 1))
)
) 0 path)]
dist))
(defn degeneracy-ordering
"Returns sequence of vertices in degeneracy order."
[g]
(loop [ordered-nodes []
node-degs (->> (zipmap (nodes g)
(map (partial out-degree g) (nodes g)))
(into (pm/priority-map)))
k 0]
(if (empty? node-degs)
ordered-nodes
(let [[n deg] (first node-degs)
This will be the adjacent nodes still in node - degs ( not in ordered - nodes ) decr'd by 1
updated-degs (->> (map (juxt identity node-degs) (successors g n))
(filter second)
(map (juxt first (comp dec second)))
(into {}))]
(recur (conj ordered-nodes n)
(reduce (fn [n-ds [n d]] ;; Update this assoc'ing the updated-degs found above
(assoc n-ds n d))
(dissoc node-degs n)
updated-degs)
(max k deg))))))
(defn- bk-gen [g [r p x] stack]
(let [v-pivot (reduce (partial max-key (partial out-degree g)) p)]
(loop [v v-pivot
p (set p)
x (set x)
stack stack]
(if (nil? v)
stack
(let [succ-v (set (successors g v))]
(recur (-> (clj.set/difference (disj p v)
(set (successors g v-pivot)))
first)
(disj p v)
(conj x v)
(conj stack [(conj r v)
(clj.set/intersection p succ-v)
(clj.set/intersection x succ-v)])))))))
(defn- bk
"An iterative implementation of Bron-Kerbosch using degeneracy ordering
at the outer loop and max-degree vertex pivoting in the inner loop."
[g]
(loop [vs (degeneracy-ordering g)
max-clqs (seq [])
p (set (nodes g))
x #{}
stack []]
(cond
;; Done
(and (empty? stack) (empty? vs))
max-clqs
;; Empty stack, create a seed to generate stack items
(empty? stack)
(let [v (first vs)
succ-v (set (successors g v))]
(recur (rest vs)
max-clqs
(disj p v)
(conj x v)
[[#{v}
(clj.set/intersection p succ-v)
(clj.set/intersection x succ-v)]]))
;; Pull the next request off the stack
:else
(let [[r s-p s-x] (peek stack)]
(cond
Maximal clique found
(and (empty? s-p) (empty? s-x))
(recur vs
(cons r max-clqs)
p
x
(pop stack))
;; No maximal clique that excludes x exists
(empty? s-p)
(recur vs
max-clqs
p
x
(pop stack))
;; Use this state to generate more states
:else
(recur vs
max-clqs
p
x
(bk-gen g [r s-p s-x] (pop stack))))))))
(defn maximal-cliques
"Enumerate the maximal cliques using Bron-Kerbosch."
[g]
(bk g))
;;;
;;; Compare graphs
;;;
(defn subgraph?
"Returns true iff g1 is a subgraph of g2. An undirected graph is never
considered as a subgraph of a directed graph and vice versa."
[g1 g2]
(and (= (directed? g1) (directed? g2))
(let [edge-test-fn (if (directed? g1)
graph/has-edge?
(fn [g x y]
(or (graph/has-edge? g x y)
(graph/has-edge? g y x))))]
(and (every? #(graph/has-node? g2 %) (nodes g1))
(every? (fn [[x y]] (edge-test-fn g2 x y))
(edges g1))))))
(defn eql?
"Returns true iff g1 is a subgraph of g2 and g2 is a subgraph of g1"
[g1 g2]
(and (subgraph? g1 g2)
(subgraph? g2 g1)))
(defn isomorphism?
"Given a mapping phi between the vertices of two graphs, determine
if the mapping is an isomorphism, e.g., {(phi x), (phi y)} connected
in g2 iff {x, y} are connected in g1."
[g1 g2 phi]
(eql? g2 (-> (if (directed? g1) (digraph) (graph))
(graph/add-nodes* (map phi (nodes g1)))
(graph/add-edges* (map (fn [[x y]] [(phi x) (phi y)])
(edges g1))))))
; ; : MST , coloring , matching , etc etc
| null | https://raw.githubusercontent.com/aysylu/loom/bb9068c5c5f4c3cf66344bf1b966cfd730e92693/src/loom/alg.cljc | clojure |
Convenience wrappers for loom.alg-generic functions
Graph algorithms
remove vertices that are unreachable from source
follows the parent pointers
to construct path from source to node v
TODO: could be better
graph is not bipartite
mst algorithms
store in q => {u [heur+dist parent act est]}
queue empty, target not reachable
target found, build path and return
continue searching
update path
we do this for following reasons
a. avoiding duplicate heuristics computation
b. duplicate entries for nodes, which needs to be removed later
TODO: this could be sped up if we priority-map supported transients
Update this assoc'ing the updated-degs found above
Done
Empty stack, create a seed to generate stack items
Pull the next request off the stack
No maximal clique that excludes x exists
Use this state to generate more states
Compare graphs
; : MST , coloring , matching , etc etc | (ns ^{:doc "Graph algorithms. Any graph record/type that satisfies the
Graph, Digraph, or WeightedGraph protocols (as appropriate per algorithm)
can use these functions."
:author "Justin Kramer"}
loom.alg
(:require [loom.alg-generic :as gen]
[loom.flow :as flow]
[loom.graph
:refer [add-nodes add-edges nodes edges successors weight predecessors
out-degree in-degree weighted? directed? graph digraph transpose]
:as graph]
[loom.alg-generic :refer [trace-path preds->span]]
#?(:clj [clojure.data.priority-map :as pm]
:cljs [tailrecursion.priority-map :as pm])
[clojure.set :as clj.set]))
(defn- traverse-all
[nodes traverse]
(persistent! (second
(reduce
(fn [[seen trav] n]
(if (seen n)
[seen trav]
(let [ctrav (traverse n :seen seen)]
[(into seen ctrav) (reduce conj! trav ctrav)])))
[#{} (transient [])]
nodes))))
(defn pre-traverse
"Traverses graph g depth-first from start. Returns a lazy seq of nodes.
When no starting node is provided, traverses the entire graph, connected
or not."
([g]
(traverse-all (nodes g) (partial gen/pre-traverse (graph/successors g))))
([g start]
(gen/pre-traverse (graph/successors g) start)))
(defn pre-span
"Returns a depth-first spanning tree of the form {node [successors]}"
([g]
(second
(reduce
(fn [[seen span] n]
(if (seen n)
[seen span]
(let [[cspan seen] (gen/pre-span
(graph/successors g)
n :seen seen :return-seen true)]
[seen (merge span {n []} cspan)])))
[#{} {}]
(nodes g))))
([g start]
(gen/pre-span (graph/successors g) start)))
(defn post-traverse
"Traverses graph g depth-first, post-order from start. Returns a
vector of the nodes."
([g]
(traverse-all (nodes g) (partial gen/post-traverse (graph/successors g))))
([g start & opts]
(apply gen/post-traverse (graph/successors g) start opts)))
(defn topsort
"Topological sort of a directed acyclic graph (DAG). Returns nil if
g contains any cycles."
([g]
(loop [seen #{}
result ()
[n & ns] (seq (nodes g))]
(if-not n
result
(if (seen n)
(recur seen result ns)
(when-let [cresult (gen/topsort-component
(graph/successors g) n seen seen)]
(recur (into seen cresult) (concat cresult result) ns))))))
([g start]
(gen/topsort-component (graph/successors g) start)))
(defn bf-traverse
"Traverses graph g breadth-first from start. When option :f is provided,
returns a lazy seq of (f node predecessor-map depth) for each node traversed.
Otherwise, returns a lazy seq of the nodes. When option :when is provided,
filters successors with (f neighbor predecessor depth)."
([g]
(first
(reduce
(fn [[cc predmap] n]
(if (contains? predmap n)
[cc predmap]
(reduce
(fn [[cc _] [n pm _]]
[(conj cc n) pm])
[cc predmap]
(gen/bf-traverse (graph/successors g) n :f vector :seen predmap))))
[[] {}]
(nodes g))))
([g start]
(gen/bf-traverse (graph/successors g) start))
([g start & opts]
(apply gen/bf-traverse (graph/successors g) start opts)))
(defn bf-span
"Returns a breadth-first spanning tree of the form {node [successors]}"
([g]
(preds->span
(reduce
(fn [predmap n]
(if (contains? predmap n)
predmap
(last (gen/bf-traverse (graph/successors g) n
:f (fn [_ pm _] pm)
:seen predmap))))
{}
(nodes g))))
([g start]
(gen/bf-span (graph/successors g) start)))
(defn bf-path
"Returns a path from start to end with the fewest hops (i.e. irrespective
of edge weights)"
[g start end & opts]
(apply gen/bf-path (graph/successors g) start end opts))
(defn bf-path-bi
"Using a bidirectional breadth-first search, finds a path from start to
end with the fewest hops (i.e. irrespective of edge weights). Can be much
faster than a unidirectional search on certain types of graphs"
[g start end]
(if (directed? g)
(gen/bf-path-bi (graph/successors g) (predecessors g) start end)
(gen/bf-path-bi (graph/successors g) (graph/successors g) start end)))
(defn dijkstra-traverse
"Returns a lazy-seq of [current-node state] where state is a map in
the format {node [distance predecessor]}. When f is provided,
returns a lazy-seq of (f node state) for each node"
([g]
(gen/dijkstra-traverse
(graph/successors g) (graph/weight g) (first (nodes g))))
([g start]
(gen/dijkstra-traverse (graph/successors g) (graph/weight g) start vector))
([g start f]
(gen/dijkstra-traverse (graph/successors g) (graph/weight g) start f)))
(defn dijkstra-span
"Finds all shortest distances from start. Returns a map in the
format {node {successor distance}}"
([g]
(gen/dijkstra-span
(graph/successors g) (graph/weight g) (first (nodes g))))
([g start]
(gen/dijkstra-span (graph/successors g) (graph/weight g) start)))
(defn dijkstra-path-dist
"Finds the shortest path from start to end. Returns a vector:
[path distance]"
[g start end]
(gen/dijkstra-path-dist (graph/successors g) (graph/weight g) start end))
(defn dijkstra-path
"Finds the shortest path from start to end"
[g start end]
(first (dijkstra-path-dist g start end)))
(defn- can-relax-edge?
"Tests for whether we can improve the shortest path to v found so far
by going through u."
[[u v :as edge] weight costs]
(let [vd (get costs v)
ud (get costs u)
sum (+ ud weight)]
(> vd sum)))
(defn- relax-edge
"If there's a shorter path from s to v via u,
update our map of estimated path costs and
map of paths from source to vertex v"
[[u v :as edge] weight [costs paths :as estimates]]
(let [ud (get costs u)
sum (+ ud weight)]
(if (can-relax-edge? edge weight costs)
[(assoc costs v sum) (assoc paths v u)]
estimates)))
(defn- relax-edges
"Performs edge relaxation on all edges in weighted directed graph"
[g start estimates]
(->> (edges g)
(reduce (fn [estimates [u v :as edge]]
(relax-edge edge (graph/weight g u v) estimates))
estimates)))
(defn- init-estimates
"Initializes path cost estimates and paths from source to all vertices,
for Bellman-Ford algorithm"
[graph start]
(let [nodes (disj (nodes graph) start)
path-costs {start 0}
paths {start nil}
infinities (repeat #?(:clj Double/POSITIVE_INFINITY
:cljs js/Infinity))
nils (repeat nil)
init-costs (interleave nodes infinities)
init-paths (interleave nodes nils)]
[(apply assoc path-costs init-costs)
(apply assoc paths init-paths)]))
(defn bellman-ford
"Given a weighted, directed graph G = (V, E) with source start,
the Bellman-Ford algorithm produces map of single source shortest
paths and their costs if no negative-weight cycle that is reachable
from the source exists, and false otherwise, indicating that no
solution exists."
[g start]
(let [initial-estimates (init-estimates g start)
relax - edges is calculated for all edges V-1 times
[costs paths] (reduce (fn [estimates _]
(relax-edges g start estimates))
initial-estimates
(-> g nodes count dec range))
edges (edges g)]
(if (some
(fn [[u v :as edge]]
(can-relax-edge? edge (graph/weight g u v) costs))
edges)
false
[costs
(->> (keys paths)
(remove #(= #?(:clj Double/POSITIVE_INFINITY
:cljs js/Infinity)
(get costs %)))
(reduce
(fn [final-paths v]
(assoc final-paths v
(loop [node v
path ()]
(if node
(recur (get paths node) (cons node path))
path))))
{}))])))
(defn dag?
"Returns true if g is a directed acyclic graph"
[g]
(boolean (topsort g)))
(defn shortest-path
"Finds the shortest path from start to end in graph g, using Dijkstra's
algorithm if the graph is weighted, breadth-first search otherwise."
[g start end]
(if (weighted? g)
(dijkstra-path g start end)
(bf-path g start end)))
(defn longest-shortest-path
"Finds the longest shortest path beginning at start, using Dijkstra's
algorithm if the graph is weighted, breadth-first search otherwise."
[g start]
(reverse
(if (weighted? g)
(reduce
(fn [path1 [n state]]
(let [path2 (trace-path (comp second state) n)]
(if (< (count path1) (count path2)) path2 path1)))
[start]
(dijkstra-traverse g start vector))
(reduce
(fn [path1 [n predmap _]]
(let [path2 (trace-path predmap n)]
(if (< (count path1) (count path2)) path2 path1)))
[start]
(bf-traverse g start :f vector)))))
(defn- bellman-ford-transform
"Helper function for Johnson's algorithm. Uses Bellman-Ford to remove negative weights."
[wg]
(let [q (first (drop-while (partial graph/has-node? wg) (repeatedly gensym)))
es (for [v (graph/nodes wg)] [q v 0])
bf-results (bellman-ford (graph/add-edges* wg es) q)]
(if bf-results
(let [[dist-q _] bf-results
new-es (map (juxt first second (fn [[u v]]
(+ (weight wg u v) (- (dist-q u)
(dist-q v)))))
(graph/edges wg))]
(graph/add-edges* wg new-es))
false)))
(defn johnson
"Finds all-pairs shortest paths using Bellman-Ford to remove any negative edges before
using Dijkstra's algorithm to find the shortest paths from each vertex to every other.
This algorithm is efficient for sparse graphs.
If the graph is unweighted, a default weight of 1 will be used. Note that it is more efficient
to use breadth-first spans for a graph with a uniform edge weight rather than Dijkstra's algorithm.
Most callers should use shortest-paths and allow the most efficient implementation be selected
for the graph."
[g]
(let [g (if (and (weighted? g) (some (partial > 0) (map (graph/weight g) (graph/edges g))))
(bellman-ford-transform g)
g)]
(if (false? g)
false
(let [dist (if (weighted? g)
(weight g)
(fn [u v] (when (graph/has-edge? g u v) 1)))]
(reduce (fn [acc node]
(assoc acc node (gen/dijkstra-span (successors g) dist node)))
{}
(nodes g))))))
(defn bf-all-pairs-shortest-paths
"Uses bf-span on each node in the graph."
[g]
(reduce (fn [spans node]
(assoc spans node (bf-span g node)))
{}
(nodes g)))
(defn all-pairs-shortest-paths
"Finds all-pairs shortest paths in a graph. Uses Johnson's algorithm for weighted graphs
which is efficient for sparse graphs. Breadth-first spans are used for unweighted graphs."
[g]
(if (weighted? g)
(johnson g)
(bf-all-pairs-shortest-paths g)))
(defn connected-components
"Returns the connected components of graph g as a vector of vectors. If g
is directed, returns the weakly-connected components."
[g]
(let [nb (if-not (directed? g) (graph/successors g)
#(concat (graph/successors g %) (predecessors g %)))]
(first
(reduce
(fn [[cc predmap] n]
(if (contains? predmap n)
[cc predmap]
(let [[c pm] (reduce
(fn [[c _] [n pm _]]
[(conj c n) pm])
[[] nil]
(gen/bf-traverse nb n :f vector :seen predmap))]
[(conj cc c) pm])))
[[] {}]
(nodes g)))))
(defn connected?
"Returns true if g is connected"
[g]
(== (count (first (connected-components g))) (count (nodes g))))
(defn scc
"Returns the strongly-connected components of directed graph g as a vector of
vectors. Uses Kosaraju's algorithm."
[g]
(let [gt (transpose g)]
(loop [stack (reverse (post-traverse g))
seen #{}
cc (transient [])]
(if (empty? stack)
(persistent! cc)
(if (seen (first stack))
(recur (rest stack) seen cc)
(let [[c seen] (post-traverse gt (first stack)
:seen seen :return-seen true)]
(recur (rest stack)
seen
(conj! cc c))))
))))
(defn strongly-connected?
[g]
(== (count (first (scc g))) (count (nodes g))))
(defn connect
"Returns graph g with all connected components connected to each other"
[g]
(reduce add-edges g (partition 2 1 (map first (connected-components g)))))
(defn density
"Return the density of graph g"
[g & {:keys [loops] :or {loops false}}]
(let [order (count (nodes g))]
(/ (count (edges g))
(* order (if loops
order
(dec order))))))
(defn loners
"Returns nodes with no connections to other nodes (i.e., isolated nodes)"
[g]
(let [degree-total (if (directed? g)
#(+ (in-degree g %) (out-degree g %))
#(out-degree g %))]
(filter (comp zero? degree-total) (nodes g))))
(defn distinct-edges
"Returns the distinct edges of g. Only useful for undirected graphs"
[g]
(if (directed? g)
(edges g)
(second
(reduce
(fn [[seen es] e]
(let [eset (set (take 2 e))]
(if (seen eset)
[seen es]
[(conj seen eset)
(conj es e)])))
[#{} []]
(edges g)))))
(defn bipartite-color
"Attempts a two-coloring of graph g. When successful, returns a map of
nodes to colors (1 or 0). Otherwise, returns nil."
[g]
(letfn [(color-component [coloring start]
(loop [coloring (assoc coloring start 1)
queue (conj #?(:clj clojure.lang.PersistentQueue/EMPTY
:cljs cljs.core/PersistentQueue.EMPTY) start)]
(if (empty? queue)
coloring
(let [v (peek queue)
color (- 1 (coloring v))
nbrs (graph/successors g v)]
(if (some #(and (coloring %) (= (coloring v) (coloring %)))
nbrs)
(let [nbrs (remove coloring nbrs)]
(recur (into coloring (for [nbr nbrs] [nbr color]))
(into (pop queue) nbrs))))))))]
(loop [[node & nodes] (seq (nodes g))
coloring {}]
(when coloring
(if (nil? node)
coloring
(if (coloring node)
(recur nodes coloring)
(recur nodes (color-component coloring node))))))))
(defn bipartite?
"Returns true if g is bipartite"
[g]
(boolean (bipartite-color g)))
(defn bipartite-sets
"Returns two sets of nodes, one for each color of the bipartite coloring,
or nil if g is not bipartite"
[g]
(when-let [coloring (bipartite-color g)]
(reduce
(fn [[s1 s2] [node color]]
(if (zero? color)
[(conj s1 node) s2]
[s1 (conj s2 node)]))
[#{} #{}]
coloring)))
(defn- neighbor-colors
"Given a putative coloring of a graph, returns the colors of all the
neighbors of a given node."
[g node coloring]
(let [successors (graph/successors g node)
neighbors (if-not (directed? g)
successors
(concat successors
(graph/predecessors g node)))]
(set (remove nil?
(map #(get coloring %)
neighbors)))))
(defn coloring?
"Returns true if a map of nodes to colors is a proper coloring of a graph."
[g coloring]
(letfn [(different-colors? [node]
(not (contains? (neighbor-colors g node coloring)
(coloring node))))]
(and (every? different-colors? (nodes g))
(every? (complement nil?) (map #(get coloring %)
(nodes g))))))
(defn greedy-coloring
"Greedily color the vertices of a graph using the first-fit heuristic.
Returns a map of nodes to colors (0, 1, ...)."
[g]
(loop [node-seq (bf-traverse g)
coloring {}
colors #{}]
(if (empty? node-seq)
coloring
(let [node (first node-seq)
possible-colors (clj.set/difference colors
(neighbor-colors g
node
coloring))
node-color (if (empty? possible-colors)
(count colors)
(apply min possible-colors))]
(recur (rest node-seq)
(conj coloring [node node-color])
(conj colors node-color))))))
(defn max-flow
"Returns [flow-map flow-value], where flow-map is a weighted adjacency map
representing the maximum flow. The argument should be a weighted digraph,
where the edge weights are flow capacities. Source and sink are the vertices
representing the flow source and sink vertices. Optionally, pass in
:method :algorithm to use. Currently, the only option is :edmonds-karp ."
[g source sink & {:keys [method] :or {method :edmonds-karp}}]
(let [method-set #{:edmonds-karp}
n (graph/successors g),
i (predecessors g),
c (graph/weight g),
s source,
t sink
[flow-map flow-value] (case method
:edmonds-karp (flow/edmonds-karp n i c s t)
(throw
(ex-info
(str "Method not found. Choose from: "
method-set)
{:method-set method-set})))]
[flow-map flow-value]))
convenience functions for mst algo
(defn- edge-weights
"Wrapper function to return edges along with weights for a given graph.
For un-weighted graphs a default value of one is produced. The function
returns values of the form [[[u v] 10] [[x y] 20] ...]"
[wg v]
(let [edge-weight (fn [u v]
(if (weighted? wg) (weight wg u v) 1))]
(map #(vec [%1 [v (edge-weight v %1)] ])
(successors wg v)))
)
(defn prim-mst-edges
"An edge-list of an minimum spanning tree along with weights that
represents an MST of the given graph. Returns the MST edge-list
for un-weighted graphs."
([wg]
(cond
(directed? wg) (throw (#?(:clj Exception. :cljs js/Error)
"Spanning tree only defined for undirected graphs"))
:else (let [mst (prim-mst-edges wg (nodes wg) nil #{} [])]
(if (weighted? wg)
mst
(map #(vec [(first %1) (second %1)]) mst)))))
([wg n h visited acc]
(cond
(empty? n) acc
(empty? h) (let [v (first n)
h (into (pm/priority-map-keyfn second) (edge-weights wg v))]
(recur wg (disj n v) h (conj visited v) acc))
:else (let [next_edge (peek h)
u (first (second next_edge))
v (first next_edge)
update-dist (fn [h [v [u wt]]]
(cond
(nil? (get h v)) (assoc h v [u wt])
(> (second (get h v)) wt) (assoc h v [u wt])
:else h))]
(let [wt (second (second next_edge))
visited (conj visited v)
h (reduce update-dist (pop h)
(filter #((complement visited) (first %) )
(edge-weights wg v)))]
(recur wg (disj n v) h (conj visited v)(conj acc [u v wt])))))))
(defn prim-mst
"Minimum spanning tree of given graph. If the graph contains more than one
component then returns a spanning forest of minimum spanning trees."
[wg]
(let [mst (apply graph/weighted-graph (prim-mst-edges wg))
]
(cond
(= ((comp count nodes) wg) ((comp count nodes) mst)) mst
:else (apply add-nodes mst (filter #(zero? (out-degree wg %)) (nodes wg)))
)))
(defn astar-path
"Returns the shortest path using A* algorithm. Returns a map of predecessors."
([g src target heur]
(let [heur (if (nil? heur) (fn [x y] 0) heur)
q (pm/priority-map-keyfn first src [0 nil 0 0])
explored (hash-map)]
(astar-path g src target heur q explored))
)
([g src target heur q explored]
(cond
(empty? q) (throw (ex-info "Target not reachable from source" {}))
(= (first (peek q)) target) (let [u (first (peek q))
parent ((second (peek q)) 1)
explored(assoc explored target parent)
path (loop [s target acc {}]
(cond
(nil? s) acc
(= s src) (assoc acc s nil)
:else (recur (explored s)
(assoc acc s (explored s)))))
]
path
)
:else (let
[curr-node (first (peek q))
curr-dist ((second (peek q)) 2)
explored (assoc explored curr-node ((second (peek q)) 1))
nbrs (remove (into #{} (keys explored)) (successors g curr-node))
update-dist (fn [curr-node curr-dist q v]
(let [act (+ curr-dist
(if (weighted? g) (weight g curr-node v) 1))
est (if (nil? (get q v))
(heur v target) ((get q v) 3))
]
(cond
(or (nil? (get q v))
(> ((get q v) 2) act))
(assoc q v [(+ act est ) curr-node act est])
:else q)))
q (reduce (partial update-dist curr-node curr-dist) (pop q)
nbrs)]
(recur g src target heur q explored)))))
(defn astar-dist
"Returns the length of the shortest path between src and target using
the A* algorithm"
[g src target heur]
(let [path (astar-path g src target heur)
dist (reduce (fn [c [u v]]
(if (nil? v)
c
(+ c (if (weighted? g) (weight g v u) 1))
)
) 0 path)]
dist))
(defn degeneracy-ordering
"Returns sequence of vertices in degeneracy order."
[g]
(loop [ordered-nodes []
node-degs (->> (zipmap (nodes g)
(map (partial out-degree g) (nodes g)))
(into (pm/priority-map)))
k 0]
(if (empty? node-degs)
ordered-nodes
(let [[n deg] (first node-degs)
This will be the adjacent nodes still in node - degs ( not in ordered - nodes ) decr'd by 1
updated-degs (->> (map (juxt identity node-degs) (successors g n))
(filter second)
(map (juxt first (comp dec second)))
(into {}))]
(recur (conj ordered-nodes n)
(assoc n-ds n d))
(dissoc node-degs n)
updated-degs)
(max k deg))))))
(defn- bk-gen [g [r p x] stack]
(let [v-pivot (reduce (partial max-key (partial out-degree g)) p)]
(loop [v v-pivot
p (set p)
x (set x)
stack stack]
(if (nil? v)
stack
(let [succ-v (set (successors g v))]
(recur (-> (clj.set/difference (disj p v)
(set (successors g v-pivot)))
first)
(disj p v)
(conj x v)
(conj stack [(conj r v)
(clj.set/intersection p succ-v)
(clj.set/intersection x succ-v)])))))))
(defn- bk
"An iterative implementation of Bron-Kerbosch using degeneracy ordering
at the outer loop and max-degree vertex pivoting in the inner loop."
[g]
(loop [vs (degeneracy-ordering g)
max-clqs (seq [])
p (set (nodes g))
x #{}
stack []]
(cond
(and (empty? stack) (empty? vs))
max-clqs
(empty? stack)
(let [v (first vs)
succ-v (set (successors g v))]
(recur (rest vs)
max-clqs
(disj p v)
(conj x v)
[[#{v}
(clj.set/intersection p succ-v)
(clj.set/intersection x succ-v)]]))
:else
(let [[r s-p s-x] (peek stack)]
(cond
Maximal clique found
(and (empty? s-p) (empty? s-x))
(recur vs
(cons r max-clqs)
p
x
(pop stack))
(empty? s-p)
(recur vs
max-clqs
p
x
(pop stack))
:else
(recur vs
max-clqs
p
x
(bk-gen g [r s-p s-x] (pop stack))))))))
(defn maximal-cliques
"Enumerate the maximal cliques using Bron-Kerbosch."
[g]
(bk g))
(defn subgraph?
"Returns true iff g1 is a subgraph of g2. An undirected graph is never
considered as a subgraph of a directed graph and vice versa."
[g1 g2]
(and (= (directed? g1) (directed? g2))
(let [edge-test-fn (if (directed? g1)
graph/has-edge?
(fn [g x y]
(or (graph/has-edge? g x y)
(graph/has-edge? g y x))))]
(and (every? #(graph/has-node? g2 %) (nodes g1))
(every? (fn [[x y]] (edge-test-fn g2 x y))
(edges g1))))))
(defn eql?
"Returns true iff g1 is a subgraph of g2 and g2 is a subgraph of g1"
[g1 g2]
(and (subgraph? g1 g2)
(subgraph? g2 g1)))
(defn isomorphism?
"Given a mapping phi between the vertices of two graphs, determine
if the mapping is an isomorphism, e.g., {(phi x), (phi y)} connected
in g2 iff {x, y} are connected in g1."
[g1 g2 phi]
(eql? g2 (-> (if (directed? g1) (digraph) (graph))
(graph/add-nodes* (map phi (nodes g1)))
(graph/add-edges* (map (fn [[x y]] [(phi x) (phi y)])
(edges g1))))))
|
fb27be4b4f34aa705b49a9596c3a1664201113731472addc3a5f042d45f91957 | expipiplus1/vulkan | VK_GOOGLE_decorate_string.hs | {-# language CPP #-}
-- | = Name
--
-- VK_GOOGLE_decorate_string - device extension
--
-- == VK_GOOGLE_decorate_string
--
-- [__Name String__]
-- @VK_GOOGLE_decorate_string@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
225
--
-- [__Revision__]
1
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- [__Contact__]
--
-
-- <-Docs/issues/new?body=[VK_GOOGLE_decorate_string] @chaoticbob%0A*Here describe the issue or question you have about the VK_GOOGLE_decorate_string extension* >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2018 - 07 - 09
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
< >
--
-- [__Contributors__]
--
- , Google
--
- , AMD
--
-- == Description
--
-- The @VK_GOOGLE_decorate_string@ extension allows use of the
-- @SPV_GOOGLE_decorate_string@ extension in SPIR-V shader modules.
--
-- == New Enum Constants
--
-- - 'GOOGLE_DECORATE_STRING_EXTENSION_NAME'
--
-- - 'GOOGLE_DECORATE_STRING_SPEC_VERSION'
--
-- == Version History
--
- Revision 1 , 2018 - 07 - 09 ( )
--
-- - Initial draft
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
< -extensions/html/vkspec.html#VK_GOOGLE_decorate_string Vulkan Specification >
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_GOOGLE_decorate_string ( GOOGLE_DECORATE_STRING_SPEC_VERSION
, pattern GOOGLE_DECORATE_STRING_SPEC_VERSION
, GOOGLE_DECORATE_STRING_EXTENSION_NAME
, pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME
) where
import Data.String (IsString)
type GOOGLE_DECORATE_STRING_SPEC_VERSION = 1
No documentation found for TopLevel " VK_GOOGLE_DECORATE_STRING_SPEC_VERSION "
pattern GOOGLE_DECORATE_STRING_SPEC_VERSION :: forall a . Integral a => a
pattern GOOGLE_DECORATE_STRING_SPEC_VERSION = 1
type GOOGLE_DECORATE_STRING_EXTENSION_NAME = "VK_GOOGLE_decorate_string"
No documentation found for TopLevel " VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "
pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME = "VK_GOOGLE_decorate_string"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/b1e33d1031779b4740c279c68879d05aee371659/src/Vulkan/Extensions/VK_GOOGLE_decorate_string.hs | haskell | # language CPP #
| = Name
VK_GOOGLE_decorate_string - device extension
== VK_GOOGLE_decorate_string
[__Name String__]
@VK_GOOGLE_decorate_string@
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
[__Contact__]
<-Docs/issues/new?body=[VK_GOOGLE_decorate_string] @chaoticbob%0A*Here describe the issue or question you have about the VK_GOOGLE_decorate_string extension* >
== Other Extension Metadata
[__Last Modified Date__]
[__IP Status__]
No known IP claims.
[__Interactions and External Dependencies__]
- This extension requires
[__Contributors__]
== Description
The @VK_GOOGLE_decorate_string@ extension allows use of the
@SPV_GOOGLE_decorate_string@ extension in SPIR-V shader modules.
== New Enum Constants
- 'GOOGLE_DECORATE_STRING_EXTENSION_NAME'
- 'GOOGLE_DECORATE_STRING_SPEC_VERSION'
== Version History
- Initial draft
== See Also
No cross-references are available
== Document Notes
For more information, see the
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly. | 225
1
- Requires support for Vulkan 1.0
-
2018 - 07 - 09
< >
- , Google
- , AMD
- Revision 1 , 2018 - 07 - 09 ( )
< -extensions/html/vkspec.html#VK_GOOGLE_decorate_string Vulkan Specification >
module Vulkan.Extensions.VK_GOOGLE_decorate_string ( GOOGLE_DECORATE_STRING_SPEC_VERSION
, pattern GOOGLE_DECORATE_STRING_SPEC_VERSION
, GOOGLE_DECORATE_STRING_EXTENSION_NAME
, pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME
) where
import Data.String (IsString)
type GOOGLE_DECORATE_STRING_SPEC_VERSION = 1
No documentation found for TopLevel " VK_GOOGLE_DECORATE_STRING_SPEC_VERSION "
pattern GOOGLE_DECORATE_STRING_SPEC_VERSION :: forall a . Integral a => a
pattern GOOGLE_DECORATE_STRING_SPEC_VERSION = 1
type GOOGLE_DECORATE_STRING_EXTENSION_NAME = "VK_GOOGLE_decorate_string"
No documentation found for TopLevel " VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "
pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern GOOGLE_DECORATE_STRING_EXTENSION_NAME = "VK_GOOGLE_decorate_string"
|
e9aa7ed1b3228d8561160826459375e755af5dd53ffb48c2f09f1bc5ff7502c8 | milankinen/cuic | todomvc_tests_without_local_server.clj | (ns cuic-examples.todomvc-tests-without-local-server
(:require [clojure.test :refer :all]
[cuic.core :as c]
[cuic.test :refer [deftest* is* browser-test-fixture]]))
(use-fixtures
:once
(browser-test-fixture))
(defn todos []
(->> (c/query ".todo-list li")
(map c/text-content)))
(defn add-todo [text]
(doto (c/find ".new-todo")
(c/fill text))
(c/press 'Enter))
(deftest* creating-new-todos
(c/goto "")
(is* (= [] (todos)))
(add-todo "Hello world!")
(is* (= ["Hello world!"] (todos)))
(add-todo "Tsers!")
(is* (= ["Hello world!" "Tsers!"] (todos))))
| null | https://raw.githubusercontent.com/milankinen/cuic/94718c0580da2aa127d967207f163c7a546b6fb1/examples/test/cuic_examples/todomvc_tests_without_local_server.clj | clojure | (ns cuic-examples.todomvc-tests-without-local-server
(:require [clojure.test :refer :all]
[cuic.core :as c]
[cuic.test :refer [deftest* is* browser-test-fixture]]))
(use-fixtures
:once
(browser-test-fixture))
(defn todos []
(->> (c/query ".todo-list li")
(map c/text-content)))
(defn add-todo [text]
(doto (c/find ".new-todo")
(c/fill text))
(c/press 'Enter))
(deftest* creating-new-todos
(c/goto "")
(is* (= [] (todos)))
(add-todo "Hello world!")
(is* (= ["Hello world!"] (todos)))
(add-todo "Tsers!")
(is* (= ["Hello world!" "Tsers!"] (todos))))
| |
e3d6152dcf98f469b778cf4941985188a3915ab139a048d3aa2ed4b931a4c4ae | google/haskell-trainings | ColorSolution.hs | Copyright 2021 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - type - defaults #
module ColorSolution where
import Internal (codelab)
SECTION 1 : Color ( and Bounded )
Our game , being a coding exercise , works with the six dev colors : red ,
-- yellow, green, cyan, blue, and magenta.
data Color
this is a constructor , of type Color
| Yellow -- same here
| Green
| Cyan
| Blue
| Magenta
deriving
( Ord -- the compiler automagically generates the instances for these
, Eq
, Enum
, Bounded
)
-- We want to have a list of all the colors. We could write such a list
-- manually, but that'd be cumbersome and error-prone. Thankfully, lists
-- support interpolation! The [a .. b] syntax is translated into a call to
enumFromTo ( defined in the ) . Here , all you have to do is
-- figure out which color is the minimum color, and which is the max. Some
other might help you ?
allColors :: [Color]
this is enumFromTo
where
minColor = Red
maxColor = Magenta
We should also provide a way to display values of type Color .
Let 's make ` show ` return only the first letter of the color 's name .
instance Show Color where
show Red = "R"
show Yellow = "Y"
show Green = "G"
show Cyan = "C"
show Blue = "B"
show Magenta = "M"
We will not write the Read instance to convert a String to a Color because
-- read is partial and we want to handle the error case ourselves (see section
3 ) .
| null | https://raw.githubusercontent.com/google/haskell-trainings/214013fc324fd6c8f63b874a58ead0c1d3e6788c/haskell_102/codelab/01_mastermind/src/ColorSolution.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
yellow, green, cyan, blue, and magenta.
same here
the compiler automagically generates the instances for these
We want to have a list of all the colors. We could write such a list
manually, but that'd be cumbersome and error-prone. Thankfully, lists
support interpolation! The [a .. b] syntax is translated into a call to
figure out which color is the minimum color, and which is the max. Some
read is partial and we want to handle the error case ourselves (see section | Copyright 2021 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - type - defaults #
module ColorSolution where
import Internal (codelab)
SECTION 1 : Color ( and Bounded )
Our game , being a coding exercise , works with the six dev colors : red ,
data Color
this is a constructor , of type Color
| Green
| Cyan
| Blue
| Magenta
deriving
, Eq
, Enum
, Bounded
)
enumFromTo ( defined in the ) . Here , all you have to do is
other might help you ?
allColors :: [Color]
this is enumFromTo
where
minColor = Red
maxColor = Magenta
We should also provide a way to display values of type Color .
Let 's make ` show ` return only the first letter of the color 's name .
instance Show Color where
show Red = "R"
show Yellow = "Y"
show Green = "G"
show Cyan = "C"
show Blue = "B"
show Magenta = "M"
We will not write the Read instance to convert a String to a Color because
3 ) .
|
d4ca0a81264b193fda34371bc9d1b7051f4adeae6ee0f1361feb80ac8bf55890 | amccausl/Swish | RDFQueryTest.hs | --------------------------------------------------------------------------------
$ I d : RDFQueryTest.hs , v 1.23 2004/01/07 19:49:13 graham Exp $
--
Copyright ( c ) 2003 , . All rights reserved .
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : RDFQueryTest
Copyright : ( c ) 2003 ,
License : GPL V2
--
Maintainer :
-- Stability : provisional
-- Portability : H98 + multi-parameter classes
--
This module defines test cases for querying an RDF graph to obtain
-- a set of variable substitutions, and to apply a set of variable
-- substitutions to a query pattern to obtain a new graph.
--
-- It also tests some primitive graph access functions.
--
--------------------------------------------------------------------------------
WNH RIP OUT module Swish . HaskellRDF.RDFQueryTest
where
import Swish.HaskellRDF.RDFQuery
( rdfQueryFind, rdfQueryFilter
, rdfQueryBack, rdfQueryBackFilter, rdfQueryBackModify
, rdfQueryInstance
, rdfQuerySubs, rdfQueryBackSubs
, rdfQuerySubsAll
, rdfQuerySubsBlank, rdfQueryBackSubsBlank
, rdfFindArcs, rdfSubjEq, rdfPredEq, rdfObjEq, rdfFindPredVal
, rdfFindValSubj, rdfFindPredVal, rdfFindPredInt, rdfFindList
-- debug
, rdfQuerySubs2
)
import Swish.HaskellRDF.RDFVarBinding
( RDFVarBinding, nullRDFVarBinding
, RDFVarBindingModify, RDFVarBindingFilter
, rdfVarBindingUriRef, rdfVarBindingBlank
, rdfVarBindingLiteral
, rdfVarBindingUntypedLiteral, rdfVarBindingTypedLiteral
, rdfVarBindingXMLLiteral, rdfVarBindingDatatyped
, rdfVarBindingMemberProp
)
import Swish.HaskellRDF.RDFGraph
( Arc(..), arcSubj
, RDFGraph, RDFLabel(..)
, isLiteral, isBlank, isQueryVar, makeBlank
, setArcs, getArcs, addArc, add, delete, extract, labels, merge
, allLabels, remapLabels
, mapnode, maplist
, res_rdf_type, res_rdf_first, res_rdf_rest, res_rdf_nil
)
import Swish.HaskellRDF.VarBinding
( VarBinding(..), nullVarBinding
, boundVars, subBinding, makeVarBinding
, applyVarBinding, joinVarBindings
, VarBindingModify(..)
, vbmCompatibility, vbmCompose
, findCompositions, findComposition
, VarBindingFilter(..)
, makeVarFilterModify
, makeVarTestFilter, makeVarCompareFilter
, varBindingId, varFilterDisjunction, varFilterConjunction
, varFilterEQ, varFilterNE
)
import Swish.HaskellUtils.Namespace
( Namespace(..)
, makeNamespaceQName
, ScopedName(..)
, getQName
, makeScopedName
)
import Swish.HaskellRDF.Vocabulary
( namespaceRDF
, namespaceXSD
, namespaceLang, langName
, swishName
, rdf_type, rdf_XMLLiteral
, xsd_boolean, xsd_integer
)
import Swish.HaskellRDF.N3Parser
( ParseResult(..), parseN3fromString )
import Swish.HaskellUtils.QName
( QName(..) )
import Swish.HaskellUtils.ListHelpers
( equiv )
import Swish.HaskellUtils.ErrorM
( ErrorM(Error,Result) )
import Test.HUnit
( Test(TestCase,TestList,TestLabel)
, assertBool, assertEqual, assertString
, runTestTT, runTestText, putTextToHandle )
import System.IO
( Handle, IOMode(WriteMode)
, openFile, hClose, hPutStr, hPutStrLn )
import Data.Maybe
( isJust, fromJust )
------------------------------------------------------------
-- misc helpers
------------------------------------------------------------
newtype Set a = Set [a] deriving Show
instance (Eq a) => Eq (Set a) where
Set v1 == Set v2 = v1 `equiv` v2
test :: String -> Bool -> Test
test lab tst = TestCase $ assertBool lab tst
testEq :: (Eq a, Show a) => String -> a -> a -> Test
testEq lab e a = TestCase $ assertEqual lab e a
testLs :: (Eq a, Show a) => String -> [a] -> [a] -> Test
testLs lab e a = TestCase $ assertEqual lab (Set e) (Set a)
testGr :: String -> String -> [RDFGraph] -> Test
testGr lab e a = TestCase $ assertBool lab (eg `elem` a)
where eg = graphFromString e
graphFromString :: String -> RDFGraph
graphFromString str = case parseN3fromString str of
Result gr -> gr
Error msg -> error msg
-- Compare lists for set equivalence:
data ListTest a = ListTest [a]
instance (Eq a) => Eq (ListTest a) where
(ListTest a1) == (ListTest a2) = a1 `equiv` a2
instance (Show a) => Show (ListTest a) where
show (ListTest a) = show a
testEqv :: (Eq a, Show a) => String -> [a] -> [a] -> Test
testEqv lab a1 a2 =
TestCase ( assertEqual ("testEqv:"++lab) (ListTest a1) (ListTest a2) )
------------------------------------------------------------
: simple query qith URI , literal and blank nodes .
------------------------------------------------------------
prefix1 =
"@prefix ex: </> . \n" ++
" \n"
graph1 = graphFromString graph1str
graph1str = prefix1 ++
"ex:s1 ex:p ex:o1 . \n" ++
"ex:s2 ex:p \"lit1\" . \n" ++
"[ ex:p ex:o3 ] . \n"
query11 = graphFromString query11str
query11str = prefix1 ++
"?s ex:p ?o . \n"
result11 = graphFromString result11str
result11str = prefix1 ++
"?s ex:r ?o . \n"
result11a = prefix1 ++
"ex:s1 ex:r ex:o1 . \n"
result11b = prefix1 ++
"ex:s2 ex:r \"lit1\" . \n"
result11c = prefix1 ++
"[ ex:r ex:o3 ] . \n"
var11 = rdfQueryFind query11 graph1
testQuery11 = test "testQuery11" (not $ null var11)
res11 = rdfQuerySubs var11 result11
testResult11 = testEq "testResult11" 3 (length res11)
testResult11a = testGr "testResult11a" result11a res11
testResult11b = testGr "testResult11b" result11b res11
testResult11c = testGr "testResult11c" result11c res11
test1 = TestList
[ testQuery11, testResult11
, testResult11a, testResult11b, testResult11c
]
------------------------------------------------------------
test2 : a range of more complex queries based on a
-- single relationship graph.
------------------------------------------------------------
prefix2 =
"@prefix pers: <urn:pers:> . \n" ++
"@prefix rel: <urn:rel:> . \n" ++
" \n"
graph2 = graphFromString graph2str
graph2str = prefix2 ++
"pers:St1 rel:wife pers:Do1 ; \n" ++
" rel:daughter pers:Ma2 ; \n" ++
" rel:daughter pers:An2 . \n" ++
"pers:Pa2 rel:wife pers:Ma2 ; \n" ++
" rel:son pers:Gr3 ; \n" ++
" rel:son pers:La3 ; \n" ++
" rel:son pers:Si3 ; \n" ++
" rel:son pers:Al3 . \n" ++
"pers:Br2 rel:wife pers:Ri2 ; \n" ++
" rel:daughter pers:Ma3 ; \n" ++
" rel:son pers:Wi3 . \n" ++
"pers:Gr3 rel:wife pers:Ma3 ; \n" ++
" rel:son pers:Ro4 ; \n" ++
" rel:daughter pers:Rh4 . \n" ++
"pers:Si3 rel:wife pers:Jo3 ; \n" ++
" rel:son pers:Ol4 ; \n" ++
" rel:son pers:Lo4 . \n" ++
"pers:Al3 rel:wife pers:Su3 ; \n" ++
" rel:son pers:Ha4 ; \n" ++
" rel:son pers:El4 . \n"
query21 = graphFromString query21str
query21str = prefix2 ++
"?a rel:wife ?b . \n"
result21 = graphFromString result21str
result21str = prefix2 ++
"?b rel:husband ?a . \n"
result21a = prefix2 ++
"pers:Do1 rel:husband pers:St1 . \n"
result21b = prefix2 ++
"pers:Ma2 rel:husband pers:Pa2 . \n"
result21c = prefix2 ++
"pers:Ri2 rel:husband pers:Br2 . \n"
result21d = prefix2 ++
"pers:Ma3 rel:husband pers:Gr3 . \n"
result21e = prefix2 ++
"pers:Jo3 rel:husband pers:Si3 . \n"
result21f = prefix2 ++
"pers:Su3 rel:husband pers:Al3 . \n"
var21 = rdfQueryFind query21 graph2
testQuery21 = test "testQuery21" (not $ null var21)
res21 = rdfQuerySubs var21 result21
testResult21 = testEq "testResult21" 6 (length res21)
testResult21a = testGr "testResult21a" result21a res21
testResult21b = testGr "testResult21b" result21b res21
testResult21c = testGr "testResult21c" result21c res21
testResult21d = testGr "testResult21d" result21d res21
testResult21e = testGr "testResult21e" result21e res21
testResult21f = testGr "testResult21f" result21f res21
query22 = graphFromString query22str
query22str = prefix2 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result22 = graphFromString result22str
result22str = prefix2 ++
"?a rel:grandparent ?c . \n"
result22a = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n"
result22b = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ol4 . \n"
result22c = prefix2 ++
"pers:Pa2 rel:grandparent pers:Lo4 . \n"
result22d = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ha4 . \n"
result22e = prefix2 ++
"pers:Pa2 rel:grandparent pers:El4 . \n"
var22 = rdfQueryFind query22 graph2
testQuery22 = test "testQuery22" (not $ null var22)
res22 = rdfQuerySubs var22 result22
testResult22 = testEq "testResult22" 5 (length res22)
testResult22a = testGr "testResult22a" result22a res22
testResult22b = testGr "testResult22b" result22b res22
testResult22c = testGr "testResult22c" result22c res22
testResult22d = testGr "testResult22d" result22d res22
testResult22e = testGr "testResult22e" result22e res22
query23 = graphFromString query23str
query23str = prefix2 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result23 = graphFromString result23str
result23str = prefix2 ++
"?b rel:brother ?c . \n"
result23a = prefix2 ++
"pers:Gr3 rel:brother pers:Gr3 . \n"
result23b = prefix2 ++
"pers:Gr3 rel:brother pers:La3 . \n"
result23c = prefix2 ++
"pers:Gr3 rel:brother pers:Si3 . \n"
result23d = prefix2 ++
"pers:Gr3 rel:brother pers:Al3 . \n"
result23e = prefix2 ++
"pers:La3 rel:brother pers:Gr3 . \n"
result23f = prefix2 ++
"pers:La3 rel:brother pers:La3 . \n"
result23g = prefix2 ++
"pers:La3 rel:brother pers:Si3 . \n"
result23h = prefix2 ++
"pers:La3 rel:brother pers:Al3 . \n"
result23i = prefix2 ++
"pers:Si3 rel:brother pers:Gr3 . \n"
result23j = prefix2 ++
"pers:Si3 rel:brother pers:La3 . \n"
result23k = prefix2 ++
"pers:Si3 rel:brother pers:Si3 . \n"
result23l = prefix2 ++
"pers:Si3 rel:brother pers:Al3 . \n"
result23m = prefix2 ++
"pers:Al3 rel:brother pers:Gr3 . \n"
result23n = prefix2 ++
"pers:Al3 rel:brother pers:La3 . \n"
result23o = prefix2 ++
"pers:Al3 rel:brother pers:Si3 . \n"
result23p = prefix2 ++
"pers:Al3 rel:brother pers:Al3 . \n"
result23q = prefix2 ++
"pers:Wi3 rel:brother pers:Wi3 . \n"
result23r = prefix2 ++
"pers:Ro4 rel:brother pers:Ro4 . \n"
result23s = prefix2 ++
"pers:Ol4 rel:brother pers:Lo4 . \n"
result23t = prefix2 ++
"pers:Ol4 rel:brother pers:Ol4 . \n"
result23u = prefix2 ++
"pers:Lo4 rel:brother pers:Lo4 . \n"
result23v = prefix2 ++
"pers:Lo4 rel:brother pers:Ol4 . \n"
result23w = prefix2 ++
"pers:Ha4 rel:brother pers:El4 . \n"
result23x = prefix2 ++
"pers:Ha4 rel:brother pers:Ha4 . \n"
result23y = prefix2 ++
"pers:El4 rel:brother pers:El4 . \n"
result23z = prefix2 ++
"pers:El4 rel:brother pers:Ha4 . \n"
var23 = rdfQueryFind query23 graph2
testQuery23 = test "testQuery23" (not $ null var23)
res23 = rdfQuerySubs var23 result23
testResult23 = testEq "testResult23" 26 (length res23)
testResult23a = testGr "testResult23a" result23a res23
testResult23b = testGr "testResult23b" result23b res23
testResult23c = testGr "testResult23c" result23c res23
testResult23d = testGr "testResult23d" result23d res23
testResult23e = testGr "testResult23e" result23e res23
testResult23f = testGr "testResult23f" result23f res23
testResult23g = testGr "testResult23g" result23g res23
testResult23h = testGr "testResult23h" result23h res23
testResult23i = testGr "testResult23i" result23i res23
testResult23j = testGr "testResult23j" result23j res23
testResult23k = testGr "testResult23k" result23k res23
testResult23l = testGr "testResult23l" result23l res23
testResult23m = testGr "testResult23m" result23m res23
testResult23n = testGr "testResult23n" result23n res23
testResult23o = testGr "testResult23o" result23o res23
testResult23p = testGr "testResult23p" result23p res23
testResult23q = testGr "testResult23q" result23q res23
testResult23r = testGr "testResult23r" result23r res23
testResult23s = testGr "testResult23s" result23s res23
testResult23t = testGr "testResult23t" result23t res23
testResult23u = testGr "testResult23u" result23u res23
testResult23v = testGr "testResult23v" result23v res23
testResult23w = testGr "testResult23w" result23w res23
testResult23x = testGr "testResult23x" result23x res23
testResult23y = testGr "testResult23y" result23y res23
testResult23z = testGr "testResult23z" result23z res23
-- apply filtering to result:
filter23 = varFilterNE (Var "b") (Var "c") :: RDFVarBindingFilter
var23F = rdfQueryFilter filter23 var23
res23F = rdfQuerySubs var23F result23
testResult23F = testEq "testResult23" 16 (length res23F)
testResult23bF = testGr "testResult23b" result23b res23F
testResult23cF = testGr "testResult23c" result23c res23F
testResult23dF = testGr "testResult23d" result23d res23F
testResult23eF = testGr "testResult23e" result23e res23F
testResult23gF = testGr "testResult23g" result23g res23F
testResult23hF = testGr "testResult23h" result23h res23F
testResult23iF = testGr "testResult23i" result23i res23F
testResult23jF = testGr "testResult23j" result23j res23F
testResult23lF = testGr "testResult23l" result23l res23F
testResult23mF = testGr "testResult23m" result23m res23F
testResult23nF = testGr "testResult23n" result23n res23F
testResult23oF = testGr "testResult23o" result23o res23F
testResult23sF = testGr "testResult23s" result23s res23F
testResult23vF = testGr "testResult23v" result23v res23F
testResult23wF = testGr "testResult23w" result23w res23F
testResult23zF = testGr "testResult23z" result23z res23F
query24 = graphFromString query24str
query24str = prefix2 ++
"?a rel:daughter ?b . \n" ++
"?a rel:daughter ?c . \n"
result24 = graphFromString result24str
result24str = prefix2 ++
"?b rel:sister ?c . \n"
result24a = prefix2 ++
"pers:Ma2 rel:sister pers:Ma2 . \n"
result24b = prefix2 ++
"pers:Ma2 rel:sister pers:An2 . \n"
result24c = prefix2 ++
"pers:An2 rel:sister pers:Ma2 . \n"
result24d = prefix2 ++
"pers:An2 rel:sister pers:An2 . \n"
result24e = prefix2 ++
"pers:Ma3 rel:sister pers:Ma3 . \n"
result24f = prefix2 ++
"pers:Rh4 rel:sister pers:Rh4 . \n"
var24 = rdfQueryFind query24 graph2
testQuery24 = test "testQuery24" (not $ null var24)
res24 = rdfQuerySubs var24 result24
testResult24 = testEq "testResult24" 6 (length res24)
testResult24a = testGr "testResult24a" result24a res24
testResult24b = testGr "testResult24b" result24b res24
testResult24c = testGr "testResult24c" result24c res24
testResult24d = testGr "testResult24d" result24d res24
testResult24e = testGr "testResult24e" result24e res24
testResult24f = testGr "testResult24f" result24f res24
query25 = graphFromString query25str
query25str = prefix2 ++
"?a rel:son ?b . \n" ++
"?a rel:daughter ?c . \n"
result25 = graphFromString result25str
result25str = prefix2 ++
"?b rel:sister ?c . \n" ++
"?c rel:brother ?b . \n"
result25a = prefix2 ++
"pers:Wi3 rel:sister pers:Ma3 . \n" ++
"pers:Ma3 rel:brother pers:Wi3 . \n"
result25b = prefix2 ++
"pers:Ro4 rel:sister pers:Rh4 . \n" ++
"pers:Rh4 rel:brother pers:Ro4 . \n"
var25 = rdfQueryFind query25 graph2
testQuery25 = test "testQuery25" (not $ null var25)
res25 = rdfQuerySubs var25 result25
testResult25 = testEq "testResult25" 2 (length res25)
testResult25a = testGr "testResult25a" result25a res25
testResult25b = testGr "testResult25b" result25b res25
test2 = TestList
[ testQuery21, testResult21
, testResult21a, testResult21b, testResult21c
, testResult21d, testResult21e, testResult21f
, testQuery22, testResult22
, testResult22a, testResult22b, testResult22c
, testResult22d, testResult22e
, testQuery23, testResult23
, testResult23a, testResult23b, testResult23c
, testResult23d, testResult23e, testResult23f
, testResult23g, testResult23h, testResult23i
, testResult23j, testResult23k, testResult23l
, testResult23m, testResult23n, testResult23o
, testResult23p, testResult23q, testResult23r
, testResult23s, testResult23t, testResult23u
, testResult23v, testResult23w, testResult23x
, testResult23y, testResult23z
, testResult23F
, testResult23bF, testResult23cF
, testResult23dF, testResult23eF
, testResult23gF, testResult23hF, testResult23iF
, testResult23jF, testResult23lF
, testResult23mF, testResult23nF, testResult23oF
, testResult23sF
, testResult23vF, testResult23wF
, testResult23zF
, testQuery24, testResult24
, testResult24a, testResult24b, testResult24c
, testResult24d, testResult24e, testResult24f
]
------------------------------------------------------------
-- test handling of unsubstituted variables, and
rdfQuerySubsAll , rdfQuerySubsBlank
------------------------------------------------------------
graph3 = graphFromString graph3str
graph3str = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n" ++
"pers:Pa2 rel:grandparent pers:Ol4 . \n"
query31 = graphFromString query31str
query31str = prefix2 ++
"?a rel:grandparent ?c . \n"
result31 = graphFromString result31str
result31str = prefix2 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result31a = prefix2 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ro4 . \n"
result31b = prefix2 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ol4 . \n"
var31 = rdfQueryFind query31 graph3
testQuery31 = test "testQuery31" (not $ null var31)
res31pairs = rdfQuerySubsAll var31 result31
(res31,res31v) = unzip res31pairs
testUnsubs31 = testEq "testUnsubs31" 2 (length res31v)
testUnsubs31a = testEq "testUnsubs31a" [(Var "b")] (head res31v)
testUnsubs31b = testEq "testUnsubs31a" [(Var "b")] (head . tail $ res31v)
testResult31 = testEq "testResult31" 2 (length res31)
testResult31a = testGr "testResult31a" result31a res31
testResult31b = testGr "testResult31b" result31b res31
query32 = graphFromString query32str
query32str = prefix2 ++
"?a rel:grandparent ?c . \n"
result32 = graphFromString result32str
result32str = prefix2 ++
"?a rel:wife _:b . \n" ++
"?d rel:any _:b0 . \n" ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result32a = prefix2 ++
"pers:Pa2 rel:wife _:b . \n" ++
"_:d0 rel:any _:b0 . \n" ++
"pers:Pa2 rel:son _:b1 . \n" ++
"_:b1 rel:son pers:Ro4 . \n"
result32b = prefix2 ++
"pers:Pa2 rel:wife _:b . \n" ++
"_:d0 rel:any _:b0 . \n" ++
"pers:Pa2 rel:son _:b1 . \n" ++
"_:b1 rel:son pers:Ol4 . \n"
res32 = rdfQuerySubsBlank var31 result32
testResult32 = testEq "testResult32" 2 (length res32)
testResult32a = testGr "testResult32a" result32a res32
testResult32b = testGr "testResult32b" result32b res32
res33 = rdfQuerySubs var31 result32
testResult33 = testEq "testResult33" 0 (length res33)
test3 = TestList
[ testQuery31
, testUnsubs31, testUnsubs31a, testUnsubs31b
, testResult31, testResult31a, testResult31b
, testResult32, testResult32a, testResult32b
, testResult33
]
-- Debug sequence for rdfQuerySubsBlank
-- (using internals of rdfQuerySubsBlank implementation)
-- res32 = rdfQuerySubsBlank (fromJust var31) result32
d1 = result32
d2 = rdfQuerySubs2 (head $ var31) d1
d3 = allLabels isBlank (fst d2)
d4 = remapLabels (snd d2) d3 makeBlank (fst d2)
------------------------------------------------------------
: test of backward - chaining query
------------------------------------------------------------
prefix4 =
"@prefix pers: <urn:pers:> . \n" ++
"@prefix rel: <urn:rel:> . \n" ++
" \n"
graph41 = graphFromString graph41str
graph41str = prefix4 ++
"pers:St1 rel:wife pers:Do1 . \n"
query41 = graphFromString query41str
query41str = prefix4 ++
"?a rel:wife ?b . \n"
result41 = graphFromString result41str
result41str = prefix4 ++
"?b rel:husband ?a . \n"
result41a = prefix4 ++
"pers:Do1 rel:husband pers:St1 . \n"
var41 = rdfQueryBack query41 graph41
testQuery41 = test "testQuery41" (not $ null var41)
testQuery41a = testEq "testQuery41a" 1 (length var41)
res41 = rdfQueryBackSubs var41 result41
testResult41 = testEq "testResult41" 1 (length res41)
testResult41a = testGr "testResult41a" result41a (fst $ unzip $ head res41)
testUnbound41a = testLs "testUnbound41a" [] (snd $ head $ head res41)
graph42 = graphFromString graph42str
graph42str = prefix4 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n"
query42 = graphFromString query42str
query42str = prefix4 ++
"?a rel:grandparent ?c . \n"
result42 = graphFromString result42str
result42str = prefix4 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result42a = prefix4 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ro4 . \n"
var42 = rdfQueryBack query42 graph42
testQuery42 = test "testQuery42" (not $ null var42)
testQuery42a = testEq "testQuery42a" 1 (length var42)
res42 = rdfQueryBackSubs var42 result42
testResult42 = testEq "testResult42" 1 (length res42)
testResult42a = testGr "testResult42a" result42a (fst $ unzip $ head res42)
testUnbound42a = testLs "testUnbound42a" [(Var "b")] (snd $ head $ head res42)
graph43 = graphFromString graph43str
graph43str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n"
query43 = graphFromString query43str
query43str = prefix4 ++
"?b rel:brother ?c . \n"
result43 = graphFromString result43str
result43str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result43a = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:La3 . \n"
var43 = rdfQueryBack query43 graph43
testQuery43 = test "testQuery43" (not $ null var43)
testQuery43a = testEq "testQuery43a" 1 (length var43)
res43 = rdfQueryBackSubs var43 result43
testResult43 = testEq "testResult43" 1 (length res43)
testResult43a = testGr "testResult43a" result43a (fst $ unzip $ head res43)
testUnbound43a = testLs "testUnbound43a" [(Var "a")] (snd $ head $ head res43)
graph44 = graphFromString graph44str
graph44str = prefix4 ++
"pers:Pa2 rel:grandson pers:Ro4 . \n"
query44 = graphFromString query44str
query44str = prefix4 ++
"?a rel:grandson ?b . \n" ++
"?c rel:grandson ?d . \n"
result44 = graphFromString result44str
result44str = prefix4 ++
"?a rel:son ?m . \n" ++
"?m rel:son ?b . \n" ++
"?c rel:daughter ?n . \n" ++
"?n rel:son ?d . \n"
result44a = prefix4 ++
"pers:Pa2 rel:son ?m . \n" ++
"?m rel:son pers:Ro4 . \n" ++
"?c rel:daughter ?n . \n" ++
"?n rel:son ?d . \n"
unbound44a = [(Var "m"),(Var "c"),(Var "n"),(Var "d")]
result44b = prefix4 ++
"?a rel:son ?m . \n" ++
"?m rel:son ?b . \n" ++
"pers:Pa2 rel:daughter ?n . \n" ++
"?n rel:son pers:Ro4 . \n"
unbound44b = [(Var "a"),(Var "m"),(Var "b"),(Var "n")]
var44 = rdfQueryBack query44 graph44
testQuery44 = test "testQuery44" (not $ null var44)
testQuery44a = testEq "testQuery44a" 2 (length var44)
res44 = rdfQueryBackSubs var44 result44
testResult44 = testEq "testResult44" 2 (length res44)
[res44_1,res44_2] = res44
testResult44a = testGr "testResult44a" result44a (fst $ unzip res44_2)
testUnbound44a = testLs "testUnbound44a" unbound44a (snd $ head res44_2)
testResult44b = testGr "testResult44b" result44b (fst $ unzip res44_1)
testUnbound44b = testLs "testUnbound44b" unbound44b (snd $ head res44_1)
-- test45: multiple substitutions used together
--
-- (?a daughter ?b, ?a son ?c) => ?b brother ?c
--
-- (b1 brother c1, b2 brother c2) if
-- (?a daughter b1, ?a son c1) && (?a daughter b2, ?a son c2)
graph45 = graphFromString graph45str
graph45str = prefix4 ++
"pers:Rh4 rel:brother pers:Ro4 . \n" ++
"pers:Ma3 rel:brother pers:Wi3 . \n"
query45 = graphFromString query45str
query45str = prefix4 ++
"?b rel:brother ?c . \n"
result45 = graphFromString result45str
result45str = prefix4 ++
"?a rel:daughter ?b . \n" ++
"?a rel:son ?c . \n"
result45a1 = prefix4 ++
"?a rel:daughter pers:Rh4 . \n" ++
"?a rel:son pers:Ro4 . \n"
unbound45a1 = [(Var "a")]
result45a2 = prefix4 ++
"?a rel:daughter pers:Ma3 . \n" ++
"?a rel:son pers:Wi3 . \n"
unbound45a2 = [(Var "a")]
var45 = rdfQueryBack query45 graph45
testQuery45 = test "testQuery45" (not $ null var45)
testQuery45a = testEq "testQuery45a" 1 (length var45)
res45 = rdfQueryBackSubs var45 result45
testResult45 = testEq "testResult45" 1 (length res45)
[res45_1] = res45
testResult45_1 = testEq "testResult45_1" 2 (length res45_1)
[res45_11,res45_12] = res45_1
testResult45a1 = testGr "testResult45a1" result45a1 [fst res45_11]
testUnbound45a1 = testLs "testUnbound45a1" unbound45a1 (snd res45_11)
testResult45a2 = testGr "testResult45a2" result45a2 [fst res45_12]
testUnbound45a2 = testLs "testUnbound45a2" unbound45a2 (snd res45_12)
test46 : multiple ways to get solution
--
( ? c son ? a , ? c stepSon b ) = > ( ? a stepBrother ? b , ? b ? a )
--
a b if
-- (_:c1 son a, _:c1 stepSon b) || (_:c2 stepSon a, _:c2 son b)
graph46 = graphFromString graph46str
graph46str = prefix4 ++
"pers:Gr3 rel:stepbrother pers:St3 . \n"
query46 = graphFromString query46str
query46str = prefix4 ++
"?b rel:stepbrother ?c . \n" ++
"?c rel:stepbrother ?b . \n"
result46 = graphFromString result46str
result46str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:stepson ?c . \n"
result46a = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound46a = [(Var "a")]
result46b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:stepson pers:St3 . \n"
unbound46b = [(Var "a")]
var46 = rdfQueryBack query46 graph46
testQuery46 = test "testQuery46" (not $ null var46)
testQuery46a = testEq "testQuery46a" 2 (length var46)
res46 = rdfQueryBackSubs var46 result46
testResult46 = testEq "testResult46" 2 (length res46)
[res46_1,res46_2] = res46
testResult46_1 = testEq "testResult46_1" 1 (length res46_1)
testResult46_2 = testEq "testResult46_2" 1 (length res46_2)
[res46_11] = res46_1
[res46_21] = res46_2
testResult46a = testGr "testResult46a" result46a [fst res46_11]
testUnbound46a = testLs "testUnbound46a" unbound46a (snd res46_11)
testResult46b = testGr "testResult46b" result46b [fst res46_21]
testUnbound46b = testLs "testUnbound46b" unbound46b (snd res46_21)
-- test47: multiple ways to multiple solutions
--
( ? c son ? a , ? c stepSon b ) = > ( ? a stepBrother ? b , ? b ? a )
--
( a stepBrother b , d ) if
-- ((_:e son a, _:e stepSon b) && (_:f son a, _:f stepSon b)) ||
-- ((_:e son a, _:e stepSon b) && (_:f stepSon a, _:f son b)) ||
-- ((_:e stepSon a, _:e son b) && (_:f son a, _:f stepSon b)) ||
-- ((_:e stepSon a, _:e son b) && (_:f stepSon a, _:f son b))
graph47 = graphFromString graph47str
graph47str = prefix4 ++
"pers:Gr3 rel:stepbrother pers:St3 . \n" ++
"pers:St3 rel:stepbrother pers:Gr3 . \n"
query47 = graphFromString query47str
query47str = prefix4 ++
"?b rel:stepbrother ?c . \n" ++
"?c rel:stepbrother ?b . \n"
result47 = graphFromString result47str
result47str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:stepson ?c . \n"
result47a1 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47a1 = [(Var "a")]
result47a2 = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:stepson pers:St3 . \n"
unbound47a2 = [(Var "a")]
result47b1 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47b1 = [(Var "a")]
result47b2 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47b2 = [(Var "a")]
result47c1 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47c1 = [(Var "a")]
result47c2 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47c2 = [(Var "a")]
result47d1 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47d1 = [(Var "a")]
result47d2 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47d2 = [(Var "a")]
var47 = rdfQueryBack query47 graph47
testQuery47 = test "testQuery47" (not $ null var47)
testQuery47a = testEq "testQuery47a" 4 (length var47)
res47 = rdfQueryBackSubs var47 result47
testResult47 = testEq "testResult47" 4 (length res47)
[res47_1,res47_2,res47_3,res47_4] = res47
testResult47_1 = testEq "testResult47_1" 2 (length res47_1)
testResult47_2 = testEq "testResult47_2" 2 (length res47_2)
testResult47_3 = testEq "testResult47_3" 2 (length res47_3)
testResult47_4 = testEq "testResult47_4" 2 (length res47_4)
[res47_11,res47_12] = res47_1
[res47_21,res47_22] = res47_2
[res47_31,res47_32] = res47_3
[res47_41,res47_42] = res47_4
testResult47a1 = testGr "testResult47a1" result47a1 [fst res47_11]
testUnbound47a1 = testLs "testUnbound47a1" unbound47a1 (snd res47_11)
testResult47a2 = testGr "testResult47a2" result47a2 [fst res47_12]
testUnbound47a2 = testLs "testUnbound47a2" unbound47a2 (snd res47_12)
testResult47b1 = testGr "testResult47b1" result47b1 [fst res47_21]
testUnbound47b1 = testLs "testUnbound47b1" unbound47b1 (snd res47_21)
testResult47b2 = testGr "testResult47b2" result47b2 [fst res47_22]
testUnbound47b2 = testLs "testUnbound47b2" unbound47b2 (snd res47_22)
testResult47c1 = testGr "testResult47c1" result47c1 [fst res47_31]
testUnbound47c1 = testLs "testUnbound47c1" unbound47c1 (snd res47_31)
testResult47c2 = testGr "testResult47c2" result47c2 [fst res47_32]
testUnbound47c2 = testLs "testUnbound47c2" unbound47c2 (snd res47_32)
testResult47d1 = testGr "testResult47d1" result47d1 [fst res47_41]
testUnbound47d1 = testLs "testUnbound47d1" unbound47d1 (snd res47_41)
testResult47d2 = testGr "testResult47d2" result47d2 [fst res47_42]
testUnbound47d2 = testLs "testUnbound47d2" unbound47d2 (snd res47_42)
-- test48: redundant multiple ways to get solution
--
-- (?a son ?b, ?a son ?c) => (?b brother ?c, ?c brother ?b)
--
-- (a brother b) if
( _ : c1 son a , _ : ) || ( _ : c2 son b , _ : c2 son a )
graph48 = graphFromString graph48str
graph48str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n"
query48 = graphFromString query48str
query48str = prefix4 ++
"?b rel:brother ?c . \n" ++
"?c rel:brother ?b . \n"
result48 = graphFromString result48str
result48str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result48a = prefix4 ++
"?a rel:son pers:La3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound48a = [(Var "a")]
result48b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:La3 . \n"
unbound48b = [(Var "a")]
var48 = rdfQueryBack query48 graph48
testQuery48 = test "testQuery48" (not $ null var48)
testQuery48a = testEq "testQuery48a" 2 (length var48)
res48 = rdfQueryBackSubs var48 result48
testResult48 = testEq "testResult48" 2 (length res48)
[res48_1,res48_2] = res48
testResult48_1 = testEq "testResult48_1" 1 (length res48_1)
testResult48_2 = testEq "testResult48_2" 1 (length res48_2)
[res48_11] = res48_1
[res48_21] = res48_2
testResult48a = testGr "testResult48a" result48a [fst res48_11]
testUnbound48a = testLs "testUnbound48a" unbound48a (snd res48_11)
testResult48b = testGr "testResult48b" result48b [fst res48_21]
testUnbound48b = testLs "testUnbound48b" unbound48b (snd res48_21)
-- test49: goal not satisfiable by rule
--
-- (?a foo ?b, ?b foo ?a) => (?a bar ?a)
--
-- (a bar b) cannot be deduced directly
graph49 = graphFromString graph49str
graph49str = prefix4 ++
"pers:Gr3 rel:foo pers:La3 . \n"
query49 = graphFromString query49str
query49str = prefix4 ++
"?a rel:bar ?a . \n"
result49 = graphFromString result49str
result49str = prefix4 ++
"?a rel:foo ?b . \n" ++
"?b rel:foo ?a . \n"
var49 = rdfQueryBack query49 graph49
testQuery49 = test "testQuery49" (null var49)
testQuery49a = testEq "testQuery49a" 0 (length var49)
res49 = rdfQueryBackSubs var49 result49
testResult49 = testEq "testResult49" 0 (length res49)
-- test50: back-chaining with filter
--
-- (?a son ?b, ?a son ?c) => (?b brother ?c, ?c brother ?b)
--
-- (a brother b) if
( _ : c1 son a , _ : ) || ( _ : c2 son b , _ : c2 son a )
graph50 = graphFromString graph50str
graph50str = prefix4 ++
"pers:Gr3 rel:brother pers:Gr3 . \n"
query50 = graphFromString query50str
query50str = prefix4 ++
"?b rel:brother ?c . \n" ++
"?c rel:brother ?b . \n"
result50 = graphFromString result50str
result50str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result50a = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound50a = [(Var "a")]
result50b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound50b = [(Var "a")]
var50 = rdfQueryBack query50 graph50
testQuery50 = test "testQuery50" (not $ null var50)
testQuery50a = testEq "testQuery50a" 2 (length var50)
res50 = rdfQueryBackSubs var50 result50
testResult50 = testEq "testResult50" 2 (length res50)
[res50_1,res50_2] = res50
testResult50_1 = testEq "testResult50_1" 1 (length res50_1)
testResult50_2 = testEq "testResult50_2" 1 (length res50_2)
[res50_11] = res50_1
[res50_21] = res50_2
testResult50a = testGr "testResult50a" result50a [fst res50_11]
testUnbound50a = testLs "testUnbound50a" unbound50a (snd res50_11)
testResult50b = testGr "testResult50b" result50b [fst res50_21]
testUnbound50b = testLs "testUnbound50b" unbound50b (snd res50_21)
filter50 = varFilterNE (Var "b") (Var "c") :: RDFVarBindingFilter
var50F = rdfQueryBackFilter filter50 var50
res50F = rdfQueryBackSubs var50F result50
testResult50F = testEq "testResult50F" 0 (length res50F)
-- Backward substitution query test suite
test4 = TestList
[ testQuery41, testQuery41a, testResult41
, testResult41a, testUnbound41a
, testQuery42, testQuery42a, testResult42
, testResult42a, testUnbound42a
, testQuery43, testQuery43a, testResult43
, testResult43a, testUnbound43a
, testQuery44, testQuery44a, testResult44
, testResult44a, testUnbound44a
, testResult44b, testUnbound44b
, testQuery45, testQuery45a, testResult45
, testResult45_1
, testResult45a1, testUnbound45a1
, testResult45a2, testUnbound45a2
, testQuery46, testQuery46a, testResult46
, testResult46_1, testResult46_2
, testResult46a, testUnbound46a
, testResult46b, testUnbound46b
, testQuery47, testQuery47a, testResult47
, testResult47_1, testResult47_2, testResult47_3, testResult47_4
, testResult47a1, testUnbound47a1
, testResult47a2, testUnbound47a2
, testResult47b1, testUnbound47b1
, testResult47b2, testUnbound47b2
, testResult47c1, testUnbound47c1
, testResult47c2, testUnbound47c2
, testResult47d1, testUnbound47d1
, testResult47d2, testUnbound47d2
, testQuery48, testQuery48a, testResult48
, testResult48_1, testResult48_2
, testResult48a, testUnbound48a
, testResult48b, testUnbound48b
, testQuery49, testQuery49a, testResult49
, testQuery50, testQuery50a, testResult50
, testResult50_1, testResult50_2
, testResult50a, testUnbound50a
, testResult50b, testUnbound50b
, testResult50F
]
------------------------------------------------------------
-- Instance query test suite
------------------------------------------------------------
--
-- The test plan is this:
( 1 ) perform a backward chaining query against some desired result .
-- ?f father ?a, ?f father ?b, ?a /= ?b => ?a brother ?b
-- against
-- Gr3 brother La3, Gr3 brother Si3
-- should yield:
-- _:a father Gr3
-- _:a father La3
_ : b father
-- _:b father Si3
( 2 ) Perform instance query of result against ' ' ( see above )
-- should yield:
-- _:a = Pa2
-- _:b = Pa2
( 3 ) Substitute this into query , should yield :
-- Pa2 father Gr3
-- Pa2 father La3
-- Pa2 father Gr3
-- Pa2 father Si3
( 4 ) Use this result in an instance query against ' ' : it should
-- match without any variable substitutions, indicating that it is
-- a subgraph
graph61 = graphFromString graph61str
graph61str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n" ++
"pers:Gr3 rel:brother pers:Si3 . \n"
query61 = graphFromString query61str
query61str = prefix4 ++
"?b rel:brother ?c . \n"
result61 = graphFromString result61str
result61str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result61a = prefix4 ++
"_:a1 rel:son pers:Gr3 . \n" ++
"_:a1 rel:son pers:La3 . \n" ++
"_:a2 rel:son pers:Gr3 . \n" ++
"_:a2 rel:son pers:Si3 . \n"
result63a = prefix4 ++
"pers:Pa2 rel:son pers:Gr3 . \n" ++
"pers:Pa2 rel:son pers:La3 . \n" ++
"pers:Pa2 rel:son pers:Gr3 . \n" ++
"pers:Pa2 rel:son pers:Si3 . \n"
1 . Backchain query with blank substutions
var61 = rdfQueryBack query61 graph61
testQuery61 = test "testQuery61" (not $ null var61)
testQuery61a = testEq "testQuery61a" 1 (length var61)
res61 = rdfQueryBackSubsBlank var61 result61
testResult61 = testEq "testResult61" 1 (length res61)
[[res61a1,res61a2]] = res61
res61a = merge res61a1 res61a2
testResult61a = testGr "testResult61a" result61a [res61a]
2 . Instance query against ' '
var62 = rdfQueryInstance res61a graph2
testQuery62 = test "testQuery62" (not $ null var62)
testQuery62a = testEq "testQuery62a" 1 (length var62)
3 . Substitute into instance query graph
res63 = rdfQuerySubs var62 res61a
testQuery63 = test "testQuery63" (not $ null res63)
testQuery63a = testEq "testQuery63a" 1 (length res63)
[res63a] = res63
testResult63a = testGr "testResult63a" result63a [res63a]
4 . Repeat instance query against ' '
-- Query bindings should be null.
var64 = rdfQueryInstance res63a graph2
testQuery64 = test "testQuery64" (not $ null var64)
testQuery64a = testEq "testQuery64a" 1 (length var64)
[var64a] = var64
testQuery64b = test "testQuery64b" (null $ vbEnum var64a)
test6 = TestList
[ testQuery61, testQuery61a, testResult61, testResult61a
, testQuery62, testQuery62a
, testQuery63, testQuery63a, testResult63a
, testQuery64, testQuery64a, testQuery64b
]
------------------------------------------------------------
-- Specific test cases
------------------------------------------------------------
-- Back-chaining query binding modifier
-- Set up call of rdfQueryBackModify
( 1 ) simple filter
( 2 ) allocate new binding
rdfQueryBackModify : :
RDFVarBindingModify - > [ [ RDFVarBinding ] ] - > [ [ RDFVarBinding ] ]
rdfQueryBackModify qbm = concatMap ( rdfQueryBackModify1 qbm ) qbss
rdfQueryBackModify ::
RDFVarBindingModify -> [[RDFVarBinding]] -> [[RDFVarBinding]]
rdfQueryBackModify qbm qbss = concatMap (rdfQueryBackModify1 qbm) qbss
-}
baseex = "/"
baserdf = nsURI namespaceRDF
q_dattyp = (makeScopedName "" baseex "datatype")
v_a = Var "a"
v_b = Var "b"
v_c = Var "c"
v_x = Var "x"
v_y = Var "y"
v_z = Var "z"
u_s = Res (makeScopedName "" baseex "s")
u_o = Res (makeScopedName "" baseex "o")
u_p = Res (makeScopedName "" baseex "p")
u_p1 = Res (makeScopedName "" baseex "p1")
u_p2a = Res (makeScopedName "" baseex "p2a")
u_p2b = Res (makeScopedName "" baseex "p2b")
u_m1 = Res (makeScopedName "" baserdf "_1")
u_m2 = Res (makeScopedName "" baserdf "_2")
u_rt = Res rdf_type
u_xt = Res rdf_XMLLiteral
u_dt = Res q_dattyp
l_1 = Lit "l1" Nothing
l_2 = Lit "l2" (Just $ langName "fr")
l_3 = Lit "l3" (Just q_dattyp)
was : ( " fr " )
l_5 = Lit "l5" (Just rdf_XMLLiteral)
b_1 = Blank "1"
b_2 = Blank "2"
b_3 = Blank "3"
b_l1 = Blank "l1"
b_l2 = Blank "l2"
vbss01a = -- ?a is uri, ?b is uri
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,u_o) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,b_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
]
vbss01b = -- ?c is blank
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,b_1) ]
]
vbss01c = -- ?c is literal
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_2) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_3) ]
]
vbss01d = -- ?c is untyped literal
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_2) ]
]
vbss01e = -- ?c is typed literal
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_3) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_4) ]
, makeVarBinding [ (v_a,b_3), (v_b,u_p), (v_c,l_5) ]
]
vbss01f = -- ?c is XML literal
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_5) ]
]
vbss01g = -- ?b is member property
[ makeVarBinding [ (v_a,b_1), (v_b,u_m1), (v_c,u_o) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_m2), (v_c,b_1) ]
]
? c is datatyped with ? x
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_3), (v_x,u_dt) ]
, makeVarBinding [ (v_a,b_2), (v_b,u_p), (v_c,l_4), (v_x,u_dt) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_5), (v_x,u_xt) ]
]
vbss01i = -- ?c is not datatyped with ?x
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_3), (v_x,u_dt) ]
, makeVarBinding [ (v_a,b_2), (v_b,u_p), (v_c,l_4), (v_x,u_xt) ]
, makeVarBinding [ (v_a,b_3), (v_b,u_p), (v_c,l_5), (v_x,u_xt) ]
]
vbss01 = [ vbss01a -- ?a is uri, ?b is uri
, vbss01b -- ?c is blank
, vbss01c -- ?c is literal
, vbss01d -- ?c is untyped literal
, vbss01e -- ?c is typed literal
, vbss01f -- ?c is XML literal
, vbss01g -- ?b is member property
? c is datatyped with ? x
, vbss01i -- ?c is not datatyped with ?x
]
testBackMod01 = testEq "testBackMod01" vbss01 $
rdfQueryBackModify varBindingId vbss01
testBackMod02 = testEq "testBackMod02" [vbss01a,vbss01b,vbss01c,vbss01d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_a)
vbss01
testBackMod03 = testEq "testBackMod03" [vbss01f,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingBlank v_a)
vbss01
testBackMod04 = testEq "testBackMod04" vbss01 $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_b)
vbss01
testBackMod05 = testEq "testBackMod05"
[vbss01c,vbss01d,vbss01e,vbss01f,vbss01h,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingLiteral v_c)
vbss01
testBackMod06 = testEq "testBackMod06" [vbss01d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUntypedLiteral v_c)
vbss01
testBackMod07 = testEq "testBackMod07" [vbss01e,vbss01f,vbss01h,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingTypedLiteral v_c)
vbss01
testBackMod08 = testEq "testBackMod08" [vbss01f] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingXMLLiteral v_c)
vbss01
testBackMod09 = testEq "testBackMod09" [vbss01g] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingMemberProp v_b)
vbss01
testBackMod10 = testEq "testBackMod10" [vbss01h] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingDatatyped v_x v_c)
vbss01
vbss02a = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_b,b_l1) ]
, makeVarBinding [ (v_b,b_l2) ]
]
vbss02b = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt) ]
, makeVarBinding [ (v_b,b_l2) ]
]
vbss02c = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_b,b_l1) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt) ]
]
vbss02d = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt) ]
]
vbss02 = [ vbss02a
, vbss02b
, vbss02c
, vbss02d
]
testBackMod20 = testEq "testBackMod20" vbss02 $
rdfQueryBackModify varBindingId vbss02
testBackMod21 = testEq "testBackMod21" [vbss02d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_a)
vbss02
-- Variable binding modifier that adds new bindings, if certain
-- others are present.
vbm22 = VarBindingModify
{ vbmName = swishName "vbm22"
, vbmApply = concatMap apply1
, vbmVocab = [v_a,v_b,v_x,v_y]
, vbmUsage = [[v_y]]
}
where
apply1 :: RDFVarBinding -> [RDFVarBinding]
apply1 vb = apply2 vb (vbMap vb v_a) (vbMap vb v_b) (vbMap vb v_x)
apply2 vb (Just a) (Just b) (Just _) =
[ joinVarBindings nva vb, joinVarBindings nvb vb ]
where
nva = makeVarBinding [(v_y,a)]
nvb = makeVarBinding [(v_y,b)]
apply2 _ _ _ _ = []
vbss02dy = sequence
[ [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,u_p1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,b_l1) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,u_p2a) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,b_l2) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2), (v_y,u_p2b) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2), (v_y,b_l2) ]
]
, [ makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt), (v_y,u_rt) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt), (v_y,u_xt) ]
]
, [ makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt), (v_y,u_rt) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt), (v_y,u_xt) ]
]
]
testBackMod22 = testEq "testBackMod22" vbss02dy $
rdfQueryBackModify vbm22 vbss02
-- simplified version of above for debugging --
vbss03a = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_b,b_l1) ]
]
vbss03b = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
]
vbss03 = [ vbss03a
, vbss03b
]
vbss03by = sequence
[ [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,u_p1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,b_l1) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,u_p2a) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,b_l2) ]
]
]
testBackMod30 = testEq "testBackMod30" vbss03by $
rdfQueryBackModify vbm22 vbss03
test7 = TestList
[ testBackMod01, testBackMod02, testBackMod03, testBackMod04
, testBackMod05, testBackMod06, testBackMod07, testBackMod08
, testBackMod09, testBackMod10
, testBackMod20, testBackMod21, testBackMod22
, testBackMod30
]
------------------------------------------------------------
-- Test simple value and list queries
------------------------------------------------------------
namespacetest =
Namespace "test" "urn:test:"
namespacelist =
Namespace "list" "urn:list:"
qntest loc = ScopedName namespacetest loc
qnlist loc = ScopedName namespacelist loc
prefixlist =
"@prefix rdf : <" ++ nsURI namespaceRDF ++ "> . \n" ++
"@prefix xsd : <" ++ nsURI namespaceXSD ++ "> . \n" ++
"@prefix test : <" ++ nsURI namespacetest ++ "> . \n" ++
"@prefix list : <" ++ nsURI namespacelist ++ "> . \n" ++
" \n"
graphlist = graphFromString graphliststr
graphliststr = prefixlist ++
"test:a rdf:type test:C1 ; " ++
" test:p test:item1 ; " ++
" test:p test:item2 . " ++
"test:b rdf:type test:C1 ; " ++
" test:p \"1\"^^xsd:integer ; " ++
" test:p \"2\"^^xsd:integer ; " ++
" test:p \"3\"^^xsd:integer . " ++
"test:c rdf:type test:C1 ; " ++
" test:q \"1\"^^xsd:integer ; " ++
" test:q \"2\"^^xsd:boolean ; " ++
" test:q \"3\" . " ++
"list:three :- (list:_1 list:_2 list:_3) . \n" ++
"list:empty :- () . \n"
testC1 = Res (qntest "C1")
testabc = [ Res (qntest "a"),Res (qntest "b"),Res (qntest "c") ]
testp = Res (qntest "p")
testq = Res (qntest "q")
testi12 = [ Res (qntest "item1"),Res (qntest "item2") ]
test123 = [ Lit "1" (Just xsd_integer)
, Lit "2" (Just xsd_integer)
, Lit "3" (Just xsd_integer)
]
test1fp = [ Lit "1" (Just xsd_integer)
, Lit "2" (Just xsd_boolean)
, Lit "3" Nothing
]
list01 = [Res (qnlist "_1"),Res (qnlist "_2"),Res (qnlist "_3")]
list02 = []
testVal01 = testEqv "testVal01" testabc $
rdfFindValSubj res_rdf_type testC1 graphlist
testVal02 = testEqv "testVal02" testi12 $
rdfFindPredVal (testabc!!0) testp graphlist
testVal03 = testEqv "testVal03" test123 $
rdfFindPredVal (testabc!!1) testp graphlist
testVal04 = testEqv "testVal04" test1fp $
rdfFindPredVal (testabc!!2) testq graphlist
testVal05 = testEqv "testVal05" [] $
rdfFindPredVal (testabc!!2) testp graphlist
testVal06 = testEqv "testVal06" [] $
rdfFindPredInt (testabc!!0) testp graphlist
testVal07 = testEqv "testVal07" [1,2,3] $
rdfFindPredInt (testabc!!1) testp graphlist
testVal08 = testEqv "testVal08" [1] $
rdfFindPredInt (testabc!!2) testq graphlist
testlist01 = testEq "testlist01" list01 $
rdfFindList graphlist (Res $ qnlist "three")
testlist02 = testEq "testlist02" list02 $
rdfFindList graphlist (Res $ qnlist "empty")
test8 = TestList
[ testVal01, testVal02, testVal03, testVal04
, testVal05, testVal06, testVal07, testVal08
, testlist01, testlist02
]
----
queryList : : RDFGraph - > RDFLabel - > [ RDFLabel ]
-- queryList gr = [ ]
-- queryList gr hd = : rdfQueryList gr ( findrest g )
queryList gr hd
| hd = = = [ ]
| otherwise = ( g):(queryList gr ( findrest g ) )
where
g =
= headOrNil [ ob | Arc _ sb ob < - g , sb = = res_rdf_first ]
findrest g = headOrNil [ ob | Arc _ sb ob < - g , sb = = res_rdf_rest ]
subgr g h = filter ( (= =) h . ) $ getArcs g
headOrNil = foldr const res_rdf_nil
th1 = ( Res $ qnlist " empty " )
th3 = ( Res $ qnlist " three " )
th3a =
th3b = th3c = findrest = queryList graphlist subgr graphlist th3c
th3e = findhead th3d
th3f = findrest th3d
tl3 = queryList graphlist th3
----
queryList :: RDFGraph -> RDFLabel -> [RDFLabel]
-- queryList gr res_rdf_nil = []
-- queryList gr hd = findhead g:rdfQueryList gr (findrest g)
queryList gr hd
| hd == res_rdf_nil = []
| otherwise = (findhead g):(queryList gr (findrest g))
where
g = subgr gr hd
findhead g = headOrNil [ ob | Arc _ sb ob <- g, sb == res_rdf_first ]
findrest g = headOrNil [ ob | Arc _ sb ob <- g, sb == res_rdf_rest ]
subgr g h = filter ((==) h . arcSubj) $ getArcs g
headOrNil = foldr const res_rdf_nil
th1 = (Res $ qnlist "empty")
th3 = (Res $ qnlist "three")
th3a = subgr graphlist th3
th3b = findhead th3a
th3c = findrest th3a
tl3c = queryList graphlist th3c
th3d = subgr graphlist th3c
th3e = findhead th3d
th3f = findrest th3d
tl3 = queryList graphlist th3
-----}
------------------------------------------------------------
-- Full test suite, main program,
-- and useful expressions for interactive use
------------------------------------------------------------
allTests = TestList
[ test1
, test2
, test3
, test4
, test6
, test7
, test8
]
main = runTestTT allTests
runTestFile t = do
h <- openFile "a.tmp" WriteMode
runTestText (putTextToHandle h False) t
hClose h
tf = runTestFile
tt = runTestTT
shres32 = TestCase $ assertString (show res32)
--------------------------------------------------------------------------------
--
Copyright ( c ) 2003 , . All rights reserved .
--
This file is part of Swish .
--
Swish is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
-- (at your option) any later version.
--
Swish is distributed in the hope that it will be useful ,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
along with Swish ; if not , write to :
The Free Software Foundation , Inc. ,
59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
--
--------------------------------------------------------------------------------
$ Source : /file / / HaskellRDF / RDFQueryTest.hs , v $
$ Author :
$ Revision : 1.23 $
-- $Log: RDFQueryTest.hs,v $
Revision 1.23 2004/01/07 19:49:13
Reorganized RDFLabel details to eliminate separate language field ,
-- and to use ScopedName rather than QName.
Removed some duplicated functions from module Namespace .
--
Revision 1.22 2004/01/06 13:53:10
Created consolidated test harness ( )
--
Revision 1.21 2003/12/20 12:53:40
Fix up code to compile and test with GHC 5.04.3
--
Revision 1.20 2003/12/08 23:55:36
-- Various enhancements to variable bindings and proof structure.
-- New module BuiltInMap coded and tested.
-- Script processor is yet to be completed.
--
Revision 1.19 2003/11/24 17:20:35 graham
Separate module Vocabulary from module .
--
Revision 1.18 2003/11/24 15:46:03
-- Rationalize N3Parser and N3Formatter to use revised vocabulary
terms defined in Namespace.hs
--
Revision 1.17 2003/11/14 16:04:43
-- Add primitive query to get integer values from a graph.
--
Revision 1.16 2003/11/14 16:01:30
Separate RDFVarBinding from module RDFQuery .
--
Revision 1.15 2003/11/13 01:13:48
-- Reworked ruleset to use ScopedName lookup.
-- Various minor fixes.
--
Revision 1.14 2003/10/16 16:01:49 graham
Reworked RDFProof and RDFProofContext to use new query binding
-- framework. Also fixed a bug in the variable binding filter code that
-- caused failures when a variable used was not bound.
--
Revision 1.13 2003/10/15 16:40:52
-- Reworked RDFQuery to use new query binding framework.
( Note : still uses rather than VarBindingModify .
The intent is to incorproate the VarBindingModify logic into RDFProof ,
displaying the existing use of BindingFilter . )
--
Revision 1.12 2003/09/24 18:50:52 graham
Revised module format to be compatible .
--
-- Revision 1.11 2003/07/02 22:39:36 graham
Subgraph entailment and Graph closure instance entailment rules
now tested . RDF forward chaining revised to combine output graphs ,
-- to preserve blank node relationships.
--
Revision 1.10 2003/06/26 15:37:23
-- Added rdfQueryInstance, and tests, all works.
--
-- Revision 1.9 2003/06/19 00:26:29 graham
-- Query binding filter methods tested.
--
Revision 1.8 2003/06/18 14:59:27
-- Augmented query variable binding structure.
RDFQuery tests OK .
--
Revision 1.7 2003/06/18 13:47:33 graham
-- Backchaining query tests complete.
--
Revision 1.6 2003/06/18 01:29:29
-- Fixed up some problems with backward chaining queries.
-- Query test cases still to complete.
-- Proof incomplete.
--
Revision 1.5 2003/06/17 17:53:08
-- Added backward chaining query primitive.
--
Revision 1.4 2003/06/17 16:29:20 graham
Eliminate redundant Maybe in return type of rdfQueryPrim .
-- (A null list suffices for the Nothing case.)
--
Revision 1.3 2003/06/17 15:59:09 graham
-- Update to use revised version of remapNodes, which accepts a
node - mapping function rather than just a Boolean to control conversion
-- of query variable nodes to blank
-- nodes.
--
-- Revision 1.2 2003/06/13 21:40:08 graham
Graph closure forward chaining works .
-- Backward chaining generates existentials.
-- Some problems with query logic for backward chaining.
--
Revision 1.1 2003/06/12 00:49:06 graham
-- Basic query processor runs test cases OK.
-- Proof framework compiles, not yet tested.
--
| null | https://raw.githubusercontent.com/amccausl/Swish/9a7356300960c62e3f0468067bda0c34ee3606bd/Swish/HaskellRDF/RDFQueryTest.hs | haskell | ------------------------------------------------------------------------------
See end of this file for licence information.
------------------------------------------------------------------------------
|
Module : RDFQueryTest
Stability : provisional
Portability : H98 + multi-parameter classes
a set of variable substitutions, and to apply a set of variable
substitutions to a query pattern to obtain a new graph.
It also tests some primitive graph access functions.
------------------------------------------------------------------------------
debug
----------------------------------------------------------
misc helpers
----------------------------------------------------------
Compare lists for set equivalence:
----------------------------------------------------------
----------------------------------------------------------
----------------------------------------------------------
single relationship graph.
----------------------------------------------------------
apply filtering to result:
----------------------------------------------------------
test handling of unsubstituted variables, and
----------------------------------------------------------
Debug sequence for rdfQuerySubsBlank
(using internals of rdfQuerySubsBlank implementation)
res32 = rdfQuerySubsBlank (fromJust var31) result32
----------------------------------------------------------
----------------------------------------------------------
test45: multiple substitutions used together
(?a daughter ?b, ?a son ?c) => ?b brother ?c
(b1 brother c1, b2 brother c2) if
(?a daughter b1, ?a son c1) && (?a daughter b2, ?a son c2)
(_:c1 son a, _:c1 stepSon b) || (_:c2 stepSon a, _:c2 son b)
test47: multiple ways to multiple solutions
((_:e son a, _:e stepSon b) && (_:f son a, _:f stepSon b)) ||
((_:e son a, _:e stepSon b) && (_:f stepSon a, _:f son b)) ||
((_:e stepSon a, _:e son b) && (_:f son a, _:f stepSon b)) ||
((_:e stepSon a, _:e son b) && (_:f stepSon a, _:f son b))
test48: redundant multiple ways to get solution
(?a son ?b, ?a son ?c) => (?b brother ?c, ?c brother ?b)
(a brother b) if
test49: goal not satisfiable by rule
(?a foo ?b, ?b foo ?a) => (?a bar ?a)
(a bar b) cannot be deduced directly
test50: back-chaining with filter
(?a son ?b, ?a son ?c) => (?b brother ?c, ?c brother ?b)
(a brother b) if
Backward substitution query test suite
----------------------------------------------------------
Instance query test suite
----------------------------------------------------------
The test plan is this:
?f father ?a, ?f father ?b, ?a /= ?b => ?a brother ?b
against
Gr3 brother La3, Gr3 brother Si3
should yield:
_:a father Gr3
_:a father La3
_:b father Si3
should yield:
_:a = Pa2
_:b = Pa2
Pa2 father Gr3
Pa2 father La3
Pa2 father Gr3
Pa2 father Si3
match without any variable substitutions, indicating that it is
a subgraph
Query bindings should be null.
----------------------------------------------------------
Specific test cases
----------------------------------------------------------
Back-chaining query binding modifier
Set up call of rdfQueryBackModify
?a is uri, ?b is uri
?c is blank
?c is literal
?c is untyped literal
?c is typed literal
?c is XML literal
?b is member property
?c is not datatyped with ?x
?a is uri, ?b is uri
?c is blank
?c is literal
?c is untyped literal
?c is typed literal
?c is XML literal
?b is member property
?c is not datatyped with ?x
Variable binding modifier that adds new bindings, if certain
others are present.
simplified version of above for debugging --
----------------------------------------------------------
Test simple value and list queries
----------------------------------------------------------
--
queryList gr = [ ]
queryList gr hd = : rdfQueryList gr ( findrest g )
--
queryList gr res_rdf_nil = []
queryList gr hd = findhead g:rdfQueryList gr (findrest g)
---}
----------------------------------------------------------
Full test suite, main program,
and useful expressions for interactive use
----------------------------------------------------------
------------------------------------------------------------------------------
(at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
------------------------------------------------------------------------------
$Log: RDFQueryTest.hs,v $
and to use ScopedName rather than QName.
Various enhancements to variable bindings and proof structure.
New module BuiltInMap coded and tested.
Script processor is yet to be completed.
Rationalize N3Parser and N3Formatter to use revised vocabulary
Add primitive query to get integer values from a graph.
Reworked ruleset to use ScopedName lookup.
Various minor fixes.
framework. Also fixed a bug in the variable binding filter code that
caused failures when a variable used was not bound.
Reworked RDFQuery to use new query binding framework.
Revision 1.11 2003/07/02 22:39:36 graham
to preserve blank node relationships.
Added rdfQueryInstance, and tests, all works.
Revision 1.9 2003/06/19 00:26:29 graham
Query binding filter methods tested.
Augmented query variable binding structure.
Backchaining query tests complete.
Fixed up some problems with backward chaining queries.
Query test cases still to complete.
Proof incomplete.
Added backward chaining query primitive.
(A null list suffices for the Nothing case.)
Update to use revised version of remapNodes, which accepts a
of query variable nodes to blank
nodes.
Revision 1.2 2003/06/13 21:40:08 graham
Backward chaining generates existentials.
Some problems with query logic for backward chaining.
Basic query processor runs test cases OK.
Proof framework compiles, not yet tested.
| $ I d : RDFQueryTest.hs , v 1.23 2004/01/07 19:49:13 graham Exp $
Copyright ( c ) 2003 , . All rights reserved .
Copyright : ( c ) 2003 ,
License : GPL V2
Maintainer :
This module defines test cases for querying an RDF graph to obtain
WNH RIP OUT module Swish . HaskellRDF.RDFQueryTest
where
import Swish.HaskellRDF.RDFQuery
( rdfQueryFind, rdfQueryFilter
, rdfQueryBack, rdfQueryBackFilter, rdfQueryBackModify
, rdfQueryInstance
, rdfQuerySubs, rdfQueryBackSubs
, rdfQuerySubsAll
, rdfQuerySubsBlank, rdfQueryBackSubsBlank
, rdfFindArcs, rdfSubjEq, rdfPredEq, rdfObjEq, rdfFindPredVal
, rdfFindValSubj, rdfFindPredVal, rdfFindPredInt, rdfFindList
, rdfQuerySubs2
)
import Swish.HaskellRDF.RDFVarBinding
( RDFVarBinding, nullRDFVarBinding
, RDFVarBindingModify, RDFVarBindingFilter
, rdfVarBindingUriRef, rdfVarBindingBlank
, rdfVarBindingLiteral
, rdfVarBindingUntypedLiteral, rdfVarBindingTypedLiteral
, rdfVarBindingXMLLiteral, rdfVarBindingDatatyped
, rdfVarBindingMemberProp
)
import Swish.HaskellRDF.RDFGraph
( Arc(..), arcSubj
, RDFGraph, RDFLabel(..)
, isLiteral, isBlank, isQueryVar, makeBlank
, setArcs, getArcs, addArc, add, delete, extract, labels, merge
, allLabels, remapLabels
, mapnode, maplist
, res_rdf_type, res_rdf_first, res_rdf_rest, res_rdf_nil
)
import Swish.HaskellRDF.VarBinding
( VarBinding(..), nullVarBinding
, boundVars, subBinding, makeVarBinding
, applyVarBinding, joinVarBindings
, VarBindingModify(..)
, vbmCompatibility, vbmCompose
, findCompositions, findComposition
, VarBindingFilter(..)
, makeVarFilterModify
, makeVarTestFilter, makeVarCompareFilter
, varBindingId, varFilterDisjunction, varFilterConjunction
, varFilterEQ, varFilterNE
)
import Swish.HaskellUtils.Namespace
( Namespace(..)
, makeNamespaceQName
, ScopedName(..)
, getQName
, makeScopedName
)
import Swish.HaskellRDF.Vocabulary
( namespaceRDF
, namespaceXSD
, namespaceLang, langName
, swishName
, rdf_type, rdf_XMLLiteral
, xsd_boolean, xsd_integer
)
import Swish.HaskellRDF.N3Parser
( ParseResult(..), parseN3fromString )
import Swish.HaskellUtils.QName
( QName(..) )
import Swish.HaskellUtils.ListHelpers
( equiv )
import Swish.HaskellUtils.ErrorM
( ErrorM(Error,Result) )
import Test.HUnit
( Test(TestCase,TestList,TestLabel)
, assertBool, assertEqual, assertString
, runTestTT, runTestText, putTextToHandle )
import System.IO
( Handle, IOMode(WriteMode)
, openFile, hClose, hPutStr, hPutStrLn )
import Data.Maybe
( isJust, fromJust )
newtype Set a = Set [a] deriving Show
instance (Eq a) => Eq (Set a) where
Set v1 == Set v2 = v1 `equiv` v2
test :: String -> Bool -> Test
test lab tst = TestCase $ assertBool lab tst
testEq :: (Eq a, Show a) => String -> a -> a -> Test
testEq lab e a = TestCase $ assertEqual lab e a
testLs :: (Eq a, Show a) => String -> [a] -> [a] -> Test
testLs lab e a = TestCase $ assertEqual lab (Set e) (Set a)
testGr :: String -> String -> [RDFGraph] -> Test
testGr lab e a = TestCase $ assertBool lab (eg `elem` a)
where eg = graphFromString e
graphFromString :: String -> RDFGraph
graphFromString str = case parseN3fromString str of
Result gr -> gr
Error msg -> error msg
data ListTest a = ListTest [a]
instance (Eq a) => Eq (ListTest a) where
(ListTest a1) == (ListTest a2) = a1 `equiv` a2
instance (Show a) => Show (ListTest a) where
show (ListTest a) = show a
testEqv :: (Eq a, Show a) => String -> [a] -> [a] -> Test
testEqv lab a1 a2 =
TestCase ( assertEqual ("testEqv:"++lab) (ListTest a1) (ListTest a2) )
: simple query qith URI , literal and blank nodes .
prefix1 =
"@prefix ex: </> . \n" ++
" \n"
graph1 = graphFromString graph1str
graph1str = prefix1 ++
"ex:s1 ex:p ex:o1 . \n" ++
"ex:s2 ex:p \"lit1\" . \n" ++
"[ ex:p ex:o3 ] . \n"
query11 = graphFromString query11str
query11str = prefix1 ++
"?s ex:p ?o . \n"
result11 = graphFromString result11str
result11str = prefix1 ++
"?s ex:r ?o . \n"
result11a = prefix1 ++
"ex:s1 ex:r ex:o1 . \n"
result11b = prefix1 ++
"ex:s2 ex:r \"lit1\" . \n"
result11c = prefix1 ++
"[ ex:r ex:o3 ] . \n"
var11 = rdfQueryFind query11 graph1
testQuery11 = test "testQuery11" (not $ null var11)
res11 = rdfQuerySubs var11 result11
testResult11 = testEq "testResult11" 3 (length res11)
testResult11a = testGr "testResult11a" result11a res11
testResult11b = testGr "testResult11b" result11b res11
testResult11c = testGr "testResult11c" result11c res11
test1 = TestList
[ testQuery11, testResult11
, testResult11a, testResult11b, testResult11c
]
test2 : a range of more complex queries based on a
prefix2 =
"@prefix pers: <urn:pers:> . \n" ++
"@prefix rel: <urn:rel:> . \n" ++
" \n"
graph2 = graphFromString graph2str
graph2str = prefix2 ++
"pers:St1 rel:wife pers:Do1 ; \n" ++
" rel:daughter pers:Ma2 ; \n" ++
" rel:daughter pers:An2 . \n" ++
"pers:Pa2 rel:wife pers:Ma2 ; \n" ++
" rel:son pers:Gr3 ; \n" ++
" rel:son pers:La3 ; \n" ++
" rel:son pers:Si3 ; \n" ++
" rel:son pers:Al3 . \n" ++
"pers:Br2 rel:wife pers:Ri2 ; \n" ++
" rel:daughter pers:Ma3 ; \n" ++
" rel:son pers:Wi3 . \n" ++
"pers:Gr3 rel:wife pers:Ma3 ; \n" ++
" rel:son pers:Ro4 ; \n" ++
" rel:daughter pers:Rh4 . \n" ++
"pers:Si3 rel:wife pers:Jo3 ; \n" ++
" rel:son pers:Ol4 ; \n" ++
" rel:son pers:Lo4 . \n" ++
"pers:Al3 rel:wife pers:Su3 ; \n" ++
" rel:son pers:Ha4 ; \n" ++
" rel:son pers:El4 . \n"
query21 = graphFromString query21str
query21str = prefix2 ++
"?a rel:wife ?b . \n"
result21 = graphFromString result21str
result21str = prefix2 ++
"?b rel:husband ?a . \n"
result21a = prefix2 ++
"pers:Do1 rel:husband pers:St1 . \n"
result21b = prefix2 ++
"pers:Ma2 rel:husband pers:Pa2 . \n"
result21c = prefix2 ++
"pers:Ri2 rel:husband pers:Br2 . \n"
result21d = prefix2 ++
"pers:Ma3 rel:husband pers:Gr3 . \n"
result21e = prefix2 ++
"pers:Jo3 rel:husband pers:Si3 . \n"
result21f = prefix2 ++
"pers:Su3 rel:husband pers:Al3 . \n"
var21 = rdfQueryFind query21 graph2
testQuery21 = test "testQuery21" (not $ null var21)
res21 = rdfQuerySubs var21 result21
testResult21 = testEq "testResult21" 6 (length res21)
testResult21a = testGr "testResult21a" result21a res21
testResult21b = testGr "testResult21b" result21b res21
testResult21c = testGr "testResult21c" result21c res21
testResult21d = testGr "testResult21d" result21d res21
testResult21e = testGr "testResult21e" result21e res21
testResult21f = testGr "testResult21f" result21f res21
query22 = graphFromString query22str
query22str = prefix2 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result22 = graphFromString result22str
result22str = prefix2 ++
"?a rel:grandparent ?c . \n"
result22a = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n"
result22b = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ol4 . \n"
result22c = prefix2 ++
"pers:Pa2 rel:grandparent pers:Lo4 . \n"
result22d = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ha4 . \n"
result22e = prefix2 ++
"pers:Pa2 rel:grandparent pers:El4 . \n"
var22 = rdfQueryFind query22 graph2
testQuery22 = test "testQuery22" (not $ null var22)
res22 = rdfQuerySubs var22 result22
testResult22 = testEq "testResult22" 5 (length res22)
testResult22a = testGr "testResult22a" result22a res22
testResult22b = testGr "testResult22b" result22b res22
testResult22c = testGr "testResult22c" result22c res22
testResult22d = testGr "testResult22d" result22d res22
testResult22e = testGr "testResult22e" result22e res22
query23 = graphFromString query23str
query23str = prefix2 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result23 = graphFromString result23str
result23str = prefix2 ++
"?b rel:brother ?c . \n"
result23a = prefix2 ++
"pers:Gr3 rel:brother pers:Gr3 . \n"
result23b = prefix2 ++
"pers:Gr3 rel:brother pers:La3 . \n"
result23c = prefix2 ++
"pers:Gr3 rel:brother pers:Si3 . \n"
result23d = prefix2 ++
"pers:Gr3 rel:brother pers:Al3 . \n"
result23e = prefix2 ++
"pers:La3 rel:brother pers:Gr3 . \n"
result23f = prefix2 ++
"pers:La3 rel:brother pers:La3 . \n"
result23g = prefix2 ++
"pers:La3 rel:brother pers:Si3 . \n"
result23h = prefix2 ++
"pers:La3 rel:brother pers:Al3 . \n"
result23i = prefix2 ++
"pers:Si3 rel:brother pers:Gr3 . \n"
result23j = prefix2 ++
"pers:Si3 rel:brother pers:La3 . \n"
result23k = prefix2 ++
"pers:Si3 rel:brother pers:Si3 . \n"
result23l = prefix2 ++
"pers:Si3 rel:brother pers:Al3 . \n"
result23m = prefix2 ++
"pers:Al3 rel:brother pers:Gr3 . \n"
result23n = prefix2 ++
"pers:Al3 rel:brother pers:La3 . \n"
result23o = prefix2 ++
"pers:Al3 rel:brother pers:Si3 . \n"
result23p = prefix2 ++
"pers:Al3 rel:brother pers:Al3 . \n"
result23q = prefix2 ++
"pers:Wi3 rel:brother pers:Wi3 . \n"
result23r = prefix2 ++
"pers:Ro4 rel:brother pers:Ro4 . \n"
result23s = prefix2 ++
"pers:Ol4 rel:brother pers:Lo4 . \n"
result23t = prefix2 ++
"pers:Ol4 rel:brother pers:Ol4 . \n"
result23u = prefix2 ++
"pers:Lo4 rel:brother pers:Lo4 . \n"
result23v = prefix2 ++
"pers:Lo4 rel:brother pers:Ol4 . \n"
result23w = prefix2 ++
"pers:Ha4 rel:brother pers:El4 . \n"
result23x = prefix2 ++
"pers:Ha4 rel:brother pers:Ha4 . \n"
result23y = prefix2 ++
"pers:El4 rel:brother pers:El4 . \n"
result23z = prefix2 ++
"pers:El4 rel:brother pers:Ha4 . \n"
var23 = rdfQueryFind query23 graph2
testQuery23 = test "testQuery23" (not $ null var23)
res23 = rdfQuerySubs var23 result23
testResult23 = testEq "testResult23" 26 (length res23)
testResult23a = testGr "testResult23a" result23a res23
testResult23b = testGr "testResult23b" result23b res23
testResult23c = testGr "testResult23c" result23c res23
testResult23d = testGr "testResult23d" result23d res23
testResult23e = testGr "testResult23e" result23e res23
testResult23f = testGr "testResult23f" result23f res23
testResult23g = testGr "testResult23g" result23g res23
testResult23h = testGr "testResult23h" result23h res23
testResult23i = testGr "testResult23i" result23i res23
testResult23j = testGr "testResult23j" result23j res23
testResult23k = testGr "testResult23k" result23k res23
testResult23l = testGr "testResult23l" result23l res23
testResult23m = testGr "testResult23m" result23m res23
testResult23n = testGr "testResult23n" result23n res23
testResult23o = testGr "testResult23o" result23o res23
testResult23p = testGr "testResult23p" result23p res23
testResult23q = testGr "testResult23q" result23q res23
testResult23r = testGr "testResult23r" result23r res23
testResult23s = testGr "testResult23s" result23s res23
testResult23t = testGr "testResult23t" result23t res23
testResult23u = testGr "testResult23u" result23u res23
testResult23v = testGr "testResult23v" result23v res23
testResult23w = testGr "testResult23w" result23w res23
testResult23x = testGr "testResult23x" result23x res23
testResult23y = testGr "testResult23y" result23y res23
testResult23z = testGr "testResult23z" result23z res23
filter23 = varFilterNE (Var "b") (Var "c") :: RDFVarBindingFilter
var23F = rdfQueryFilter filter23 var23
res23F = rdfQuerySubs var23F result23
testResult23F = testEq "testResult23" 16 (length res23F)
testResult23bF = testGr "testResult23b" result23b res23F
testResult23cF = testGr "testResult23c" result23c res23F
testResult23dF = testGr "testResult23d" result23d res23F
testResult23eF = testGr "testResult23e" result23e res23F
testResult23gF = testGr "testResult23g" result23g res23F
testResult23hF = testGr "testResult23h" result23h res23F
testResult23iF = testGr "testResult23i" result23i res23F
testResult23jF = testGr "testResult23j" result23j res23F
testResult23lF = testGr "testResult23l" result23l res23F
testResult23mF = testGr "testResult23m" result23m res23F
testResult23nF = testGr "testResult23n" result23n res23F
testResult23oF = testGr "testResult23o" result23o res23F
testResult23sF = testGr "testResult23s" result23s res23F
testResult23vF = testGr "testResult23v" result23v res23F
testResult23wF = testGr "testResult23w" result23w res23F
testResult23zF = testGr "testResult23z" result23z res23F
query24 = graphFromString query24str
query24str = prefix2 ++
"?a rel:daughter ?b . \n" ++
"?a rel:daughter ?c . \n"
result24 = graphFromString result24str
result24str = prefix2 ++
"?b rel:sister ?c . \n"
result24a = prefix2 ++
"pers:Ma2 rel:sister pers:Ma2 . \n"
result24b = prefix2 ++
"pers:Ma2 rel:sister pers:An2 . \n"
result24c = prefix2 ++
"pers:An2 rel:sister pers:Ma2 . \n"
result24d = prefix2 ++
"pers:An2 rel:sister pers:An2 . \n"
result24e = prefix2 ++
"pers:Ma3 rel:sister pers:Ma3 . \n"
result24f = prefix2 ++
"pers:Rh4 rel:sister pers:Rh4 . \n"
var24 = rdfQueryFind query24 graph2
testQuery24 = test "testQuery24" (not $ null var24)
res24 = rdfQuerySubs var24 result24
testResult24 = testEq "testResult24" 6 (length res24)
testResult24a = testGr "testResult24a" result24a res24
testResult24b = testGr "testResult24b" result24b res24
testResult24c = testGr "testResult24c" result24c res24
testResult24d = testGr "testResult24d" result24d res24
testResult24e = testGr "testResult24e" result24e res24
testResult24f = testGr "testResult24f" result24f res24
query25 = graphFromString query25str
query25str = prefix2 ++
"?a rel:son ?b . \n" ++
"?a rel:daughter ?c . \n"
result25 = graphFromString result25str
result25str = prefix2 ++
"?b rel:sister ?c . \n" ++
"?c rel:brother ?b . \n"
result25a = prefix2 ++
"pers:Wi3 rel:sister pers:Ma3 . \n" ++
"pers:Ma3 rel:brother pers:Wi3 . \n"
result25b = prefix2 ++
"pers:Ro4 rel:sister pers:Rh4 . \n" ++
"pers:Rh4 rel:brother pers:Ro4 . \n"
var25 = rdfQueryFind query25 graph2
testQuery25 = test "testQuery25" (not $ null var25)
res25 = rdfQuerySubs var25 result25
testResult25 = testEq "testResult25" 2 (length res25)
testResult25a = testGr "testResult25a" result25a res25
testResult25b = testGr "testResult25b" result25b res25
test2 = TestList
[ testQuery21, testResult21
, testResult21a, testResult21b, testResult21c
, testResult21d, testResult21e, testResult21f
, testQuery22, testResult22
, testResult22a, testResult22b, testResult22c
, testResult22d, testResult22e
, testQuery23, testResult23
, testResult23a, testResult23b, testResult23c
, testResult23d, testResult23e, testResult23f
, testResult23g, testResult23h, testResult23i
, testResult23j, testResult23k, testResult23l
, testResult23m, testResult23n, testResult23o
, testResult23p, testResult23q, testResult23r
, testResult23s, testResult23t, testResult23u
, testResult23v, testResult23w, testResult23x
, testResult23y, testResult23z
, testResult23F
, testResult23bF, testResult23cF
, testResult23dF, testResult23eF
, testResult23gF, testResult23hF, testResult23iF
, testResult23jF, testResult23lF
, testResult23mF, testResult23nF, testResult23oF
, testResult23sF
, testResult23vF, testResult23wF
, testResult23zF
, testQuery24, testResult24
, testResult24a, testResult24b, testResult24c
, testResult24d, testResult24e, testResult24f
]
rdfQuerySubsAll , rdfQuerySubsBlank
graph3 = graphFromString graph3str
graph3str = prefix2 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n" ++
"pers:Pa2 rel:grandparent pers:Ol4 . \n"
query31 = graphFromString query31str
query31str = prefix2 ++
"?a rel:grandparent ?c . \n"
result31 = graphFromString result31str
result31str = prefix2 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result31a = prefix2 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ro4 . \n"
result31b = prefix2 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ol4 . \n"
var31 = rdfQueryFind query31 graph3
testQuery31 = test "testQuery31" (not $ null var31)
res31pairs = rdfQuerySubsAll var31 result31
(res31,res31v) = unzip res31pairs
testUnsubs31 = testEq "testUnsubs31" 2 (length res31v)
testUnsubs31a = testEq "testUnsubs31a" [(Var "b")] (head res31v)
testUnsubs31b = testEq "testUnsubs31a" [(Var "b")] (head . tail $ res31v)
testResult31 = testEq "testResult31" 2 (length res31)
testResult31a = testGr "testResult31a" result31a res31
testResult31b = testGr "testResult31b" result31b res31
query32 = graphFromString query32str
query32str = prefix2 ++
"?a rel:grandparent ?c . \n"
result32 = graphFromString result32str
result32str = prefix2 ++
"?a rel:wife _:b . \n" ++
"?d rel:any _:b0 . \n" ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result32a = prefix2 ++
"pers:Pa2 rel:wife _:b . \n" ++
"_:d0 rel:any _:b0 . \n" ++
"pers:Pa2 rel:son _:b1 . \n" ++
"_:b1 rel:son pers:Ro4 . \n"
result32b = prefix2 ++
"pers:Pa2 rel:wife _:b . \n" ++
"_:d0 rel:any _:b0 . \n" ++
"pers:Pa2 rel:son _:b1 . \n" ++
"_:b1 rel:son pers:Ol4 . \n"
res32 = rdfQuerySubsBlank var31 result32
testResult32 = testEq "testResult32" 2 (length res32)
testResult32a = testGr "testResult32a" result32a res32
testResult32b = testGr "testResult32b" result32b res32
res33 = rdfQuerySubs var31 result32
testResult33 = testEq "testResult33" 0 (length res33)
test3 = TestList
[ testQuery31
, testUnsubs31, testUnsubs31a, testUnsubs31b
, testResult31, testResult31a, testResult31b
, testResult32, testResult32a, testResult32b
, testResult33
]
d1 = result32
d2 = rdfQuerySubs2 (head $ var31) d1
d3 = allLabels isBlank (fst d2)
d4 = remapLabels (snd d2) d3 makeBlank (fst d2)
: test of backward - chaining query
prefix4 =
"@prefix pers: <urn:pers:> . \n" ++
"@prefix rel: <urn:rel:> . \n" ++
" \n"
graph41 = graphFromString graph41str
graph41str = prefix4 ++
"pers:St1 rel:wife pers:Do1 . \n"
query41 = graphFromString query41str
query41str = prefix4 ++
"?a rel:wife ?b . \n"
result41 = graphFromString result41str
result41str = prefix4 ++
"?b rel:husband ?a . \n"
result41a = prefix4 ++
"pers:Do1 rel:husband pers:St1 . \n"
var41 = rdfQueryBack query41 graph41
testQuery41 = test "testQuery41" (not $ null var41)
testQuery41a = testEq "testQuery41a" 1 (length var41)
res41 = rdfQueryBackSubs var41 result41
testResult41 = testEq "testResult41" 1 (length res41)
testResult41a = testGr "testResult41a" result41a (fst $ unzip $ head res41)
testUnbound41a = testLs "testUnbound41a" [] (snd $ head $ head res41)
graph42 = graphFromString graph42str
graph42str = prefix4 ++
"pers:Pa2 rel:grandparent pers:Ro4 . \n"
query42 = graphFromString query42str
query42str = prefix4 ++
"?a rel:grandparent ?c . \n"
result42 = graphFromString result42str
result42str = prefix4 ++
"?a rel:son ?b . \n" ++
"?b rel:son ?c . \n"
result42a = prefix4 ++
"pers:Pa2 rel:son ?b . \n" ++
"?b rel:son pers:Ro4 . \n"
var42 = rdfQueryBack query42 graph42
testQuery42 = test "testQuery42" (not $ null var42)
testQuery42a = testEq "testQuery42a" 1 (length var42)
res42 = rdfQueryBackSubs var42 result42
testResult42 = testEq "testResult42" 1 (length res42)
testResult42a = testGr "testResult42a" result42a (fst $ unzip $ head res42)
testUnbound42a = testLs "testUnbound42a" [(Var "b")] (snd $ head $ head res42)
graph43 = graphFromString graph43str
graph43str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n"
query43 = graphFromString query43str
query43str = prefix4 ++
"?b rel:brother ?c . \n"
result43 = graphFromString result43str
result43str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result43a = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:La3 . \n"
var43 = rdfQueryBack query43 graph43
testQuery43 = test "testQuery43" (not $ null var43)
testQuery43a = testEq "testQuery43a" 1 (length var43)
res43 = rdfQueryBackSubs var43 result43
testResult43 = testEq "testResult43" 1 (length res43)
testResult43a = testGr "testResult43a" result43a (fst $ unzip $ head res43)
testUnbound43a = testLs "testUnbound43a" [(Var "a")] (snd $ head $ head res43)
graph44 = graphFromString graph44str
graph44str = prefix4 ++
"pers:Pa2 rel:grandson pers:Ro4 . \n"
query44 = graphFromString query44str
query44str = prefix4 ++
"?a rel:grandson ?b . \n" ++
"?c rel:grandson ?d . \n"
result44 = graphFromString result44str
result44str = prefix4 ++
"?a rel:son ?m . \n" ++
"?m rel:son ?b . \n" ++
"?c rel:daughter ?n . \n" ++
"?n rel:son ?d . \n"
result44a = prefix4 ++
"pers:Pa2 rel:son ?m . \n" ++
"?m rel:son pers:Ro4 . \n" ++
"?c rel:daughter ?n . \n" ++
"?n rel:son ?d . \n"
unbound44a = [(Var "m"),(Var "c"),(Var "n"),(Var "d")]
result44b = prefix4 ++
"?a rel:son ?m . \n" ++
"?m rel:son ?b . \n" ++
"pers:Pa2 rel:daughter ?n . \n" ++
"?n rel:son pers:Ro4 . \n"
unbound44b = [(Var "a"),(Var "m"),(Var "b"),(Var "n")]
var44 = rdfQueryBack query44 graph44
testQuery44 = test "testQuery44" (not $ null var44)
testQuery44a = testEq "testQuery44a" 2 (length var44)
res44 = rdfQueryBackSubs var44 result44
testResult44 = testEq "testResult44" 2 (length res44)
[res44_1,res44_2] = res44
testResult44a = testGr "testResult44a" result44a (fst $ unzip res44_2)
testUnbound44a = testLs "testUnbound44a" unbound44a (snd $ head res44_2)
testResult44b = testGr "testResult44b" result44b (fst $ unzip res44_1)
testUnbound44b = testLs "testUnbound44b" unbound44b (snd $ head res44_1)
graph45 = graphFromString graph45str
graph45str = prefix4 ++
"pers:Rh4 rel:brother pers:Ro4 . \n" ++
"pers:Ma3 rel:brother pers:Wi3 . \n"
query45 = graphFromString query45str
query45str = prefix4 ++
"?b rel:brother ?c . \n"
result45 = graphFromString result45str
result45str = prefix4 ++
"?a rel:daughter ?b . \n" ++
"?a rel:son ?c . \n"
result45a1 = prefix4 ++
"?a rel:daughter pers:Rh4 . \n" ++
"?a rel:son pers:Ro4 . \n"
unbound45a1 = [(Var "a")]
result45a2 = prefix4 ++
"?a rel:daughter pers:Ma3 . \n" ++
"?a rel:son pers:Wi3 . \n"
unbound45a2 = [(Var "a")]
var45 = rdfQueryBack query45 graph45
testQuery45 = test "testQuery45" (not $ null var45)
testQuery45a = testEq "testQuery45a" 1 (length var45)
res45 = rdfQueryBackSubs var45 result45
testResult45 = testEq "testResult45" 1 (length res45)
[res45_1] = res45
testResult45_1 = testEq "testResult45_1" 2 (length res45_1)
[res45_11,res45_12] = res45_1
testResult45a1 = testGr "testResult45a1" result45a1 [fst res45_11]
testUnbound45a1 = testLs "testUnbound45a1" unbound45a1 (snd res45_11)
testResult45a2 = testGr "testResult45a2" result45a2 [fst res45_12]
testUnbound45a2 = testLs "testUnbound45a2" unbound45a2 (snd res45_12)
test46 : multiple ways to get solution
( ? c son ? a , ? c stepSon b ) = > ( ? a stepBrother ? b , ? b ? a )
a b if
graph46 = graphFromString graph46str
graph46str = prefix4 ++
"pers:Gr3 rel:stepbrother pers:St3 . \n"
query46 = graphFromString query46str
query46str = prefix4 ++
"?b rel:stepbrother ?c . \n" ++
"?c rel:stepbrother ?b . \n"
result46 = graphFromString result46str
result46str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:stepson ?c . \n"
result46a = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound46a = [(Var "a")]
result46b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:stepson pers:St3 . \n"
unbound46b = [(Var "a")]
var46 = rdfQueryBack query46 graph46
testQuery46 = test "testQuery46" (not $ null var46)
testQuery46a = testEq "testQuery46a" 2 (length var46)
res46 = rdfQueryBackSubs var46 result46
testResult46 = testEq "testResult46" 2 (length res46)
[res46_1,res46_2] = res46
testResult46_1 = testEq "testResult46_1" 1 (length res46_1)
testResult46_2 = testEq "testResult46_2" 1 (length res46_2)
[res46_11] = res46_1
[res46_21] = res46_2
testResult46a = testGr "testResult46a" result46a [fst res46_11]
testUnbound46a = testLs "testUnbound46a" unbound46a (snd res46_11)
testResult46b = testGr "testResult46b" result46b [fst res46_21]
testUnbound46b = testLs "testUnbound46b" unbound46b (snd res46_21)
( ? c son ? a , ? c stepSon b ) = > ( ? a stepBrother ? b , ? b ? a )
( a stepBrother b , d ) if
graph47 = graphFromString graph47str
graph47str = prefix4 ++
"pers:Gr3 rel:stepbrother pers:St3 . \n" ++
"pers:St3 rel:stepbrother pers:Gr3 . \n"
query47 = graphFromString query47str
query47str = prefix4 ++
"?b rel:stepbrother ?c . \n" ++
"?c rel:stepbrother ?b . \n"
result47 = graphFromString result47str
result47str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:stepson ?c . \n"
result47a1 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47a1 = [(Var "a")]
result47a2 = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:stepson pers:St3 . \n"
unbound47a2 = [(Var "a")]
result47b1 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47b1 = [(Var "a")]
result47b2 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47b2 = [(Var "a")]
result47c1 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47c1 = [(Var "a")]
result47c2 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47c2 = [(Var "a")]
result47d1 = prefix4 ++
"?a rel:stepson pers:St3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound47d1 = [(Var "a")]
result47d2 = prefix4 ++
"?a rel:son pers:St3 . \n" ++
"?a rel:stepson pers:Gr3 . \n"
unbound47d2 = [(Var "a")]
var47 = rdfQueryBack query47 graph47
testQuery47 = test "testQuery47" (not $ null var47)
testQuery47a = testEq "testQuery47a" 4 (length var47)
res47 = rdfQueryBackSubs var47 result47
testResult47 = testEq "testResult47" 4 (length res47)
[res47_1,res47_2,res47_3,res47_4] = res47
testResult47_1 = testEq "testResult47_1" 2 (length res47_1)
testResult47_2 = testEq "testResult47_2" 2 (length res47_2)
testResult47_3 = testEq "testResult47_3" 2 (length res47_3)
testResult47_4 = testEq "testResult47_4" 2 (length res47_4)
[res47_11,res47_12] = res47_1
[res47_21,res47_22] = res47_2
[res47_31,res47_32] = res47_3
[res47_41,res47_42] = res47_4
testResult47a1 = testGr "testResult47a1" result47a1 [fst res47_11]
testUnbound47a1 = testLs "testUnbound47a1" unbound47a1 (snd res47_11)
testResult47a2 = testGr "testResult47a2" result47a2 [fst res47_12]
testUnbound47a2 = testLs "testUnbound47a2" unbound47a2 (snd res47_12)
testResult47b1 = testGr "testResult47b1" result47b1 [fst res47_21]
testUnbound47b1 = testLs "testUnbound47b1" unbound47b1 (snd res47_21)
testResult47b2 = testGr "testResult47b2" result47b2 [fst res47_22]
testUnbound47b2 = testLs "testUnbound47b2" unbound47b2 (snd res47_22)
testResult47c1 = testGr "testResult47c1" result47c1 [fst res47_31]
testUnbound47c1 = testLs "testUnbound47c1" unbound47c1 (snd res47_31)
testResult47c2 = testGr "testResult47c2" result47c2 [fst res47_32]
testUnbound47c2 = testLs "testUnbound47c2" unbound47c2 (snd res47_32)
testResult47d1 = testGr "testResult47d1" result47d1 [fst res47_41]
testUnbound47d1 = testLs "testUnbound47d1" unbound47d1 (snd res47_41)
testResult47d2 = testGr "testResult47d2" result47d2 [fst res47_42]
testUnbound47d2 = testLs "testUnbound47d2" unbound47d2 (snd res47_42)
( _ : c1 son a , _ : ) || ( _ : c2 son b , _ : c2 son a )
graph48 = graphFromString graph48str
graph48str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n"
query48 = graphFromString query48str
query48str = prefix4 ++
"?b rel:brother ?c . \n" ++
"?c rel:brother ?b . \n"
result48 = graphFromString result48str
result48str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result48a = prefix4 ++
"?a rel:son pers:La3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound48a = [(Var "a")]
result48b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:La3 . \n"
unbound48b = [(Var "a")]
var48 = rdfQueryBack query48 graph48
testQuery48 = test "testQuery48" (not $ null var48)
testQuery48a = testEq "testQuery48a" 2 (length var48)
res48 = rdfQueryBackSubs var48 result48
testResult48 = testEq "testResult48" 2 (length res48)
[res48_1,res48_2] = res48
testResult48_1 = testEq "testResult48_1" 1 (length res48_1)
testResult48_2 = testEq "testResult48_2" 1 (length res48_2)
[res48_11] = res48_1
[res48_21] = res48_2
testResult48a = testGr "testResult48a" result48a [fst res48_11]
testUnbound48a = testLs "testUnbound48a" unbound48a (snd res48_11)
testResult48b = testGr "testResult48b" result48b [fst res48_21]
testUnbound48b = testLs "testUnbound48b" unbound48b (snd res48_21)
graph49 = graphFromString graph49str
graph49str = prefix4 ++
"pers:Gr3 rel:foo pers:La3 . \n"
query49 = graphFromString query49str
query49str = prefix4 ++
"?a rel:bar ?a . \n"
result49 = graphFromString result49str
result49str = prefix4 ++
"?a rel:foo ?b . \n" ++
"?b rel:foo ?a . \n"
var49 = rdfQueryBack query49 graph49
testQuery49 = test "testQuery49" (null var49)
testQuery49a = testEq "testQuery49a" 0 (length var49)
res49 = rdfQueryBackSubs var49 result49
testResult49 = testEq "testResult49" 0 (length res49)
( _ : c1 son a , _ : ) || ( _ : c2 son b , _ : c2 son a )
graph50 = graphFromString graph50str
graph50str = prefix4 ++
"pers:Gr3 rel:brother pers:Gr3 . \n"
query50 = graphFromString query50str
query50str = prefix4 ++
"?b rel:brother ?c . \n" ++
"?c rel:brother ?b . \n"
result50 = graphFromString result50str
result50str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result50a = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound50a = [(Var "a")]
result50b = prefix4 ++
"?a rel:son pers:Gr3 . \n" ++
"?a rel:son pers:Gr3 . \n"
unbound50b = [(Var "a")]
var50 = rdfQueryBack query50 graph50
testQuery50 = test "testQuery50" (not $ null var50)
testQuery50a = testEq "testQuery50a" 2 (length var50)
res50 = rdfQueryBackSubs var50 result50
testResult50 = testEq "testResult50" 2 (length res50)
[res50_1,res50_2] = res50
testResult50_1 = testEq "testResult50_1" 1 (length res50_1)
testResult50_2 = testEq "testResult50_2" 1 (length res50_2)
[res50_11] = res50_1
[res50_21] = res50_2
testResult50a = testGr "testResult50a" result50a [fst res50_11]
testUnbound50a = testLs "testUnbound50a" unbound50a (snd res50_11)
testResult50b = testGr "testResult50b" result50b [fst res50_21]
testUnbound50b = testLs "testUnbound50b" unbound50b (snd res50_21)
filter50 = varFilterNE (Var "b") (Var "c") :: RDFVarBindingFilter
var50F = rdfQueryBackFilter filter50 var50
res50F = rdfQueryBackSubs var50F result50
testResult50F = testEq "testResult50F" 0 (length res50F)
test4 = TestList
[ testQuery41, testQuery41a, testResult41
, testResult41a, testUnbound41a
, testQuery42, testQuery42a, testResult42
, testResult42a, testUnbound42a
, testQuery43, testQuery43a, testResult43
, testResult43a, testUnbound43a
, testQuery44, testQuery44a, testResult44
, testResult44a, testUnbound44a
, testResult44b, testUnbound44b
, testQuery45, testQuery45a, testResult45
, testResult45_1
, testResult45a1, testUnbound45a1
, testResult45a2, testUnbound45a2
, testQuery46, testQuery46a, testResult46
, testResult46_1, testResult46_2
, testResult46a, testUnbound46a
, testResult46b, testUnbound46b
, testQuery47, testQuery47a, testResult47
, testResult47_1, testResult47_2, testResult47_3, testResult47_4
, testResult47a1, testUnbound47a1
, testResult47a2, testUnbound47a2
, testResult47b1, testUnbound47b1
, testResult47b2, testUnbound47b2
, testResult47c1, testUnbound47c1
, testResult47c2, testUnbound47c2
, testResult47d1, testUnbound47d1
, testResult47d2, testUnbound47d2
, testQuery48, testQuery48a, testResult48
, testResult48_1, testResult48_2
, testResult48a, testUnbound48a
, testResult48b, testUnbound48b
, testQuery49, testQuery49a, testResult49
, testQuery50, testQuery50a, testResult50
, testResult50_1, testResult50_2
, testResult50a, testUnbound50a
, testResult50b, testUnbound50b
, testResult50F
]
( 1 ) perform a backward chaining query against some desired result .
_ : b father
( 2 ) Perform instance query of result against ' ' ( see above )
( 3 ) Substitute this into query , should yield :
( 4 ) Use this result in an instance query against ' ' : it should
graph61 = graphFromString graph61str
graph61str = prefix4 ++
"pers:Gr3 rel:brother pers:La3 . \n" ++
"pers:Gr3 rel:brother pers:Si3 . \n"
query61 = graphFromString query61str
query61str = prefix4 ++
"?b rel:brother ?c . \n"
result61 = graphFromString result61str
result61str = prefix4 ++
"?a rel:son ?b . \n" ++
"?a rel:son ?c . \n"
result61a = prefix4 ++
"_:a1 rel:son pers:Gr3 . \n" ++
"_:a1 rel:son pers:La3 . \n" ++
"_:a2 rel:son pers:Gr3 . \n" ++
"_:a2 rel:son pers:Si3 . \n"
result63a = prefix4 ++
"pers:Pa2 rel:son pers:Gr3 . \n" ++
"pers:Pa2 rel:son pers:La3 . \n" ++
"pers:Pa2 rel:son pers:Gr3 . \n" ++
"pers:Pa2 rel:son pers:Si3 . \n"
1 . Backchain query with blank substutions
var61 = rdfQueryBack query61 graph61
testQuery61 = test "testQuery61" (not $ null var61)
testQuery61a = testEq "testQuery61a" 1 (length var61)
res61 = rdfQueryBackSubsBlank var61 result61
testResult61 = testEq "testResult61" 1 (length res61)
[[res61a1,res61a2]] = res61
res61a = merge res61a1 res61a2
testResult61a = testGr "testResult61a" result61a [res61a]
2 . Instance query against ' '
var62 = rdfQueryInstance res61a graph2
testQuery62 = test "testQuery62" (not $ null var62)
testQuery62a = testEq "testQuery62a" 1 (length var62)
3 . Substitute into instance query graph
res63 = rdfQuerySubs var62 res61a
testQuery63 = test "testQuery63" (not $ null res63)
testQuery63a = testEq "testQuery63a" 1 (length res63)
[res63a] = res63
testResult63a = testGr "testResult63a" result63a [res63a]
4 . Repeat instance query against ' '
var64 = rdfQueryInstance res63a graph2
testQuery64 = test "testQuery64" (not $ null var64)
testQuery64a = testEq "testQuery64a" 1 (length var64)
[var64a] = var64
testQuery64b = test "testQuery64b" (null $ vbEnum var64a)
test6 = TestList
[ testQuery61, testQuery61a, testResult61, testResult61a
, testQuery62, testQuery62a
, testQuery63, testQuery63a, testResult63a
, testQuery64, testQuery64a, testQuery64b
]
( 1 ) simple filter
( 2 ) allocate new binding
rdfQueryBackModify : :
RDFVarBindingModify - > [ [ RDFVarBinding ] ] - > [ [ RDFVarBinding ] ]
rdfQueryBackModify qbm = concatMap ( rdfQueryBackModify1 qbm ) qbss
rdfQueryBackModify ::
RDFVarBindingModify -> [[RDFVarBinding]] -> [[RDFVarBinding]]
rdfQueryBackModify qbm qbss = concatMap (rdfQueryBackModify1 qbm) qbss
-}
baseex = "/"
baserdf = nsURI namespaceRDF
q_dattyp = (makeScopedName "" baseex "datatype")
v_a = Var "a"
v_b = Var "b"
v_c = Var "c"
v_x = Var "x"
v_y = Var "y"
v_z = Var "z"
u_s = Res (makeScopedName "" baseex "s")
u_o = Res (makeScopedName "" baseex "o")
u_p = Res (makeScopedName "" baseex "p")
u_p1 = Res (makeScopedName "" baseex "p1")
u_p2a = Res (makeScopedName "" baseex "p2a")
u_p2b = Res (makeScopedName "" baseex "p2b")
u_m1 = Res (makeScopedName "" baserdf "_1")
u_m2 = Res (makeScopedName "" baserdf "_2")
u_rt = Res rdf_type
u_xt = Res rdf_XMLLiteral
u_dt = Res q_dattyp
l_1 = Lit "l1" Nothing
l_2 = Lit "l2" (Just $ langName "fr")
l_3 = Lit "l3" (Just q_dattyp)
was : ( " fr " )
l_5 = Lit "l5" (Just rdf_XMLLiteral)
b_1 = Blank "1"
b_2 = Blank "2"
b_3 = Blank "3"
b_l1 = Blank "l1"
b_l2 = Blank "l2"
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,u_o) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,b_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
]
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,b_1) ]
]
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_2) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_3) ]
]
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_1) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_2) ]
]
[ makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_3) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_4) ]
, makeVarBinding [ (v_a,b_3), (v_b,u_p), (v_c,l_5) ]
]
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_5) ]
]
[ makeVarBinding [ (v_a,b_1), (v_b,u_m1), (v_c,u_o) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_m2), (v_c,b_1) ]
]
? c is datatyped with ? x
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_3), (v_x,u_dt) ]
, makeVarBinding [ (v_a,b_2), (v_b,u_p), (v_c,l_4), (v_x,u_dt) ]
, makeVarBinding [ (v_a,u_s), (v_b,u_p), (v_c,l_5), (v_x,u_xt) ]
]
[ makeVarBinding [ (v_a,b_1), (v_b,u_p), (v_c,l_3), (v_x,u_dt) ]
, makeVarBinding [ (v_a,b_2), (v_b,u_p), (v_c,l_4), (v_x,u_xt) ]
, makeVarBinding [ (v_a,b_3), (v_b,u_p), (v_c,l_5), (v_x,u_xt) ]
]
? c is datatyped with ? x
]
testBackMod01 = testEq "testBackMod01" vbss01 $
rdfQueryBackModify varBindingId vbss01
testBackMod02 = testEq "testBackMod02" [vbss01a,vbss01b,vbss01c,vbss01d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_a)
vbss01
testBackMod03 = testEq "testBackMod03" [vbss01f,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingBlank v_a)
vbss01
testBackMod04 = testEq "testBackMod04" vbss01 $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_b)
vbss01
testBackMod05 = testEq "testBackMod05"
[vbss01c,vbss01d,vbss01e,vbss01f,vbss01h,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingLiteral v_c)
vbss01
testBackMod06 = testEq "testBackMod06" [vbss01d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUntypedLiteral v_c)
vbss01
testBackMod07 = testEq "testBackMod07" [vbss01e,vbss01f,vbss01h,vbss01i] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingTypedLiteral v_c)
vbss01
testBackMod08 = testEq "testBackMod08" [vbss01f] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingXMLLiteral v_c)
vbss01
testBackMod09 = testEq "testBackMod09" [vbss01g] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingMemberProp v_b)
vbss01
testBackMod10 = testEq "testBackMod10" [vbss01h] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingDatatyped v_x v_c)
vbss01
vbss02a = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_b,b_l1) ]
, makeVarBinding [ (v_b,b_l2) ]
]
vbss02b = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt) ]
, makeVarBinding [ (v_b,b_l2) ]
]
vbss02c = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_b,b_l1) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt) ]
]
vbss02d = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt) ]
]
vbss02 = [ vbss02a
, vbss02b
, vbss02c
, vbss02d
]
testBackMod20 = testEq "testBackMod20" vbss02 $
rdfQueryBackModify varBindingId vbss02
testBackMod21 = testEq "testBackMod21" [vbss02d] $
rdfQueryBackModify
(makeVarFilterModify $ rdfVarBindingUriRef v_a)
vbss02
vbm22 = VarBindingModify
{ vbmName = swishName "vbm22"
, vbmApply = concatMap apply1
, vbmVocab = [v_a,v_b,v_x,v_y]
, vbmUsage = [[v_y]]
}
where
apply1 :: RDFVarBinding -> [RDFVarBinding]
apply1 vb = apply2 vb (vbMap vb v_a) (vbMap vb v_b) (vbMap vb v_x)
apply2 vb (Just a) (Just b) (Just _) =
[ joinVarBindings nva vb, joinVarBindings nvb vb ]
where
nva = makeVarBinding [(v_y,a)]
nvb = makeVarBinding [(v_y,b)]
apply2 _ _ _ _ = []
vbss02dy = sequence
[ [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,u_p1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,b_l1) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,u_p2a) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,b_l2) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2), (v_y,u_p2b) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2b), (v_b,b_l2), (v_y,b_l2) ]
]
, [ makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt), (v_y,u_rt) ]
, makeVarBinding [ (v_x,b_l1), (v_a,u_rt), (v_b,u_xt), (v_y,u_xt) ]
]
, [ makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt), (v_y,u_rt) ]
, makeVarBinding [ (v_x,b_l2), (v_a,u_rt), (v_b,u_xt), (v_y,u_xt) ]
]
]
testBackMod22 = testEq "testBackMod22" vbss02dy $
rdfQueryBackModify vbm22 vbss02
vbss03a = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_b,b_l1) ]
]
vbss03b = [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2) ]
]
vbss03 = [ vbss03a
, vbss03b
]
vbss03by = sequence
[ [ makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,u_p1) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p1), (v_b,b_l1), (v_y,b_l1) ]
]
, [ makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,u_p2a) ]
, makeVarBinding [ (v_x,u_s), (v_a,u_p2a), (v_b,b_l2), (v_y,b_l2) ]
]
]
testBackMod30 = testEq "testBackMod30" vbss03by $
rdfQueryBackModify vbm22 vbss03
test7 = TestList
[ testBackMod01, testBackMod02, testBackMod03, testBackMod04
, testBackMod05, testBackMod06, testBackMod07, testBackMod08
, testBackMod09, testBackMod10
, testBackMod20, testBackMod21, testBackMod22
, testBackMod30
]
namespacetest =
Namespace "test" "urn:test:"
namespacelist =
Namespace "list" "urn:list:"
qntest loc = ScopedName namespacetest loc
qnlist loc = ScopedName namespacelist loc
prefixlist =
"@prefix rdf : <" ++ nsURI namespaceRDF ++ "> . \n" ++
"@prefix xsd : <" ++ nsURI namespaceXSD ++ "> . \n" ++
"@prefix test : <" ++ nsURI namespacetest ++ "> . \n" ++
"@prefix list : <" ++ nsURI namespacelist ++ "> . \n" ++
" \n"
graphlist = graphFromString graphliststr
graphliststr = prefixlist ++
"test:a rdf:type test:C1 ; " ++
" test:p test:item1 ; " ++
" test:p test:item2 . " ++
"test:b rdf:type test:C1 ; " ++
" test:p \"1\"^^xsd:integer ; " ++
" test:p \"2\"^^xsd:integer ; " ++
" test:p \"3\"^^xsd:integer . " ++
"test:c rdf:type test:C1 ; " ++
" test:q \"1\"^^xsd:integer ; " ++
" test:q \"2\"^^xsd:boolean ; " ++
" test:q \"3\" . " ++
"list:three :- (list:_1 list:_2 list:_3) . \n" ++
"list:empty :- () . \n"
testC1 = Res (qntest "C1")
testabc = [ Res (qntest "a"),Res (qntest "b"),Res (qntest "c") ]
testp = Res (qntest "p")
testq = Res (qntest "q")
testi12 = [ Res (qntest "item1"),Res (qntest "item2") ]
test123 = [ Lit "1" (Just xsd_integer)
, Lit "2" (Just xsd_integer)
, Lit "3" (Just xsd_integer)
]
test1fp = [ Lit "1" (Just xsd_integer)
, Lit "2" (Just xsd_boolean)
, Lit "3" Nothing
]
list01 = [Res (qnlist "_1"),Res (qnlist "_2"),Res (qnlist "_3")]
list02 = []
testVal01 = testEqv "testVal01" testabc $
rdfFindValSubj res_rdf_type testC1 graphlist
testVal02 = testEqv "testVal02" testi12 $
rdfFindPredVal (testabc!!0) testp graphlist
testVal03 = testEqv "testVal03" test123 $
rdfFindPredVal (testabc!!1) testp graphlist
testVal04 = testEqv "testVal04" test1fp $
rdfFindPredVal (testabc!!2) testq graphlist
testVal05 = testEqv "testVal05" [] $
rdfFindPredVal (testabc!!2) testp graphlist
testVal06 = testEqv "testVal06" [] $
rdfFindPredInt (testabc!!0) testp graphlist
testVal07 = testEqv "testVal07" [1,2,3] $
rdfFindPredInt (testabc!!1) testp graphlist
testVal08 = testEqv "testVal08" [1] $
rdfFindPredInt (testabc!!2) testq graphlist
testlist01 = testEq "testlist01" list01 $
rdfFindList graphlist (Res $ qnlist "three")
testlist02 = testEq "testlist02" list02 $
rdfFindList graphlist (Res $ qnlist "empty")
test8 = TestList
[ testVal01, testVal02, testVal03, testVal04
, testVal05, testVal06, testVal07, testVal08
, testlist01, testlist02
]
queryList : : RDFGraph - > RDFLabel - > [ RDFLabel ]
queryList gr hd
| hd = = = [ ]
| otherwise = ( g):(queryList gr ( findrest g ) )
where
g =
= headOrNil [ ob | Arc _ sb ob < - g , sb = = res_rdf_first ]
findrest g = headOrNil [ ob | Arc _ sb ob < - g , sb = = res_rdf_rest ]
subgr g h = filter ( (= =) h . ) $ getArcs g
headOrNil = foldr const res_rdf_nil
th1 = ( Res $ qnlist " empty " )
th3 = ( Res $ qnlist " three " )
th3a =
th3b = th3c = findrest = queryList graphlist subgr graphlist th3c
th3e = findhead th3d
th3f = findrest th3d
tl3 = queryList graphlist th3
queryList :: RDFGraph -> RDFLabel -> [RDFLabel]
queryList gr hd
| hd == res_rdf_nil = []
| otherwise = (findhead g):(queryList gr (findrest g))
where
g = subgr gr hd
findhead g = headOrNil [ ob | Arc _ sb ob <- g, sb == res_rdf_first ]
findrest g = headOrNil [ ob | Arc _ sb ob <- g, sb == res_rdf_rest ]
subgr g h = filter ((==) h . arcSubj) $ getArcs g
headOrNil = foldr const res_rdf_nil
th1 = (Res $ qnlist "empty")
th3 = (Res $ qnlist "three")
th3a = subgr graphlist th3
th3b = findhead th3a
th3c = findrest th3a
tl3c = queryList graphlist th3c
th3d = subgr graphlist th3c
th3e = findhead th3d
th3f = findrest th3d
tl3 = queryList graphlist th3
allTests = TestList
[ test1
, test2
, test3
, test4
, test6
, test7
, test8
]
main = runTestTT allTests
runTestFile t = do
h <- openFile "a.tmp" WriteMode
runTestText (putTextToHandle h False) t
hClose h
tf = runTestFile
tt = runTestTT
shres32 = TestCase $ assertString (show res32)
Copyright ( c ) 2003 , . All rights reserved .
This file is part of Swish .
Swish is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
Swish is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with Swish ; if not , write to :
The Free Software Foundation , Inc. ,
59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
$ Source : /file / / HaskellRDF / RDFQueryTest.hs , v $
$ Author :
$ Revision : 1.23 $
Revision 1.23 2004/01/07 19:49:13
Reorganized RDFLabel details to eliminate separate language field ,
Removed some duplicated functions from module Namespace .
Revision 1.22 2004/01/06 13:53:10
Created consolidated test harness ( )
Revision 1.21 2003/12/20 12:53:40
Fix up code to compile and test with GHC 5.04.3
Revision 1.20 2003/12/08 23:55:36
Revision 1.19 2003/11/24 17:20:35 graham
Separate module Vocabulary from module .
Revision 1.18 2003/11/24 15:46:03
terms defined in Namespace.hs
Revision 1.17 2003/11/14 16:04:43
Revision 1.16 2003/11/14 16:01:30
Separate RDFVarBinding from module RDFQuery .
Revision 1.15 2003/11/13 01:13:48
Revision 1.14 2003/10/16 16:01:49 graham
Reworked RDFProof and RDFProofContext to use new query binding
Revision 1.13 2003/10/15 16:40:52
( Note : still uses rather than VarBindingModify .
The intent is to incorproate the VarBindingModify logic into RDFProof ,
displaying the existing use of BindingFilter . )
Revision 1.12 2003/09/24 18:50:52 graham
Revised module format to be compatible .
Subgraph entailment and Graph closure instance entailment rules
now tested . RDF forward chaining revised to combine output graphs ,
Revision 1.10 2003/06/26 15:37:23
Revision 1.8 2003/06/18 14:59:27
RDFQuery tests OK .
Revision 1.7 2003/06/18 13:47:33 graham
Revision 1.6 2003/06/18 01:29:29
Revision 1.5 2003/06/17 17:53:08
Revision 1.4 2003/06/17 16:29:20 graham
Eliminate redundant Maybe in return type of rdfQueryPrim .
Revision 1.3 2003/06/17 15:59:09 graham
node - mapping function rather than just a Boolean to control conversion
Graph closure forward chaining works .
Revision 1.1 2003/06/12 00:49:06 graham
|
9cdaa9c6e2ccb2f6a514fbe96271e252375fd8a03adaf2b482bedfd85042a17e | RefactoringTools/HaRe | L.hs | +
Except for one equation , this module provides an implementation of the
function L from section 9.3 of the revised Haskell 98 Report .
The missing equation is the one that requires interaction with the parser .
This means that things like
let x=1
in x+x
will be correctly translated to
let { x=1 }
in x+x
but things like
let x=1 in x+x
that should be parsed as
let { x=1 } in x+x
will * not * be treated correctly in this implementation .
Except for one equation, this module provides an implementation of the
function L from section 9.3 of the revised Haskell 98 Report.
The missing equation is the one that requires interaction with the parser.
This means that things like
let x=1
in x+x
will be correctly translated to
let {x=1}
in x+x
but things like
let x=1 in x+x
that should be parsed as
let { x=1 } in x+x
will *not* be treated correctly in this implementation.
-}
module L(l) where
import HsTokens(Token(..))
default(Int)
The equations for cases when < n > is the first token :
l ts0@((Indent n,(p,_)):ts) ms0@(m:ms) | m==n = semi p:l ts ms0
| n<m = vrbrace p:l ts0 ms
l ((Indent _,_):ts) ms = l ts ms
The equations for cases when { n } is the first token :
l ((Open n,(p,_)):ts) (m:ms) | n>m = vlbrace p:l ts (n:m:ms)
l ((Open n,(p,_)):ts) [] | n>0 = vlbrace p:l ts [n]
l ((Open n,(p,_)):ts) ms = vlbrace p:vrbrace p:l ((Indent n,(p,"")):ts) ms
-- Equations for explicit braces:
l (t1@(Special,(_,"}")):ts) (0:ms) = t1:l ts ms
l (t1@(Special,(p,"}")):ts) ms = layout_error p "unexpected }"++ts -- hmm
l (t1@(Special,(p,"{")):ts) ms = t1:l ts (0:ms)
-- The equation for ordinary tokens:
l (t:ts) ms = t:l ts ms
-- Equations for end of file:
l [] (m:ms) = if m/=0
then vrbrace eof:l [] ms
else layout_error eof "missing } at eof"
-- There are the tokens inserted by the layout processor:
vlbrace p = (Layout,(p,"{"))
vrbrace p = (Layout,(p,"}"))
semi p = (Special,(p,";"))
= ( GotEOF,(eof , " " ) )
eof = (-1,-1) -- hmm
layout_error p msg = [(ErrorToken,(p,"{-"++msg++"-}"))] -- hmm
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/old/tools/hsutils/L.hs | haskell | Equations for explicit braces:
hmm
The equation for ordinary tokens:
Equations for end of file:
There are the tokens inserted by the layout processor:
hmm
hmm | +
Except for one equation , this module provides an implementation of the
function L from section 9.3 of the revised Haskell 98 Report .
The missing equation is the one that requires interaction with the parser .
This means that things like
let x=1
in x+x
will be correctly translated to
let { x=1 }
in x+x
but things like
let x=1 in x+x
that should be parsed as
let { x=1 } in x+x
will * not * be treated correctly in this implementation .
Except for one equation, this module provides an implementation of the
function L from section 9.3 of the revised Haskell 98 Report.
The missing equation is the one that requires interaction with the parser.
This means that things like
let x=1
in x+x
will be correctly translated to
let {x=1}
in x+x
but things like
let x=1 in x+x
that should be parsed as
let { x=1 } in x+x
will *not* be treated correctly in this implementation.
-}
module L(l) where
import HsTokens(Token(..))
default(Int)
The equations for cases when < n > is the first token :
l ts0@((Indent n,(p,_)):ts) ms0@(m:ms) | m==n = semi p:l ts ms0
| n<m = vrbrace p:l ts0 ms
l ((Indent _,_):ts) ms = l ts ms
The equations for cases when { n } is the first token :
l ((Open n,(p,_)):ts) (m:ms) | n>m = vlbrace p:l ts (n:m:ms)
l ((Open n,(p,_)):ts) [] | n>0 = vlbrace p:l ts [n]
l ((Open n,(p,_)):ts) ms = vlbrace p:vrbrace p:l ((Indent n,(p,"")):ts) ms
l (t1@(Special,(_,"}")):ts) (0:ms) = t1:l ts ms
l (t1@(Special,(p,"{")):ts) ms = t1:l ts (0:ms)
l (t:ts) ms = t:l ts ms
l [] (m:ms) = if m/=0
then vrbrace eof:l [] ms
else layout_error eof "missing } at eof"
vlbrace p = (Layout,(p,"{"))
vrbrace p = (Layout,(p,"}"))
semi p = (Special,(p,";"))
= ( GotEOF,(eof , " " ) )
|
169f26c598b3bf710c8fce87c73500c17c7d1731dba3b494f049b6d95221f826 | snapframework/snap-server | Config.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
------------------------------------------------------------------------------
-- | This module exports the 'Config' datatype, which you can use to configure
the Snap HTTP server .
--
module Snap.Internal.Http.Server.Config
NOTE : also edit Snap . Http . Server . Config if you change these
( ConfigLog(..)
, Config(..)
, ProxyType(..)
, emptyConfig
, defaultConfig
, commandLineConfig
, extendedCommandLineConfig
, completeConfig
, optDescrs
, fmapOpt
, getAccessLog
, getBind
, getCompression
, getDefaultTimeout
, getErrorHandler
, getErrorLog
, getHostname
, getLocale
, getOther
, getPort
, getProxyType
, getSSLBind
, getSSLCert
, getSSLChainCert
, getSSLKey
, getSSLPort
, getVerbose
, getStartupHook
, getUnixSocket
, getUnixSocketAccessMode
, setAccessLog
, setBind
, setCompression
, setDefaultTimeout
, setErrorHandler
, setErrorLog
, setHostname
, setLocale
, setOther
, setPort
, setProxyType
, setSSLBind
, setSSLCert
, setSSLChainCert
, setSSLKey
, setSSLPort
, setVerbose
, setUnixSocket
, setUnixSocketAccessMode
, setStartupHook
, StartupInfo(..)
, getStartupSockets
, getStartupConfig
-- * Private
, emptyStartupInfo
, setStartupSockets
, setStartupConfig
) where
------------------------------------------------------------------------------
import Control.Exception (SomeException)
import Control.Monad (when)
import Data.Bits ((.&.))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.CaseInsensitive as CI
import Data.Function (on)
import Data.List (foldl')
import qualified Data.Map as Map
import Data.Maybe (isJust, isNothing)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid (..))
#endif
import Data.Monoid (Last (Last, getLast))
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup (..))
#endif
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
#if MIN_VERSION_base(4,7,0)
import Data.Typeable (Typeable)
#else
import Data.Typeable (TyCon, Typeable, Typeable1 (..), mkTyCon3, mkTyConApp)
#endif
import Network.Socket (Socket)
import Numeric (readOct, showOct)
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding (catch)
#endif
import System.Console.GetOpt (ArgDescr (..), ArgOrder (Permute), OptDescr (..), getOpt, usageInfo)
import System.Environment hiding (getEnv)
#ifndef PORTABLE
import Data.Char (isAlpha)
import System.Posix.Env (getEnv)
#endif
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
------------------------------------------------------------------------------
import Data.ByteString.Builder (Builder, byteString, stringUtf8, toLazyByteString)
import qualified System.IO.Streams as Streams
------------------------------------------------------------------------------
import Snap.Core (MonadSnap, Request (rqClientAddr, rqClientPort, rqParams, rqPostParams), emptyResponse, finishWith, getsRequest, logError, setContentLength, setContentType, setResponseBody, setResponseStatus)
import Snap.Internal.Debug (debug)
------------------------------------------------------------------------------
-- | FIXME
--
Note : this type changed in snap - server 1.0.0.0 .
data ProxyType = NoProxy
| HaProxy
| X_Forwarded_For
deriving (Show, Eq, Typeable)
------------------------------------------------------------------------------
-- | Data type representing the configuration of a logging target
data ConfigLog = ConfigNoLog -- ^ no logging
| ConfigFileLog FilePath -- ^ log to text file
^ log custom IO handler
instance Show ConfigLog where
show ConfigNoLog = "no log"
show (ConfigFileLog f) = "log to file " ++ show f
show (ConfigIoLog _) = "custom logging handler"
------------------------------------------------------------------------------
We should be using ServerConfig here . There needs to be a clearer
-- separation between:
--
-- * what the underlying code needs to configure itself
--
-- * what the command-line processing does.
--
-- The latter will provide "library" helper functions that operate on
ServerConfig / etc in order to allow users to configure their own environment .
--
--
-- Todo:
--
-- * need a function ::
CommandLineConfig - > IO [ ( ServerConfig hookState , AcceptFunc ) ]
--
-- this will prep for another function that will spawn all of the
-- accept loops with httpAcceptLoop.
--
* all backends provide " Some - > Foo - > Config - > IO AcceptFunc "
--
-- * add support for socket activation to command line, or delegate to
-- different library? It's linux-only anyways, need to ifdef. It would be
silly to depend on the socket - activation library for that one little
-- function.
--
-- * break config into multiple modules:
--
-- * everything that modifies the snap handler (compression, proxy
-- settings, error handler)
--
-- * everything that directly modifies server settings (hostname /
-- defaultTimeout / hooks / etc)
--
-- * everything that configures backends (port/bind/ssl*)
--
-- * everything that handles command line stuff
--
-- * utility stuff
--
Cruft that definitely must be removed :
--
* ConfigLog -- this becomes a binary option on the command - line side ( no
-- logging or yes, to this file), but the ConfigIoLog gets zapped
-- altogether.
------------------------------------------------------------------------------
-- | A record type which represents partial configurations (for 'httpServe')
-- by wrapping all of its fields in a 'Maybe'. Values of this type are usually
-- constructed via its 'Monoid' instance by doing something like:
--
> setPort 1234 mempty
--
-- Any fields which are unspecified in the 'Config' passed to 'httpServe' (and
-- this is the norm) are filled in with default values from 'defaultConfig'.
data Config m a = Config
{ hostname :: Maybe ByteString
, accessLog :: Maybe ConfigLog
, errorLog :: Maybe ConfigLog
, locale :: Maybe String
, port :: Maybe Int
, bind :: Maybe ByteString
, sslport :: Maybe Int
, sslbind :: Maybe ByteString
, sslcert :: Maybe FilePath
, sslchaincert :: Maybe Bool
, sslkey :: Maybe FilePath
, unixsocket :: Maybe FilePath
, unixaccessmode :: Maybe Int
, compression :: Maybe Bool
, verbose :: Maybe Bool
, errorHandler :: Maybe (SomeException -> m ())
, defaultTimeout :: Maybe Int
, other :: Maybe a
, proxyType :: Maybe ProxyType
, startupHook :: Maybe (StartupInfo m a -> IO ())
}
#if MIN_VERSION_base(4,7,0)
deriving Typeable
#else
------------------------------------------------------------------------------
-- | The 'Typeable1' instance is here so 'Config' values can be
-- dynamically loaded with Hint.
configTyCon :: TyCon
configTyCon = mkTyCon3 "snap-server" "Snap.Http.Server.Config" "Config"
# NOINLINE configTyCon #
instance (Typeable1 m) => Typeable1 (Config m) where
typeOf1 _ = mkTyConApp configTyCon [typeOf1 (undefined :: m ())]
#endif
instance Show (Config m a) where
show c = unlines [ "Config:"
, "hostname: " ++ _hostname
, "accessLog: " ++ _accessLog
, "errorLog: " ++ _errorLog
, "locale: " ++ _locale
, "port: " ++ _port
, "bind: " ++ _bind
, "sslport: " ++ _sslport
, "sslbind: " ++ _sslbind
, "sslcert: " ++ _sslcert
, "sslchaincert: " ++ _sslchaincert
, "sslkey: " ++ _sslkey
, "unixsocket: " ++ _unixsocket
, "unixaccessmode: " ++ _unixaccessmode
, "compression: " ++ _compression
, "verbose: " ++ _verbose
, "defaultTimeout: " ++ _defaultTimeout
, "proxyType: " ++ _proxyType
]
where
_hostname = show $ hostname c
_accessLog = show $ accessLog c
_errorLog = show $ errorLog c
_locale = show $ locale c
_port = show $ port c
_bind = show $ bind c
_sslport = show $ sslport c
_sslbind = show $ sslbind c
_sslcert = show $ sslcert c
_sslchaincert = show $ sslchaincert c
_sslkey = show $ sslkey c
_compression = show $ compression c
_verbose = show $ verbose c
_defaultTimeout = show $ defaultTimeout c
_proxyType = show $ proxyType c
_unixsocket = show $ unixsocket c
_unixaccessmode = case unixaccessmode c of
Nothing -> "Nothing"
Just s -> ("Just 0" ++) . showOct s $ []
------------------------------------------------------------------------------
| Returns a completely empty ' Config ' . Equivalent to ' ' from
-- 'Config''s 'Monoid' instance.
emptyConfig :: Config m a
emptyConfig = mempty
------------------------------------------------------------------------------
instance Semigroup (Config m a) where
a <> b = Config
{ hostname = ov hostname
, accessLog = ov accessLog
, errorLog = ov errorLog
, locale = ov locale
, port = ov port
, bind = ov bind
, sslport = ov sslport
, sslbind = ov sslbind
, sslcert = ov sslcert
, sslchaincert = ov sslchaincert
, sslkey = ov sslkey
, unixsocket = ov unixsocket
, unixaccessmode = ov unixaccessmode
, compression = ov compression
, verbose = ov verbose
, errorHandler = ov errorHandler
, defaultTimeout = ov defaultTimeout
, other = ov other
, proxyType = ov proxyType
, startupHook = ov startupHook
}
where
ov :: (Config m a -> Maybe b) -> Maybe b
ov f = getLast $! (mappend `on` (Last . f)) a b
instance Monoid (Config m a) where
mempty = Config
{ hostname = Nothing
, accessLog = Nothing
, errorLog = Nothing
, locale = Nothing
, port = Nothing
, bind = Nothing
, sslport = Nothing
, sslbind = Nothing
, sslcert = Nothing
, sslchaincert = Nothing
, sslkey = Nothing
, unixsocket = Nothing
, unixaccessmode = Nothing
, compression = Nothing
, verbose = Nothing
, errorHandler = Nothing
, defaultTimeout = Nothing
, other = Nothing
, proxyType = Nothing
, startupHook = Nothing
}
#if !MIN_VERSION_base(4,11,0)
mappend = (<>)
#endif
------------------------------------------------------------------------------
-- | These are the default values for the options
defaultConfig :: MonadSnap m => Config m a
defaultConfig = mempty
{ hostname = Just "localhost"
, accessLog = Just $ ConfigFileLog "log/access.log"
, errorLog = Just $ ConfigFileLog "log/error.log"
, locale = Just "en_US"
, compression = Just True
, verbose = Just True
, errorHandler = Just defaultErrorHandler
, bind = Just "0.0.0.0"
, sslbind = Nothing
, sslcert = Nothing
, sslkey = Nothing
, sslchaincert = Nothing
, defaultTimeout = Just 60
}
------------------------------------------------------------------------------
-- | The hostname of the HTTP server. This field has the same format as an HTTP
@Host@ header ; if a header came in with the request , we use that ,
-- otherwise we default to this value specified in the configuration.
getHostname :: Config m a -> Maybe ByteString
getHostname = hostname
-- | Path to the access log
getAccessLog :: Config m a -> Maybe ConfigLog
getAccessLog = accessLog
-- | Path to the error log
getErrorLog :: Config m a -> Maybe ConfigLog
getErrorLog = errorLog
-- | Gets the locale to use. Locales are used on Unix only, to set the
-- @LANG@\/@LC_ALL@\/etc. environment variable. For instance if you set the
locale to " , we 'll set the relevant environment variables to
-- \"@en_US.UTF-8@\".
getLocale :: Config m a -> Maybe String
getLocale = locale
-- | Returns the port to listen on (for http)
getPort :: Config m a -> Maybe Int
getPort = port
-- | Returns the address to bind to (for http)
getBind :: Config m a -> Maybe ByteString
getBind = bind
-- | Returns the port to listen on (for https)
getSSLPort :: Config m a -> Maybe Int
getSSLPort = sslport
-- | Returns the address to bind to (for https)
getSSLBind :: Config m a -> Maybe ByteString
getSSLBind = sslbind
-- | Path to the SSL certificate file
getSSLCert :: Config m a -> Maybe FilePath
getSSLCert = sslcert
-- | Path to the SSL certificate file
getSSLChainCert :: Config m a -> Maybe Bool
getSSLChainCert = sslchaincert
-- | Path to the SSL key file
getSSLKey :: Config m a -> Maybe FilePath
getSSLKey = sslkey
-- | File path to unix socket. Must be absolute path, but allows for symbolic
-- links.
getUnixSocket :: Config m a -> Maybe FilePath
getUnixSocket = unixsocket
-- | Access mode for unix socket, by default is system specific.
-- This should only be used to grant additional permissions to created
-- socket file, and not to remove permissions set by default.
-- The only portable way to limit access to socket is creating it in a
-- directory with proper permissions set.
--
Most BSD systems ignore access permissions on unix sockets .
--
-- Note: This uses umask. There is a race condition if process creates other
-- files at the same time as opening a unix socket with this option set.
getUnixSocketAccessMode :: Config m a -> Maybe Int
getUnixSocketAccessMode = unixaccessmode
-- | If set and set to True, compression is turned on when applicable
getCompression :: Config m a -> Maybe Bool
getCompression = compression
-- | Whether to write server status updates to stderr
getVerbose :: Config m a -> Maybe Bool
getVerbose = verbose
| A MonadSnap action to handle 500 errors
getErrorHandler :: Config m a -> Maybe (SomeException -> m ())
getErrorHandler = errorHandler
getDefaultTimeout :: Config m a -> Maybe Int
getDefaultTimeout = defaultTimeout
getOther :: Config m a -> Maybe a
getOther = other
getProxyType :: Config m a -> Maybe ProxyType
getProxyType = proxyType
-- | A startup hook is run after the server initializes but before user request
processing begins . The server passes , through a ' StartupInfo ' object , the
-- startup hook a list of the sockets it is listening on and the final 'Config'
-- object completed after command-line processing.
getStartupHook :: Config m a -> Maybe (StartupInfo m a -> IO ())
getStartupHook = startupHook
------------------------------------------------------------------------------
setHostname :: ByteString -> Config m a -> Config m a
setHostname x c = c { hostname = Just x }
setAccessLog :: ConfigLog -> Config m a -> Config m a
setAccessLog x c = c { accessLog = Just x }
setErrorLog :: ConfigLog -> Config m a -> Config m a
setErrorLog x c = c { errorLog = Just x }
setLocale :: String -> Config m a -> Config m a
setLocale x c = c { locale = Just x }
setPort :: Int -> Config m a -> Config m a
setPort x c = c { port = Just x }
setBind :: ByteString -> Config m a -> Config m a
setBind x c = c { bind = Just x }
setSSLPort :: Int -> Config m a -> Config m a
setSSLPort x c = c { sslport = Just x }
setSSLBind :: ByteString -> Config m a -> Config m a
setSSLBind x c = c { sslbind = Just x }
setSSLCert :: FilePath -> Config m a -> Config m a
setSSLCert x c = c { sslcert = Just x }
setSSLChainCert :: Bool -> Config m a -> Config m a
setSSLChainCert x c = c { sslchaincert = Just x }
setSSLKey :: FilePath -> Config m a -> Config m a
setSSLKey x c = c { sslkey = Just x }
setUnixSocket :: FilePath -> Config m a -> Config m a
setUnixSocket x c = c { unixsocket = Just x }
setUnixSocketAccessMode :: Int -> Config m a -> Config m a
setUnixSocketAccessMode p c = c { unixaccessmode = Just ( p .&. 0o777) }
setCompression :: Bool -> Config m a -> Config m a
setCompression x c = c { compression = Just x }
setVerbose :: Bool -> Config m a -> Config m a
setVerbose x c = c { verbose = Just x }
setErrorHandler :: (SomeException -> m ()) -> Config m a -> Config m a
setErrorHandler x c = c { errorHandler = Just x }
setDefaultTimeout :: Int -> Config m a -> Config m a
setDefaultTimeout x c = c { defaultTimeout = Just x }
setOther :: a -> Config m a -> Config m a
setOther x c = c { other = Just x }
setProxyType :: ProxyType -> Config m a -> Config m a
setProxyType x c = c { proxyType = Just x }
setStartupHook :: (StartupInfo m a -> IO ()) -> Config m a -> Config m a
setStartupHook x c = c { startupHook = Just x }
------------------------------------------------------------------------------
-- | Arguments passed to 'setStartupHook'.
data StartupInfo m a = StartupInfo
{ startupHookConfig :: Config m a
, startupHookSockets :: [Socket]
}
emptyStartupInfo :: StartupInfo m a
emptyStartupInfo = StartupInfo emptyConfig []
| The ' Socket 's opened by the server . There will be two ' Socket 's for SSL
connections , and one otherwise .
getStartupSockets :: StartupInfo m a -> [Socket]
getStartupSockets = startupHookSockets
-- The 'Config', after any command line parsing has been performed.
getStartupConfig :: StartupInfo m a -> Config m a
getStartupConfig = startupHookConfig
setStartupSockets :: [Socket] -> StartupInfo m a -> StartupInfo m a
setStartupSockets x c = c { startupHookSockets = x }
setStartupConfig :: Config m a -> StartupInfo m a -> StartupInfo m a
setStartupConfig x c = c { startupHookConfig = x }
------------------------------------------------------------------------------
completeConfig :: (MonadSnap m) => Config m a -> IO (Config m a)
completeConfig config = do
when noPort $ hPutStrLn stderr
"no port specified, defaulting to port 8000"
return $! cfg `mappend` cfg'
where
cfg = defaultConfig `mappend` config
sslVals = map ($ cfg) [ isJust . getSSLPort
, isJust . getSSLBind
, isJust . getSSLKey
, isJust . getSSLCert ]
sslValid = and sslVals
unixValid = isJust $ unixsocket cfg
noPort = isNothing (getPort cfg) && not sslValid && not unixValid
cfg' = emptyConfig { port = if noPort then Just 8000 else Nothing }
------------------------------------------------------------------------------
bsFromString :: String -> ByteString
bsFromString = T.encodeUtf8 . T.pack
------------------------------------------------------------------------------
toString :: ByteString -> String
toString = T.unpack . T.decodeUtf8
------------------------------------------------------------------------------
-- | Returns a description of the snap command line options suitable for use
with " System . Console . " .
optDescrs :: forall m a . MonadSnap m =>
Config m a -- ^ the configuration defaults.
-> [OptDescr (Maybe (Config m a))]
optDescrs defaults =
[ Option "" ["hostname"]
(ReqArg (Just . setConfig setHostname . bsFromString) "NAME")
$ "local hostname" ++ defaultC getHostname
, Option "b" ["address"]
(ReqArg (\s -> Just $ mempty { bind = Just $ bsFromString s })
"ADDRESS")
$ "address to bind to" ++ defaultO bind
, Option "p" ["port"]
(ReqArg (\s -> Just $ mempty { port = Just $ read s}) "PORT")
$ "port to listen on" ++ defaultO port
, Option "" ["ssl-address"]
(ReqArg (\s -> Just $ mempty { sslbind = Just $ bsFromString s })
"ADDRESS")
$ "ssl address to bind to" ++ defaultO sslbind
, Option "" ["ssl-port"]
(ReqArg (\s -> Just $ mempty { sslport = Just $ read s}) "PORT")
$ "ssl port to listen on" ++ defaultO sslport
, Option "" ["ssl-cert"]
(ReqArg (\s -> Just $ mempty { sslcert = Just s}) "PATH")
$ "path to ssl certificate in PEM format" ++ defaultO sslcert
, Option [] ["ssl-chain-cert"]
(NoArg $ Just $ setConfig setSSLChainCert True)
$ "certificate file contains complete certificate chain" ++ defaultB sslchaincert "site certificate only" "complete certificate chain"
, Option [] ["no-ssl-chain-cert"]
(NoArg $ Just $ setConfig setSSLChainCert False)
$ "certificate file contains only the site certificate" ++ defaultB sslchaincert "site certificate only" "complete certificate chain"
, Option [] ["ssl-key"]
(ReqArg (\s -> Just $ mempty { sslkey = Just s}) "PATH")
$ "path to ssl private key in PEM format" ++ defaultO sslkey
, Option "" ["access-log"]
(ReqArg (Just . setConfig setAccessLog . ConfigFileLog) "PATH")
$ "access log" ++ defaultC getAccessLog
, Option "" ["error-log"]
(ReqArg (Just . setConfig setErrorLog . ConfigFileLog) "PATH")
$ "error log" ++ defaultC getErrorLog
, Option "" ["no-access-log"]
(NoArg $ Just $ setConfig setAccessLog ConfigNoLog)
"don't have an access log"
, Option "" ["no-error-log"]
(NoArg $ Just $ setConfig setErrorLog ConfigNoLog)
"don't have an error log"
, Option "c" ["compression"]
(NoArg $ Just $ setConfig setCompression True)
$ "use gzip compression on responses" ++
defaultB getCompression "compressed" "uncompressed"
, Option "t" ["timeout"]
(ReqArg (\t -> Just $ mempty {
defaultTimeout = Just $ read t
}) "SECS")
$ "set default timeout in seconds" ++ defaultC defaultTimeout
, Option "" ["no-compression"]
(NoArg $ Just $ setConfig setCompression False)
$ "serve responses uncompressed" ++
defaultB compression "compressed" "uncompressed"
, Option "v" ["verbose"]
(NoArg $ Just $ setConfig setVerbose True)
$ "print server status updates to stderr" ++
defaultC getVerbose
, Option "q" ["quiet"]
(NoArg $ Just $ setConfig setVerbose False)
$ "do not print anything to stderr" ++
defaultB getVerbose "verbose" "quiet"
, Option "" ["proxy"]
(ReqArg (Just . setConfig setProxyType . parseProxy . CI.mk)
"X_Forwarded_For")
$ concat [ "Set --proxy=X_Forwarded_For if your snap application \n"
, "is behind an HTTP reverse proxy to ensure that \n"
, "rqClientAddr is set properly.\n"
, "Set --proxy=haproxy to use the haproxy protocol\n("
, "-protocol.txt)"
, defaultC getProxyType ]
, Option "" ["unix-socket"]
(ReqArg (Just . setConfig setUnixSocket) "PATH")
$ concat ["Absolute path to unix socket file. "
, "File will be removed if already exists"]
, Option "" ["unix-socket-mode"]
(ReqArg (Just . setConfig setUnixSocketAccessMode . parseOctal)
"MODE")
$ concat ["Access mode for unix socket in octal, for example 0760.\n"
," Default is system specific."]
, Option "h" ["help"]
(NoArg Nothing)
"display this help and exit"
]
where
parseProxy s | s == "NoProxy" = NoProxy
| s == "X_Forwarded_For" = X_Forwarded_For
| s == "haproxy" = HaProxy
| otherwise = error $ concat [
"Error (--proxy): expected one of 'NoProxy', "
, "'X_Forwarded_For', or 'haproxy'. Got '"
, CI.original s
, "'"
]
parseOctal s = case readOct s of
((v, _):_) | v >= 0 && v <= 0o777 -> v
_ -> error $ "Error (--unix-socket-mode): expected octal access mode"
setConfig f c = f c mempty
conf = defaultConfig `mappend` defaults
defaultB :: (Config m a -> Maybe Bool) -> String -> String -> String
defaultB f y n = (maybe "" (\b -> ", default " ++ if b
then y
else n) $ f conf) :: String
defaultC :: (Show b) => (Config m a -> Maybe b) -> String
defaultC f = maybe "" ((", default " ++) . show) $ f conf
defaultO :: (Show b) => (Config m a -> Maybe b) -> String
defaultO f = maybe ", default off" ((", default " ++) . show) $ f conf
------------------------------------------------------------------------------
defaultErrorHandler :: MonadSnap m => SomeException -> m ()
defaultErrorHandler e = do
debug "Snap.Http.Server.Config errorHandler:"
req <- getsRequest blindParams
let sm = smsg req
debug $ toString sm
logError sm
finishWith $ setContentType "text/plain; charset=utf-8"
. setContentLength (fromIntegral $ S.length msg)
. setResponseStatus 500 "Internal Server Error"
. setResponseBody errBody
$ emptyResponse
where
blindParams r = r { rqPostParams = rmValues $ rqPostParams r
, rqParams = rmValues $ rqParams r }
rmValues = Map.map (const ["..."])
errBody os = Streams.write (Just msgB) os >> return os
toByteString = S.concat . L.toChunks . toLazyByteString
smsg req = toByteString $ requestErrorMessage req e
msg = toByteString msgB
msgB = mconcat [
byteString "A web handler threw an exception. Details:\n"
, stringUtf8 $ show e
]
------------------------------------------------------------------------------
-- | Returns a 'Config' obtained from parsing command-line options, using the
default Snap ' OptDescr ' set .
--
-- On Unix systems, the locale is read from the @LANG@ environment variable.
commandLineConfig :: MonadSnap m
=> Config m a
-- ^ default configuration. This is combined with
-- 'defaultConfig' to obtain default values to use if the
-- given parameter is specified on the command line.
-- Usually it is fine to use 'emptyConfig' here.
-> IO (Config m a)
commandLineConfig defaults = extendedCommandLineConfig (optDescrs defaults) f defaults
where
-- Here getOpt can ever change the "other" field, because we only use the
Snap OptDescr list . The combining function will never be invoked .
f = undefined
------------------------------------------------------------------------------
-- | Returns a 'Config' obtained from parsing command-line options, using the
default Snap ' OptDescr ' set as well as a list of user OptDescrs . User
OptDescrs use the \"other\ " field ( accessible using ' ' and
' ' ) to store additional command - line option state . These are
-- combined using a user-defined combining function.
--
-- On Unix systems, the locale is read from the @LANG@ environment variable.
extendedCommandLineConfig :: MonadSnap m
=> [OptDescr (Maybe (Config m a))]
-- ^ Full list of command line options (combine
yours with ' optDescrs ' to extend Snap 's default
-- set of options)
-> (a -> a -> a)
-- ^ State for multiple invoked user command-line
-- options will be combined using this function.
-> Config m a
-- ^ default configuration. This is combined with
Snap 's ' defaultConfig ' to obtain default values
-- to use if the given parameter is specified on
-- the command line. Usually it is fine to use
-- 'emptyConfig' here.
-> IO (Config m a)
extendedCommandLineConfig opts combiningFunction defaults = do
args <- getArgs
prog <- getProgName
result <- either (usage prog)
return
(case getOpt Permute opts args of
(f, _, [] ) -> maybe (Left []) Right $
fmap (foldl' combine mempty) $
sequence f
(_, _, errs) -> Left errs)
#ifndef PORTABLE
lang <- getEnv "LANG"
completeConfig $ mconcat [defaults,
mempty {locale = fmap upToUtf8 lang},
result]
#else
completeConfig $ mconcat [defaults, result]
#endif
where
usage prog errs = do
let hdr = "Usage:\n " ++ prog ++ " [OPTION...]\n\nOptions:"
let msg = concat errs ++ usageInfo hdr opts
hPutStrLn stderr msg
exitFailure
#ifndef PORTABLE
upToUtf8 = takeWhile $ \c -> isAlpha c || '_' == c
#endif
combine !a !b = a `mappend` b `mappend` newOther
where
-- combined is only a Just if both a and b have other fields, and then
-- we use the combining function. Config's mappend picks the last
-- "Just" in the other list.
combined = do
x <- getOther a
y <- getOther b
return $! combiningFunction x y
newOther = mempty { other = combined }
fmapArg :: (a -> b) -> ArgDescr a -> ArgDescr b
fmapArg f (NoArg a) = NoArg (f a)
fmapArg f (ReqArg g s) = ReqArg (f . g) s
fmapArg f (OptArg g s) = OptArg (f . g) s
fmapOpt :: (a -> b) -> OptDescr a -> OptDescr b
fmapOpt f (Option s l d e) = Option s l (fmapArg f d) e
------------------------------------------------------------------------------
requestErrorMessage :: Request -> SomeException -> Builder
requestErrorMessage req e =
mconcat [ byteString "During processing of request from "
, byteString $ rqClientAddr req
, byteString ":"
, fromShow $ rqClientPort req
, byteString "\nrequest:\n"
, fromShow $ show req
, byteString "\n"
, msgB
]
where
msgB = mconcat [
byteString "A web handler threw an exception. Details:\n"
, fromShow e
]
------------------------------------------------------------------------------
fromShow :: Show a => a -> Builder
fromShow = stringUtf8 . show
| null | https://raw.githubusercontent.com/snapframework/snap-server/f9c6e00630a8a78705aceafa0ac046ae70e1310e/src/Snap/Internal/Http/Server/Config.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
| This module exports the 'Config' datatype, which you can use to configure
* Private
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| FIXME
----------------------------------------------------------------------------
| Data type representing the configuration of a logging target
^ no logging
^ log to text file
----------------------------------------------------------------------------
separation between:
* what the underlying code needs to configure itself
* what the command-line processing does.
The latter will provide "library" helper functions that operate on
Todo:
* need a function ::
this will prep for another function that will spawn all of the
accept loops with httpAcceptLoop.
* add support for socket activation to command line, or delegate to
different library? It's linux-only anyways, need to ifdef. It would be
function.
* break config into multiple modules:
* everything that modifies the snap handler (compression, proxy
settings, error handler)
* everything that directly modifies server settings (hostname /
defaultTimeout / hooks / etc)
* everything that configures backends (port/bind/ssl*)
* everything that handles command line stuff
* utility stuff
this becomes a binary option on the command - line side ( no
logging or yes, to this file), but the ConfigIoLog gets zapped
altogether.
----------------------------------------------------------------------------
| A record type which represents partial configurations (for 'httpServe')
by wrapping all of its fields in a 'Maybe'. Values of this type are usually
constructed via its 'Monoid' instance by doing something like:
Any fields which are unspecified in the 'Config' passed to 'httpServe' (and
this is the norm) are filled in with default values from 'defaultConfig'.
----------------------------------------------------------------------------
| The 'Typeable1' instance is here so 'Config' values can be
dynamically loaded with Hint.
----------------------------------------------------------------------------
'Config''s 'Monoid' instance.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| These are the default values for the options
----------------------------------------------------------------------------
| The hostname of the HTTP server. This field has the same format as an HTTP
otherwise we default to this value specified in the configuration.
| Path to the access log
| Path to the error log
| Gets the locale to use. Locales are used on Unix only, to set the
@LANG@\/@LC_ALL@\/etc. environment variable. For instance if you set the
\"@en_US.UTF-8@\".
| Returns the port to listen on (for http)
| Returns the address to bind to (for http)
| Returns the port to listen on (for https)
| Returns the address to bind to (for https)
| Path to the SSL certificate file
| Path to the SSL certificate file
| Path to the SSL key file
| File path to unix socket. Must be absolute path, but allows for symbolic
links.
| Access mode for unix socket, by default is system specific.
This should only be used to grant additional permissions to created
socket file, and not to remove permissions set by default.
The only portable way to limit access to socket is creating it in a
directory with proper permissions set.
Note: This uses umask. There is a race condition if process creates other
files at the same time as opening a unix socket with this option set.
| If set and set to True, compression is turned on when applicable
| Whether to write server status updates to stderr
| A startup hook is run after the server initializes but before user request
startup hook a list of the sockets it is listening on and the final 'Config'
object completed after command-line processing.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Arguments passed to 'setStartupHook'.
The 'Config', after any command line parsing has been performed.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Returns a description of the snap command line options suitable for use
^ the configuration defaults.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Returns a 'Config' obtained from parsing command-line options, using the
On Unix systems, the locale is read from the @LANG@ environment variable.
^ default configuration. This is combined with
'defaultConfig' to obtain default values to use if the
given parameter is specified on the command line.
Usually it is fine to use 'emptyConfig' here.
Here getOpt can ever change the "other" field, because we only use the
----------------------------------------------------------------------------
| Returns a 'Config' obtained from parsing command-line options, using the
combined using a user-defined combining function.
On Unix systems, the locale is read from the @LANG@ environment variable.
^ Full list of command line options (combine
set of options)
^ State for multiple invoked user command-line
options will be combined using this function.
^ default configuration. This is combined with
to use if the given parameter is specified on
the command line. Usually it is fine to use
'emptyConfig' here.
combined is only a Just if both a and b have other fields, and then
we use the combining function. Config's mappend picks the last
"Just" in the other list.
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
the Snap HTTP server .
module Snap.Internal.Http.Server.Config
NOTE : also edit Snap . Http . Server . Config if you change these
( ConfigLog(..)
, Config(..)
, ProxyType(..)
, emptyConfig
, defaultConfig
, commandLineConfig
, extendedCommandLineConfig
, completeConfig
, optDescrs
, fmapOpt
, getAccessLog
, getBind
, getCompression
, getDefaultTimeout
, getErrorHandler
, getErrorLog
, getHostname
, getLocale
, getOther
, getPort
, getProxyType
, getSSLBind
, getSSLCert
, getSSLChainCert
, getSSLKey
, getSSLPort
, getVerbose
, getStartupHook
, getUnixSocket
, getUnixSocketAccessMode
, setAccessLog
, setBind
, setCompression
, setDefaultTimeout
, setErrorHandler
, setErrorLog
, setHostname
, setLocale
, setOther
, setPort
, setProxyType
, setSSLBind
, setSSLCert
, setSSLChainCert
, setSSLKey
, setSSLPort
, setVerbose
, setUnixSocket
, setUnixSocketAccessMode
, setStartupHook
, StartupInfo(..)
, getStartupSockets
, getStartupConfig
, emptyStartupInfo
, setStartupSockets
, setStartupConfig
) where
import Control.Exception (SomeException)
import Control.Monad (when)
import Data.Bits ((.&.))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.CaseInsensitive as CI
import Data.Function (on)
import Data.List (foldl')
import qualified Data.Map as Map
import Data.Maybe (isJust, isNothing)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid (..))
#endif
import Data.Monoid (Last (Last, getLast))
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup (..))
#endif
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
#if MIN_VERSION_base(4,7,0)
import Data.Typeable (Typeable)
#else
import Data.Typeable (TyCon, Typeable, Typeable1 (..), mkTyCon3, mkTyConApp)
#endif
import Network.Socket (Socket)
import Numeric (readOct, showOct)
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding (catch)
#endif
import System.Console.GetOpt (ArgDescr (..), ArgOrder (Permute), OptDescr (..), getOpt, usageInfo)
import System.Environment hiding (getEnv)
#ifndef PORTABLE
import Data.Char (isAlpha)
import System.Posix.Env (getEnv)
#endif
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
import Data.ByteString.Builder (Builder, byteString, stringUtf8, toLazyByteString)
import qualified System.IO.Streams as Streams
import Snap.Core (MonadSnap, Request (rqClientAddr, rqClientPort, rqParams, rqPostParams), emptyResponse, finishWith, getsRequest, logError, setContentLength, setContentType, setResponseBody, setResponseStatus)
import Snap.Internal.Debug (debug)
Note : this type changed in snap - server 1.0.0.0 .
data ProxyType = NoProxy
| HaProxy
| X_Forwarded_For
deriving (Show, Eq, Typeable)
^ log custom IO handler
instance Show ConfigLog where
show ConfigNoLog = "no log"
show (ConfigFileLog f) = "log to file " ++ show f
show (ConfigIoLog _) = "custom logging handler"
We should be using ServerConfig here . There needs to be a clearer
ServerConfig / etc in order to allow users to configure their own environment .
CommandLineConfig - > IO [ ( ServerConfig hookState , AcceptFunc ) ]
* all backends provide " Some - > Foo - > Config - > IO AcceptFunc "
silly to depend on the socket - activation library for that one little
Cruft that definitely must be removed :
> setPort 1234 mempty
data Config m a = Config
{ hostname :: Maybe ByteString
, accessLog :: Maybe ConfigLog
, errorLog :: Maybe ConfigLog
, locale :: Maybe String
, port :: Maybe Int
, bind :: Maybe ByteString
, sslport :: Maybe Int
, sslbind :: Maybe ByteString
, sslcert :: Maybe FilePath
, sslchaincert :: Maybe Bool
, sslkey :: Maybe FilePath
, unixsocket :: Maybe FilePath
, unixaccessmode :: Maybe Int
, compression :: Maybe Bool
, verbose :: Maybe Bool
, errorHandler :: Maybe (SomeException -> m ())
, defaultTimeout :: Maybe Int
, other :: Maybe a
, proxyType :: Maybe ProxyType
, startupHook :: Maybe (StartupInfo m a -> IO ())
}
#if MIN_VERSION_base(4,7,0)
deriving Typeable
#else
configTyCon :: TyCon
configTyCon = mkTyCon3 "snap-server" "Snap.Http.Server.Config" "Config"
# NOINLINE configTyCon #
instance (Typeable1 m) => Typeable1 (Config m) where
typeOf1 _ = mkTyConApp configTyCon [typeOf1 (undefined :: m ())]
#endif
instance Show (Config m a) where
show c = unlines [ "Config:"
, "hostname: " ++ _hostname
, "accessLog: " ++ _accessLog
, "errorLog: " ++ _errorLog
, "locale: " ++ _locale
, "port: " ++ _port
, "bind: " ++ _bind
, "sslport: " ++ _sslport
, "sslbind: " ++ _sslbind
, "sslcert: " ++ _sslcert
, "sslchaincert: " ++ _sslchaincert
, "sslkey: " ++ _sslkey
, "unixsocket: " ++ _unixsocket
, "unixaccessmode: " ++ _unixaccessmode
, "compression: " ++ _compression
, "verbose: " ++ _verbose
, "defaultTimeout: " ++ _defaultTimeout
, "proxyType: " ++ _proxyType
]
where
_hostname = show $ hostname c
_accessLog = show $ accessLog c
_errorLog = show $ errorLog c
_locale = show $ locale c
_port = show $ port c
_bind = show $ bind c
_sslport = show $ sslport c
_sslbind = show $ sslbind c
_sslcert = show $ sslcert c
_sslchaincert = show $ sslchaincert c
_sslkey = show $ sslkey c
_compression = show $ compression c
_verbose = show $ verbose c
_defaultTimeout = show $ defaultTimeout c
_proxyType = show $ proxyType c
_unixsocket = show $ unixsocket c
_unixaccessmode = case unixaccessmode c of
Nothing -> "Nothing"
Just s -> ("Just 0" ++) . showOct s $ []
| Returns a completely empty ' Config ' . Equivalent to ' ' from
emptyConfig :: Config m a
emptyConfig = mempty
instance Semigroup (Config m a) where
a <> b = Config
{ hostname = ov hostname
, accessLog = ov accessLog
, errorLog = ov errorLog
, locale = ov locale
, port = ov port
, bind = ov bind
, sslport = ov sslport
, sslbind = ov sslbind
, sslcert = ov sslcert
, sslchaincert = ov sslchaincert
, sslkey = ov sslkey
, unixsocket = ov unixsocket
, unixaccessmode = ov unixaccessmode
, compression = ov compression
, verbose = ov verbose
, errorHandler = ov errorHandler
, defaultTimeout = ov defaultTimeout
, other = ov other
, proxyType = ov proxyType
, startupHook = ov startupHook
}
where
ov :: (Config m a -> Maybe b) -> Maybe b
ov f = getLast $! (mappend `on` (Last . f)) a b
instance Monoid (Config m a) where
mempty = Config
{ hostname = Nothing
, accessLog = Nothing
, errorLog = Nothing
, locale = Nothing
, port = Nothing
, bind = Nothing
, sslport = Nothing
, sslbind = Nothing
, sslcert = Nothing
, sslchaincert = Nothing
, sslkey = Nothing
, unixsocket = Nothing
, unixaccessmode = Nothing
, compression = Nothing
, verbose = Nothing
, errorHandler = Nothing
, defaultTimeout = Nothing
, other = Nothing
, proxyType = Nothing
, startupHook = Nothing
}
#if !MIN_VERSION_base(4,11,0)
mappend = (<>)
#endif
defaultConfig :: MonadSnap m => Config m a
defaultConfig = mempty
{ hostname = Just "localhost"
, accessLog = Just $ ConfigFileLog "log/access.log"
, errorLog = Just $ ConfigFileLog "log/error.log"
, locale = Just "en_US"
, compression = Just True
, verbose = Just True
, errorHandler = Just defaultErrorHandler
, bind = Just "0.0.0.0"
, sslbind = Nothing
, sslcert = Nothing
, sslkey = Nothing
, sslchaincert = Nothing
, defaultTimeout = Just 60
}
@Host@ header ; if a header came in with the request , we use that ,
getHostname :: Config m a -> Maybe ByteString
getHostname = hostname
getAccessLog :: Config m a -> Maybe ConfigLog
getAccessLog = accessLog
getErrorLog :: Config m a -> Maybe ConfigLog
getErrorLog = errorLog
locale to " , we 'll set the relevant environment variables to
getLocale :: Config m a -> Maybe String
getLocale = locale
getPort :: Config m a -> Maybe Int
getPort = port
getBind :: Config m a -> Maybe ByteString
getBind = bind
getSSLPort :: Config m a -> Maybe Int
getSSLPort = sslport
getSSLBind :: Config m a -> Maybe ByteString
getSSLBind = sslbind
getSSLCert :: Config m a -> Maybe FilePath
getSSLCert = sslcert
getSSLChainCert :: Config m a -> Maybe Bool
getSSLChainCert = sslchaincert
getSSLKey :: Config m a -> Maybe FilePath
getSSLKey = sslkey
getUnixSocket :: Config m a -> Maybe FilePath
getUnixSocket = unixsocket
Most BSD systems ignore access permissions on unix sockets .
getUnixSocketAccessMode :: Config m a -> Maybe Int
getUnixSocketAccessMode = unixaccessmode
getCompression :: Config m a -> Maybe Bool
getCompression = compression
getVerbose :: Config m a -> Maybe Bool
getVerbose = verbose
| A MonadSnap action to handle 500 errors
getErrorHandler :: Config m a -> Maybe (SomeException -> m ())
getErrorHandler = errorHandler
getDefaultTimeout :: Config m a -> Maybe Int
getDefaultTimeout = defaultTimeout
getOther :: Config m a -> Maybe a
getOther = other
getProxyType :: Config m a -> Maybe ProxyType
getProxyType = proxyType
processing begins . The server passes , through a ' StartupInfo ' object , the
getStartupHook :: Config m a -> Maybe (StartupInfo m a -> IO ())
getStartupHook = startupHook
setHostname :: ByteString -> Config m a -> Config m a
setHostname x c = c { hostname = Just x }
setAccessLog :: ConfigLog -> Config m a -> Config m a
setAccessLog x c = c { accessLog = Just x }
setErrorLog :: ConfigLog -> Config m a -> Config m a
setErrorLog x c = c { errorLog = Just x }
setLocale :: String -> Config m a -> Config m a
setLocale x c = c { locale = Just x }
setPort :: Int -> Config m a -> Config m a
setPort x c = c { port = Just x }
setBind :: ByteString -> Config m a -> Config m a
setBind x c = c { bind = Just x }
setSSLPort :: Int -> Config m a -> Config m a
setSSLPort x c = c { sslport = Just x }
setSSLBind :: ByteString -> Config m a -> Config m a
setSSLBind x c = c { sslbind = Just x }
setSSLCert :: FilePath -> Config m a -> Config m a
setSSLCert x c = c { sslcert = Just x }
setSSLChainCert :: Bool -> Config m a -> Config m a
setSSLChainCert x c = c { sslchaincert = Just x }
setSSLKey :: FilePath -> Config m a -> Config m a
setSSLKey x c = c { sslkey = Just x }
setUnixSocket :: FilePath -> Config m a -> Config m a
setUnixSocket x c = c { unixsocket = Just x }
setUnixSocketAccessMode :: Int -> Config m a -> Config m a
setUnixSocketAccessMode p c = c { unixaccessmode = Just ( p .&. 0o777) }
setCompression :: Bool -> Config m a -> Config m a
setCompression x c = c { compression = Just x }
setVerbose :: Bool -> Config m a -> Config m a
setVerbose x c = c { verbose = Just x }
setErrorHandler :: (SomeException -> m ()) -> Config m a -> Config m a
setErrorHandler x c = c { errorHandler = Just x }
setDefaultTimeout :: Int -> Config m a -> Config m a
setDefaultTimeout x c = c { defaultTimeout = Just x }
setOther :: a -> Config m a -> Config m a
setOther x c = c { other = Just x }
setProxyType :: ProxyType -> Config m a -> Config m a
setProxyType x c = c { proxyType = Just x }
setStartupHook :: (StartupInfo m a -> IO ()) -> Config m a -> Config m a
setStartupHook x c = c { startupHook = Just x }
data StartupInfo m a = StartupInfo
{ startupHookConfig :: Config m a
, startupHookSockets :: [Socket]
}
emptyStartupInfo :: StartupInfo m a
emptyStartupInfo = StartupInfo emptyConfig []
| The ' Socket 's opened by the server . There will be two ' Socket 's for SSL
connections , and one otherwise .
getStartupSockets :: StartupInfo m a -> [Socket]
getStartupSockets = startupHookSockets
getStartupConfig :: StartupInfo m a -> Config m a
getStartupConfig = startupHookConfig
setStartupSockets :: [Socket] -> StartupInfo m a -> StartupInfo m a
setStartupSockets x c = c { startupHookSockets = x }
setStartupConfig :: Config m a -> StartupInfo m a -> StartupInfo m a
setStartupConfig x c = c { startupHookConfig = x }
completeConfig :: (MonadSnap m) => Config m a -> IO (Config m a)
completeConfig config = do
when noPort $ hPutStrLn stderr
"no port specified, defaulting to port 8000"
return $! cfg `mappend` cfg'
where
cfg = defaultConfig `mappend` config
sslVals = map ($ cfg) [ isJust . getSSLPort
, isJust . getSSLBind
, isJust . getSSLKey
, isJust . getSSLCert ]
sslValid = and sslVals
unixValid = isJust $ unixsocket cfg
noPort = isNothing (getPort cfg) && not sslValid && not unixValid
cfg' = emptyConfig { port = if noPort then Just 8000 else Nothing }
bsFromString :: String -> ByteString
bsFromString = T.encodeUtf8 . T.pack
toString :: ByteString -> String
toString = T.unpack . T.decodeUtf8
with " System . Console . " .
optDescrs :: forall m a . MonadSnap m =>
-> [OptDescr (Maybe (Config m a))]
optDescrs defaults =
[ Option "" ["hostname"]
(ReqArg (Just . setConfig setHostname . bsFromString) "NAME")
$ "local hostname" ++ defaultC getHostname
, Option "b" ["address"]
(ReqArg (\s -> Just $ mempty { bind = Just $ bsFromString s })
"ADDRESS")
$ "address to bind to" ++ defaultO bind
, Option "p" ["port"]
(ReqArg (\s -> Just $ mempty { port = Just $ read s}) "PORT")
$ "port to listen on" ++ defaultO port
, Option "" ["ssl-address"]
(ReqArg (\s -> Just $ mempty { sslbind = Just $ bsFromString s })
"ADDRESS")
$ "ssl address to bind to" ++ defaultO sslbind
, Option "" ["ssl-port"]
(ReqArg (\s -> Just $ mempty { sslport = Just $ read s}) "PORT")
$ "ssl port to listen on" ++ defaultO sslport
, Option "" ["ssl-cert"]
(ReqArg (\s -> Just $ mempty { sslcert = Just s}) "PATH")
$ "path to ssl certificate in PEM format" ++ defaultO sslcert
, Option [] ["ssl-chain-cert"]
(NoArg $ Just $ setConfig setSSLChainCert True)
$ "certificate file contains complete certificate chain" ++ defaultB sslchaincert "site certificate only" "complete certificate chain"
, Option [] ["no-ssl-chain-cert"]
(NoArg $ Just $ setConfig setSSLChainCert False)
$ "certificate file contains only the site certificate" ++ defaultB sslchaincert "site certificate only" "complete certificate chain"
, Option [] ["ssl-key"]
(ReqArg (\s -> Just $ mempty { sslkey = Just s}) "PATH")
$ "path to ssl private key in PEM format" ++ defaultO sslkey
, Option "" ["access-log"]
(ReqArg (Just . setConfig setAccessLog . ConfigFileLog) "PATH")
$ "access log" ++ defaultC getAccessLog
, Option "" ["error-log"]
(ReqArg (Just . setConfig setErrorLog . ConfigFileLog) "PATH")
$ "error log" ++ defaultC getErrorLog
, Option "" ["no-access-log"]
(NoArg $ Just $ setConfig setAccessLog ConfigNoLog)
"don't have an access log"
, Option "" ["no-error-log"]
(NoArg $ Just $ setConfig setErrorLog ConfigNoLog)
"don't have an error log"
, Option "c" ["compression"]
(NoArg $ Just $ setConfig setCompression True)
$ "use gzip compression on responses" ++
defaultB getCompression "compressed" "uncompressed"
, Option "t" ["timeout"]
(ReqArg (\t -> Just $ mempty {
defaultTimeout = Just $ read t
}) "SECS")
$ "set default timeout in seconds" ++ defaultC defaultTimeout
, Option "" ["no-compression"]
(NoArg $ Just $ setConfig setCompression False)
$ "serve responses uncompressed" ++
defaultB compression "compressed" "uncompressed"
, Option "v" ["verbose"]
(NoArg $ Just $ setConfig setVerbose True)
$ "print server status updates to stderr" ++
defaultC getVerbose
, Option "q" ["quiet"]
(NoArg $ Just $ setConfig setVerbose False)
$ "do not print anything to stderr" ++
defaultB getVerbose "verbose" "quiet"
, Option "" ["proxy"]
(ReqArg (Just . setConfig setProxyType . parseProxy . CI.mk)
"X_Forwarded_For")
$ concat [ "Set --proxy=X_Forwarded_For if your snap application \n"
, "is behind an HTTP reverse proxy to ensure that \n"
, "rqClientAddr is set properly.\n"
, "Set --proxy=haproxy to use the haproxy protocol\n("
, "-protocol.txt)"
, defaultC getProxyType ]
, Option "" ["unix-socket"]
(ReqArg (Just . setConfig setUnixSocket) "PATH")
$ concat ["Absolute path to unix socket file. "
, "File will be removed if already exists"]
, Option "" ["unix-socket-mode"]
(ReqArg (Just . setConfig setUnixSocketAccessMode . parseOctal)
"MODE")
$ concat ["Access mode for unix socket in octal, for example 0760.\n"
," Default is system specific."]
, Option "h" ["help"]
(NoArg Nothing)
"display this help and exit"
]
where
parseProxy s | s == "NoProxy" = NoProxy
| s == "X_Forwarded_For" = X_Forwarded_For
| s == "haproxy" = HaProxy
| otherwise = error $ concat [
"Error (--proxy): expected one of 'NoProxy', "
, "'X_Forwarded_For', or 'haproxy'. Got '"
, CI.original s
, "'"
]
parseOctal s = case readOct s of
((v, _):_) | v >= 0 && v <= 0o777 -> v
_ -> error $ "Error (--unix-socket-mode): expected octal access mode"
setConfig f c = f c mempty
conf = defaultConfig `mappend` defaults
defaultB :: (Config m a -> Maybe Bool) -> String -> String -> String
defaultB f y n = (maybe "" (\b -> ", default " ++ if b
then y
else n) $ f conf) :: String
defaultC :: (Show b) => (Config m a -> Maybe b) -> String
defaultC f = maybe "" ((", default " ++) . show) $ f conf
defaultO :: (Show b) => (Config m a -> Maybe b) -> String
defaultO f = maybe ", default off" ((", default " ++) . show) $ f conf
defaultErrorHandler :: MonadSnap m => SomeException -> m ()
defaultErrorHandler e = do
debug "Snap.Http.Server.Config errorHandler:"
req <- getsRequest blindParams
let sm = smsg req
debug $ toString sm
logError sm
finishWith $ setContentType "text/plain; charset=utf-8"
. setContentLength (fromIntegral $ S.length msg)
. setResponseStatus 500 "Internal Server Error"
. setResponseBody errBody
$ emptyResponse
where
blindParams r = r { rqPostParams = rmValues $ rqPostParams r
, rqParams = rmValues $ rqParams r }
rmValues = Map.map (const ["..."])
errBody os = Streams.write (Just msgB) os >> return os
toByteString = S.concat . L.toChunks . toLazyByteString
smsg req = toByteString $ requestErrorMessage req e
msg = toByteString msgB
msgB = mconcat [
byteString "A web handler threw an exception. Details:\n"
, stringUtf8 $ show e
]
default Snap ' OptDescr ' set .
commandLineConfig :: MonadSnap m
=> Config m a
-> IO (Config m a)
commandLineConfig defaults = extendedCommandLineConfig (optDescrs defaults) f defaults
where
Snap OptDescr list . The combining function will never be invoked .
f = undefined
default Snap ' OptDescr ' set as well as a list of user OptDescrs . User
OptDescrs use the \"other\ " field ( accessible using ' ' and
' ' ) to store additional command - line option state . These are
extendedCommandLineConfig :: MonadSnap m
=> [OptDescr (Maybe (Config m a))]
yours with ' optDescrs ' to extend Snap 's default
-> (a -> a -> a)
-> Config m a
Snap 's ' defaultConfig ' to obtain default values
-> IO (Config m a)
extendedCommandLineConfig opts combiningFunction defaults = do
args <- getArgs
prog <- getProgName
result <- either (usage prog)
return
(case getOpt Permute opts args of
(f, _, [] ) -> maybe (Left []) Right $
fmap (foldl' combine mempty) $
sequence f
(_, _, errs) -> Left errs)
#ifndef PORTABLE
lang <- getEnv "LANG"
completeConfig $ mconcat [defaults,
mempty {locale = fmap upToUtf8 lang},
result]
#else
completeConfig $ mconcat [defaults, result]
#endif
where
usage prog errs = do
let hdr = "Usage:\n " ++ prog ++ " [OPTION...]\n\nOptions:"
let msg = concat errs ++ usageInfo hdr opts
hPutStrLn stderr msg
exitFailure
#ifndef PORTABLE
upToUtf8 = takeWhile $ \c -> isAlpha c || '_' == c
#endif
combine !a !b = a `mappend` b `mappend` newOther
where
combined = do
x <- getOther a
y <- getOther b
return $! combiningFunction x y
newOther = mempty { other = combined }
fmapArg :: (a -> b) -> ArgDescr a -> ArgDescr b
fmapArg f (NoArg a) = NoArg (f a)
fmapArg f (ReqArg g s) = ReqArg (f . g) s
fmapArg f (OptArg g s) = OptArg (f . g) s
fmapOpt :: (a -> b) -> OptDescr a -> OptDescr b
fmapOpt f (Option s l d e) = Option s l (fmapArg f d) e
requestErrorMessage :: Request -> SomeException -> Builder
requestErrorMessage req e =
mconcat [ byteString "During processing of request from "
, byteString $ rqClientAddr req
, byteString ":"
, fromShow $ rqClientPort req
, byteString "\nrequest:\n"
, fromShow $ show req
, byteString "\n"
, msgB
]
where
msgB = mconcat [
byteString "A web handler threw an exception. Details:\n"
, fromShow e
]
fromShow :: Show a => a -> Builder
fromShow = stringUtf8 . show
|
6c4b82c4c5fc6c10e8b0300f3ee58e36f15293426ebd0be49d9bb3746b513dda | flipstone/orville | ErrorDetailLevel.hs | module Orville.PostgreSQL.Internal.ErrorDetailLevel
( ErrorDetailLevel (ErrorDetailLevel, includeErrorMessage, includeSchemaNames, includeRowIdentifierValues, includeNonIdentifierValues),
defaultErrorDetailLevel,
minimalErrorDetailLevel,
maximalErrorDetailLevel,
redactErrorMessage,
redactSchemaName,
redactIdentifierValue,
redactNonIdentifierValue,
)
where
|
' ErrorDetailLevel ' provides a means to configure what elements of information
are included in error messages that originate from decoding rows queried
from the database . This can be specified either my manually rendering the
error message and providing the desired configuration , or by setting the
desired detail level in the @OrvilleState@ as a default .
Information will be redacted from error messages for any of the fields
that are set to
'ErrorDetailLevel' provides a means to configure what elements of information
are included in error messages that originate from decoding rows queried
from the database. This can be specified either my manually rendering the
error message and providing the desired configuration, or by setting the
desired detail level in the @OrvilleState@ as a default.
Information will be redacted from error messages for any of the fields
that are set to @False@.
-}
data ErrorDetailLevel = ErrorDetailLevel
{ includeErrorMessage :: Bool
, includeSchemaNames :: Bool
, includeRowIdentifierValues :: Bool
, includeNonIdentifierValues :: Bool
}
deriving (Show)
|
A minimal ' ErrorDetailLevel ' where everything all information ( including
any situationally - specific error message ! ) is redacted from error messages .
A minimal 'ErrorDetailLevel' where everything all information (including
any situationally-specific error message!) is redacted from error messages.
-}
minimalErrorDetailLevel :: ErrorDetailLevel
minimalErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = False
, includeSchemaNames = False
, includeRowIdentifierValues = False
, includeNonIdentifierValues = False
}
|
A default ' ErrorDetailLevel ' that strikes balance of including all " Generic "
information such as the error message , schema names and row identifiers , but
avoids untentionally leaking non - identifier values from the database by
redacting them .
A default 'ErrorDetailLevel' that strikes balance of including all "Generic"
information such as the error message, schema names and row identifiers, but
avoids untentionally leaking non-identifier values from the database by
redacting them.
-}
defaultErrorDetailLevel :: ErrorDetailLevel
defaultErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = True
, includeSchemaNames = True
, includeRowIdentifierValues = True
, includeNonIdentifierValues = False
}
|
A maximal ' ErrorDetailLevel ' that redacts no information from the error
messages . Error messages will include values from the database for any
columns are involved in a decoding failure , including some which you may
not have intended to expose through error message . Use with caution .
A maximal 'ErrorDetailLevel' that redacts no information from the error
messages. Error messages will include values from the database for any
columns are involved in a decoding failure, including some which you may
not have intended to expose through error message. Use with caution.
-}
maximalErrorDetailLevel :: ErrorDetailLevel
maximalErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = True
, includeSchemaNames = True
, includeRowIdentifierValues = True
, includeNonIdentifierValues = True
}
|
Redacts given the error message string if the ' ErrorDetailLevel ' indicates
that error messages should be redacted .
Redacts given the error message string if the 'ErrorDetailLevel' indicates
that error messages should be redacted.
-}
redactErrorMessage :: ErrorDetailLevel -> String -> String
redactErrorMessage detailLevel message =
if includeErrorMessage detailLevel
then message
else redactedValue
|
Redacts given the schema name string if the ' ErrorDetailLevel ' indicates
that schema names should be redacted .
Redacts given the schema name string if the 'ErrorDetailLevel' indicates
that schema names should be redacted.
-}
redactSchemaName :: ErrorDetailLevel -> String -> String
redactSchemaName detailLevel schemaName =
if includeSchemaNames detailLevel
then schemaName
else redactedValue
|
Redacts given the identifier value string if the ' ErrorDetailLevel ' indicates
that identifier values should be redacted .
Redacts given the identifier value string if the 'ErrorDetailLevel' indicates
that identifier values should be redacted.
-}
redactIdentifierValue :: ErrorDetailLevel -> String -> String
redactIdentifierValue detailLevel idValue =
if includeRowIdentifierValues detailLevel
then idValue
else redactedValue
|
Redacts given the non - identifier value string if the ' ErrorDetailLevel ' indicates
that non - identifier values should be redacted .
Redacts given the non-identifier value string if the 'ErrorDetailLevel' indicates
that non-identifier values should be redacted.
-}
redactNonIdentifierValue :: ErrorDetailLevel -> String -> String
redactNonIdentifierValue detailLevel nonIdValue =
if includeNonIdentifierValues detailLevel
then nonIdValue
else redactedValue
redactedValue :: String
redactedValue =
"[REDACTED]"
| null | https://raw.githubusercontent.com/flipstone/orville/0c03174967a5ad70c2a5881d1c43b43cac587556/orville-postgresql-libpq/src/Orville/PostgreSQL/Internal/ErrorDetailLevel.hs | haskell | module Orville.PostgreSQL.Internal.ErrorDetailLevel
( ErrorDetailLevel (ErrorDetailLevel, includeErrorMessage, includeSchemaNames, includeRowIdentifierValues, includeNonIdentifierValues),
defaultErrorDetailLevel,
minimalErrorDetailLevel,
maximalErrorDetailLevel,
redactErrorMessage,
redactSchemaName,
redactIdentifierValue,
redactNonIdentifierValue,
)
where
|
' ErrorDetailLevel ' provides a means to configure what elements of information
are included in error messages that originate from decoding rows queried
from the database . This can be specified either my manually rendering the
error message and providing the desired configuration , or by setting the
desired detail level in the @OrvilleState@ as a default .
Information will be redacted from error messages for any of the fields
that are set to
'ErrorDetailLevel' provides a means to configure what elements of information
are included in error messages that originate from decoding rows queried
from the database. This can be specified either my manually rendering the
error message and providing the desired configuration, or by setting the
desired detail level in the @OrvilleState@ as a default.
Information will be redacted from error messages for any of the fields
that are set to @False@.
-}
data ErrorDetailLevel = ErrorDetailLevel
{ includeErrorMessage :: Bool
, includeSchemaNames :: Bool
, includeRowIdentifierValues :: Bool
, includeNonIdentifierValues :: Bool
}
deriving (Show)
|
A minimal ' ErrorDetailLevel ' where everything all information ( including
any situationally - specific error message ! ) is redacted from error messages .
A minimal 'ErrorDetailLevel' where everything all information (including
any situationally-specific error message!) is redacted from error messages.
-}
minimalErrorDetailLevel :: ErrorDetailLevel
minimalErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = False
, includeSchemaNames = False
, includeRowIdentifierValues = False
, includeNonIdentifierValues = False
}
|
A default ' ErrorDetailLevel ' that strikes balance of including all " Generic "
information such as the error message , schema names and row identifiers , but
avoids untentionally leaking non - identifier values from the database by
redacting them .
A default 'ErrorDetailLevel' that strikes balance of including all "Generic"
information such as the error message, schema names and row identifiers, but
avoids untentionally leaking non-identifier values from the database by
redacting them.
-}
defaultErrorDetailLevel :: ErrorDetailLevel
defaultErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = True
, includeSchemaNames = True
, includeRowIdentifierValues = True
, includeNonIdentifierValues = False
}
|
A maximal ' ErrorDetailLevel ' that redacts no information from the error
messages . Error messages will include values from the database for any
columns are involved in a decoding failure , including some which you may
not have intended to expose through error message . Use with caution .
A maximal 'ErrorDetailLevel' that redacts no information from the error
messages. Error messages will include values from the database for any
columns are involved in a decoding failure, including some which you may
not have intended to expose through error message. Use with caution.
-}
maximalErrorDetailLevel :: ErrorDetailLevel
maximalErrorDetailLevel =
ErrorDetailLevel
{ includeErrorMessage = True
, includeSchemaNames = True
, includeRowIdentifierValues = True
, includeNonIdentifierValues = True
}
|
Redacts given the error message string if the ' ErrorDetailLevel ' indicates
that error messages should be redacted .
Redacts given the error message string if the 'ErrorDetailLevel' indicates
that error messages should be redacted.
-}
redactErrorMessage :: ErrorDetailLevel -> String -> String
redactErrorMessage detailLevel message =
if includeErrorMessage detailLevel
then message
else redactedValue
|
Redacts given the schema name string if the ' ErrorDetailLevel ' indicates
that schema names should be redacted .
Redacts given the schema name string if the 'ErrorDetailLevel' indicates
that schema names should be redacted.
-}
redactSchemaName :: ErrorDetailLevel -> String -> String
redactSchemaName detailLevel schemaName =
if includeSchemaNames detailLevel
then schemaName
else redactedValue
|
Redacts given the identifier value string if the ' ErrorDetailLevel ' indicates
that identifier values should be redacted .
Redacts given the identifier value string if the 'ErrorDetailLevel' indicates
that identifier values should be redacted.
-}
redactIdentifierValue :: ErrorDetailLevel -> String -> String
redactIdentifierValue detailLevel idValue =
if includeRowIdentifierValues detailLevel
then idValue
else redactedValue
|
Redacts given the non - identifier value string if the ' ErrorDetailLevel ' indicates
that non - identifier values should be redacted .
Redacts given the non-identifier value string if the 'ErrorDetailLevel' indicates
that non-identifier values should be redacted.
-}
redactNonIdentifierValue :: ErrorDetailLevel -> String -> String
redactNonIdentifierValue detailLevel nonIdValue =
if includeNonIdentifierValues detailLevel
then nonIdValue
else redactedValue
redactedValue :: String
redactedValue =
"[REDACTED]"
| |
4af9e4ff8cdf43041ea2373f7f7a53e9949d87d818a7d31966b1b4c9c4440014 | GaloisInc/tower | Simple.hs | # LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE QuasiQuotes #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Main where
import Ivory.Tower
import Ivory.Language
import Tower.AADL
import Ivory.Tower.Config
simpleTower :: Tower e ()
simpleTower = do
towerModule towerDepModule
towerDepends towerDepModule
(c1in, c1out) <- channel
(chtx, chrx) <- channel
per <- period (Microseconds 1000)
monitor "periodicM" $ do
s <- state "local_st"
handler per "tickh" $ do
e <- emitter c1in 1
callback $ \_ -> do
emit e (constRef (s :: Ref 'Global ('Stored Uint8)))
monitor "withsharedM" $ do
s <- state "last_m2_chan1_message"
handler c1out "fromActiveh" $ do
e <- emitter chtx 1
callback $ \m -> do
refCopy s m
emitV e true
handler chrx "readStateh" $ do
callback $ \_m -> do
s' <- deref s
call_ printf "rsh: %u\n" s'
--------------------------------------------------------------------------------
[ivory|
struct Foo { foo :: Stored Uint8 }
|]
fooMod :: Module
fooMod = package "foo" (defStruct (Proxy :: Proxy "Foo"))
simpleTower2 :: Tower e ()
simpleTower2 = do
towerModule fooMod
towerDepends fooMod
(c1in, c1out) <- channel
per <- period (Microseconds 1000)
monitor "m1" $ do
s <- state "local_st"
handler per "tick" $ do
e <- emitter c1in 1
callback $ \_ -> emit e (constRef (s :: Ref 'Global ('Struct "Foo")))
callback $ \ _ - > emit e ( constRef ( s : : Ref Global ( Array 3 ( Stored Uint8 ) ) ) )
monitor "m2" $ do
s <- state "last_m2_chan1_message"
handler c1out "chan1msg" $ do
callback $ \m ->
refCopy s m
--------------------------------------------------------------------------------
main :: IO ()
main = compileTowerAADL id p simpleTower
where
p topts = getConfig topts $ aadlConfigParser defaultAADLConfig
[ivory|
import (stdio.h, printf) void printf(string x, uint8_t y)
|]
towerDepModule :: Module
towerDepModule = package "towerDeps" $ do
incl printf
| null | https://raw.githubusercontent.com/GaloisInc/tower/a43f5e36c6443472ea2dc15bbd49faf8643a6f87/tower-aadl/test/Simple.hs | haskell | # LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE QuasiQuotes #
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -fno - warn - orphans #
module Main where
import Ivory.Tower
import Ivory.Language
import Tower.AADL
import Ivory.Tower.Config
simpleTower :: Tower e ()
simpleTower = do
towerModule towerDepModule
towerDepends towerDepModule
(c1in, c1out) <- channel
(chtx, chrx) <- channel
per <- period (Microseconds 1000)
monitor "periodicM" $ do
s <- state "local_st"
handler per "tickh" $ do
e <- emitter c1in 1
callback $ \_ -> do
emit e (constRef (s :: Ref 'Global ('Stored Uint8)))
monitor "withsharedM" $ do
s <- state "last_m2_chan1_message"
handler c1out "fromActiveh" $ do
e <- emitter chtx 1
callback $ \m -> do
refCopy s m
emitV e true
handler chrx "readStateh" $ do
callback $ \_m -> do
s' <- deref s
call_ printf "rsh: %u\n" s'
[ivory|
struct Foo { foo :: Stored Uint8 }
|]
fooMod :: Module
fooMod = package "foo" (defStruct (Proxy :: Proxy "Foo"))
simpleTower2 :: Tower e ()
simpleTower2 = do
towerModule fooMod
towerDepends fooMod
(c1in, c1out) <- channel
per <- period (Microseconds 1000)
monitor "m1" $ do
s <- state "local_st"
handler per "tick" $ do
e <- emitter c1in 1
callback $ \_ -> emit e (constRef (s :: Ref 'Global ('Struct "Foo")))
callback $ \ _ - > emit e ( constRef ( s : : Ref Global ( Array 3 ( Stored Uint8 ) ) ) )
monitor "m2" $ do
s <- state "last_m2_chan1_message"
handler c1out "chan1msg" $ do
callback $ \m ->
refCopy s m
main :: IO ()
main = compileTowerAADL id p simpleTower
where
p topts = getConfig topts $ aadlConfigParser defaultAADLConfig
[ivory|
import (stdio.h, printf) void printf(string x, uint8_t y)
|]
towerDepModule :: Module
towerDepModule = package "towerDeps" $ do
incl printf
|
bdcb9ce6ebf5f721ba3e871a419ad0a281ee5d6378557b8b01e294ef92ab6694 | rkallos/wrek | wrek_test_handler.erl | -module(wrek_test_handler).
-include("wrek_event.hrl").
-export([code_change/3,
handle_call/2,
handle_event/2,
handle_info/2,
init/1,
terminate/2]).
-behaviour(gen_event).
-record(state, {
caller = undefined :: pid(),
count = 0 :: integer(),
evts = [] :: [wrek_event()],
fail_mode = total :: total | partial
}).
%% callbacks
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_call(get, State) ->
{ok, State#state.count, State};
handle_call(_, State) ->
{ok, ok, State}.
handle_event(Evt = #wrek_event{type = {wrek, error}},
State = #state{fail_mode = total}) ->
finish(Evt, State);
handle_event(Evt = #wrek_event{type = {wrek, done}}, State) ->
finish(Evt, State);
handle_event(Evt, State = #state{count = Count, evts = Evts}) ->
{ok, State#state{count = Count + 1, evts = [Evt | Evts]}}.
handle_info(_, State) ->
{ok, State}.
init([FailMode, Caller]) ->
{ok, #state{caller = Caller, fail_mode = FailMode}}.
terminate(_, _State) ->
ok.
%% private
finish(Evt, State) ->
#state{
caller = Caller,
count = Count,
evts = Evts
} = State,
Caller ! #{count => Count + 1, evts => [Evt | Evts]},
remove_handler.
| null | https://raw.githubusercontent.com/rkallos/wrek/3859e9efdf21227e6e8e0ea81095b229eceb6641/test/wrek_test_handler.erl | erlang | callbacks
private | -module(wrek_test_handler).
-include("wrek_event.hrl").
-export([code_change/3,
handle_call/2,
handle_event/2,
handle_info/2,
init/1,
terminate/2]).
-behaviour(gen_event).
-record(state, {
caller = undefined :: pid(),
count = 0 :: integer(),
evts = [] :: [wrek_event()],
fail_mode = total :: total | partial
}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_call(get, State) ->
{ok, State#state.count, State};
handle_call(_, State) ->
{ok, ok, State}.
handle_event(Evt = #wrek_event{type = {wrek, error}},
State = #state{fail_mode = total}) ->
finish(Evt, State);
handle_event(Evt = #wrek_event{type = {wrek, done}}, State) ->
finish(Evt, State);
handle_event(Evt, State = #state{count = Count, evts = Evts}) ->
{ok, State#state{count = Count + 1, evts = [Evt | Evts]}}.
handle_info(_, State) ->
{ok, State}.
init([FailMode, Caller]) ->
{ok, #state{caller = Caller, fail_mode = FailMode}}.
terminate(_, _State) ->
ok.
finish(Evt, State) ->
#state{
caller = Caller,
count = Count,
evts = Evts
} = State,
Caller ! #{count => Count + 1, evts => [Evt | Evts]},
remove_handler.
|
93076462861f725ce4d339021718cd498e9596f7ca3553b812d201f97742333c | juhp/pkgtreediff | PackageTreeDiff.hs | # LANGUAGE CPP #
| A library for pkgtreediff for comparing trees of rpm packages
module Distribution.RPM.PackageTreeDiff
(RPMPkgDiff(..),
Ignore(..),
diffPkgs,
-- * from rpm-nvr
NVRA(..),
readNVRA
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Data.RPM.VerRel
import Data.RPM.NVRA
import Data.RPM.VerCmp (rpmVerCompare)
-- | Ignore describes how comparison is done
data Ignore = IgnoreNone -- ^ do not ignore version or release
| IgnoreRelease -- ^ ignore differences in release
| IgnoreVersion -- ^ ignore differences in version
deriving Eq
-- | RPMPkgDiff type encodes how a particular rpm package differs between trees
data RPMPkgDiff = PkgUpdate NVRA NVRA
| PkgDowngrade NVRA NVRA
| PkgAdd NVRA
| PkgDel NVRA
| PkgArch NVRA NVRA
deriving Eq
| Compare two lists of packages NVRAs
diffPkgs :: Ignore -> [NVRA] -> [NVRA] -> [RPMPkgDiff]
diffPkgs _ [] [] = []
diffPkgs ignore (p:ps) [] = PkgDel p : diffPkgs ignore ps []
diffPkgs ignore [] (p:ps) = PkgAdd p : diffPkgs ignore [] ps
diffPkgs ignore (p1:ps1) (p2:ps2) =
case compare (rpmName p1) (rpmName p2) of
LT -> PkgDel p1 : diffPkgs ignore ps1 (p2:ps2)
EQ -> case diffPkg of
Just diff -> (diff :)
Nothing -> id
$ diffPkgs ignore ps1 ps2
GT -> PkgAdd p2 : diffPkgs ignore (p1:ps1) ps2
where
diffPkg :: Maybe RPMPkgDiff
diffPkg =
if rpmArch p1 == rpmArch p2
then case cmpVR ignore (rpmVerRel p1) (rpmVerRel p2) of
LT -> Just $ PkgUpdate p1 p2
EQ -> Nothing
GT -> Just $ PkgDowngrade p1 p2
else Just $ PkgArch p1 p2
-- cmpVR True ignore release
cmpVR :: Ignore -> VerRel -> VerRel -> Ordering
cmpVR IgnoreNone vr vr' = compare vr vr'
cmpVR IgnoreRelease (VerRel v _) (VerRel v' _) = rpmVerCompare v v'
cmpVR IgnoreVersion _ _ = EQ
| null | https://raw.githubusercontent.com/juhp/pkgtreediff/87412ffee80f54a1372f96f045d952a6e901662e/src/Distribution/RPM/PackageTreeDiff.hs | haskell | * from rpm-nvr
| Ignore describes how comparison is done
^ do not ignore version or release
^ ignore differences in release
^ ignore differences in version
| RPMPkgDiff type encodes how a particular rpm package differs between trees
cmpVR True ignore release | # LANGUAGE CPP #
| A library for pkgtreediff for comparing trees of rpm packages
module Distribution.RPM.PackageTreeDiff
(RPMPkgDiff(..),
Ignore(..),
diffPkgs,
NVRA(..),
readNVRA
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Data.RPM.VerRel
import Data.RPM.NVRA
import Data.RPM.VerCmp (rpmVerCompare)
deriving Eq
data RPMPkgDiff = PkgUpdate NVRA NVRA
| PkgDowngrade NVRA NVRA
| PkgAdd NVRA
| PkgDel NVRA
| PkgArch NVRA NVRA
deriving Eq
| Compare two lists of packages NVRAs
diffPkgs :: Ignore -> [NVRA] -> [NVRA] -> [RPMPkgDiff]
diffPkgs _ [] [] = []
diffPkgs ignore (p:ps) [] = PkgDel p : diffPkgs ignore ps []
diffPkgs ignore [] (p:ps) = PkgAdd p : diffPkgs ignore [] ps
diffPkgs ignore (p1:ps1) (p2:ps2) =
case compare (rpmName p1) (rpmName p2) of
LT -> PkgDel p1 : diffPkgs ignore ps1 (p2:ps2)
EQ -> case diffPkg of
Just diff -> (diff :)
Nothing -> id
$ diffPkgs ignore ps1 ps2
GT -> PkgAdd p2 : diffPkgs ignore (p1:ps1) ps2
where
diffPkg :: Maybe RPMPkgDiff
diffPkg =
if rpmArch p1 == rpmArch p2
then case cmpVR ignore (rpmVerRel p1) (rpmVerRel p2) of
LT -> Just $ PkgUpdate p1 p2
EQ -> Nothing
GT -> Just $ PkgDowngrade p1 p2
else Just $ PkgArch p1 p2
cmpVR :: Ignore -> VerRel -> VerRel -> Ordering
cmpVR IgnoreNone vr vr' = compare vr vr'
cmpVR IgnoreRelease (VerRel v _) (VerRel v' _) = rpmVerCompare v v'
cmpVR IgnoreVersion _ _ = EQ
|
472a6a08bef7d2b74b1630e46bf3adb849a08542434e92cd676b88fdb46932a9 | andreypopp/type-systems | infer.ml | open Base
open Expr
module Instance = struct
type t = { instance : qual_pred; witness : String.t }
end
module Typeclass = struct
type t = { typeclass : qual_pred; instances : Instance.t list }
end
module Env : sig
type t
(* Construction API. *)
val empty : t
val add : t -> String.t -> qual_ty -> t
val add_typeclass : t -> qual_pred -> t
val add_instance : t -> qual_pred -> String.t -> t
(* Query API. *)
val find : t -> String.t -> qual_ty option
val dependencies : t -> String.t -> qual_pred list
val instances : t -> String.t -> qual_pred list
end = struct
type t = {
env : (String.t, qual_ty, String.comparator_witness) Map.t;
typeclasses : (String.t, Typeclass.t, String.comparator_witness) Map.t;
}
let empty =
{ env = Map.empty (module String); typeclasses = Map.empty (module String) }
let add env name qty = { env with env = Map.set env.env ~key:name ~data:qty }
let add_typeclass env (qp : Expr.qual_pred) =
(* TODO: add checks *)
let _, (name, _) = qp in
{
env with
typeclasses =
Map.set env.typeclasses ~key:name
~data:{ typeclass = qp; instances = [] };
}
let add_instance env qp witness =
(* TODO: add checks *)
let _, (name, _) = qp in
let cls =
match Map.find env.typeclasses name with
| None -> failwith (Printf.sprintf "no such typeclass %s" name)
| Some cls -> cls
in
let cls =
{ cls with instances = { instance = qp; witness } :: cls.instances }
in
{ env with typeclasses = Map.set env.typeclasses ~key:name ~data:cls }
let dependencies env id =
let cls = Map.find_exn env.typeclasses id in
List.map (fst cls.typeclass) ~f:(fun (name, _) ->
let dep = Map.find_exn env.typeclasses name in
dep.typeclass)
let instances env id =
let cls = Map.find_exn env.typeclasses id in
List.map cls.instances ~f:(fun instance -> instance.instance)
let find env = Map.find env.env
end
type error =
| Error_unification of ty * ty
| Error_recursive_types
| Error_recursive_row_types
| Error_not_a_function of ty
| Error_unknown_name of string
| Error_arity_mismatch of ty * int * int
| Error_missing_typeclass_instance of pred
| Error_ambigious_predicate of pred
exception Type_error of error
let type_error err = raise (Type_error err)
let layout_error =
PPrint.(
function
| Error_recursive_types -> string "recursive types"
| Error_recursive_row_types -> string "recursive row types"
| Error_not_a_function ty ->
string "expected a function but got:" ^^ nest 2 (break 1 ^^ layout_ty ty)
| Error_unknown_name name -> string "unknown name: " ^^ string name
| Error_arity_mismatch (ty, expected, got) ->
string "arity mismatch: expected "
^^ string (Int.to_string expected)
^^ string " arguments but got "
^^ string (Int.to_string got)
^^ nest 2 (break 1 ^^ layout_ty ty)
| Error_unification (ty1, ty2) ->
string "unification error of"
^^ nest 2 (break 1 ^^ layout_ty ty1)
^^ (break 1 ^^ string "with")
^^ nest 2 (break 1 ^^ layout_ty ty2)
| Error_missing_typeclass_instance p ->
string "missing typeclass instance: " ^^ layout_pred p
| Error_ambigious_predicate p ->
string "ambigious predicate: " ^^ layout_pred p)
let pp_error = pp' layout_error
let show_error = show' layout_error
module Vars : sig
val newvar : lvl -> unit -> ty
val newrowvar : lvl -> unit -> ty_row
val reset_vars : unit -> unit
val newgenvar : unit -> ty
end = struct
let currentid = ref 0
let reset_vars () = currentid := 0
let newid () =
Int.incr currentid;
!currentid
let newvar lvl () =
Ty_var { contents = Ty_var_unbound { id = newid (); lvl } }
let newrowvar lvl () =
Ty_row_var { contents = Ty_var_unbound { id = newid (); lvl } }
let newgenvar () = Ty_var { contents = Ty_var_generic (newid ()) }
end
include Vars
(** Instantiation of type schemas into types.
This is done by replacing all generic type variables with fresh unbound type
variables.
*)
module Instantiate : sig
val instantiate_qual_ty : lvl -> qual_ty -> qual_ty
val instantiate_qual_pred : lvl -> qual_pred -> qual_pred
val instantiate_pred : lvl -> pred -> pred
end = struct
type ctx = {
lvl : Int.t;
vars : (Int.t, ty) Hashtbl.t;
rowvars : (Int.t, ty_row) Hashtbl.t;
}
let make_ctx lvl =
{
lvl;
vars = Hashtbl.create (module Int);
rowvars = Hashtbl.create (module Int);
}
let rec instantiate_ty' ctx (ty : ty) : ty =
match ty with
| Ty_const _ -> ty
| Ty_arr (ty_args, ty_ret) ->
Ty_arr
(List.map ty_args ~f:(instantiate_ty' ctx), instantiate_ty' ctx ty_ret)
| Ty_app (ty, ty_args) ->
Ty_app (instantiate_ty' ctx ty, List.map ty_args ~f:(instantiate_ty' ctx))
| Ty_var { contents = Ty_var_link ty } -> instantiate_ty' ctx ty
| Ty_var { contents = Ty_var_unbound _ } -> ty
| Ty_var { contents = Ty_var_generic id } ->
Hashtbl.find_or_add ctx.vars id ~default:(newvar ctx.lvl)
| Ty_record row -> Ty_record (instantiate_ty_row' ctx row)
and instantiate_ty_row' ctx (ty_row : ty_row) =
match ty_row with
| Ty_row_field (name, ty, ty_row) ->
Ty_row_field (name, instantiate_ty' ctx ty, instantiate_ty_row' ctx ty_row)
| Ty_row_empty -> ty_row
| Ty_row_var { contents = Ty_var_link ty_row } ->
instantiate_ty_row' ctx ty_row
| Ty_row_var { contents = Ty_var_unbound _ } -> ty_row
| Ty_row_var { contents = Ty_var_generic id } ->
Hashtbl.find_or_add ctx.rowvars id ~default:(newrowvar ctx.lvl)
| Ty_row_const _ -> assert false
and instantiate_pred' ctx (name, args) =
(name, List.map args ~f:(instantiate_ty' ctx))
and instantiate_qual_ty' ctx qty =
let preds, ty = qty in
(List.map preds ~f:(instantiate_pred' ctx), instantiate_ty' ctx ty)
let instantiate_pred lvl p =
let ctx = make_ctx lvl in
instantiate_pred' ctx p
let instantiate_qual_ty lvl qty =
let ctx = make_ctx lvl in
instantiate_qual_ty' ctx qty
let instantiate_qual_pred lvl (ps, p) =
let ctx = make_ctx lvl in
(List.map ps ~f:(instantiate_pred' ctx), instantiate_pred' ctx p)
end
include Instantiate
module Pred_solver : sig
val solve_preds :
lvl ->
Env.t ->
(Ty_var_unbound.t, Ty_var_unbound.comparator_witness) Set.t ->
pred list ->
pred list * pred list
(** Solve a set of predicates.
This raises a [Type_error] in case it cannot find a suitable instance for
a ground predicate or if a predicate is ambigious.
The function returns a pair of predicate sets [deferred, retained] where
[retained] should be generalized while [deferred] should be propagated
upwards.
*)
end = struct
let match_ty ty1 ty2 =
invariant : this destructs only
(* TODO: handle closed record types. *)
let rec aux ty1 ty2 : bool =
if phys_equal ty1 ty2 then true
else
match (ty1, ty2) with
| ty1, Ty_var { contents = Ty_var_link ty2 } -> aux ty1 ty2
| Ty_app (f1, args1), Ty_app (f2, args2) ->
aux f1 f2
&& List.length args1 = List.length args2
&& List.for_all2_exn args1 args2 ~f:(fun ty1 ty2 -> aux ty1 ty2)
| Ty_var { contents = Ty_var_link ty1 }, ty2 -> aux ty1 ty2
| Ty_var ({ contents = Ty_var_unbound _ } as var), ty2 ->
var := Ty_var_link ty2;
true
| Ty_var { contents = Ty_var_generic _ }, _ ->
failwith "uninstantiated type variable"
| Ty_const name1, Ty_const name2 -> String.(name1 = name2)
| _, _ -> false
in
aux ty1 ty2
let match_pred (name1, args1) (name2, args2) =
if not String.(name1 = name2) then false
else
let rec aux args1 args2 =
match (args1, args2) with
| [], [] -> true
| [], _ -> false
| _, [] -> false
| a1 :: args1, a2 :: args2 -> match_ty a1 a2 && aux args1 args2
in
aux args1 args2
let entailments_of_dependencies _lvl env pred =
(* TODO: need to return a list of all things here *)
let rec aux entailments pred =
let dependencies = Env.dependencies env (fst pred) in
List.fold dependencies ~init:(pred :: entailments)
~f:(fun entailments dep -> aux entailments (snd dep))
in
aux [] pred
Try each instance of the class and on first match return the list of
dependencies .
We are looking for the first match becuase we are supposed to have
non - overlapping instances in the environment ( that 's a TODO to enforce this
invatiant on environment construction ) .
dependencies.
We are looking for the first match becuase we are supposed to have
non-overlapping instances in the environment (that's a TODO to enforce this
invatiant on environment construction). *)
let entailments_of_instances lvl env pred =
let rec aux = function
| [] -> None
| q :: qs ->
let deps', pred' = instantiate_qual_pred lvl q in
if match_pred pred' pred then Some deps' else aux qs
in
aux (Env.instances env (fst pred))
Entailment relation between predicates .
[ entail lvl env ps p ] returns [ true ] in case predicates [ ps ] are enough to
establish [ p ] predicate .
[entail lvl env ps p] returns [true] in case predicates [ps] are enough to
establish [p] predicate. *)
let rec entail lvl env ps p =
let rec inspect_dependencies = function
| [] -> false
| q :: qs ->
let deps = entailments_of_dependencies lvl env q in
List.exists deps ~f:(fun dep ->
let dep = instantiate_pred lvl dep in
match_pred dep p)
|| inspect_dependencies qs
in
inspect_dependencies ps
||
match entailments_of_instances lvl env p with
| None -> false
| Some qs -> List.for_all qs ~f:(fun q -> entail lvl env ps q)
Check that a predicate in a head normal form ( HNF ) .
A predicate is in HNF if all its arguments are type variables ( this HNF
definition is specific for languages with first order polymorphism only ) .
A predicate is in HNF if all its arguments are type variables (this HNF
definition is specific for languages with first order polymorphism only). *)
let is_hnf (_name, args) =
let rec aux = function
| Ty_var { contents = Ty_var_link ty } -> aux ty
| Ty_var { contents = Ty_var_generic _ } -> assert false
| Ty_var { contents = Ty_var_unbound _ } -> true
| Ty_app _ -> false
| Ty_arr _ -> false
| Ty_const _ -> false
| Ty_record _ -> false
in
List.for_all args ~f:aux
Try to convert a predicate into a HNF .
Raises a type error if some instances are missing .
Raises a type error if some instances are missing. *)
let rec to_hnf lvl env p =
if is_hnf p then [ p ]
else
match entailments_of_instances lvl env p with
| None -> type_error (Error_missing_typeclass_instance p)
| Some ps -> to_hnfs lvl env ps
and to_hnfs lvl env ps = List.concat (List.map ps ~f:(to_hnf lvl env))
Simplify a list of predicates .
Simplification is performed by removing those predicates which can be
inferred from other predicates in the same list ( for which an entailment
relation holds ) .
Simplification is performed by removing those predicates which can be
inferred from other predicates in the same list (for which an entailment
relation holds). *)
let simplify lvl env ps =
let rec aux simplified = function
| [] -> simplified
| p :: ps ->
if entail lvl env (simplified @ ps) p then aux simplified ps
else aux (p :: simplified) ps
in
aux [] ps
(* Reduce a list of predicates. *)
let reduce lvl env ps =
let ps = to_hnfs lvl env ps in
simplify lvl env ps
let ty_vars ((_name, args) as p) =
let rec inspect = function
| Ty_var { contents = Ty_var_unbound tv } -> tv
| Ty_var { contents = Ty_var_link ty } -> inspect ty
| _ -> failwith (Printf.sprintf "predicate not in HNF: %s" (show_pred p))
in
List.map args ~f:inspect
let solve_preds lvl env vars ps =
let ps = reduce lvl env ps in
let should_defer p =
List.for_all (ty_vars p) ~f:(fun tv -> tv.lvl <= lvl)
in
let rec aux (deferred, retained) = function
| [] -> (deferred, retained)
| p :: ps ->
if should_defer p then aux (p :: deferred, retained) ps
else
let not_in_vars tv = not (Set.mem vars tv) in
if List.exists (ty_vars p) ~f:not_in_vars then
type_error (Error_ambigious_predicate p);
aux (deferred, p :: retained) ps
in
aux ([], []) ps
end
include Pred_solver
let generalize lvl env (qty : qual_ty) =
let generalize_ty ty =
(* Along with generalizing the type we also find all unbound type variables
which we later use to check predicates for ambiguity. *)
let seen = ref (Set.empty (module Ty_var_unbound)) in
let mark id = Ref.replace seen (fun seen -> Set.add seen id) in
let rec generalize_ty ty =
match ty with
| Ty_const _ -> ty
| Ty_arr (ty_args, ty_ret) ->
Ty_arr (List.map ty_args ~f:generalize_ty, generalize_ty ty_ret)
| Ty_app (ty, ty_args) ->
Ty_app (generalize_ty ty, List.map ty_args ~f:generalize_ty)
| Ty_var { contents = Ty_var_link ty } -> generalize_ty ty
| Ty_var { contents = Ty_var_generic _ } -> ty
| Ty_var { contents = Ty_var_unbound tv } ->
mark tv;
if tv.lvl > lvl then Ty_var { contents = Ty_var_generic tv.id } else ty
| Ty_record row -> Ty_record (generalize_ty_row row)
and generalize_ty_row (ty_row : ty_row) =
match ty_row with
| Ty_row_field (name, ty, row) ->
Ty_row_field (name, generalize_ty ty, generalize_ty_row row)
| Ty_row_empty -> ty_row
| Ty_row_var { contents = Ty_var_link ty_row } -> generalize_ty_row ty_row
| Ty_row_var { contents = Ty_var_generic _ } -> ty_row
| Ty_row_var { contents = Ty_var_unbound { id; lvl = var_lvl } } ->
if var_lvl > lvl then Ty_row_var { contents = Ty_var_generic id }
else ty_row
| Ty_row_const _ -> assert false
in
let ty = generalize_ty ty in
(ty, !seen)
in
let generalize_pred (name, args) =
let args = List.map args ~f:(fun ty -> fst (generalize_ty ty)) in
(name, args)
in
let ps, ty = qty in
let ty, vars = generalize_ty ty in
let deferred, retained = solve_preds lvl env vars ps in
(deferred @ List.map retained ~f:generalize_pred, ty)
let occurs_check lvl id ty =
let rec occurs_check_ty (ty : ty) : unit =
match ty with
| Ty_const _ -> ()
| Ty_arr (args, ret) ->
List.iter args ~f:occurs_check_ty;
occurs_check_ty ret
| Ty_app (f, args) ->
occurs_check_ty f;
List.iter args ~f:occurs_check_ty
| Ty_var { contents = Ty_var_link ty } -> occurs_check_ty ty
| Ty_var { contents = Ty_var_generic _ } -> ()
| Ty_var ({ contents = Ty_var_unbound v } as var) ->
if v.id = id then type_error Error_recursive_types
else if lvl < v.lvl then var := Ty_var_unbound { id = v.id; lvl }
else ()
| Ty_record ty_row -> occurs_check_ty_row ty_row
and occurs_check_ty_row (ty_row : ty_row) : unit =
match ty_row with
| Ty_row_field (_name, ty, ty_row) ->
occurs_check_ty ty;
occurs_check_ty_row ty_row
| Ty_row_empty -> ()
| Ty_row_var { contents = Ty_var_link ty_row } -> occurs_check_ty_row ty_row
| Ty_row_var { contents = Ty_var_generic _ } -> ()
| Ty_row_var ({ contents = Ty_var_unbound v } as var) ->
if v.id = id then type_error Error_recursive_types
else if lvl < v.lvl then var := Ty_var_unbound { id = v.id; lvl }
else ()
| Ty_row_const _ -> assert false
in
occurs_check_ty ty
let rec unify (ty1 : ty) (ty2 : ty) =
if phys_equal ty1 ty2 then ()
else
match (ty1, ty2) with
| Ty_const name1, Ty_const name2 ->
if not String.(name1 = name2) then
type_error (Error_unification (ty1, ty2))
| Ty_arr (args1, ret1), Ty_arr (args2, ret2) -> (
match List.iter2 args1 args2 ~f:unify with
| Unequal_lengths ->
type_error
(Error_arity_mismatch (ty1, List.length args2, List.length args1))
| Ok () -> unify ret1 ret2)
| Ty_app (f1, args1), Ty_app (f2, args2) ->
unify f1 f2;
List.iter2_exn args1 args2 ~f:unify
| Ty_record row1, Ty_record row2 -> unify_row row1 row2
| Ty_var { contents = Ty_var_link ty1 }, ty2
| ty1, Ty_var { contents = Ty_var_link ty2 } ->
unify ty1 ty2
| Ty_var ({ contents = Ty_var_unbound { id; lvl } } as var), ty
| ty, Ty_var ({ contents = Ty_var_unbound { id; lvl } } as var) ->
occurs_check lvl id ty;
var := Ty_var_link ty
| ty1, ty2 -> type_error (Error_unification (ty1, ty2))
and unify_row row1 row2 =
if phys_equal row1 row2 then ()
else
match (row1, row2) with
| Ty_row_empty, Ty_row_empty -> ()
| Ty_row_field (name, ty, row1), Ty_row_field _ ->
let exception Row_rewrite_error in
let rec rewrite = function
| Ty_row_empty -> raise Row_rewrite_error
| Ty_row_field (name', ty', row') ->
if String.(name = name') then (
unify ty ty';
row')
else Ty_row_field (name', ty', rewrite row')
| Ty_row_var { contents = Ty_var_link row' } -> rewrite row'
| Ty_row_var ({ contents = Ty_var_unbound { id = _; lvl } } as var) ->
let row' = newrowvar lvl () in
var := Ty_var_link (Ty_row_field (name, ty, row'));
row'
| Ty_row_var { contents = Ty_var_generic _ } ->
failwith "non instantiated row variable"
| Ty_row_const _ -> assert false
in
let row1_unbound =
match row1 with
| Ty_row_var ({ contents = Ty_var_unbound _ } as var) -> Some var
| _ -> None
in
let row2 =
try rewrite row2 with
| Row_rewrite_error ->
type_error (Error_unification (Ty_record row1, Ty_record row2))
in
(match row1_unbound with
| Some { contents = Ty_var_link _ } ->
type_error Error_recursive_row_types
| _ -> ());
unify_row row1 row2
| Ty_row_var { contents = Ty_var_link row1 }, row2
| row2, Ty_row_var { contents = Ty_var_link row1 } ->
unify_row row1 row2
| Ty_row_var ({ contents = Ty_var_unbound { id; lvl } } as var), row
| row, Ty_row_var ({ contents = Ty_var_unbound { id; lvl } } as var) ->
occurs_check lvl id (Ty_record row);
var := Ty_var_link row
| row1, row2 ->
type_error (Error_unification (Ty_record row1, Ty_record row2))
let rec unify_abs arity ty =
match ty with
| Ty_arr (ty_args, ty_ret) ->
if List.length ty_args <> arity then
type_error (Error_arity_mismatch (ty, List.length ty_args, arity));
(ty_args, ty_ret)
| Ty_var var -> (
match !var with
| Ty_var_link ty -> unify_abs arity ty
| Ty_var_unbound v ->
let ty_ret = newvar v.lvl () in
let ty_args = List.init arity ~f:(fun _ -> newvar v.lvl ()) in
var := Ty_var_link (Ty_arr (ty_args, ty_ret));
(ty_args, ty_ret)
| Ty_var_generic _ -> failwith "uninstantiated generic type")
| Ty_app _
| Ty_const _
| Ty_record _ ->
type_error (Error_not_a_function ty)
let rec infer' lvl (env : Env.t) (e : expr) =
match e with
| Expr_name name ->
let qty =
match Env.find env name with
| Some ty -> ty
| None -> type_error (Error_unknown_name name)
in
instantiate_qual_ty lvl qty
| Expr_abs (args, body) ->
let ty_args = List.map args ~f:(fun _ -> newvar lvl ()) in
let cs, ty_body =
let env =
List.fold_left (List.zip_exn args ty_args) ~init:env
~f:(fun env (arg, ty_arg) -> Env.add env arg ([], ty_arg))
in
infer' lvl env body
in
(cs, Ty_arr (ty_args, ty_body))
| Expr_app (func, args) ->
let cs, ty_func = infer' lvl env func in
let ty_args, ty_ret = unify_abs (List.length args) ty_func in
let cs =
List.fold2_exn args ty_args ~init:cs ~f:(fun cs arg ty_arg ->
let cs', ty = infer' lvl env arg in
unify ty ty_arg;
cs @ cs')
in
(cs, ty_ret)
| Expr_let (name, e, b) ->
let ty_e = infer' (lvl + 1) env e in
let ty_e = generalize lvl env ty_e in
let env = Env.add env name ty_e in
infer' lvl env b
| Expr_let_rec (name, e, b) ->
let ty_e =
(* fix : a . (a -> a) -> a *)
let ty_ret = newvar lvl () in
let ty_fun = Ty_arr ([ ty_ret ], ty_ret) in
let cs, ty_fun' = infer' (lvl + 1) env (Expr_abs ([ name ], e)) in
unify ty_fun' ty_fun;
(cs, ty_ret)
in
let ty_e = generalize lvl env ty_e in
let env = Env.add env name ty_e in
infer' lvl env b
| Expr_record fields ->
let cs, ty_row =
List.fold_left fields ~init:([], Ty_row_empty)
~f:(fun (cs, row) (label, e) ->
let cs', ty_e = infer' lvl env e in
(cs @ cs', Ty_row_field (label, ty_e, row)))
in
(cs, Ty_record ty_row)
| Expr_record_proj (e, label) ->
let cs, ty_e = infer' lvl env e in
let ty_proj = newvar lvl () in
unify ty_e (Ty_record (Ty_row_field (label, ty_proj, newrowvar lvl ())));
(cs, ty_proj)
| Expr_record_extend (e, fields) ->
let ty_row = newrowvar lvl () in
let cs, return_ty_row =
List.fold_left fields ~init:([], ty_row)
~f:(fun (cs, ty_row) (label, e) ->
let ty_e = newvar lvl () in
let cs', ty_e' = infer' lvl env e in
unify ty_e ty_e';
(cs @ cs', Ty_row_field (label, ty_e, ty_row)))
in
let cs', ty_e' = infer' lvl env e in
unify (Ty_record ty_row) ty_e';
(cs @ cs', Ty_record return_ty_row)
| Expr_record_update (e, fields) ->
let ty_row = newrowvar lvl () in
let return_ty_row, to_unify =
List.fold fields ~init:(ty_row, [])
~f:(fun (ty_row, to_unify) (label, e) ->
let ty_e = newvar lvl () in
(Ty_row_field (label, ty_e, ty_row), (e, ty_e) :: to_unify))
in
let cs, ty_e = infer' lvl env e in
unify (Ty_record return_ty_row) ty_e;
let cs =
List.fold (List.rev to_unify) ~init:cs ~f:(fun cs (e, ty_e) ->
let cs', ty_e' = infer' lvl env e in
unify ty_e ty_e';
cs @ cs')
in
(cs, Ty_record return_ty_row)
| Expr_lit (Lit_string _) -> ([], Ty_const "string")
| Expr_lit (Lit_int _) -> ([], Ty_const "int")
let infer env e =
let qty = infer' 0 env e in
generalize (-1) env qty
| null | https://raw.githubusercontent.com/andreypopp/type-systems/9be1fc78b441f5c2ccf0302993a2b9f08bc16fc2/algo_w/infer.ml | ocaml | Construction API.
Query API.
TODO: add checks
TODO: add checks
* Instantiation of type schemas into types.
This is done by replacing all generic type variables with fresh unbound type
variables.
* Solve a set of predicates.
This raises a [Type_error] in case it cannot find a suitable instance for
a ground predicate or if a predicate is ambigious.
The function returns a pair of predicate sets [deferred, retained] where
[retained] should be generalized while [deferred] should be propagated
upwards.
TODO: handle closed record types.
TODO: need to return a list of all things here
Reduce a list of predicates.
Along with generalizing the type we also find all unbound type variables
which we later use to check predicates for ambiguity.
fix : a . (a -> a) -> a | open Base
open Expr
module Instance = struct
type t = { instance : qual_pred; witness : String.t }
end
module Typeclass = struct
type t = { typeclass : qual_pred; instances : Instance.t list }
end
module Env : sig
type t
val empty : t
val add : t -> String.t -> qual_ty -> t
val add_typeclass : t -> qual_pred -> t
val add_instance : t -> qual_pred -> String.t -> t
val find : t -> String.t -> qual_ty option
val dependencies : t -> String.t -> qual_pred list
val instances : t -> String.t -> qual_pred list
end = struct
type t = {
env : (String.t, qual_ty, String.comparator_witness) Map.t;
typeclasses : (String.t, Typeclass.t, String.comparator_witness) Map.t;
}
let empty =
{ env = Map.empty (module String); typeclasses = Map.empty (module String) }
let add env name qty = { env with env = Map.set env.env ~key:name ~data:qty }
let add_typeclass env (qp : Expr.qual_pred) =
let _, (name, _) = qp in
{
env with
typeclasses =
Map.set env.typeclasses ~key:name
~data:{ typeclass = qp; instances = [] };
}
let add_instance env qp witness =
let _, (name, _) = qp in
let cls =
match Map.find env.typeclasses name with
| None -> failwith (Printf.sprintf "no such typeclass %s" name)
| Some cls -> cls
in
let cls =
{ cls with instances = { instance = qp; witness } :: cls.instances }
in
{ env with typeclasses = Map.set env.typeclasses ~key:name ~data:cls }
let dependencies env id =
let cls = Map.find_exn env.typeclasses id in
List.map (fst cls.typeclass) ~f:(fun (name, _) ->
let dep = Map.find_exn env.typeclasses name in
dep.typeclass)
let instances env id =
let cls = Map.find_exn env.typeclasses id in
List.map cls.instances ~f:(fun instance -> instance.instance)
let find env = Map.find env.env
end
type error =
| Error_unification of ty * ty
| Error_recursive_types
| Error_recursive_row_types
| Error_not_a_function of ty
| Error_unknown_name of string
| Error_arity_mismatch of ty * int * int
| Error_missing_typeclass_instance of pred
| Error_ambigious_predicate of pred
exception Type_error of error
let type_error err = raise (Type_error err)
let layout_error =
PPrint.(
function
| Error_recursive_types -> string "recursive types"
| Error_recursive_row_types -> string "recursive row types"
| Error_not_a_function ty ->
string "expected a function but got:" ^^ nest 2 (break 1 ^^ layout_ty ty)
| Error_unknown_name name -> string "unknown name: " ^^ string name
| Error_arity_mismatch (ty, expected, got) ->
string "arity mismatch: expected "
^^ string (Int.to_string expected)
^^ string " arguments but got "
^^ string (Int.to_string got)
^^ nest 2 (break 1 ^^ layout_ty ty)
| Error_unification (ty1, ty2) ->
string "unification error of"
^^ nest 2 (break 1 ^^ layout_ty ty1)
^^ (break 1 ^^ string "with")
^^ nest 2 (break 1 ^^ layout_ty ty2)
| Error_missing_typeclass_instance p ->
string "missing typeclass instance: " ^^ layout_pred p
| Error_ambigious_predicate p ->
string "ambigious predicate: " ^^ layout_pred p)
let pp_error = pp' layout_error
let show_error = show' layout_error
module Vars : sig
val newvar : lvl -> unit -> ty
val newrowvar : lvl -> unit -> ty_row
val reset_vars : unit -> unit
val newgenvar : unit -> ty
end = struct
let currentid = ref 0
let reset_vars () = currentid := 0
let newid () =
Int.incr currentid;
!currentid
let newvar lvl () =
Ty_var { contents = Ty_var_unbound { id = newid (); lvl } }
let newrowvar lvl () =
Ty_row_var { contents = Ty_var_unbound { id = newid (); lvl } }
let newgenvar () = Ty_var { contents = Ty_var_generic (newid ()) }
end
include Vars
module Instantiate : sig
val instantiate_qual_ty : lvl -> qual_ty -> qual_ty
val instantiate_qual_pred : lvl -> qual_pred -> qual_pred
val instantiate_pred : lvl -> pred -> pred
end = struct
type ctx = {
lvl : Int.t;
vars : (Int.t, ty) Hashtbl.t;
rowvars : (Int.t, ty_row) Hashtbl.t;
}
let make_ctx lvl =
{
lvl;
vars = Hashtbl.create (module Int);
rowvars = Hashtbl.create (module Int);
}
let rec instantiate_ty' ctx (ty : ty) : ty =
match ty with
| Ty_const _ -> ty
| Ty_arr (ty_args, ty_ret) ->
Ty_arr
(List.map ty_args ~f:(instantiate_ty' ctx), instantiate_ty' ctx ty_ret)
| Ty_app (ty, ty_args) ->
Ty_app (instantiate_ty' ctx ty, List.map ty_args ~f:(instantiate_ty' ctx))
| Ty_var { contents = Ty_var_link ty } -> instantiate_ty' ctx ty
| Ty_var { contents = Ty_var_unbound _ } -> ty
| Ty_var { contents = Ty_var_generic id } ->
Hashtbl.find_or_add ctx.vars id ~default:(newvar ctx.lvl)
| Ty_record row -> Ty_record (instantiate_ty_row' ctx row)
and instantiate_ty_row' ctx (ty_row : ty_row) =
match ty_row with
| Ty_row_field (name, ty, ty_row) ->
Ty_row_field (name, instantiate_ty' ctx ty, instantiate_ty_row' ctx ty_row)
| Ty_row_empty -> ty_row
| Ty_row_var { contents = Ty_var_link ty_row } ->
instantiate_ty_row' ctx ty_row
| Ty_row_var { contents = Ty_var_unbound _ } -> ty_row
| Ty_row_var { contents = Ty_var_generic id } ->
Hashtbl.find_or_add ctx.rowvars id ~default:(newrowvar ctx.lvl)
| Ty_row_const _ -> assert false
and instantiate_pred' ctx (name, args) =
(name, List.map args ~f:(instantiate_ty' ctx))
and instantiate_qual_ty' ctx qty =
let preds, ty = qty in
(List.map preds ~f:(instantiate_pred' ctx), instantiate_ty' ctx ty)
let instantiate_pred lvl p =
let ctx = make_ctx lvl in
instantiate_pred' ctx p
let instantiate_qual_ty lvl qty =
let ctx = make_ctx lvl in
instantiate_qual_ty' ctx qty
let instantiate_qual_pred lvl (ps, p) =
let ctx = make_ctx lvl in
(List.map ps ~f:(instantiate_pred' ctx), instantiate_pred' ctx p)
end
include Instantiate
module Pred_solver : sig
val solve_preds :
lvl ->
Env.t ->
(Ty_var_unbound.t, Ty_var_unbound.comparator_witness) Set.t ->
pred list ->
pred list * pred list
end = struct
let match_ty ty1 ty2 =
invariant : this destructs only
let rec aux ty1 ty2 : bool =
if phys_equal ty1 ty2 then true
else
match (ty1, ty2) with
| ty1, Ty_var { contents = Ty_var_link ty2 } -> aux ty1 ty2
| Ty_app (f1, args1), Ty_app (f2, args2) ->
aux f1 f2
&& List.length args1 = List.length args2
&& List.for_all2_exn args1 args2 ~f:(fun ty1 ty2 -> aux ty1 ty2)
| Ty_var { contents = Ty_var_link ty1 }, ty2 -> aux ty1 ty2
| Ty_var ({ contents = Ty_var_unbound _ } as var), ty2 ->
var := Ty_var_link ty2;
true
| Ty_var { contents = Ty_var_generic _ }, _ ->
failwith "uninstantiated type variable"
| Ty_const name1, Ty_const name2 -> String.(name1 = name2)
| _, _ -> false
in
aux ty1 ty2
let match_pred (name1, args1) (name2, args2) =
if not String.(name1 = name2) then false
else
let rec aux args1 args2 =
match (args1, args2) with
| [], [] -> true
| [], _ -> false
| _, [] -> false
| a1 :: args1, a2 :: args2 -> match_ty a1 a2 && aux args1 args2
in
aux args1 args2
let entailments_of_dependencies _lvl env pred =
let rec aux entailments pred =
let dependencies = Env.dependencies env (fst pred) in
List.fold dependencies ~init:(pred :: entailments)
~f:(fun entailments dep -> aux entailments (snd dep))
in
aux [] pred
Try each instance of the class and on first match return the list of
dependencies .
We are looking for the first match becuase we are supposed to have
non - overlapping instances in the environment ( that 's a TODO to enforce this
invatiant on environment construction ) .
dependencies.
We are looking for the first match becuase we are supposed to have
non-overlapping instances in the environment (that's a TODO to enforce this
invatiant on environment construction). *)
let entailments_of_instances lvl env pred =
let rec aux = function
| [] -> None
| q :: qs ->
let deps', pred' = instantiate_qual_pred lvl q in
if match_pred pred' pred then Some deps' else aux qs
in
aux (Env.instances env (fst pred))
Entailment relation between predicates .
[ entail lvl env ps p ] returns [ true ] in case predicates [ ps ] are enough to
establish [ p ] predicate .
[entail lvl env ps p] returns [true] in case predicates [ps] are enough to
establish [p] predicate. *)
let rec entail lvl env ps p =
let rec inspect_dependencies = function
| [] -> false
| q :: qs ->
let deps = entailments_of_dependencies lvl env q in
List.exists deps ~f:(fun dep ->
let dep = instantiate_pred lvl dep in
match_pred dep p)
|| inspect_dependencies qs
in
inspect_dependencies ps
||
match entailments_of_instances lvl env p with
| None -> false
| Some qs -> List.for_all qs ~f:(fun q -> entail lvl env ps q)
Check that a predicate in a head normal form ( HNF ) .
A predicate is in HNF if all its arguments are type variables ( this HNF
definition is specific for languages with first order polymorphism only ) .
A predicate is in HNF if all its arguments are type variables (this HNF
definition is specific for languages with first order polymorphism only). *)
let is_hnf (_name, args) =
let rec aux = function
| Ty_var { contents = Ty_var_link ty } -> aux ty
| Ty_var { contents = Ty_var_generic _ } -> assert false
| Ty_var { contents = Ty_var_unbound _ } -> true
| Ty_app _ -> false
| Ty_arr _ -> false
| Ty_const _ -> false
| Ty_record _ -> false
in
List.for_all args ~f:aux
Try to convert a predicate into a HNF .
Raises a type error if some instances are missing .
Raises a type error if some instances are missing. *)
let rec to_hnf lvl env p =
if is_hnf p then [ p ]
else
match entailments_of_instances lvl env p with
| None -> type_error (Error_missing_typeclass_instance p)
| Some ps -> to_hnfs lvl env ps
and to_hnfs lvl env ps = List.concat (List.map ps ~f:(to_hnf lvl env))
Simplify a list of predicates .
Simplification is performed by removing those predicates which can be
inferred from other predicates in the same list ( for which an entailment
relation holds ) .
Simplification is performed by removing those predicates which can be
inferred from other predicates in the same list (for which an entailment
relation holds). *)
let simplify lvl env ps =
let rec aux simplified = function
| [] -> simplified
| p :: ps ->
if entail lvl env (simplified @ ps) p then aux simplified ps
else aux (p :: simplified) ps
in
aux [] ps
let reduce lvl env ps =
let ps = to_hnfs lvl env ps in
simplify lvl env ps
let ty_vars ((_name, args) as p) =
let rec inspect = function
| Ty_var { contents = Ty_var_unbound tv } -> tv
| Ty_var { contents = Ty_var_link ty } -> inspect ty
| _ -> failwith (Printf.sprintf "predicate not in HNF: %s" (show_pred p))
in
List.map args ~f:inspect
let solve_preds lvl env vars ps =
let ps = reduce lvl env ps in
let should_defer p =
List.for_all (ty_vars p) ~f:(fun tv -> tv.lvl <= lvl)
in
let rec aux (deferred, retained) = function
| [] -> (deferred, retained)
| p :: ps ->
if should_defer p then aux (p :: deferred, retained) ps
else
let not_in_vars tv = not (Set.mem vars tv) in
if List.exists (ty_vars p) ~f:not_in_vars then
type_error (Error_ambigious_predicate p);
aux (deferred, p :: retained) ps
in
aux ([], []) ps
end
include Pred_solver
let generalize lvl env (qty : qual_ty) =
let generalize_ty ty =
let seen = ref (Set.empty (module Ty_var_unbound)) in
let mark id = Ref.replace seen (fun seen -> Set.add seen id) in
let rec generalize_ty ty =
match ty with
| Ty_const _ -> ty
| Ty_arr (ty_args, ty_ret) ->
Ty_arr (List.map ty_args ~f:generalize_ty, generalize_ty ty_ret)
| Ty_app (ty, ty_args) ->
Ty_app (generalize_ty ty, List.map ty_args ~f:generalize_ty)
| Ty_var { contents = Ty_var_link ty } -> generalize_ty ty
| Ty_var { contents = Ty_var_generic _ } -> ty
| Ty_var { contents = Ty_var_unbound tv } ->
mark tv;
if tv.lvl > lvl then Ty_var { contents = Ty_var_generic tv.id } else ty
| Ty_record row -> Ty_record (generalize_ty_row row)
and generalize_ty_row (ty_row : ty_row) =
match ty_row with
| Ty_row_field (name, ty, row) ->
Ty_row_field (name, generalize_ty ty, generalize_ty_row row)
| Ty_row_empty -> ty_row
| Ty_row_var { contents = Ty_var_link ty_row } -> generalize_ty_row ty_row
| Ty_row_var { contents = Ty_var_generic _ } -> ty_row
| Ty_row_var { contents = Ty_var_unbound { id; lvl = var_lvl } } ->
if var_lvl > lvl then Ty_row_var { contents = Ty_var_generic id }
else ty_row
| Ty_row_const _ -> assert false
in
let ty = generalize_ty ty in
(ty, !seen)
in
let generalize_pred (name, args) =
let args = List.map args ~f:(fun ty -> fst (generalize_ty ty)) in
(name, args)
in
let ps, ty = qty in
let ty, vars = generalize_ty ty in
let deferred, retained = solve_preds lvl env vars ps in
(deferred @ List.map retained ~f:generalize_pred, ty)
let occurs_check lvl id ty =
let rec occurs_check_ty (ty : ty) : unit =
match ty with
| Ty_const _ -> ()
| Ty_arr (args, ret) ->
List.iter args ~f:occurs_check_ty;
occurs_check_ty ret
| Ty_app (f, args) ->
occurs_check_ty f;
List.iter args ~f:occurs_check_ty
| Ty_var { contents = Ty_var_link ty } -> occurs_check_ty ty
| Ty_var { contents = Ty_var_generic _ } -> ()
| Ty_var ({ contents = Ty_var_unbound v } as var) ->
if v.id = id then type_error Error_recursive_types
else if lvl < v.lvl then var := Ty_var_unbound { id = v.id; lvl }
else ()
| Ty_record ty_row -> occurs_check_ty_row ty_row
and occurs_check_ty_row (ty_row : ty_row) : unit =
match ty_row with
| Ty_row_field (_name, ty, ty_row) ->
occurs_check_ty ty;
occurs_check_ty_row ty_row
| Ty_row_empty -> ()
| Ty_row_var { contents = Ty_var_link ty_row } -> occurs_check_ty_row ty_row
| Ty_row_var { contents = Ty_var_generic _ } -> ()
| Ty_row_var ({ contents = Ty_var_unbound v } as var) ->
if v.id = id then type_error Error_recursive_types
else if lvl < v.lvl then var := Ty_var_unbound { id = v.id; lvl }
else ()
| Ty_row_const _ -> assert false
in
occurs_check_ty ty
let rec unify (ty1 : ty) (ty2 : ty) =
if phys_equal ty1 ty2 then ()
else
match (ty1, ty2) with
| Ty_const name1, Ty_const name2 ->
if not String.(name1 = name2) then
type_error (Error_unification (ty1, ty2))
| Ty_arr (args1, ret1), Ty_arr (args2, ret2) -> (
match List.iter2 args1 args2 ~f:unify with
| Unequal_lengths ->
type_error
(Error_arity_mismatch (ty1, List.length args2, List.length args1))
| Ok () -> unify ret1 ret2)
| Ty_app (f1, args1), Ty_app (f2, args2) ->
unify f1 f2;
List.iter2_exn args1 args2 ~f:unify
| Ty_record row1, Ty_record row2 -> unify_row row1 row2
| Ty_var { contents = Ty_var_link ty1 }, ty2
| ty1, Ty_var { contents = Ty_var_link ty2 } ->
unify ty1 ty2
| Ty_var ({ contents = Ty_var_unbound { id; lvl } } as var), ty
| ty, Ty_var ({ contents = Ty_var_unbound { id; lvl } } as var) ->
occurs_check lvl id ty;
var := Ty_var_link ty
| ty1, ty2 -> type_error (Error_unification (ty1, ty2))
and unify_row row1 row2 =
if phys_equal row1 row2 then ()
else
match (row1, row2) with
| Ty_row_empty, Ty_row_empty -> ()
| Ty_row_field (name, ty, row1), Ty_row_field _ ->
let exception Row_rewrite_error in
let rec rewrite = function
| Ty_row_empty -> raise Row_rewrite_error
| Ty_row_field (name', ty', row') ->
if String.(name = name') then (
unify ty ty';
row')
else Ty_row_field (name', ty', rewrite row')
| Ty_row_var { contents = Ty_var_link row' } -> rewrite row'
| Ty_row_var ({ contents = Ty_var_unbound { id = _; lvl } } as var) ->
let row' = newrowvar lvl () in
var := Ty_var_link (Ty_row_field (name, ty, row'));
row'
| Ty_row_var { contents = Ty_var_generic _ } ->
failwith "non instantiated row variable"
| Ty_row_const _ -> assert false
in
let row1_unbound =
match row1 with
| Ty_row_var ({ contents = Ty_var_unbound _ } as var) -> Some var
| _ -> None
in
let row2 =
try rewrite row2 with
| Row_rewrite_error ->
type_error (Error_unification (Ty_record row1, Ty_record row2))
in
(match row1_unbound with
| Some { contents = Ty_var_link _ } ->
type_error Error_recursive_row_types
| _ -> ());
unify_row row1 row2
| Ty_row_var { contents = Ty_var_link row1 }, row2
| row2, Ty_row_var { contents = Ty_var_link row1 } ->
unify_row row1 row2
| Ty_row_var ({ contents = Ty_var_unbound { id; lvl } } as var), row
| row, Ty_row_var ({ contents = Ty_var_unbound { id; lvl } } as var) ->
occurs_check lvl id (Ty_record row);
var := Ty_var_link row
| row1, row2 ->
type_error (Error_unification (Ty_record row1, Ty_record row2))
let rec unify_abs arity ty =
match ty with
| Ty_arr (ty_args, ty_ret) ->
if List.length ty_args <> arity then
type_error (Error_arity_mismatch (ty, List.length ty_args, arity));
(ty_args, ty_ret)
| Ty_var var -> (
match !var with
| Ty_var_link ty -> unify_abs arity ty
| Ty_var_unbound v ->
let ty_ret = newvar v.lvl () in
let ty_args = List.init arity ~f:(fun _ -> newvar v.lvl ()) in
var := Ty_var_link (Ty_arr (ty_args, ty_ret));
(ty_args, ty_ret)
| Ty_var_generic _ -> failwith "uninstantiated generic type")
| Ty_app _
| Ty_const _
| Ty_record _ ->
type_error (Error_not_a_function ty)
let rec infer' lvl (env : Env.t) (e : expr) =
match e with
| Expr_name name ->
let qty =
match Env.find env name with
| Some ty -> ty
| None -> type_error (Error_unknown_name name)
in
instantiate_qual_ty lvl qty
| Expr_abs (args, body) ->
let ty_args = List.map args ~f:(fun _ -> newvar lvl ()) in
let cs, ty_body =
let env =
List.fold_left (List.zip_exn args ty_args) ~init:env
~f:(fun env (arg, ty_arg) -> Env.add env arg ([], ty_arg))
in
infer' lvl env body
in
(cs, Ty_arr (ty_args, ty_body))
| Expr_app (func, args) ->
let cs, ty_func = infer' lvl env func in
let ty_args, ty_ret = unify_abs (List.length args) ty_func in
let cs =
List.fold2_exn args ty_args ~init:cs ~f:(fun cs arg ty_arg ->
let cs', ty = infer' lvl env arg in
unify ty ty_arg;
cs @ cs')
in
(cs, ty_ret)
| Expr_let (name, e, b) ->
let ty_e = infer' (lvl + 1) env e in
let ty_e = generalize lvl env ty_e in
let env = Env.add env name ty_e in
infer' lvl env b
| Expr_let_rec (name, e, b) ->
let ty_e =
let ty_ret = newvar lvl () in
let ty_fun = Ty_arr ([ ty_ret ], ty_ret) in
let cs, ty_fun' = infer' (lvl + 1) env (Expr_abs ([ name ], e)) in
unify ty_fun' ty_fun;
(cs, ty_ret)
in
let ty_e = generalize lvl env ty_e in
let env = Env.add env name ty_e in
infer' lvl env b
| Expr_record fields ->
let cs, ty_row =
List.fold_left fields ~init:([], Ty_row_empty)
~f:(fun (cs, row) (label, e) ->
let cs', ty_e = infer' lvl env e in
(cs @ cs', Ty_row_field (label, ty_e, row)))
in
(cs, Ty_record ty_row)
| Expr_record_proj (e, label) ->
let cs, ty_e = infer' lvl env e in
let ty_proj = newvar lvl () in
unify ty_e (Ty_record (Ty_row_field (label, ty_proj, newrowvar lvl ())));
(cs, ty_proj)
| Expr_record_extend (e, fields) ->
let ty_row = newrowvar lvl () in
let cs, return_ty_row =
List.fold_left fields ~init:([], ty_row)
~f:(fun (cs, ty_row) (label, e) ->
let ty_e = newvar lvl () in
let cs', ty_e' = infer' lvl env e in
unify ty_e ty_e';
(cs @ cs', Ty_row_field (label, ty_e, ty_row)))
in
let cs', ty_e' = infer' lvl env e in
unify (Ty_record ty_row) ty_e';
(cs @ cs', Ty_record return_ty_row)
| Expr_record_update (e, fields) ->
let ty_row = newrowvar lvl () in
let return_ty_row, to_unify =
List.fold fields ~init:(ty_row, [])
~f:(fun (ty_row, to_unify) (label, e) ->
let ty_e = newvar lvl () in
(Ty_row_field (label, ty_e, ty_row), (e, ty_e) :: to_unify))
in
let cs, ty_e = infer' lvl env e in
unify (Ty_record return_ty_row) ty_e;
let cs =
List.fold (List.rev to_unify) ~init:cs ~f:(fun cs (e, ty_e) ->
let cs', ty_e' = infer' lvl env e in
unify ty_e ty_e';
cs @ cs')
in
(cs, Ty_record return_ty_row)
| Expr_lit (Lit_string _) -> ([], Ty_const "string")
| Expr_lit (Lit_int _) -> ([], Ty_const "int")
let infer env e =
let qty = infer' 0 env e in
generalize (-1) env qty
|
79791558a9519c42e4273e7b4434f523a09ef80dbf31c0652c9c0f47fda59240 | borkdude/edamame | syntax_quote.cljc | (ns edamame.impl.syntax-quote
"Taken and adapted from
"
{:no-doc true}
(:require [clojure.string :as str]))
(defn unquote? [form]
(and (seq? form)
(= (first form) 'clojure.core/unquote)))
(defn- unquote-splicing? [form]
(and (seq? form)
(= (first form) 'clojure.core/unquote-splicing)))
(declare syntax-quote)
(defn- expand-list
"Expand a list by resolving its syntax quotes and unquotes"
[ctx #?(:cljs ^not-native reader :default reader) s]
(loop [s (seq s) r (transient [])]
(if s
(let [item (first s)
ret (conj! r
(cond
(unquote? item) (list 'clojure.core/list (second item))
(unquote-splicing? item) (second item)
:else (list 'clojure.core/list (syntax-quote ctx reader item))))]
(recur (next s) ret))
(seq (persistent! r)))))
(defn- syntax-quote-coll [ctx #?(:cljs ^not-native reader :default reader) type coll]
We use sequence rather than seq here to fix -1444
;; But because of -1586 we still need to call seq on the form
(let [res (list 'clojure.core/sequence
(list 'clojure.core/seq
(cons 'clojure.core/concat
(expand-list ctx reader coll))))]
(if type
(list 'clojure.core/apply type res)
res)))
(defn map-func
"Decide which map type to use, array-map if less than 16 elements"
[coll]
(if (>= (count coll) 16)
'clojure.core/hash-map
'clojure.core/array-map))
(defn- flatten-map
"Flatten a map into a seq of alternate keys and values"
[form]
(loop [s (seq form) key-vals (transient [])]
(if s
(let [e (first s)]
(recur (next s) (-> key-vals
(conj! (key e))
(conj! (val e)))))
(seq (persistent! key-vals)))))
(defn- syntax-quote* [{:keys [:gensyms] :as ctx}
#?(:cljs ^not-native reader :default reader) form]
(cond
(special-symbol? form) (list 'quote form)
(symbol? form)
(list 'quote
(let [sym-name (name form)]
(cond (special-symbol? form) form
(str/ends-with? sym-name "#")
(if-let [generated (get @gensyms form)]
generated
(let [n (subs sym-name 0 (dec (count sym-name)))
generated (gensym (str n "__"))
generated (symbol (str (name generated) "__auto__"))]
(swap! gensyms assoc form generated)
generated))
:else
(let [f (-> ctx :syntax-quote :resolve-symbol)]
((or f identity) form)))))
(unquote? form) (second form)
(unquote-splicing? form) (throw (new #?(:cljs js/Error :clj IllegalStateException)
"unquote-splice not in list"))
(coll? form)
(cond
(instance? #?(:clj clojure.lang.IRecord :cljs IRecord) form) form
(map? form) (syntax-quote-coll ctx reader (map-func form) (flatten-map form))
(vector? form) (list 'clojure.core/vec (syntax-quote-coll ctx reader nil form))
(set? form) (syntax-quote-coll ctx reader 'clojure.core/hash-set form)
(or (seq? form) (list? form))
(let [seq (seq form)]
(if seq
(syntax-quote-coll ctx reader nil seq)
'(clojure.core/list)))
:else (throw (new #?(:clj UnsupportedOperationException
:cljs js/Error) "Unknown Collection type")))
(or (keyword? form)
(number? form)
(char? form)
(string? form)
(nil? form)
(boolean? form)
#?(:clj (instance? java.util.regex.Pattern form)
:cljs (regexp? form)))
form
:else (list 'quote form)))
(defn- add-meta [ctx reader form ret]
(if (and #?(:clj (instance? clojure.lang.IObj form)
:cljs (implements? IWithMeta form))
(seq (dissoc (meta form) (:row-key ctx) (:col-key ctx) (:end-row-key ctx) (:end-col-key ctx))))
(list #?(:clj 'clojure.core/with-meta
:cljs 'cljs.core/with-meta) ret (syntax-quote* ctx reader (meta form)))
ret))
(defn syntax-quote [ctx reader form]
(let [ret (syntax-quote* ctx reader form)]
(add-meta ctx reader form ret)))
| null | https://raw.githubusercontent.com/borkdude/edamame/e609451d5dc38f347a5e0c68453edd76cccac3ff/src/edamame/impl/syntax_quote.cljc | clojure | But because of -1586 we still need to call seq on the form | (ns edamame.impl.syntax-quote
"Taken and adapted from
"
{:no-doc true}
(:require [clojure.string :as str]))
(defn unquote? [form]
(and (seq? form)
(= (first form) 'clojure.core/unquote)))
(defn- unquote-splicing? [form]
(and (seq? form)
(= (first form) 'clojure.core/unquote-splicing)))
(declare syntax-quote)
(defn- expand-list
"Expand a list by resolving its syntax quotes and unquotes"
[ctx #?(:cljs ^not-native reader :default reader) s]
(loop [s (seq s) r (transient [])]
(if s
(let [item (first s)
ret (conj! r
(cond
(unquote? item) (list 'clojure.core/list (second item))
(unquote-splicing? item) (second item)
:else (list 'clojure.core/list (syntax-quote ctx reader item))))]
(recur (next s) ret))
(seq (persistent! r)))))
(defn- syntax-quote-coll [ctx #?(:cljs ^not-native reader :default reader) type coll]
We use sequence rather than seq here to fix -1444
(let [res (list 'clojure.core/sequence
(list 'clojure.core/seq
(cons 'clojure.core/concat
(expand-list ctx reader coll))))]
(if type
(list 'clojure.core/apply type res)
res)))
(defn map-func
"Decide which map type to use, array-map if less than 16 elements"
[coll]
(if (>= (count coll) 16)
'clojure.core/hash-map
'clojure.core/array-map))
(defn- flatten-map
"Flatten a map into a seq of alternate keys and values"
[form]
(loop [s (seq form) key-vals (transient [])]
(if s
(let [e (first s)]
(recur (next s) (-> key-vals
(conj! (key e))
(conj! (val e)))))
(seq (persistent! key-vals)))))
(defn- syntax-quote* [{:keys [:gensyms] :as ctx}
#?(:cljs ^not-native reader :default reader) form]
(cond
(special-symbol? form) (list 'quote form)
(symbol? form)
(list 'quote
(let [sym-name (name form)]
(cond (special-symbol? form) form
(str/ends-with? sym-name "#")
(if-let [generated (get @gensyms form)]
generated
(let [n (subs sym-name 0 (dec (count sym-name)))
generated (gensym (str n "__"))
generated (symbol (str (name generated) "__auto__"))]
(swap! gensyms assoc form generated)
generated))
:else
(let [f (-> ctx :syntax-quote :resolve-symbol)]
((or f identity) form)))))
(unquote? form) (second form)
(unquote-splicing? form) (throw (new #?(:cljs js/Error :clj IllegalStateException)
"unquote-splice not in list"))
(coll? form)
(cond
(instance? #?(:clj clojure.lang.IRecord :cljs IRecord) form) form
(map? form) (syntax-quote-coll ctx reader (map-func form) (flatten-map form))
(vector? form) (list 'clojure.core/vec (syntax-quote-coll ctx reader nil form))
(set? form) (syntax-quote-coll ctx reader 'clojure.core/hash-set form)
(or (seq? form) (list? form))
(let [seq (seq form)]
(if seq
(syntax-quote-coll ctx reader nil seq)
'(clojure.core/list)))
:else (throw (new #?(:clj UnsupportedOperationException
:cljs js/Error) "Unknown Collection type")))
(or (keyword? form)
(number? form)
(char? form)
(string? form)
(nil? form)
(boolean? form)
#?(:clj (instance? java.util.regex.Pattern form)
:cljs (regexp? form)))
form
:else (list 'quote form)))
(defn- add-meta [ctx reader form ret]
(if (and #?(:clj (instance? clojure.lang.IObj form)
:cljs (implements? IWithMeta form))
(seq (dissoc (meta form) (:row-key ctx) (:col-key ctx) (:end-row-key ctx) (:end-col-key ctx))))
(list #?(:clj 'clojure.core/with-meta
:cljs 'cljs.core/with-meta) ret (syntax-quote* ctx reader (meta form)))
ret))
(defn syntax-quote [ctx reader form]
(let [ret (syntax-quote* ctx reader form)]
(add-meta ctx reader form ret)))
|
e9066ef6e10336141325b55895ba3abca0f630c6e965766c1f95899fd3bc34af | nikita-volkov/rerebase | QSemN.hs | module Control.Concurrent.QSemN
(
module Rebase.Control.Concurrent.QSemN
)
where
import Rebase.Control.Concurrent.QSemN
| null | https://raw.githubusercontent.com/nikita-volkov/rerebase/25895e6d8b0c515c912c509ad8dd8868780a74b6/library/Control/Concurrent/QSemN.hs | haskell | module Control.Concurrent.QSemN
(
module Rebase.Control.Concurrent.QSemN
)
where
import Rebase.Control.Concurrent.QSemN
| |
60a5e51b73bef63beea4eeb73eb48e0d344c719ef14fc82d726482da325b2bc8 | rafalio/rafalio | Util.hs | {-# LANGUAGE OverloadedStrings #-}
module Site.Util where
import Site.PandocProcessors
import Hakyll
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Maybe
import Data.Time.Format
import Data.Time.Clock
import Data.List
import System.FilePath.Posix
import Control.Applicative
import System.FilePath (takeBaseName, takeDirectory, takeFileName)
import Text.Pandoc.Options
-- This gets rid of the date string in my .md post, and adds the "posts/" prefix
-- This is unsafe
preparePostString :: String -> String
preparePostString path =
let fn = takeFileName path
parsedTime = parseTimeM True defaultTimeLocale "%Y-%m-%d" (take 10 fn) :: Maybe UTCTime
in
((++) "posts/") $ case parsedTime of
Nothing -> fn -- parse failed, no date available, keep filename
Just _ -> drop 11 fn -- get rid of the timestamp
selectCustomPandocCompiler :: Item String -> Compiler (Item String)
selectCustomPandocCompiler item = do
metadata <- getMetadata $ itemIdentifier item
let hasToc = isJust $ lookupString "toc" metadata
let tocVal = (lookupString "toc" metadata >>= fmap fst . listToMaybe . reads) <|> (Just 4)
let wOptions = if hasToc then (pandocWriterOptionsTOC {writerTOCDepth = fromJust tocVal}) else pandocWriterOptions
let sections = isJust $ lookupString "notocsections" metadata
let curWExts = writerExtensions defaultHakyllWriterOptions
let finalOptions = wOptions {writerNumberSections = not sections}
let ident = takeExtension . toFilePath . itemIdentifier $ item
let curExts = readerExtensions defaultHakyllReaderOptions
let rOptions = if (ident == ".lhs") then defaultHakyllReaderOptions {readerExtensions = S.insert Ext_literate_haskell curExts} else defaultHakyllReaderOptions
pandocCompilerWithTransform rOptions finalOptions (processCodeBlocks ident)
getPostBodies :: [Item String] -> Compiler String
getPostBodies = return . concat . intersperse "<hr />" . map itemBody
| null | https://raw.githubusercontent.com/rafalio/rafalio/bebef4904f0538a7c40d1b4a88153e8e56aae11e/src/Site/Util.hs | haskell | # LANGUAGE OverloadedStrings #
This gets rid of the date string in my .md post, and adds the "posts/" prefix
This is unsafe
parse failed, no date available, keep filename
get rid of the timestamp |
module Site.Util where
import Site.PandocProcessors
import Hakyll
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Maybe
import Data.Time.Format
import Data.Time.Clock
import Data.List
import System.FilePath.Posix
import Control.Applicative
import System.FilePath (takeBaseName, takeDirectory, takeFileName)
import Text.Pandoc.Options
preparePostString :: String -> String
preparePostString path =
let fn = takeFileName path
parsedTime = parseTimeM True defaultTimeLocale "%Y-%m-%d" (take 10 fn) :: Maybe UTCTime
in
((++) "posts/") $ case parsedTime of
selectCustomPandocCompiler :: Item String -> Compiler (Item String)
selectCustomPandocCompiler item = do
metadata <- getMetadata $ itemIdentifier item
let hasToc = isJust $ lookupString "toc" metadata
let tocVal = (lookupString "toc" metadata >>= fmap fst . listToMaybe . reads) <|> (Just 4)
let wOptions = if hasToc then (pandocWriterOptionsTOC {writerTOCDepth = fromJust tocVal}) else pandocWriterOptions
let sections = isJust $ lookupString "notocsections" metadata
let curWExts = writerExtensions defaultHakyllWriterOptions
let finalOptions = wOptions {writerNumberSections = not sections}
let ident = takeExtension . toFilePath . itemIdentifier $ item
let curExts = readerExtensions defaultHakyllReaderOptions
let rOptions = if (ident == ".lhs") then defaultHakyllReaderOptions {readerExtensions = S.insert Ext_literate_haskell curExts} else defaultHakyllReaderOptions
pandocCompilerWithTransform rOptions finalOptions (processCodeBlocks ident)
getPostBodies :: [Item String] -> Compiler String
getPostBodies = return . concat . intersperse "<hr />" . map itemBody
|
b81fc8bd64d6b5354373fd42b485cd4c77a6b54161cb83e498d56ca90f8317b2 | illiichi/orenolisp | main_ui.clj | (ns orenolisp.view.ui.main-ui
(:require [orenolisp.view.ui.fx-util :as fx]
[orenolisp.view.ui.component.typed-history :as typed-history]
[orenolisp.view.ui.component.context-display :as context-display]
[orenolisp.view.ui.component.viewport :as viewport]
[orenolisp.view.ui.component.logscreen :as logscreen]
[orenolisp.view.ui.component.window-indicator :as w-indicator]
[clojure.core.async :as async])
(:import
(javafx.application Application)
(javafx.stage Stage StageStyle)
(javafx.geometry Insets Pos)
(javafx.scene Scene Group)
(javafx.scene.transform Scale)
(javafx.scene.layout Pane StackPane BorderPane GridPane ColumnConstraints Priority)))
(def ui-state (atom nil))
(declare %layer-parent)
(def base-width 1280)
(def base-height 1024)
(def scale 0.5)
(defn calcurate-scale [screen-width screen-height]
(max (/ screen-width base-width)
(/ screen-height base-height)))
(defn render-base [input-ch]
(reset! ui-state
(let [root (doto (StackPane.)
(.setStyle "-fx-background-color: black")
(.setPadding (Insets. 25 0 0 0)))
container (doto (Group.)
(fx/add-child root))
scene (doto (Scene. container 500 300)
(.setOnKeyTyped
(fx/event-handler*
(fn [e] (async/go (async/>! input-ch e))))))
stage (doto (Stage. StageStyle/DECORATED)
(.setScene scene)
(.sizeToScene))
scale-listener (fx/changed-handler*
(fn [_ _ _]
(let [scale (calcurate-scale (-> scene .getWidth)
(-> scene .getHeight))]
(-> root (.setPrefWidth (/ (-> scene .getWidth) scale)))
(-> root (.setPrefHeight (/ (-> scene .getHeight) scale)))
(doto (-> root .getTransforms)
.clear
(.add (doto (Scale. scale scale)
(.setPivotX 0.0)
(.setPivotY 0.0)))))))]
(-> scene .widthProperty (.addListener scale-listener))
(-> scene .heightProperty (.addListener scale-listener))
(-> scene .getStylesheets (.add "style.css"))
(.show stage)
{:root root :stage stage})))
(defn layout-content [content]
(doto (-> @ui-state :root .getChildren)
(.clear)
(.add content)))
(defn- create-bottom [left right]
(let [panel (GridPane.)]
(doto (.getColumnConstraints panel)
(.add (doto (ColumnConstraints.)
(.setPercentWidth 0)
(.setFillWidth true)
(.setHgrow Priority/ALWAYS)))
(.add (doto (ColumnConstraints.)
(.setPercentWidth 70)
(.setFillWidth true)
(.setHgrow Priority/ALWAYS))))
(.add panel left 1 1)
(.add panel right 2 1)
panel))
(defn render []
(doto (BorderPane.)
(.setCenter (doto (StackPane.)
(fx/add-child (logscreen/render))
(fx/add-child (viewport/render))
(fx/add-child (w-indicator/render))))
(.setBottom (create-bottom (typed-history/create-control)
(context-display/create)))))
| null | https://raw.githubusercontent.com/illiichi/orenolisp/7b085fb687dbe16b5cbe8c739238bbaf79156814/src/orenolisp/view/ui/main_ui.clj | clojure | (ns orenolisp.view.ui.main-ui
(:require [orenolisp.view.ui.fx-util :as fx]
[orenolisp.view.ui.component.typed-history :as typed-history]
[orenolisp.view.ui.component.context-display :as context-display]
[orenolisp.view.ui.component.viewport :as viewport]
[orenolisp.view.ui.component.logscreen :as logscreen]
[orenolisp.view.ui.component.window-indicator :as w-indicator]
[clojure.core.async :as async])
(:import
(javafx.application Application)
(javafx.stage Stage StageStyle)
(javafx.geometry Insets Pos)
(javafx.scene Scene Group)
(javafx.scene.transform Scale)
(javafx.scene.layout Pane StackPane BorderPane GridPane ColumnConstraints Priority)))
(def ui-state (atom nil))
(declare %layer-parent)
(def base-width 1280)
(def base-height 1024)
(def scale 0.5)
(defn calcurate-scale [screen-width screen-height]
(max (/ screen-width base-width)
(/ screen-height base-height)))
(defn render-base [input-ch]
(reset! ui-state
(let [root (doto (StackPane.)
(.setStyle "-fx-background-color: black")
(.setPadding (Insets. 25 0 0 0)))
container (doto (Group.)
(fx/add-child root))
scene (doto (Scene. container 500 300)
(.setOnKeyTyped
(fx/event-handler*
(fn [e] (async/go (async/>! input-ch e))))))
stage (doto (Stage. StageStyle/DECORATED)
(.setScene scene)
(.sizeToScene))
scale-listener (fx/changed-handler*
(fn [_ _ _]
(let [scale (calcurate-scale (-> scene .getWidth)
(-> scene .getHeight))]
(-> root (.setPrefWidth (/ (-> scene .getWidth) scale)))
(-> root (.setPrefHeight (/ (-> scene .getHeight) scale)))
(doto (-> root .getTransforms)
.clear
(.add (doto (Scale. scale scale)
(.setPivotX 0.0)
(.setPivotY 0.0)))))))]
(-> scene .widthProperty (.addListener scale-listener))
(-> scene .heightProperty (.addListener scale-listener))
(-> scene .getStylesheets (.add "style.css"))
(.show stage)
{:root root :stage stage})))
(defn layout-content [content]
(doto (-> @ui-state :root .getChildren)
(.clear)
(.add content)))
(defn- create-bottom [left right]
(let [panel (GridPane.)]
(doto (.getColumnConstraints panel)
(.add (doto (ColumnConstraints.)
(.setPercentWidth 0)
(.setFillWidth true)
(.setHgrow Priority/ALWAYS)))
(.add (doto (ColumnConstraints.)
(.setPercentWidth 70)
(.setFillWidth true)
(.setHgrow Priority/ALWAYS))))
(.add panel left 1 1)
(.add panel right 2 1)
panel))
(defn render []
(doto (BorderPane.)
(.setCenter (doto (StackPane.)
(fx/add-child (logscreen/render))
(fx/add-child (viewport/render))
(fx/add-child (w-indicator/render))))
(.setBottom (create-bottom (typed-history/create-control)
(context-display/create)))))
| |
bd4ccaca70bb959e659ff329f00aafcafb9bc96cde2d511fa0dcf9de17bbc7a7 | FranklinChen/learn-you-some-erlang | ppool_supersup.erl | -module(ppool_supersup).
-behaviour(supervisor).
-export([start_link/0, start_pool/3, stop_pool/1]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ppool}, ?MODULE, []).
start_pool(Name, Limit, MFA) ->
ChildSpec = {Name,
{ppool_sup, start_link, [Name, Limit, MFA]},
permanent, 10500, supervisor, [ppool_sup]},
supervisor:start_child(ppool, ChildSpec).
stop_pool(Name) ->
supervisor:terminate_child(ppool, Name),
supervisor:delete_child(ppool, Name).
init([]) ->
MaxRestart = 6,
MaxTime = 3000,
{ok, {{one_for_one, MaxRestart, MaxTime}, []}}.
| null | https://raw.githubusercontent.com/FranklinChen/learn-you-some-erlang/878c8bc2011a12862fe72dd7fdc6c921348c79d6/release/ppool-1.0/src/ppool_supersup.erl | erlang | -module(ppool_supersup).
-behaviour(supervisor).
-export([start_link/0, start_pool/3, stop_pool/1]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ppool}, ?MODULE, []).
start_pool(Name, Limit, MFA) ->
ChildSpec = {Name,
{ppool_sup, start_link, [Name, Limit, MFA]},
permanent, 10500, supervisor, [ppool_sup]},
supervisor:start_child(ppool, ChildSpec).
stop_pool(Name) ->
supervisor:terminate_child(ppool, Name),
supervisor:delete_child(ppool, Name).
init([]) ->
MaxRestart = 6,
MaxTime = 3000,
{ok, {{one_for_one, MaxRestart, MaxTime}, []}}.
| |
3ee0e6521852638b229c1b6840914e5d59a5ae0c6e053cc58075f82564e10cce | fgalassi/cs61a-sp11 | 2.76.scm | ; conventional:
; to add a type, go into each generic operation and add a cond to deal with the new type
; to add an operation, create a new function for the operation dealing with every available type
; BEST IF WE FREQUENTLY ADD NEW OPERATIONS
; message-passing:
; to add a type, create a new function for the type dealing with every available operation
; to add an operation, go into each function representing a type and add a cond to deal with the new operation
; BEST IF WE FREQUENTLY ADD NEW TYPES
; data-directed:
; to add a type, put a line for each operation into the dispatch table
; to add an operation, put a line for each type into the dispatch table
; BEST IF WE HAVE BOTH CASES
| null | https://raw.githubusercontent.com/fgalassi/cs61a-sp11/66df3b54b03ee27f368c716ae314fd7ed85c4dba/homework/2.76.scm | scheme | conventional:
to add a type, go into each generic operation and add a cond to deal with the new type
to add an operation, create a new function for the operation dealing with every available type
BEST IF WE FREQUENTLY ADD NEW OPERATIONS
message-passing:
to add a type, create a new function for the type dealing with every available operation
to add an operation, go into each function representing a type and add a cond to deal with the new operation
BEST IF WE FREQUENTLY ADD NEW TYPES
data-directed:
to add a type, put a line for each operation into the dispatch table
to add an operation, put a line for each type into the dispatch table
BEST IF WE HAVE BOTH CASES | |
2a6c65f4ce0eba9436edf83f5483f2643dbc6838a91a8d41a38bc177d4439003 | Mesabloo/nihil | Lambda.hs | # LANGUAGE BlockArguments #
{-# LANGUAGE OverloadedStrings #-}
module Nihil.Syntax.Concrete.Parser.Expression.Lambda where
import Nihil.Syntax.Common (Parser)
import Nihil.Syntax.Concrete.Core
import Nihil.Syntax.Concrete.Parser.Identifier
import qualified Nihil.Syntax.Concrete.Parser.Pattern.Atom as Pattern
import {-# SOURCE #-} Nihil.Syntax.Concrete.Parser.Expression
import Nihil.Syntax.Concrete.Debug
import Control.Applicative ((<|>))
import qualified Text.Megaparsec as MP
pLambda :: Parser () -> Parser Atom
pLambda s = debug "pLambda" $ do
pSymbol' "\\" <|> MP.hidden (pSymbol' "λ")
s
params <- Pattern.pAtom `MP.sepEndBy1` s
pSymbol' "->" <|> MP.hidden (pSymbol' "→")
ALambda params <$> (s *> pExpression s)
| null | https://raw.githubusercontent.com/Mesabloo/nihil/3821052ca5691a6492b23bd8a46bfe70567d374f/src/parser/Nihil/Syntax/Concrete/Parser/Expression/Lambda.hs | haskell | # LANGUAGE OverloadedStrings #
# SOURCE # | # LANGUAGE BlockArguments #
module Nihil.Syntax.Concrete.Parser.Expression.Lambda where
import Nihil.Syntax.Common (Parser)
import Nihil.Syntax.Concrete.Core
import Nihil.Syntax.Concrete.Parser.Identifier
import qualified Nihil.Syntax.Concrete.Parser.Pattern.Atom as Pattern
import Nihil.Syntax.Concrete.Debug
import Control.Applicative ((<|>))
import qualified Text.Megaparsec as MP
pLambda :: Parser () -> Parser Atom
pLambda s = debug "pLambda" $ do
pSymbol' "\\" <|> MP.hidden (pSymbol' "λ")
s
params <- Pattern.pAtom `MP.sepEndBy1` s
pSymbol' "->" <|> MP.hidden (pSymbol' "→")
ALambda params <$> (s *> pExpression s)
|
b1229c61531c08dbef909acc6fbd829d4ff29d5e11838bb047f4761c9efa8b4d | marcoheisig/Petalisp | lazy-stack.lisp | © 2016 - 2023 - license : GNU AGPLv3 -*- coding : utf-8 -*-
(in-package #:petalisp.api)
(defun lazy-stack (axis array &rest more-arrays)
(let* ((arrays (list* array more-arrays))
(lazy-array (lazy-array array))
(more-lazy-arrays (mapcar #'lazy-array more-arrays))
(lazy-arrays (list* lazy-array more-lazy-arrays))
(rank (lazy-array-rank lazy-array))
(step 1))
;; Check that the axis is valid.
(unless (and (integerp axis) (< -1 axis rank))
(error "~@<Invalid stack axis ~S for the array ~S.~:@>"
axis array))
;; Check that all supplied arrays have the same rank, and the same
shape in all but the specified AXIS .
(loop for other-lazy-array in more-lazy-arrays for index from 0 do
(unless (= (lazy-array-rank other-lazy-array) rank)
(error "~@<Cannot stack arrays with varying ranks, got ~S and ~S.~:@>"
array (nth index arrays)))
(loop for range1 in (lazy-array-ranges lazy-array)
for range2 in (lazy-array-ranges other-lazy-array)
for pos below rank
when (/= pos axis)
do (unless (range= range1 range2)
(error "~@<Arrays being stacked must only differ in the specified axis, ~
but the arrays ~S and ~S also differ in axis ~D.~:@>"
array (nth index arrays) pos))))
;; Determine the step size.
(loop for lazy-array in lazy-arrays
for index from 0
For error reporting , we track the position of the first array
with more than one element along AXIS .
with pos = nil
do (let ((range (shape-range (lazy-array-shape lazy-array) axis)))
(when (> (range-size range) 1)
(unless (null pos)
(unless (= step (range-step range))
(error "~@<Cannot stack arrays with varying step sizes, got ~S and ~S.~:@>"
(nth pos (list* array more-arrays))
(nth index (list* array more-arrays)))))
(setf step (range-step range))
(setf pos index))))
;; Now stack the arrays.
(let* ((inputs (remove 0 lazy-arrays :key #'lazy-array-size))
(transformations
(loop for input in inputs
for range = (lazy-array-range input axis)
for start = (range-start range)
for position = start then (+ position increment)
for increment = (* (range-size range) step)
for offsets = (make-array rank :initial-element 0)
do (setf (aref offsets axis) (- position start))
collect (make-transformation :offsets offsets))))
(if (null inputs)
lazy-array
(apply
#'lazy-fuse
(mapcar #'lazy-reshape inputs transformations))))))
| null | https://raw.githubusercontent.com/marcoheisig/Petalisp/834e972cc9c10d08b24be033de6772d36fe87960/code/api/lazy-stack.lisp | lisp | Check that the axis is valid.
Check that all supplied arrays have the same rank, and the same
Determine the step size.
Now stack the arrays. | © 2016 - 2023 - license : GNU AGPLv3 -*- coding : utf-8 -*-
(in-package #:petalisp.api)
(defun lazy-stack (axis array &rest more-arrays)
(let* ((arrays (list* array more-arrays))
(lazy-array (lazy-array array))
(more-lazy-arrays (mapcar #'lazy-array more-arrays))
(lazy-arrays (list* lazy-array more-lazy-arrays))
(rank (lazy-array-rank lazy-array))
(step 1))
(unless (and (integerp axis) (< -1 axis rank))
(error "~@<Invalid stack axis ~S for the array ~S.~:@>"
axis array))
shape in all but the specified AXIS .
(loop for other-lazy-array in more-lazy-arrays for index from 0 do
(unless (= (lazy-array-rank other-lazy-array) rank)
(error "~@<Cannot stack arrays with varying ranks, got ~S and ~S.~:@>"
array (nth index arrays)))
(loop for range1 in (lazy-array-ranges lazy-array)
for range2 in (lazy-array-ranges other-lazy-array)
for pos below rank
when (/= pos axis)
do (unless (range= range1 range2)
(error "~@<Arrays being stacked must only differ in the specified axis, ~
but the arrays ~S and ~S also differ in axis ~D.~:@>"
array (nth index arrays) pos))))
(loop for lazy-array in lazy-arrays
for index from 0
For error reporting , we track the position of the first array
with more than one element along AXIS .
with pos = nil
do (let ((range (shape-range (lazy-array-shape lazy-array) axis)))
(when (> (range-size range) 1)
(unless (null pos)
(unless (= step (range-step range))
(error "~@<Cannot stack arrays with varying step sizes, got ~S and ~S.~:@>"
(nth pos (list* array more-arrays))
(nth index (list* array more-arrays)))))
(setf step (range-step range))
(setf pos index))))
(let* ((inputs (remove 0 lazy-arrays :key #'lazy-array-size))
(transformations
(loop for input in inputs
for range = (lazy-array-range input axis)
for start = (range-start range)
for position = start then (+ position increment)
for increment = (* (range-size range) step)
for offsets = (make-array rank :initial-element 0)
do (setf (aref offsets axis) (- position start))
collect (make-transformation :offsets offsets))))
(if (null inputs)
lazy-array
(apply
#'lazy-fuse
(mapcar #'lazy-reshape inputs transformations))))))
|
5ed2e367f94bf17b50ab90cc407f3ac115990ae01ca67b112bc93aab056b004b | AdaCore/why3 | eliminate_unused.ml | open Decl
open Task
open Term
let term_lsymbols =
(* compute lsymbols of a term *)
let rec aux acc (t: term) =
match t.t_node with
| Tapp (ls, _) -> Term.t_fold aux (Term.Sls.add ls acc) t
| _ -> Term.t_fold aux acc t
in
aux Sls.empty
let defn_symbols defn =
let _, def = open_ls_defn defn in
term_lsymbols def
let compute_needed_td (tdl, needed_symbols) td =
match td.Theory.td_node with
| Theory.Decl d ->
begin
match d.d_node with
| Dtype _ | Ddata _ | Dind _ -> td :: tdl, needed_symbols
| Dparam ls ->
if Sls.mem ls needed_symbols then
td :: tdl, Sls.remove ls needed_symbols
else
tdl, needed_symbols
| Dlogic lls ->
if List.exists (fun (ls, _) -> Sls.mem ls needed_symbols) lls then
let needed_symbols =
List.fold_left (fun acc (_, defn) ->
Sls.union acc (defn_symbols defn)) needed_symbols lls in
let needed_symbols =
List.fold_left (fun acc (ls, _) ->
Sls.remove ls acc) needed_symbols lls in
td :: tdl, needed_symbols
else tdl, needed_symbols
| Dprop (_, _, t) ->
let needed_symbols = Sls.union needed_symbols (term_lsymbols t) in
td :: tdl, needed_symbols
end
| _ -> td :: tdl, needed_symbols
let eliminate_unused = Trans.store (fun task ->
let tdl, _ = task_fold compute_needed_td ([], Sls.empty) task in
List.fold_left add_tdecl None tdl)
let () = Trans.register_transform "eliminate_unused" eliminate_unused
~desc:"eliminate unused symbols"
| null | https://raw.githubusercontent.com/AdaCore/why3/a07f5a6a593a545047d0fe4073bf458b935aa10c/src/transform/eliminate_unused.ml | ocaml | compute lsymbols of a term | open Decl
open Task
open Term
let term_lsymbols =
let rec aux acc (t: term) =
match t.t_node with
| Tapp (ls, _) -> Term.t_fold aux (Term.Sls.add ls acc) t
| _ -> Term.t_fold aux acc t
in
aux Sls.empty
let defn_symbols defn =
let _, def = open_ls_defn defn in
term_lsymbols def
let compute_needed_td (tdl, needed_symbols) td =
match td.Theory.td_node with
| Theory.Decl d ->
begin
match d.d_node with
| Dtype _ | Ddata _ | Dind _ -> td :: tdl, needed_symbols
| Dparam ls ->
if Sls.mem ls needed_symbols then
td :: tdl, Sls.remove ls needed_symbols
else
tdl, needed_symbols
| Dlogic lls ->
if List.exists (fun (ls, _) -> Sls.mem ls needed_symbols) lls then
let needed_symbols =
List.fold_left (fun acc (_, defn) ->
Sls.union acc (defn_symbols defn)) needed_symbols lls in
let needed_symbols =
List.fold_left (fun acc (ls, _) ->
Sls.remove ls acc) needed_symbols lls in
td :: tdl, needed_symbols
else tdl, needed_symbols
| Dprop (_, _, t) ->
let needed_symbols = Sls.union needed_symbols (term_lsymbols t) in
td :: tdl, needed_symbols
end
| _ -> td :: tdl, needed_symbols
let eliminate_unused = Trans.store (fun task ->
let tdl, _ = task_fold compute_needed_td ([], Sls.empty) task in
List.fold_left add_tdecl None tdl)
let () = Trans.register_transform "eliminate_unused" eliminate_unused
~desc:"eliminate unused symbols"
|
b5f0fa1fb53a88a179e930fa7b99443a4c47087154bb305dbdeb23637f699ca1 | input-output-hk/io-sim | Trace.hs | # LANGUAGE DeriveFunctor #
module Data.List.Trace
( Trace (..)
, ppTrace
, toList
, fromList
, head
, tail
, filter
, length
) where
import Prelude hiding (filter, head, length, tail)
import Control.Applicative (Alternative (..))
import Control.Monad (MonadPlus (..))
import Control.Monad.Fix (MonadFix (..), fix)
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Data.Functor.Classes
-- | A 'cons' list with polymorphic 'nil', thus an octopus.
--
-- * @'Trace' Void a@ is an infinite stream
-- * @'Trace' () a@ is isomorphic to @[a]@
--
-- Usually used with @a@ being a non empty sum type.
--
data Trace a b
= Cons b (Trace a b)
| Nil a
deriving (Show, Eq, Ord, Functor)
head :: Trace a b -> b
head (Cons b _) = b
head _ = error "Trace.head: empty"
tail :: Trace a b -> Trace a b
tail (Cons _ o) = o
tail Nil {} = error "Trace.tail: empty"
filter :: (b -> Bool) -> Trace a b -> Trace a b
filter _fn o@Nil {} = o
filter fn (Cons b o) =
case fn b of
True -> Cons b (filter fn o)
False -> filter fn o
length :: Trace a b -> Int
length (Cons _ o) = (+) 1 $! length o
length Nil {} = 0
toList :: Trace a b -> [b]
toList = bifoldr (\_ bs -> bs) (:) []
fromList :: a -> [b] -> Trace a b
fromList a = foldr Cons (Nil a)
-- | Pretty print an 'Trace'.
--
ppTrace :: (a -> String) -> (b -> String) -> Trace a b -> String
ppTrace sa sb (Cons b bs) = sb b ++ "\n" ++ ppTrace sa sb bs
ppTrace sa _sb (Nil a) = sa a
instance Bifunctor Trace where
bimap f g (Cons b bs) = Cons (g b) (bimap f g bs)
bimap f _ (Nil a) = Nil (f a)
instance Bifoldable Trace where
bifoldMap f g (Cons b bs) = g b <> bifoldMap f g bs
bifoldMap f _ (Nil a) = f a
bifoldr f g c = go
where
go (Cons b bs) = b `g` go bs
go (Nil a) = a `f` c
# INLINE[0 ] bifoldr #
bifoldl f g = go
where
go c (Cons b bs) = go (c `g` b) bs
go c (Nil a) = c `f` a
{-# INLINE[0] bifoldl #-}
instance Bitraversable Trace where
bitraverse f g (Cons b bs) = Cons <$> g b <*> bitraverse f g bs
bitraverse f _ (Nil a) = Nil <$> f a
instance Semigroup a => Semigroup (Trace a b) where
Cons b o <> o' = Cons b (o <> o')
o@Nil {} <> (Cons b o') = Cons b (o <> o')
Nil a <> Nil a' = Nil (a <> a')
instance Monoid a => Monoid (Trace a b) where
mempty = Nil mempty
instance Monoid a => Applicative (Trace a) where
pure b = Cons b (Nil mempty)
Cons f fs <*> o = fmap f o <> (fs <*> o)
Nil a <*> _ = Nil a
instance Monoid a => Monad (Trace a) where
return = pure
@bifoldMap is the @join@ of a@
o >>= f = bifoldMap Nil id $ fmap f o
instance Monoid a => MonadFail (Trace a) where
fail _ = mzero
instance Monoid a => Alternative (Trace a) where
empty = mempty
(<|>) = (<>)
instance Monoid a => MonadPlus (Trace a) where
mzero = mempty
mplus = (<>)
instance Monoid a => MonadFix (Trace a) where
mfix f = case fix (f . head) of
o@Nil {} -> o
Cons b _ -> Cons b (mfix (tail . f))
instance Eq a => Eq1 (Trace a) where
liftEq f (Cons b o) (Cons b' o') = f b b' && liftEq f o o'
liftEq _ Nil {} Cons {} = False
liftEq _ Cons {} Nil {} = False
liftEq _ (Nil a) (Nil a') = a == a'
instance Ord a => Ord1 (Trace a) where
liftCompare f (Cons b o) (Cons b' o') = f b b' `compare` liftCompare f o o'
liftCompare _ Nil {} Cons {} = LT
liftCompare _ Cons {} Nil {} = GT
liftCompare _ (Nil a) (Nil a') = a `compare` a'
instance Show a => Show1 (Trace a) where
liftShowsPrec showsPrec_ showsList_ prec (Cons b o)
= showString "Cons "
. showsPrec_ prec b
. showChar ' '
. showParen True (liftShowsPrec showsPrec_ showsList_ prec o)
liftShowsPrec _showsPrec _showsList _prec (Nil a)
= showString "Nil "
. shows a
| null | https://raw.githubusercontent.com/input-output-hk/io-sim/15a46359d92c2f6a63fcf4671bf5ef4be0c75c35/io-sim/src/Data/List/Trace.hs | haskell | | A 'cons' list with polymorphic 'nil', thus an octopus.
* @'Trace' Void a@ is an infinite stream
* @'Trace' () a@ is isomorphic to @[a]@
Usually used with @a@ being a non empty sum type.
| Pretty print an 'Trace'.
# INLINE[0] bifoldl # | # LANGUAGE DeriveFunctor #
module Data.List.Trace
( Trace (..)
, ppTrace
, toList
, fromList
, head
, tail
, filter
, length
) where
import Prelude hiding (filter, head, length, tail)
import Control.Applicative (Alternative (..))
import Control.Monad (MonadPlus (..))
import Control.Monad.Fix (MonadFix (..), fix)
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Data.Functor.Classes
data Trace a b
= Cons b (Trace a b)
| Nil a
deriving (Show, Eq, Ord, Functor)
head :: Trace a b -> b
head (Cons b _) = b
head _ = error "Trace.head: empty"
tail :: Trace a b -> Trace a b
tail (Cons _ o) = o
tail Nil {} = error "Trace.tail: empty"
filter :: (b -> Bool) -> Trace a b -> Trace a b
filter _fn o@Nil {} = o
filter fn (Cons b o) =
case fn b of
True -> Cons b (filter fn o)
False -> filter fn o
length :: Trace a b -> Int
length (Cons _ o) = (+) 1 $! length o
length Nil {} = 0
toList :: Trace a b -> [b]
toList = bifoldr (\_ bs -> bs) (:) []
fromList :: a -> [b] -> Trace a b
fromList a = foldr Cons (Nil a)
ppTrace :: (a -> String) -> (b -> String) -> Trace a b -> String
ppTrace sa sb (Cons b bs) = sb b ++ "\n" ++ ppTrace sa sb bs
ppTrace sa _sb (Nil a) = sa a
instance Bifunctor Trace where
bimap f g (Cons b bs) = Cons (g b) (bimap f g bs)
bimap f _ (Nil a) = Nil (f a)
instance Bifoldable Trace where
bifoldMap f g (Cons b bs) = g b <> bifoldMap f g bs
bifoldMap f _ (Nil a) = f a
bifoldr f g c = go
where
go (Cons b bs) = b `g` go bs
go (Nil a) = a `f` c
# INLINE[0 ] bifoldr #
bifoldl f g = go
where
go c (Cons b bs) = go (c `g` b) bs
go c (Nil a) = c `f` a
instance Bitraversable Trace where
bitraverse f g (Cons b bs) = Cons <$> g b <*> bitraverse f g bs
bitraverse f _ (Nil a) = Nil <$> f a
instance Semigroup a => Semigroup (Trace a b) where
Cons b o <> o' = Cons b (o <> o')
o@Nil {} <> (Cons b o') = Cons b (o <> o')
Nil a <> Nil a' = Nil (a <> a')
instance Monoid a => Monoid (Trace a b) where
mempty = Nil mempty
instance Monoid a => Applicative (Trace a) where
pure b = Cons b (Nil mempty)
Cons f fs <*> o = fmap f o <> (fs <*> o)
Nil a <*> _ = Nil a
instance Monoid a => Monad (Trace a) where
return = pure
@bifoldMap is the @join@ of a@
o >>= f = bifoldMap Nil id $ fmap f o
instance Monoid a => MonadFail (Trace a) where
fail _ = mzero
instance Monoid a => Alternative (Trace a) where
empty = mempty
(<|>) = (<>)
instance Monoid a => MonadPlus (Trace a) where
mzero = mempty
mplus = (<>)
instance Monoid a => MonadFix (Trace a) where
mfix f = case fix (f . head) of
o@Nil {} -> o
Cons b _ -> Cons b (mfix (tail . f))
instance Eq a => Eq1 (Trace a) where
liftEq f (Cons b o) (Cons b' o') = f b b' && liftEq f o o'
liftEq _ Nil {} Cons {} = False
liftEq _ Cons {} Nil {} = False
liftEq _ (Nil a) (Nil a') = a == a'
instance Ord a => Ord1 (Trace a) where
liftCompare f (Cons b o) (Cons b' o') = f b b' `compare` liftCompare f o o'
liftCompare _ Nil {} Cons {} = LT
liftCompare _ Cons {} Nil {} = GT
liftCompare _ (Nil a) (Nil a') = a `compare` a'
instance Show a => Show1 (Trace a) where
liftShowsPrec showsPrec_ showsList_ prec (Cons b o)
= showString "Cons "
. showsPrec_ prec b
. showChar ' '
. showParen True (liftShowsPrec showsPrec_ showsList_ prec o)
liftShowsPrec _showsPrec _showsList _prec (Nil a)
= showString "Nil "
. shows a
|
6e485c46ddab31c64ed908ecde56579acc32611b77f5f47d5a179a656b4e1761 | rbkmoney/fistful-server | ff_transfer_SUITE.erl | -module(ff_transfer_SUITE).
-include_lib("fistful_proto/include/ff_proto_fistful_admin_thrift.hrl").
-include_lib("fistful_proto/include/ff_proto_withdrawal_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-export([all/0]).
-export([groups/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_group/2]).
-export([end_per_group/2]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([get_missing_fails/1]).
-export([deposit_via_admin_ok/1]).
-export([deposit_via_admin_fails/1]).
-export([deposit_via_admin_amount_fails/1]).
-export([deposit_via_admin_currency_fails/1]).
-export([deposit_withdrawal_ok/1]).
-export([deposit_quote_withdrawal_ok/1]).
-export([deposit_withdrawal_to_crypto_wallet/1]).
-export([deposit_withdrawal_to_digital_wallet/1]).
-type config() :: ct_helper:config().
-type test_case_name() :: ct_helper:test_case_name().
-type group_name() :: ct_helper:group_name().
-type test_return() :: _ | no_return().
-spec all() -> [test_case_name() | {group, group_name()}].
all() ->
[{group, default}].
-spec groups() -> [{group_name(), list(), [test_case_name()]}].
groups() ->
[
{default, [parallel], [
get_missing_fails,
deposit_via_admin_ok,
deposit_via_admin_fails,
deposit_via_admin_amount_fails,
deposit_via_admin_currency_fails,
deposit_withdrawal_ok,
deposit_quote_withdrawal_ok,
deposit_withdrawal_to_crypto_wallet,
deposit_withdrawal_to_digital_wallet
]}
].
-spec init_per_suite(config()) -> config().
init_per_suite(C) ->
ct_helper:makeup_cfg(
[
ct_helper:test_case_name(init),
ct_payment_system:setup()
],
C
).
-spec end_per_suite(config()) -> _.
end_per_suite(C) ->
ok = ct_payment_system:shutdown(C).
%%
-spec init_per_group(group_name(), config()) -> config().
init_per_group(_, C) ->
C.
-spec end_per_group(group_name(), config()) -> _.
end_per_group(_, _) ->
ok.
%%
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(Name, C) ->
C1 = ct_helper:makeup_cfg([ct_helper:test_case_name(Name), ct_helper:woody_ctx()], C),
ok = ct_helper:set_context(C1),
C1.
-spec end_per_testcase(test_case_name(), config()) -> _.
end_per_testcase(_Name, _C) ->
ok = ct_helper:unset_context().
%%
-spec get_missing_fails(config()) -> test_return().
-spec deposit_via_admin_ok(config()) -> test_return().
-spec deposit_via_admin_fails(config()) -> test_return().
-spec deposit_via_admin_amount_fails(config()) -> test_return().
-spec deposit_via_admin_currency_fails(config()) -> test_return().
-spec deposit_withdrawal_ok(config()) -> test_return().
-spec deposit_withdrawal_to_crypto_wallet(config()) -> test_return().
-spec deposit_withdrawal_to_digital_wallet(config()) -> test_return().
-spec deposit_quote_withdrawal_ok(config()) -> test_return().
get_missing_fails(_C) ->
ID = genlib:unique(),
{error, {unknown_withdrawal, ID}} = ff_withdrawal_machine:get(ID).
deposit_via_admin_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
% Create source
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
% Process deposit
{ok, Dep1} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 20000,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
DepID = Dep1#deposit_Deposit.id,
{pending, _} = Dep1#deposit_Deposit.status,
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, Dep} = call_admin('GetDeposit', {DepID}),
{Status, _} = Dep#deposit_Deposit.status,
Status
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({20000, <<"RUB">>}, WalID).
deposit_via_admin_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
% Create source
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
{ok, Dep1} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 10000002,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
DepID = Dep1#deposit_Deposit.id,
{pending, _} = Dep1#deposit_Deposit.status,
failed = ct_helper:await(
failed,
fun() ->
{ok, Dep} = call_admin('GetDeposit', {DepID}),
{Status, _} = Dep#deposit_Deposit.status,
Status
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_via_admin_amount_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
% Create source
{ok, _Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
{exception, #ff_admin_DepositAmountInvalid{}} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = -1,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_via_admin_currency_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
% Create source
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
BadCurrency = <<"CAT">>,
{exception, #ff_admin_DepositCurrencyInvalid{}} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 1000,
currency = #'CurrencyRef'{symbolic_code = BadCurrency}
}
}
}
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_withdrawal_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[1] = route_changes(Events).
deposit_withdrawal_to_crypto_wallet(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"WalletName">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_crypto_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[2] = route_changes(Events).
deposit_withdrawal_to_digital_wallet(C) ->
Party = create_party(C),
IID = create_identity(Party, <<"good-two">>, C),
WalID = create_wallet(IID, <<"WalletName">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_digital_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[3] = route_changes(Events).
deposit_quote_withdrawal_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, <<"good-two">>, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_destination(IID, C),
DomainRevision = ff_domain_config:head(),
{ok, PartyRevision} = ff_party:get_revision(Party),
WdrID = process_withdrawal(WalID, DestID, #{
wallet_id => WalID,
destination_id => DestID,
body => {4240, <<"RUB">>},
quote => #{
cash_from => {4240, <<"RUB">>},
cash_to => {2120, <<"USD">>},
created_at => <<"2016-03-22T06:12:27Z">>,
expires_on => <<"2016-03-22T06:12:27Z">>,
quote_data => #{<<"test">> => <<"test">>},
route => ff_withdrawal_routing:make_route(3, 1),
domain_revision => DomainRevision,
party_revision => PartyRevision
}
}),
Events = get_withdrawal_events(WdrID),
[3] = route_changes(Events).
create_party(_C) ->
ID = genlib:bsuuid(),
_ = ff_party:create(ID),
ID.
create_identity(Party, C) ->
create_identity(Party, <<"good-one">>, C).
create_identity(Party, ProviderID, C) ->
create_identity(Party, <<"Identity Name">>, ProviderID, C).
create_identity(Party, Name, ProviderID, _C) ->
ID = genlib:unique(),
ok = ff_identity_machine:create(
#{id => ID, name => Name, party => Party, provider => ProviderID},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
),
ID.
create_wallet(IdentityID, Name, Currency, _C) ->
ID = genlib:unique(),
ok = ff_wallet_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency},
ff_entity_context:new()
),
ID.
await_wallet_balance({Amount, Currency}, ID) ->
Balance = {Amount, {{inclusive, Amount}, {inclusive, Amount}}, Currency},
Balance = ct_helper:await(
Balance,
fun() -> get_wallet_balance(ID) end,
genlib_retry:linear(3, 500)
),
ok.
await_destination_balance({Amount, Currency}, ID) ->
Balance = {Amount, {{inclusive, Amount}, {inclusive, Amount}}, Currency},
Balance = ct_helper:await(
Balance,
fun() -> get_destination_balance(ID) end,
genlib_retry:linear(3, 500)
),
ok.
get_wallet_balance(ID) ->
{ok, Machine} = ff_wallet_machine:get(ID),
get_account_balance(ff_wallet:account(ff_wallet_machine:wallet(Machine))).
get_destination_balance(ID) ->
{ok, Machine} = ff_destination_machine:get(ID),
Destination = ff_destination_machine:destination(Machine),
get_account_balance(ff_destination:account(Destination)).
get_account_balance(Account) ->
{ok, {Amounts, Currency}} = ff_transaction:balance(
Account,
ff_clock:latest_clock()
),
{ff_indef:current(Amounts), ff_indef:to_range(Amounts), Currency}.
create_source(IdentityID, Name, Currency, Resource) ->
ID = genlib:unique(),
ok = ff_source_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency, resource => Resource},
ff_entity_context:new()
),
ID.
create_destination(IdentityID, Name, Currency, Resource) ->
ID = genlib:unique(),
ok = ff_destination_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency, resource => Resource},
ff_entity_context:new()
),
ID.
generate_id() ->
genlib:to_binary(genlib_time:ticks()).
call_admin(Fun, Args) ->
Service = {ff_proto_fistful_admin_thrift, 'FistfulAdmin'},
Request = {Service, Fun, Args},
Client = ff_woody_client:new(#{
url => <<":8022/v1/admin">>,
event_handler => scoper_woody_event_handler
}),
ff_woody_client:call(Client, Request).
create_source(IID, _C) ->
SrcResource = #{type => internal, details => <<"Infinite source of cash">>},
SrcID = create_source(IID, <<"XSource">>, <<"RUB">>, SrcResource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, SrcM} = ff_source_machine:get(SrcID),
Source = ff_source_machine:source(SrcM),
ff_source:status(Source)
end
),
SrcID.
process_deposit(SrcID, WalID) ->
DepID = generate_id(),
ok = ff_deposit_machine:create(
#{id => DepID, source_id => SrcID, wallet_id => WalID, body => {10000, <<"RUB">>}},
ff_entity_context:new()
),
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, DepM} = ff_deposit_machine:get(DepID),
ff_deposit:status(ff_deposit_machine:deposit(DepM))
end,
genlib_retry:linear(15, 1000)
),
await_wallet_balance({10000, <<"RUB">>}, WalID).
create_destination(IID, C) ->
DestResource = {bank_card, #{bank_card => ct_cardstore:bank_card(<<"4150399999000900">>, {12, 2025}, C)}},
DestID = create_destination(IID, <<"XDesination">>, <<"RUB">>, DestResource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
create_crypto_destination(IID, _C) ->
Resource =
{crypto_wallet, #{
crypto_wallet => #{
id => <<"a30e277c07400c9940628828949efd48">>,
currency => {litecoin, #{}}
}
}},
DestID = create_destination(IID, <<"CryptoDestination">>, <<"RUB">>, Resource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
create_digital_destination(IID, _C) ->
Resource =
{digital_wallet, #{
digital_wallet => #{
id => <<"a30e277c07400c9940628828949efd48">>,
data => {webmoney, #{}}
}
}},
DestID = create_destination(IID, <<"DigitalDestination">>, <<"RUB">>, Resource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
process_withdrawal(WalID, DestID) ->
process_withdrawal(WalID, DestID, #{wallet_id => WalID, destination_id => DestID, body => {4240, <<"RUB">>}}).
process_withdrawal(WalID, DestID, Params) ->
WdrID = generate_id(),
ok = ff_withdrawal_machine:create(
Params#{id => WdrID},
ff_entity_context:new()
),
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, WdrM} = ff_withdrawal_machine:get(WdrID),
ff_withdrawal:status(ff_withdrawal_machine:withdrawal(WdrM))
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({10000 - 4240, <<"RUB">>}, WalID),
ok = await_destination_balance({4240 - 848, <<"RUB">>}, DestID),
WdrID.
%%%
get_withdrawal_events(WdrID) ->
Service = {{ff_proto_withdrawal_thrift, 'Management'}, <<"/v1/withdrawal">>},
{ok, Events} = call('GetEvents', Service, {WdrID, #'EventRange'{'after' = 0, limit = 1000}}),
Events.
call(Function, Service, Args) ->
call(Function, Service, Args, <<"8022">>).
call(Function, {Service, Path}, Args, Port) ->
Request = {Service, Function, Args},
Client = ff_woody_client:new(#{
url => <<":", Port/binary, Path/binary>>,
event_handler => scoper_woody_event_handler
}),
ff_woody_client:call(Client, Request).
route_changes(Events) ->
lists:filtermap(
fun
(#wthd_Event{change = {route, RouteChange}}) ->
#wthd_RouteChange{route = #wthd_Route{provider_id = ProviderID}} = RouteChange,
{true, ProviderID};
(_Other) ->
false
end,
Events
).
| null | https://raw.githubusercontent.com/rbkmoney/fistful-server/f6155acb0475987e47a4fbc911758c595e129c80/apps/ff_transfer/test/ff_transfer_SUITE.erl | erlang |
Create source
Process deposit
Create source
Create source
Create source
| -module(ff_transfer_SUITE).
-include_lib("fistful_proto/include/ff_proto_fistful_admin_thrift.hrl").
-include_lib("fistful_proto/include/ff_proto_withdrawal_thrift.hrl").
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-export([all/0]).
-export([groups/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([init_per_group/2]).
-export([end_per_group/2]).
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
-export([get_missing_fails/1]).
-export([deposit_via_admin_ok/1]).
-export([deposit_via_admin_fails/1]).
-export([deposit_via_admin_amount_fails/1]).
-export([deposit_via_admin_currency_fails/1]).
-export([deposit_withdrawal_ok/1]).
-export([deposit_quote_withdrawal_ok/1]).
-export([deposit_withdrawal_to_crypto_wallet/1]).
-export([deposit_withdrawal_to_digital_wallet/1]).
-type config() :: ct_helper:config().
-type test_case_name() :: ct_helper:test_case_name().
-type group_name() :: ct_helper:group_name().
-type test_return() :: _ | no_return().
-spec all() -> [test_case_name() | {group, group_name()}].
all() ->
[{group, default}].
-spec groups() -> [{group_name(), list(), [test_case_name()]}].
groups() ->
[
{default, [parallel], [
get_missing_fails,
deposit_via_admin_ok,
deposit_via_admin_fails,
deposit_via_admin_amount_fails,
deposit_via_admin_currency_fails,
deposit_withdrawal_ok,
deposit_quote_withdrawal_ok,
deposit_withdrawal_to_crypto_wallet,
deposit_withdrawal_to_digital_wallet
]}
].
-spec init_per_suite(config()) -> config().
init_per_suite(C) ->
ct_helper:makeup_cfg(
[
ct_helper:test_case_name(init),
ct_payment_system:setup()
],
C
).
-spec end_per_suite(config()) -> _.
end_per_suite(C) ->
ok = ct_payment_system:shutdown(C).
-spec init_per_group(group_name(), config()) -> config().
init_per_group(_, C) ->
C.
-spec end_per_group(group_name(), config()) -> _.
end_per_group(_, _) ->
ok.
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(Name, C) ->
C1 = ct_helper:makeup_cfg([ct_helper:test_case_name(Name), ct_helper:woody_ctx()], C),
ok = ct_helper:set_context(C1),
C1.
-spec end_per_testcase(test_case_name(), config()) -> _.
end_per_testcase(_Name, _C) ->
ok = ct_helper:unset_context().
-spec get_missing_fails(config()) -> test_return().
-spec deposit_via_admin_ok(config()) -> test_return().
-spec deposit_via_admin_fails(config()) -> test_return().
-spec deposit_via_admin_amount_fails(config()) -> test_return().
-spec deposit_via_admin_currency_fails(config()) -> test_return().
-spec deposit_withdrawal_ok(config()) -> test_return().
-spec deposit_withdrawal_to_crypto_wallet(config()) -> test_return().
-spec deposit_withdrawal_to_digital_wallet(config()) -> test_return().
-spec deposit_quote_withdrawal_ok(config()) -> test_return().
get_missing_fails(_C) ->
ID = genlib:unique(),
{error, {unknown_withdrawal, ID}} = ff_withdrawal_machine:get(ID).
deposit_via_admin_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
{ok, Dep1} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 20000,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
DepID = Dep1#deposit_Deposit.id,
{pending, _} = Dep1#deposit_Deposit.status,
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, Dep} = call_admin('GetDeposit', {DepID}),
{Status, _} = Dep#deposit_Deposit.status,
Status
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({20000, <<"RUB">>}, WalID).
deposit_via_admin_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
{ok, Dep1} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 10000002,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
DepID = Dep1#deposit_Deposit.id,
{pending, _} = Dep1#deposit_Deposit.status,
failed = ct_helper:await(
failed,
fun() ->
{ok, Dep} = call_admin('GetDeposit', {DepID}),
{Status, _} = Dep#deposit_Deposit.status,
Status
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_via_admin_amount_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
{ok, _Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
{exception, #ff_admin_DepositAmountInvalid{}} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = -1,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>}
}
}
}
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_via_admin_currency_fails(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = genlib:unique(),
DepID = genlib:unique(),
{ok, Src1} = call_admin(
'CreateSource',
{
#ff_admin_SourceParams{
id = SrcID,
name = <<"HAHA NO">>,
identity_id = IID,
currency = #'CurrencyRef'{symbolic_code = <<"RUB">>},
resource = {internal, #src_Internal{details = <<"Infinite source of cash">>}}
}
}
),
SrcID = Src1#src_Source.id,
{authorized, #src_Authorized{}} = ct_helper:await(
{authorized, #src_Authorized{}},
fun() ->
{ok, Src} = call_admin('GetSource', {SrcID}),
Src#src_Source.status
end
),
BadCurrency = <<"CAT">>,
{exception, #ff_admin_DepositCurrencyInvalid{}} = call_admin(
'CreateDeposit',
{
#ff_admin_DepositParams{
id = DepID,
source = SrcID,
destination = WalID,
body = #'Cash'{
amount = 1000,
currency = #'CurrencyRef'{symbolic_code = BadCurrency}
}
}
}
),
ok = await_wallet_balance({0, <<"RUB">>}, WalID).
deposit_withdrawal_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[1] = route_changes(Events).
deposit_withdrawal_to_crypto_wallet(C) ->
Party = create_party(C),
IID = create_identity(Party, C),
WalID = create_wallet(IID, <<"WalletName">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_crypto_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[2] = route_changes(Events).
deposit_withdrawal_to_digital_wallet(C) ->
Party = create_party(C),
IID = create_identity(Party, <<"good-two">>, C),
WalID = create_wallet(IID, <<"WalletName">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_digital_destination(IID, C),
WdrID = process_withdrawal(WalID, DestID),
Events = get_withdrawal_events(WdrID),
[3] = route_changes(Events).
deposit_quote_withdrawal_ok(C) ->
Party = create_party(C),
IID = create_identity(Party, <<"good-two">>, C),
WalID = create_wallet(IID, <<"HAHA NO">>, <<"RUB">>, C),
ok = await_wallet_balance({0, <<"RUB">>}, WalID),
SrcID = create_source(IID, C),
ok = process_deposit(SrcID, WalID),
DestID = create_destination(IID, C),
DomainRevision = ff_domain_config:head(),
{ok, PartyRevision} = ff_party:get_revision(Party),
WdrID = process_withdrawal(WalID, DestID, #{
wallet_id => WalID,
destination_id => DestID,
body => {4240, <<"RUB">>},
quote => #{
cash_from => {4240, <<"RUB">>},
cash_to => {2120, <<"USD">>},
created_at => <<"2016-03-22T06:12:27Z">>,
expires_on => <<"2016-03-22T06:12:27Z">>,
quote_data => #{<<"test">> => <<"test">>},
route => ff_withdrawal_routing:make_route(3, 1),
domain_revision => DomainRevision,
party_revision => PartyRevision
}
}),
Events = get_withdrawal_events(WdrID),
[3] = route_changes(Events).
create_party(_C) ->
ID = genlib:bsuuid(),
_ = ff_party:create(ID),
ID.
create_identity(Party, C) ->
create_identity(Party, <<"good-one">>, C).
create_identity(Party, ProviderID, C) ->
create_identity(Party, <<"Identity Name">>, ProviderID, C).
create_identity(Party, Name, ProviderID, _C) ->
ID = genlib:unique(),
ok = ff_identity_machine:create(
#{id => ID, name => Name, party => Party, provider => ProviderID},
#{<<"com.rbkmoney.wapi">> => #{<<"name">> => Name}}
),
ID.
create_wallet(IdentityID, Name, Currency, _C) ->
ID = genlib:unique(),
ok = ff_wallet_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency},
ff_entity_context:new()
),
ID.
await_wallet_balance({Amount, Currency}, ID) ->
Balance = {Amount, {{inclusive, Amount}, {inclusive, Amount}}, Currency},
Balance = ct_helper:await(
Balance,
fun() -> get_wallet_balance(ID) end,
genlib_retry:linear(3, 500)
),
ok.
await_destination_balance({Amount, Currency}, ID) ->
Balance = {Amount, {{inclusive, Amount}, {inclusive, Amount}}, Currency},
Balance = ct_helper:await(
Balance,
fun() -> get_destination_balance(ID) end,
genlib_retry:linear(3, 500)
),
ok.
get_wallet_balance(ID) ->
{ok, Machine} = ff_wallet_machine:get(ID),
get_account_balance(ff_wallet:account(ff_wallet_machine:wallet(Machine))).
get_destination_balance(ID) ->
{ok, Machine} = ff_destination_machine:get(ID),
Destination = ff_destination_machine:destination(Machine),
get_account_balance(ff_destination:account(Destination)).
get_account_balance(Account) ->
{ok, {Amounts, Currency}} = ff_transaction:balance(
Account,
ff_clock:latest_clock()
),
{ff_indef:current(Amounts), ff_indef:to_range(Amounts), Currency}.
create_source(IdentityID, Name, Currency, Resource) ->
ID = genlib:unique(),
ok = ff_source_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency, resource => Resource},
ff_entity_context:new()
),
ID.
create_destination(IdentityID, Name, Currency, Resource) ->
ID = genlib:unique(),
ok = ff_destination_machine:create(
#{id => ID, identity => IdentityID, name => Name, currency => Currency, resource => Resource},
ff_entity_context:new()
),
ID.
generate_id() ->
genlib:to_binary(genlib_time:ticks()).
call_admin(Fun, Args) ->
Service = {ff_proto_fistful_admin_thrift, 'FistfulAdmin'},
Request = {Service, Fun, Args},
Client = ff_woody_client:new(#{
url => <<":8022/v1/admin">>,
event_handler => scoper_woody_event_handler
}),
ff_woody_client:call(Client, Request).
create_source(IID, _C) ->
SrcResource = #{type => internal, details => <<"Infinite source of cash">>},
SrcID = create_source(IID, <<"XSource">>, <<"RUB">>, SrcResource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, SrcM} = ff_source_machine:get(SrcID),
Source = ff_source_machine:source(SrcM),
ff_source:status(Source)
end
),
SrcID.
process_deposit(SrcID, WalID) ->
DepID = generate_id(),
ok = ff_deposit_machine:create(
#{id => DepID, source_id => SrcID, wallet_id => WalID, body => {10000, <<"RUB">>}},
ff_entity_context:new()
),
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, DepM} = ff_deposit_machine:get(DepID),
ff_deposit:status(ff_deposit_machine:deposit(DepM))
end,
genlib_retry:linear(15, 1000)
),
await_wallet_balance({10000, <<"RUB">>}, WalID).
create_destination(IID, C) ->
DestResource = {bank_card, #{bank_card => ct_cardstore:bank_card(<<"4150399999000900">>, {12, 2025}, C)}},
DestID = create_destination(IID, <<"XDesination">>, <<"RUB">>, DestResource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
create_crypto_destination(IID, _C) ->
Resource =
{crypto_wallet, #{
crypto_wallet => #{
id => <<"a30e277c07400c9940628828949efd48">>,
currency => {litecoin, #{}}
}
}},
DestID = create_destination(IID, <<"CryptoDestination">>, <<"RUB">>, Resource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
create_digital_destination(IID, _C) ->
Resource =
{digital_wallet, #{
digital_wallet => #{
id => <<"a30e277c07400c9940628828949efd48">>,
data => {webmoney, #{}}
}
}},
DestID = create_destination(IID, <<"DigitalDestination">>, <<"RUB">>, Resource),
authorized = ct_helper:await(
authorized,
fun() ->
{ok, DestM} = ff_destination_machine:get(DestID),
Destination = ff_destination_machine:destination(DestM),
ff_destination:status(Destination)
end
),
DestID.
process_withdrawal(WalID, DestID) ->
process_withdrawal(WalID, DestID, #{wallet_id => WalID, destination_id => DestID, body => {4240, <<"RUB">>}}).
process_withdrawal(WalID, DestID, Params) ->
WdrID = generate_id(),
ok = ff_withdrawal_machine:create(
Params#{id => WdrID},
ff_entity_context:new()
),
succeeded = ct_helper:await(
succeeded,
fun() ->
{ok, WdrM} = ff_withdrawal_machine:get(WdrID),
ff_withdrawal:status(ff_withdrawal_machine:withdrawal(WdrM))
end,
genlib_retry:linear(15, 1000)
),
ok = await_wallet_balance({10000 - 4240, <<"RUB">>}, WalID),
ok = await_destination_balance({4240 - 848, <<"RUB">>}, DestID),
WdrID.
get_withdrawal_events(WdrID) ->
Service = {{ff_proto_withdrawal_thrift, 'Management'}, <<"/v1/withdrawal">>},
{ok, Events} = call('GetEvents', Service, {WdrID, #'EventRange'{'after' = 0, limit = 1000}}),
Events.
call(Function, Service, Args) ->
call(Function, Service, Args, <<"8022">>).
call(Function, {Service, Path}, Args, Port) ->
Request = {Service, Function, Args},
Client = ff_woody_client:new(#{
url => <<":", Port/binary, Path/binary>>,
event_handler => scoper_woody_event_handler
}),
ff_woody_client:call(Client, Request).
route_changes(Events) ->
lists:filtermap(
fun
(#wthd_Event{change = {route, RouteChange}}) ->
#wthd_RouteChange{route = #wthd_Route{provider_id = ProviderID}} = RouteChange,
{true, ProviderID};
(_Other) ->
false
end,
Events
).
|
841144bec5556191b79121ec7ea150aee5ff9e01d20eefb4d515ecef050a64eb | rescript-lang/rescript-compiler | typedecl.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
and , projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(**** Typing of type definitions ****)
open Misc
open Asttypes
open Parsetree
open Primitive
open Types
open Typetexp
type native_repr_kind = Unboxed | Untagged
type error =
Repeated_parameter
| Duplicate_constructor of string
| Duplicate_label of string
| Recursive_abbrev of string
| Cycle_in_def of string * type_expr
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Inconsistent_constraint of Env.t * (type_expr * type_expr) list
| Type_clash of Env.t * (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Unbound_type_var of type_expr * type_declaration
| Cannot_extend_private_type of Path.t
| Not_extensible_type of Path.t
| Extension_mismatch of Path.t * Includecore.type_mismatch list
| Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list
| Rebind_mismatch of Longident.t * Path.t * Path.t
| Rebind_private of Longident.t
| Bad_variance of int * (bool * bool * bool) * (bool * bool * bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_ext of type_expr * extension_constructor
| Varying_anonymous
| Val_in_structure
| Bad_immediate_attribute
| Bad_unboxed_attribute of string
| Boxed_and_unboxed
| Nonrec_gadt
open Typedtree
exception Error of Location.t * error
(* Note: do not factor the branches in the following pattern-matching:
the records must be constants for the compiler to do sharing on them.
*)
let get_unboxed_from_attributes sdecl =
let unboxed = Builtin_attributes.has_unboxed sdecl.ptype_attributes in
let boxed = Builtin_attributes.has_boxed sdecl.ptype_attributes in
match boxed, unboxed, !Clflags.unboxed_types with
| true, true, _ -> raise (Error(sdecl.ptype_loc, Boxed_and_unboxed))
| true, false, _ -> unboxed_false_default_false
| false, true, _ -> unboxed_true_default_false
| false, false, false -> unboxed_false_default_true
| false, false, true -> unboxed_true_default_true
(* Enter all declared types in the environment as abstract types *)
let enter_type rec_flag env sdecl id =
let needed =
match rec_flag with
| Asttypes.Nonrecursive ->
begin match sdecl.ptype_kind with
| Ptype_variant scds ->
List.iter (fun cd ->
if cd.pcd_res <> None then raise (Error(cd.pcd_loc, Nonrec_gadt)))
scds
| _ -> ()
end;
Btype.is_row_name (Ident.name id)
| Asttypes.Recursive -> true
in
if not needed then env else
let decl =
{ type_params =
List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;
type_arity = List.length sdecl.ptype_params;
type_kind = Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with None -> None
| Some _ -> Some(Ctype.newvar ()) end;
type_variance = List.map (fun _ -> Variance.full) sdecl.ptype_params;
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed = unboxed_false_default_false;
}
in
Env.add_type ~check:true id decl env
let update_type temp_env env id loc =
let path = Path.Pident id in
let decl = Env.find_type path temp_env in
match decl.type_manifest with None -> ()
| Some ty ->
let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in
try Ctype.unify env (Ctype.newconstr path params) ty
with Ctype.Unify trace ->
raise (Error(loc, Type_clash (env, trace)))
We use the Ctype.expand_head_opt version of expand_head to get access
to the manifest type of private abbreviations .
to the manifest type of private abbreviations. *)
let rec get_unboxed_type_representation env ty fuel =
if fuel < 0 then None else
let ty = Ctype.repr (Ctype.expand_head_opt env ty) in
match ty.desc with
| Tconstr (p, args, _) ->
begin match Env.find_type p env with
| exception Not_found -> Some ty
| {type_unboxed = {unboxed = false}} -> Some ty
| {type_params; type_kind =
Type_record ([{ld_type = ty2; _}], _)
| Type_variant [{cd_args = Cstr_tuple [ty2]; _}]
| Type_variant [{cd_args = Cstr_record [{ld_type = ty2; _}]; _}]}
-> get_unboxed_type_representation env
(Ctype.apply env type_params ty2 args) (fuel - 1)
| {type_kind=Type_abstract} -> None
(* This case can occur when checking a recursive unboxed type
declaration. *)
| _ -> assert false (* only the above can be unboxed *)
end
| _ -> Some ty
let get_unboxed_type_representation env ty =
(* Do not give too much fuel: PR#7424 *)
get_unboxed_type_representation env ty 100
;;
Determine if a type definition defines a fixed type . ( PW )
let is_fixed_type sd =
let rec has_row_var sty =
match sty.ptyp_desc with
Ptyp_alias (sty, _) -> has_row_var sty
| Ptyp_class _
| Ptyp_object (_, Open)
| Ptyp_variant (_, Open, _)
| Ptyp_variant (_, Closed, Some _) -> true
| _ -> false
in
match sd.ptype_manifest with
None -> false
| Some sty ->
sd.ptype_kind = Ptype_abstract &&
sd.ptype_private = Private &&
has_row_var sty
(* Set the row variable in a fixed type *)
let set_fixed_row env loc p decl =
let tm =
match decl.type_manifest with
None -> assert false
| Some t -> Ctype.expand_head env t
in
let rv =
match tm.desc with
Tvariant row ->
let row = Btype.row_repr row in
tm.desc <- Tvariant {row with row_fixed = true};
if Btype.static_row row then Btype.newgenty Tnil
else row.row_more
| Tobject (ty, _) ->
snd (Ctype.flatten_fields ty)
| _ ->
raise (Error (loc, Bad_fixed_type "is not an object or variant"))
in
if not (Btype.is_Tvar rv) then
raise (Error (loc, Bad_fixed_type "has no row variable"));
rv.desc <- Tconstr (p, decl.type_params, ref Mnil)
Translate one type declaration
module StringSet =
Set.Make(struct
type t = string
let compare (x:t) y = compare x y
end)
let make_params env params =
let make_param (sty, v) =
try
(transl_type_param env sty, v)
with Already_bound ->
raise(Error(sty.ptyp_loc, Repeated_parameter))
in
List.map make_param params
let transl_labels env closed lbls =
if !Config.bs_only then
match !Builtin_attributes.check_duplicated_labels lbls with
| None -> ()
| Some {loc;txt=name} -> raise (Error(loc,Duplicate_label name))
else (
let all_labels = ref StringSet.empty in
List.iter
(fun {pld_name = {txt=name; loc}} ->
if StringSet.mem name !all_labels then
raise(Error(loc, Duplicate_label name));
all_labels := StringSet.add name !all_labels)
lbls);
let mk {pld_name=name;pld_mutable=mut;pld_type=arg;pld_loc=loc;
pld_attributes=attrs} =
Builtin_attributes.warning_scope attrs
(fun () ->
let arg = Ast_helper.Typ.force_poly arg in
let cty = transl_simple_type env closed arg in
{ld_id = Ident.create name.txt; ld_name = name; ld_mutable = mut;
ld_type = cty; ld_loc = loc; ld_attributes = attrs}
)
in
let lbls = List.map mk lbls in
let lbls' =
List.map
(fun ld ->
let ty = ld.ld_type.ctyp_type in
let ty = match ty.desc with Tpoly(t,[]) -> t | _ -> ty in
{Types.ld_id = ld.ld_id;
ld_mutable = ld.ld_mutable;
ld_type = ty;
ld_loc = ld.ld_loc;
ld_attributes = ld.ld_attributes
}
)
lbls in
lbls, lbls'
let transl_constructor_arguments env closed = function
| Pcstr_tuple l ->
let l = List.map (transl_simple_type env closed) l in
Types.Cstr_tuple (List.map (fun t -> t.ctyp_type) l),
Cstr_tuple l
| Pcstr_record l ->
let lbls, lbls' = transl_labels env closed l in
Types.Cstr_record lbls',
Cstr_record lbls
let make_constructor env type_path type_params sargs sret_type =
match sret_type with
| None ->
let args, targs =
transl_constructor_arguments env true sargs
in
targs, None, args, None, type_params
| Some sret_type ->
if it 's a generalized constructor we must first narrow and
then widen so as to not introduce any new constraints
then widen so as to not introduce any new constraints *)
let z = narrow () in
reset_type_variables ();
let args, targs =
transl_constructor_arguments env false sargs
in
let tret_type = transl_simple_type env false sret_type in
let ret_type = tret_type.ctyp_type in
let params =
match (Ctype.repr ret_type).desc with
| Tconstr (p', params, _) when Path.same type_path p' ->
params
| _ ->
raise (Error (sret_type.ptyp_loc, Constraint_failed
(ret_type, Ctype.newconstr type_path type_params)))
in
widen z;
targs, Some tret_type, args, Some ret_type, params
(* Check that all the variables found in [ty] are in [univ].
Because [ty] is the argument to an abstract type, the representation
of that abstract type could be any subexpression of [ty], in particular
any type variable present in [ty].
*)
let transl_declaration env sdecl id =
(* Bind type parameters *)
reset_type_variables();
Ctype.begin_def ();
let tparams = make_params env sdecl.ptype_params in
let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in
let cstrs = List.map
(fun (sty, sty', loc) ->
transl_simple_type env false sty,
transl_simple_type env false sty', loc)
sdecl.ptype_cstrs
in
let raw_status = get_unboxed_from_attributes sdecl in
if raw_status.unboxed && not raw_status.default then begin
match sdecl.ptype_kind with
| Ptype_abstract ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it is abstract"))
| Ptype_variant [{pcd_args = Pcstr_tuple []; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has no argument"))
| Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}] -> ()
| Ptype_variant [{pcd_args = Pcstr_tuple _; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has more than one argument"))
| Ptype_variant [{pcd_args = Pcstr_record
[{pld_mutable=Immutable; _}]; _}] -> ()
| Ptype_variant [{pcd_args = Pcstr_record [{pld_mutable=Mutable; _}]; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute "it is mutable"))
| Ptype_variant [{pcd_args = Pcstr_record _; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has more than one argument"))
| Ptype_variant _ ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it has more than one constructor"))
| Ptype_record [{pld_mutable=Immutable; _}] -> ()
| Ptype_record [{pld_mutable=Mutable; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it is mutable"))
| Ptype_record _ ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it has more than one field"))
| Ptype_open ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"extensible variant types cannot be unboxed"))
end;
let unboxed_status =
match sdecl.ptype_kind with
| Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}]
| Ptype_variant [{pcd_args = Pcstr_record
[{pld_mutable = Immutable; _}]; _}]
| Ptype_record [{pld_mutable = Immutable; _}] ->
raw_status
| _ -> (* The type is not unboxable, mark it as boxed *)
unboxed_false_default_false
in
let unbox = unboxed_status.unboxed in
let (tkind, kind) =
match sdecl.ptype_kind with
| Ptype_abstract -> Ttype_abstract, Type_abstract
| Ptype_variant scstrs ->
assert (scstrs <> []);
if List.exists (fun cstr -> cstr.pcd_res <> None) scstrs then begin
match cstrs with
[] -> ()
| (_,_,loc)::_ ->
Location.prerr_warning loc Warnings.Constraint_on_gadt
end;
let has_optional attrs = Ext_list.exists attrs (fun ({txt },_) -> txt = "res.optional") in
let scstrs =
Ext_list.map scstrs (fun ({pcd_args} as cstr) ->
match pcd_args with
| Pcstr_tuple _ -> cstr
| Pcstr_record lds ->
{cstr with pcd_args = Pcstr_record (Ext_list.map lds (fun ld ->
if has_optional ld.pld_attributes then
let typ = ld.pld_type in
let typ = {typ with ptyp_desc = Ptyp_constr ({txt = Lident "option"; loc=typ.ptyp_loc}, [typ])} in
{ld with pld_type = typ}
else ld
))}
) in
let all_constrs = ref StringSet.empty in
List.iter
(fun {pcd_name = {txt = name}} ->
if StringSet.mem name !all_constrs then
raise(Error(sdecl.ptype_loc, Duplicate_constructor name));
all_constrs := StringSet.add name !all_constrs)
scstrs;
let make_cstr scstr =
let name = Ident.create scstr.pcd_name.txt in
let targs, tret_type, args, ret_type, _cstr_params =
make_constructor env (Path.Pident id) params
scstr.pcd_args scstr.pcd_res
in
let tcstr =
{ cd_id = name;
cd_name = scstr.pcd_name;
cd_args = targs;
cd_res = tret_type;
cd_loc = scstr.pcd_loc;
cd_attributes = scstr.pcd_attributes }
in
let cstr =
{ Types.cd_id = name;
cd_args = args;
cd_res = ret_type;
cd_loc = scstr.pcd_loc;
cd_attributes = scstr.pcd_attributes }
in
tcstr, cstr
in
let make_cstr scstr =
Builtin_attributes.warning_scope scstr.pcd_attributes
(fun () -> make_cstr scstr)
in
let tcstrs, cstrs = List.split (List.map make_cstr scstrs) in
Ttype_variant tcstrs, Type_variant cstrs
| Ptype_record lbls ->
let has_optional attrs = Ext_list.exists attrs (fun ({txt },_) -> txt = "res.optional") in
let optionalLabels =
Ext_list.filter_map lbls
(fun lbl -> if has_optional lbl.pld_attributes then Some lbl.pld_name.txt else None) in
let lbls =
if optionalLabels = [] then lbls
else Ext_list.map lbls (fun lbl ->
let typ = lbl.pld_type in
let typ =
if has_optional lbl.pld_attributes then
{typ with ptyp_desc = Ptyp_constr ({txt = Lident "option"; loc=typ.ptyp_loc}, [typ])}
else typ in
{lbl with pld_type = typ }) in
let lbls, lbls' = transl_labels env true lbls in
let rep =
if unbox then Record_unboxed false
else
if optionalLabels <> []
then Record_optional_labels optionalLabels
else Record_regular
in
Ttype_record lbls, Type_record(lbls', rep)
| Ptype_open -> Ttype_open, Type_open
in
let (tman, man) = match sdecl.ptype_manifest with
None -> None, None
| Some sty ->
let no_row = not (is_fixed_type sdecl) in
let cty = transl_simple_type env no_row sty in
Some cty, Some cty.ctyp_type
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind = kind;
type_private = sdecl.ptype_private;
type_manifest = man;
type_variance = List.map (fun _ -> Variance.full) params;
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed = unboxed_status;
} in
(* Check constraints *)
List.iter
(fun (cty, cty', loc) ->
let ty = cty.ctyp_type in
let ty' = cty'.ctyp_type in
try Ctype.unify env ty ty' with Ctype.Unify tr ->
raise(Error(loc, Inconsistent_constraint (env, tr))))
cstrs;
Ctype.end_def ();
(* Add abstract row *)
if is_fixed_type sdecl then begin
let p =
try Env.lookup_type (Longident.Lident(Ident.name id ^ "#row")) env
with Not_found -> assert false in
set_fixed_row env sdecl.ptype_loc p decl
end;
(* Check for cyclic abbreviations *)
begin match decl.type_manifest with None -> ()
| Some ty ->
if Ctype.cyclic_abbrev env id ty then
raise(Error(sdecl.ptype_loc, Recursive_abbrev sdecl.ptype_name.txt));
end;
{
typ_id = id;
typ_name = sdecl.ptype_name;
typ_params = tparams;
typ_type = decl;
typ_cstrs = cstrs;
typ_loc = sdecl.ptype_loc;
typ_manifest = tman;
typ_kind = tkind;
typ_private = sdecl.ptype_private;
typ_attributes = sdecl.ptype_attributes;
}
a type declaration
let generalize_decl decl =
List.iter Ctype.generalize decl.type_params;
Btype.iter_type_expr_kind Ctype.generalize decl.type_kind;
begin match decl.type_manifest with
| None -> ()
| Some ty -> Ctype.generalize ty
end
(* Check that all constraints are enforced *)
module TypeSet = Btype.TypeSet
module TypeMap = Btype.TypeMap
let rec check_constraints_rec env loc visited ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else begin
visited := TypeSet.add ty !visited;
match ty.desc with
| Tconstr (path, args, _) ->
let args' = List.map (fun _ -> Ctype.newvar ()) args in
let ty' = Ctype.newconstr path args' in
begin try Ctype.enforce_constraints env ty'
with Ctype.Unify _ -> assert false
| Not_found -> raise (Error(loc, Unavailable_type_constructor path))
end;
if not (Ctype.matches env ty ty') then
raise (Error(loc, Constraint_failed (ty, ty')));
List.iter (check_constraints_rec env loc visited) args
| Tpoly (ty, tl) ->
let _, ty = Ctype.instance_poly false tl ty in
check_constraints_rec env loc visited ty
| _ ->
Btype.iter_type_expr (check_constraints_rec env loc visited) ty
end
module SMap = Map.Make(String)
let check_constraints_labels env visited l pl =
let rec get_loc name = function
[] -> assert false
| pld :: tl ->
if name = pld.pld_name.txt then pld.pld_type.ptyp_loc
else get_loc name tl
in
List.iter
(fun {Types.ld_id=name; ld_type=ty} ->
check_constraints_rec env (get_loc (Ident.name name) pl) visited ty)
l
let check_constraints env sdecl (_, decl) =
let visited = ref TypeSet.empty in
begin match decl.type_kind with
| Type_abstract -> ()
| Type_variant l ->
let find_pl = function
Ptype_variant pl -> pl
| Ptype_record _ | Ptype_abstract | Ptype_open -> assert false
in
let pl = find_pl sdecl.ptype_kind in
let pl_index =
let foldf acc x =
SMap.add x.pcd_name.txt x acc
in
List.fold_left foldf SMap.empty pl
in
List.iter
(fun {Types.cd_id=name; cd_args; cd_res} ->
let {pcd_args; pcd_res; _} =
try SMap.find (Ident.name name) pl_index
with Not_found -> assert false in
begin match cd_args, pcd_args with
| Cstr_tuple tyl, Pcstr_tuple styl ->
List.iter2
(fun sty ty ->
check_constraints_rec env sty.ptyp_loc visited ty)
styl tyl
| Cstr_record tyl, Pcstr_record styl ->
check_constraints_labels env visited tyl styl
| _ -> assert false
end;
match pcd_res, cd_res with
| Some sr, Some r ->
check_constraints_rec env sr.ptyp_loc visited r
| _ ->
() )
l
| Type_record (l, _) ->
let find_pl = function
Ptype_record pl -> pl
| Ptype_variant _ | Ptype_abstract | Ptype_open -> assert false
in
let pl = find_pl sdecl.ptype_kind in
check_constraints_labels env visited l pl
| Type_open -> ()
end;
begin match decl.type_manifest with
| None -> ()
| Some ty ->
let sty =
match sdecl.ptype_manifest with Some sty -> sty | _ -> assert false
in
check_constraints_rec env sty.ptyp_loc visited ty
end
(*
If both a variant/record definition and a type equation are given,
need to check that the equation refers to a type of the same kind
with the same constructors and labels.
*)
let check_coherence env loc id decl =
match decl with
{ type_kind = (Type_variant _ | Type_record _| Type_open);
type_manifest = Some ty } ->
begin match (Ctype.repr ty).desc with
Tconstr(path, args, _) ->
begin try
let decl' = Env.find_type path env in
let err =
if List.length args <> List.length decl.type_params
then [Includecore.Arity]
else if not (Ctype.equal env false args decl.type_params)
then [Includecore.Constraint]
else
Includecore.type_declarations ~loc ~equality:true env
(Path.last path)
decl'
id
(Subst.type_declaration
(Subst.add_type id path Subst.identity) decl)
in
if err <> [] then
raise(Error(loc, Definition_mismatch (ty, err)))
with Not_found ->
raise(Error(loc, Unavailable_type_constructor path))
end
| _ -> raise(Error(loc, Definition_mismatch (ty, [])))
end
| _ -> ()
let check_abbrev env sdecl (id, decl) =
check_coherence env sdecl.ptype_loc id decl
(* Check that recursion is well-founded *)
let check_well_founded env loc path to_check ty =
let visited = ref TypeMap.empty in
let rec check ty0 parents ty =
let ty = Btype.repr ty in
if TypeSet.mem ty parents then begin
(*Format.eprintf "@[%a@]@." Printtyp.raw_type_expr ty;*)
if match ty0.desc with
| Tconstr (p, _, _) -> Path.same p path
| _ -> false
then raise (Error (loc, Recursive_abbrev (Path.name path)))
else raise (Error (loc, Cycle_in_def (Path.name path, ty0)))
end;
let (fini, parents) =
try
let prev = TypeMap.find ty !visited in
if TypeSet.subset parents prev then (true, parents) else
(false, TypeSet.union parents prev)
with Not_found ->
(false, parents)
in
if fini then () else
let rec_ok =
match ty.desc with
Tconstr(_p,_,_) ->
! Clflags.recursive_types & & Ctype.is_contractive env p
| Tobject _ | Tvariant _ -> true
| _ -> false (* !Clflags.recursive_types*)
in
let visited' = TypeMap.add ty parents !visited in
let arg_exn =
try
visited := visited';
let parents =
if rec_ok then TypeSet.empty else TypeSet.add ty parents in
Btype.iter_type_expr (check ty0 parents) ty;
None
with e ->
visited := visited'; Some e
in
match ty.desc with
| Tconstr(p, _, _) when arg_exn <> None || to_check p ->
if to_check p then may raise arg_exn
else Btype.iter_type_expr (check ty0 TypeSet.empty) ty;
begin try
let ty' = Ctype.try_expand_once_opt env ty in
let ty0 = if TypeSet.is_empty parents then ty else ty0 in
check ty0 (TypeSet.add ty parents) ty'
with
Ctype.Cannot_expand -> may raise arg_exn
end
| _ -> may raise arg_exn
in
let snap = Btype.snapshot () in
try Ctype.wrap_trace_gadt_instances env (check ty TypeSet.empty) ty
with Ctype.Unify _ ->
(* Will be detected by check_recursion *)
Btype.backtrack snap
let check_well_founded_manifest env loc path decl =
if decl.type_manifest = None then () else
let args = List.map (fun _ -> Ctype.newvar()) decl.type_params in
check_well_founded env loc path (Path.same path) (Ctype.newconstr path args)
let check_well_founded_decl env loc path decl to_check =
let open Btype in
let it =
{type_iterators with
it_type_expr = (fun _ -> check_well_founded env loc path to_check)} in
it.it_type_declaration it (Ctype.instance_declaration decl)
Check for ill - defined
let check_recursion env loc path decl to_check =
to_check is true for potentially mutually recursive paths .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
if decl.type_params = [] then () else
let visited = ref [] in
let rec check_regular cpath args prev_exp ty =
let ty = Ctype.repr ty in
if not (List.memq ty !visited) then begin
visited := ty :: !visited;
match ty.desc with
| Tconstr(path', args', _) ->
if Path.same path path' then begin
if not (Ctype.equal env false args args') then
raise (Error(loc,
Parameters_differ(cpath, ty, Ctype.newconstr path args)))
end
(* Attempt to expand a type abbreviation if:
1- [to_check path'] holds
(otherwise the expansion cannot involve [path]);
2- we haven't expanded this type constructor before
(otherwise we could loop if [path'] is itself
a non-regular abbreviation). *)
else if to_check path' && not (List.mem path' prev_exp) then begin
try
(* Attempt expansion *)
let (params0, body0, _) = Env.find_type_expansion path' env in
let (params, body) =
Ctype.instance_parameterized_type params0 body0 in
begin
try List.iter2 (Ctype.unify env) params args'
with Ctype.Unify _ ->
raise (Error(loc, Constraint_failed
(ty, Ctype.newconstr path' params0)));
end;
check_regular path' args (path' :: prev_exp) body
with Not_found -> ()
end;
List.iter (check_regular cpath args prev_exp) args'
| Tpoly (ty, tl) ->
let (_, ty) = Ctype.instance_poly ~keep_names:true false tl ty in
check_regular cpath args prev_exp ty
| _ ->
Btype.iter_type_expr (check_regular cpath args prev_exp) ty
end in
Misc.may
(fun body ->
let (args, body) =
Ctype.instance_parameterized_type
~keep_names:true decl.type_params body in
check_regular path args [] body)
decl.type_manifest
let check_abbrev_recursion env id_loc_list to_check tdecl =
let decl = tdecl.typ_type in
let id = tdecl.typ_id in
check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl to_check
(* Compute variance *)
let get_variance ty visited =
try TypeMap.find ty !visited with Not_found -> Variance.null
let compute_variance env visited vari ty =
let rec compute_variance_rec vari ty =
Format.eprintf " % a : % x@. " ty ( Obj.magic vari ) ;
let ty = Ctype.repr ty in
let vari' = get_variance ty visited in
if Variance.subset vari vari' then () else
let vari = Variance.union vari vari' in
visited := TypeMap.add ty vari !visited;
let compute_same = compute_variance_rec vari in
match ty.desc with
Tarrow (_, ty1, ty2, _) ->
let open Variance in
let v = conjugate vari in
let v1 =
if mem May_pos v || mem May_neg v
then set May_weak true v else v
in
compute_variance_rec v1 ty1;
compute_same ty2
| Ttuple tl ->
List.iter compute_same tl
| Tconstr (path, tl, _) ->
let open Variance in
if tl = [] then () else begin
try
let decl = Env.find_type path env in
let cvari f = mem f vari in
List.iter2
(fun ty v ->
let cv f = mem f v in
let strict =
cvari Inv && cv Inj || (cvari Pos || cvari Neg) && cv Inv
in
if strict then compute_variance_rec full ty else
let p1 = inter v vari
and n1 = inter v (conjugate vari) in
let v1 =
union (inter covariant (union p1 (conjugate p1)))
(inter (conjugate covariant) (union n1 (conjugate n1)))
and weak =
cvari May_weak && (cv May_pos || cv May_neg) ||
(cvari May_pos || cvari May_neg) && cv May_weak
in
let v2 = set May_weak weak v1 in
compute_variance_rec v2 ty)
tl decl.type_variance
with Not_found ->
List.iter (compute_variance_rec may_inv) tl
end
| Tobject (ty, _) ->
compute_same ty
| Tfield (_, _, ty1, ty2) ->
compute_same ty1;
compute_same ty2
| Tsubst ty ->
compute_same ty
| Tvariant row ->
let row = Btype.row_repr row in
List.iter
(fun (_,f) ->
match Btype.row_field_repr f with
Rpresent (Some ty) ->
compute_same ty
| Reither (_, tyl, _, _) ->
let open Variance in
let upper =
List.fold_left (fun s f -> set f true s)
null [May_pos; May_neg; May_weak]
in
let v = inter vari upper in
cf PR#7269 :
if tyl > 1 then upper else inter vari upper
if List.length tyl > 1 then upper else inter vari upper *)
List.iter (compute_variance_rec v) tyl
| _ -> ())
row.row_fields;
compute_same row.row_more
| Tpoly (ty, _) ->
compute_same ty
| Tvar _ | Tnil | Tlink _ | Tunivar _ -> ()
| Tpackage (_, _, tyl) ->
let v =
Variance.(if mem Pos vari || mem Neg vari then full else may_inv)
in
List.iter (compute_variance_rec v) tyl
in
compute_variance_rec vari ty
let make p n i =
let open Variance in
set May_pos p (set May_neg n (set May_weak n (set Inj i null)))
let compute_variance_type env check (required, loc) decl tyl =
(* Requirements *)
let required =
List.map (fun (c,n,i) -> if c || n then (c,n,i) else (true,true,i))
required
in
(* Prepare *)
let params = List.map Btype.repr decl.type_params in
let tvl = ref TypeMap.empty in
(* Compute occurrences in the body *)
let open Variance in
List.iter
(fun (cn,ty) ->
compute_variance env tvl (if cn then full else covariant) ty)
tyl;
if check then begin
(* Check variance of parameters *)
let pos = ref 0 in
List.iter2
(fun ty (c, n, i) ->
incr pos;
let var = get_variance ty tvl in
let (co,cn) = get_upper var and ij = mem Inj var in
if Btype.is_Tvar ty && (co && not c || cn && not n || not ij && i)
then raise (Error(loc, Bad_variance (!pos, (co,cn,ij), (c,n,i)))))
params required;
(* Check propagation from constrained parameters *)
let args = Btype.newgenty (Ttuple params) in
let fvl = Ctype.free_variables args in
let fvl = Ext_list.filter fvl (fun v -> not (List.memq v params)) in
(* If there are no extra variables there is nothing to do *)
if fvl = [] then () else
let tvl2 = ref TypeMap.empty in
List.iter2
(fun ty (p,n,_) ->
if Btype.is_Tvar ty then () else
let v =
if p then if n then full else covariant else conjugate covariant in
compute_variance env tvl2 v ty)
params required;
let visited = ref TypeSet.empty in
let rec check ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else
let visited' = TypeSet.add ty !visited in
visited := visited';
let v1 = get_variance ty tvl in
let snap = Btype.snapshot () in
let v2 =
TypeMap.fold
(fun t vt v ->
if Ctype.equal env false [ty] [t] then union vt v else v)
!tvl2 null in
Btype.backtrack snap;
let (c1,n1) = get_upper v1 and (c2,n2,_,i2) = get_lower v2 in
if c1 && not c2 || n1 && not n2 then
if List.memq ty fvl then
let code = if not i2 then -2 else if c2 || n2 then -1 else -3 in
raise (Error (loc, Bad_variance (code, (c1,n1,false), (c2,n2,false))))
else
Btype.iter_type_expr check ty
in
List.iter (fun (_,ty) -> check ty) tyl;
end;
List.map2
(fun ty (p, n, i) ->
let v = get_variance ty tvl in
let tr = decl.type_private in
(* Use required variance where relevant *)
let concr = decl.type_kind <> Type_abstract (*|| tr = Type_new*) in
let (p, n) =
if tr = Private || not (Btype.is_Tvar ty) then (p, n) (* set *)
else (false, false) (* only check *)
and i = concr || i && tr = Private in
let v = union v (make p n i) in
let v =
if not concr then v else
if mem Pos v && mem Neg v then full else
if Btype.is_Tvar ty then v else
union v
(if p then if n then full else covariant else conjugate covariant)
in
if decl.type_kind = Type_abstract && tr = Public then v else
set May_weak (mem May_neg v) v)
params required
let add_false = List.map (fun ty -> false, ty)
(* A parameter is constrained if it is either instantiated,
or it is a variable appearing in another parameter *)
let constrained vars ty =
match ty.desc with
| Tvar _ -> List.exists (fun tl -> List.memq ty tl) vars
| _ -> true
let for_constr = function
| Types.Cstr_tuple l -> add_false l
| Types.Cstr_record l ->
List.map
(fun {Types.ld_mutable; ld_type} -> (ld_mutable = Mutable, ld_type))
l
let compute_variance_gadt env check (required, loc as rloc) decl
(tl, ret_type_opt) =
match ret_type_opt with
| None ->
compute_variance_type env check rloc {decl with type_private = Private}
(for_constr tl)
| Some ret_type ->
match Ctype.repr ret_type with
| {desc=Tconstr (_, tyl, _)} ->
let tyl = List.map ( Ctype.expand_head env ) tyl in
let tyl = List.map Ctype.repr tyl in
let fvl = List.map (Ctype.free_variables ?env:None) tyl in
let _ =
List.fold_left2
(fun (fv1,fv2) ty (c,n,_) ->
match fv2 with [] -> assert false
| fv :: fv2 ->
(* fv1 @ fv2 = free_variables of other parameters *)
if (c||n) && constrained (fv1 @ fv2) ty then
raise (Error(loc, Varying_anonymous));
(fv :: fv1, fv2))
([], fvl) tyl required
in
compute_variance_type env check rloc
{decl with type_params = tyl; type_private = Private}
(for_constr tl)
| _ -> assert false
let compute_variance_extension env check decl ext rloc =
compute_variance_gadt env check rloc
{decl with type_params = ext.ext_type_params}
(ext.ext_args, ext.ext_ret_type)
let compute_variance_decl env check decl (required, _ as rloc) =
if (decl.type_kind = Type_abstract || decl.type_kind = Type_open)
&& decl.type_manifest = None then
List.map
(fun (c, n, i) ->
make (not n) (not c) (decl.type_kind <> Type_abstract || i))
required
else
let mn =
match decl.type_manifest with
None -> []
| Some ty -> [false, ty]
in
match decl.type_kind with
Type_abstract | Type_open ->
compute_variance_type env check rloc decl mn
| Type_variant tll ->
if List.for_all (fun c -> c.Types.cd_res = None) tll then
compute_variance_type env check rloc decl
(mn @ List.flatten (List.map (fun c -> for_constr c.Types.cd_args)
tll))
else begin
let mn =
List.map (fun (_,ty) -> (Types.Cstr_tuple [ty],None)) mn in
let tll =
mn @ List.map (fun c -> c.Types.cd_args, c.Types.cd_res) tll in
match List.map (compute_variance_gadt env check rloc decl) tll with
| vari :: rem ->
let varl = List.fold_left (List.map2 Variance.union) vari rem in
List.map
Variance.(fun v -> if mem Pos v && mem Neg v then full else v)
varl
| _ -> assert false
end
| Type_record (ftl, _) ->
compute_variance_type env check rloc decl
(mn @ List.map (fun {Types.ld_mutable; ld_type} ->
(ld_mutable = Mutable, ld_type)) ftl)
let is_hash id =
let s = Ident.name id in
String.length s > 0 && s.[0] = '#'
let marked_as_immediate decl =
Builtin_attributes.immediate decl.type_attributes
let compute_immediacy env tdecl =
match (tdecl.type_kind, tdecl.type_manifest) with
| (Type_variant [{cd_args = Cstr_tuple [arg]; _}], _)
| (Type_variant [{cd_args = Cstr_record [{ld_type = arg; _}]; _}], _)
| (Type_record ([{ld_type = arg; _}], _), _)
when tdecl.type_unboxed.unboxed ->
begin match get_unboxed_type_representation env arg with
| Some argrepr -> not (Ctype.maybe_pointer_type env argrepr)
| None -> false
end
| (Type_variant (_ :: _ as cstrs), _) ->
not (List.exists (fun c -> c.Types.cd_args <> Types.Cstr_tuple []) cstrs)
| (Type_abstract, Some(typ)) ->
not (Ctype.maybe_pointer_type env typ)
| (Type_abstract, None) -> marked_as_immediate tdecl
| _ -> false
(* Computes the fixpoint for the variance and immediacy of type declarations *)
let rec compute_properties_fixpoint env decls required variances immediacies =
let new_decls =
List.map2
(fun (id, decl) (variance, immediacy) ->
id, {decl with type_variance = variance; type_immediate = immediacy})
decls (List.combine variances immediacies)
in
let new_env =
List.fold_right
(fun (id, decl) env -> Env.add_type ~check:true id decl env)
new_decls env
in
let new_variances =
List.map2
(fun (_id, decl) -> compute_variance_decl new_env false decl)
new_decls required
in
let new_variances =
List.map2 (List.map2 Variance.union) new_variances variances in
let new_immediacies =
List.map
(fun (_id, decl) -> compute_immediacy new_env decl)
new_decls
in
if new_variances <> variances || new_immediacies <> immediacies then
compute_properties_fixpoint env decls required new_variances new_immediacies
else begin
List.iter ( fun ( i d , ) - >
Printf.eprintf " % s : " ( Ident.name i d ) ;
List.iter ( fun ( v : Variance.t ) - >
Printf.eprintf " % x " ( Obj.magic v : int ) )
decl.type_variance ;
prerr_endline " " )
new_decls ;
Printf.eprintf "%s:" (Ident.name id);
List.iter (fun (v : Variance.t) ->
Printf.eprintf " %x" (Obj.magic v : int))
decl.type_variance;
prerr_endline "")
new_decls; *)
List.iter (fun (_, decl) ->
if (marked_as_immediate decl) && (not decl.type_immediate) then
raise (Error (decl.type_loc, Bad_immediate_attribute))
else ())
new_decls;
List.iter2
(fun (id, decl) req -> if not (is_hash id) then
ignore (compute_variance_decl new_env true decl req))
new_decls required;
new_decls, new_env
end
let init_variance (_id, decl) =
List.map (fun _ -> Variance.null) decl.type_params
let add_injectivity =
List.map
(function
| Covariant -> (true, false, false)
| Contravariant -> (false, true, false)
| Invariant -> (false, false, false)
)
(* for typeclass.ml *)
let compute_variance_decls env cldecls =
let decls, required =
List.fold_right
(fun (obj_id, obj_abbr, _cl_abbr, _clty, _cltydef, ci) (decls, req) ->
let variance = List.map snd ci.ci_params in
(obj_id, obj_abbr) :: decls,
(add_injectivity variance, ci.ci_loc) :: req)
cldecls ([],[])
in
let (decls, _) =
compute_properties_fixpoint env decls required
(List.map init_variance decls)
(List.map (fun _ -> false) decls)
in
List.map2
(fun (_,decl) (_, _, cl_abbr, clty, cltydef, _) ->
let variance = decl.type_variance in
(decl, {cl_abbr with type_variance = variance},
{clty with cty_variance = variance},
{cltydef with clty_variance = variance}))
decls cldecls
(* Check multiple declarations of labels/constructors *)
let check_duplicates sdecl_list =
let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in
List.iter
(fun sdecl -> match sdecl.ptype_kind with
Ptype_variant cl ->
List.iter
(fun pcd ->
try
let name' = Hashtbl.find constrs pcd.pcd_name.txt in
Location.prerr_warning pcd.pcd_loc
(Warnings.Duplicate_definitions
("constructor", pcd.pcd_name.txt, name',
sdecl.ptype_name.txt))
with Not_found ->
Hashtbl.add constrs pcd.pcd_name.txt sdecl.ptype_name.txt)
cl
| Ptype_record fl ->
List.iter
(fun {pld_name=cname;pld_loc=loc} ->
try
let name' = Hashtbl.find labels cname.txt in
Location.prerr_warning loc
(Warnings.Duplicate_definitions
("label", cname.txt, name', sdecl.ptype_name.txt))
with Not_found -> Hashtbl.add labels cname.txt sdecl.ptype_name.txt)
fl
| Ptype_abstract -> ()
| Ptype_open -> ())
sdecl_list
Force recursion to go through i d for private types
let name_recursion sdecl id decl =
match decl with
| { type_kind = Type_abstract;
type_manifest = Some ty;
type_private = Private; } when is_fixed_type sdecl ->
let ty = Ctype.repr ty in
let ty' = Btype.newty2 ty.level ty.desc in
if Ctype.deep_occur ty ty' then
let td = Tconstr(Path.Pident id, decl.type_params, ref Mnil) in
Btype.link_type ty (Btype.newty2 ty.level td);
{decl with type_manifest = Some ty'}
else decl
| _ -> decl
(* Translate a set of type declarations, mutually recursive or not *)
let transl_type_decl env rec_flag sdecl_list =
(* Add dummy types for fixed rows *)
let fixed_types = Ext_list.filter sdecl_list is_fixed_type in
let sdecl_list =
List.map
(fun sdecl ->
let ptype_name =
mkloc (sdecl.ptype_name.txt ^"#row") sdecl.ptype_name.loc in
{sdecl with
ptype_name; ptype_kind = Ptype_abstract; ptype_manifest = None})
fixed_types
@ sdecl_list
in
(* Create identifiers. *)
let id_list =
List.map (fun sdecl -> Ident.create sdecl.ptype_name.txt) sdecl_list
in
Since we 've introduced fresh idents , make sure the definition
level is at least the binding time of these events . Otherwise ,
passing one of the recursively - defined type as argument
to an abbreviation may fail .
Since we've introduced fresh idents, make sure the definition
level is at least the binding time of these events. Otherwise,
passing one of the recursively-defined type constrs as argument
to an abbreviation may fail.
*)
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
(* Enter types. *)
let temp_env =
List.fold_left2 (enter_type rec_flag) env sdecl_list id_list in
(* Translate each declaration. *)
let current_slot = ref None in
let warn_unused = Warnings.is_active (Warnings.Unused_type_declaration "") in
let id_slots id =
match rec_flag with
| Asttypes.Recursive when warn_unused ->
(* See typecore.ml for a description of the algorithm used
to detect unused declarations in a set of recursive definitions. *)
let slot = ref [] in
let td = Env.find_type (Path.Pident id) temp_env in
let name = Ident.name id in
Env.set_type_used_callback
name td
(fun old_callback ->
match !current_slot with
| Some slot -> slot := (name, td) :: !slot
| None ->
List.iter (fun (name, d) -> Env.mark_type_used env name d)
(get_ref slot);
old_callback ()
);
id, Some slot
| Asttypes.Recursive | Asttypes.Nonrecursive ->
id, None
in
let transl_declaration name_sdecl (id, slot) =
current_slot := slot;
Builtin_attributes.warning_scope
name_sdecl.ptype_attributes
(fun () -> transl_declaration temp_env name_sdecl id)
in
let tdecls =
List.map2 transl_declaration sdecl_list (List.map id_slots id_list) in
let decls =
List.map (fun tdecl -> (tdecl.typ_id, tdecl.typ_type)) tdecls in
current_slot := None;
(* Check for duplicates *)
check_duplicates sdecl_list;
(* Build the final env. *)
let newenv =
List.fold_right
(fun (id, decl) env -> Env.add_type ~check:true id decl env)
decls env
in
(* Update stubs *)
begin match rec_flag with
| Asttypes.Nonrecursive -> ()
| Asttypes.Recursive ->
List.iter2
(fun id sdecl -> update_type temp_env newenv id sdecl.ptype_loc)
id_list sdecl_list
end;
Generalize type declarations .
Ctype.end_def();
List.iter (fun (_, decl) -> generalize_decl decl) decls;
Check for ill - formed
let id_loc_list =
List.map2 (fun id sdecl -> (id, sdecl.ptype_loc))
id_list sdecl_list
in
List.iter (fun (id, decl) ->
check_well_founded_manifest newenv (List.assoc id id_loc_list)
(Path.Pident id) decl)
decls;
let to_check =
function Path.Pident id -> List.mem_assoc id id_loc_list | _ -> false in
List.iter (fun (id, decl) ->
check_well_founded_decl newenv (List.assoc id id_loc_list) (Path.Pident id)
decl to_check)
decls;
List.iter (check_abbrev_recursion newenv id_loc_list to_check) tdecls;
(* Check that all type variables are closed *)
List.iter2
(fun sdecl tdecl ->
let decl = tdecl.typ_type in
match Ctype.closed_type_decl decl with
Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
| None -> ())
sdecl_list tdecls;
(* Check that constraints are enforced *)
List.iter2 (check_constraints newenv) sdecl_list decls;
(* Name recursion *)
let decls =
List.map2 (fun sdecl (id, decl) -> id, name_recursion sdecl id decl)
sdecl_list decls
in
(* Add variances to the environment *)
let required =
List.map
(fun sdecl ->
add_injectivity (List.map snd sdecl.ptype_params),
sdecl.ptype_loc
)
sdecl_list
in
let final_decls, final_env =
compute_properties_fixpoint env decls required
(List.map init_variance decls)
(List.map (fun _ -> false) decls)
in
(* Check re-exportation *)
List.iter2 (check_abbrev final_env) sdecl_list final_decls;
(* Keep original declaration *)
let final_decls =
List.map2
(fun tdecl (_id2, decl) ->
{ tdecl with typ_type = decl }
) tdecls final_decls
in
(* Done *)
(final_decls, final_env)
(* Translating type extensions *)
let transl_extension_constructor env type_path type_params
typext_params priv sext =
let id = Ident.create sext.pext_name.txt in
let args, ret_type, kind =
match sext.pext_kind with
Pext_decl(sargs, sret_type) ->
let targs, tret_type, args, ret_type, _ =
make_constructor env type_path typext_params
sargs sret_type
in
args, ret_type, Text_decl(targs, tret_type)
| Pext_rebind lid ->
let cdescr = Typetexp.find_constructor env lid.loc lid.txt in
let usage =
if cdescr.cstr_private = Private || priv = Public
then Env.Positive else Env.Privatize
in
Env.mark_constructor usage env (Longident.last lid.txt) cdescr;
let (args, cstr_res) = Ctype.instance_constructor cdescr in
let res, ret_type =
if cdescr.cstr_generalized then
let params = Ctype.instance_list env type_params in
let res = Ctype.newconstr type_path params in
let ret_type = Some (Ctype.newconstr type_path params) in
res, ret_type
else (Ctype.newconstr type_path typext_params), None
in
begin
try
Ctype.unify env cstr_res res
with Ctype.Unify trace ->
raise (Error(lid.loc,
Rebind_wrong_type(lid.txt, env, trace)))
end;
(* Remove "_" names from parameters used in the constructor *)
if not cdescr.cstr_generalized then begin
let vars =
Ctype.free_variables (Btype.newgenty (Ttuple args))
in
List.iter
(function {desc = Tvar (Some "_")} as ty ->
if List.memq ty vars then ty.desc <- Tvar None
| _ -> ())
typext_params
end;
(* Ensure that constructor's type matches the type being extended *)
let cstr_type_path, cstr_type_params =
match cdescr.cstr_res.desc with
Tconstr (p, _, _) ->
let decl = Env.find_type p env in
p, decl.type_params
| _ -> assert false
in
let cstr_types =
(Btype.newgenty
(Tconstr(cstr_type_path, cstr_type_params, ref Mnil)))
:: cstr_type_params
in
let ext_types =
(Btype.newgenty
(Tconstr(type_path, type_params, ref Mnil)))
:: type_params
in
if not (Ctype.equal env true cstr_types ext_types) then
raise (Error(lid.loc,
Rebind_mismatch(lid.txt, cstr_type_path, type_path)));
(* Disallow rebinding private constructors to non-private *)
begin
match cdescr.cstr_private, priv with
Private, Public ->
raise (Error(lid.loc, Rebind_private lid.txt))
| _ -> ()
end;
let path =
match cdescr.cstr_tag with
Cstr_extension(path, _) -> path
| _ -> assert false
in
let args =
match cdescr.cstr_inlined with
| None ->
Types.Cstr_tuple args
| Some decl ->
let tl =
match args with
| [ {desc=Tconstr(_, tl, _)} ] -> tl
| _ -> assert false
in
let decl = Ctype.instance_declaration decl in
assert (List.length decl.type_params = List.length tl);
List.iter2 (Ctype.unify env) decl.type_params tl;
let lbls =
match decl.type_kind with
| Type_record (lbls, Record_extension) -> lbls
| _ -> assert false
in
Types.Cstr_record lbls
in
args, ret_type, Text_rebind(path, lid)
in
let ext =
{ ext_type_path = type_path;
ext_type_params = typext_params;
ext_args = args;
ext_ret_type = ret_type;
ext_private = priv;
Types.ext_loc = sext.pext_loc;
Types.ext_attributes = sext.pext_attributes; }
in
{ ext_id = id;
ext_name = sext.pext_name;
ext_type = ext;
ext_kind = kind;
Typedtree.ext_loc = sext.pext_loc;
Typedtree.ext_attributes = sext.pext_attributes; }
let transl_extension_constructor env type_path type_params
typext_params priv sext =
Builtin_attributes.warning_scope sext.pext_attributes
(fun () -> transl_extension_constructor env type_path type_params
typext_params priv sext)
let transl_type_extension extend env loc styext =
reset_type_variables();
Ctype.begin_def();
let (type_path, type_decl) =
let lid = styext.ptyext_path in
Typetexp.find_type env lid.loc lid.txt
in
begin
match type_decl.type_kind with
| Type_open -> begin
match type_decl.type_private with
| Private when extend -> begin
match
List.find
(function {pext_kind = Pext_decl _} -> true
| {pext_kind = Pext_rebind _} -> false)
styext.ptyext_constructors
with
| {pext_loc} ->
raise (Error(pext_loc, Cannot_extend_private_type type_path))
| exception Not_found -> ()
end
| _ -> ()
end
| _ ->
raise (Error(loc, Not_extensible_type type_path))
end;
let type_variance =
List.map (fun v ->
let (co, cn) = Variance.get_upper v in
(not cn, not co, false))
type_decl.type_variance
in
let err =
if type_decl.type_arity <> List.length styext.ptyext_params then
[Includecore.Arity]
else
if List.for_all2
(fun (c1, n1, _) (c2, n2, _) -> (not c2 || c1) && (not n2 || n1))
type_variance
(add_injectivity (List.map snd styext.ptyext_params))
then [] else [Includecore.Variance]
in
if err <> [] then
raise (Error(loc, Extension_mismatch (type_path, err)));
let ttype_params = make_params env styext.ptyext_params in
let type_params = List.map (fun (cty, _) -> cty.ctyp_type) ttype_params in
List.iter2 (Ctype.unify_var env)
(Ctype.instance_list env type_decl.type_params)
type_params;
let constructors =
List.map (transl_extension_constructor env type_path
type_decl.type_params type_params styext.ptyext_private)
styext.ptyext_constructors
in
Ctype.end_def();
types
List.iter Ctype.generalize type_params;
List.iter
(fun ext ->
Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;
may Ctype.generalize ext.ext_type.ext_ret_type)
constructors;
(* Check that all type variables are closed *)
List.iter
(fun ext ->
match Ctype.closed_extension_constructor ext.ext_type with
Some ty ->
raise(Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type)))
| None -> ())
constructors;
(* Check variances are correct *)
List.iter
(fun ext->
ignore (compute_variance_extension env true type_decl
ext.ext_type (type_variance, loc)))
constructors;
(* Add extension constructors to the environment *)
let newenv =
List.fold_left
(fun env ext ->
Env.add_extension ~check:true ext.ext_id ext.ext_type env)
env constructors
in
let tyext =
{ tyext_path = type_path;
tyext_txt = styext.ptyext_path;
tyext_params = ttype_params;
tyext_constructors = constructors;
tyext_private = styext.ptyext_private;
tyext_attributes = styext.ptyext_attributes; }
in
(tyext, newenv)
let transl_type_extension extend env loc styext =
Builtin_attributes.warning_scope styext.ptyext_attributes
(fun () -> transl_type_extension extend env loc styext)
let transl_exception env sext =
reset_type_variables();
Ctype.begin_def();
let ext =
transl_extension_constructor env
Predef.path_exn [] [] Asttypes.Public sext
in
Ctype.end_def();
types
Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;
may Ctype.generalize ext.ext_type.ext_ret_type;
(* Check that all type variables are closed *)
begin match Ctype.closed_extension_constructor ext.ext_type with
Some ty ->
raise (Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type)))
| None -> ()
end;
let newenv = Env.add_extension ~check:true ext.ext_id ext.ext_type env in
ext, newenv
let rec parse_native_repr_attributes env core_type ty =
match core_type.ptyp_desc, (Ctype.repr ty).desc
with
| Ptyp_arrow (_, _, ct2), Tarrow (_, _, t2, _) ->
let repr_arg = Same_as_ocaml_repr in
let repr_args, repr_res =
parse_native_repr_attributes env ct2 t2
in
(repr_arg :: repr_args, repr_res)
| Ptyp_arrow _, _ | _, Tarrow _ -> assert false
| _ -> ([], Same_as_ocaml_repr)
let parse_native_repr_attributes env core_type ty =
match core_type.ptyp_desc, (Ctype.repr ty).desc
with
| Ptyp_constr ({txt = Lident "function$"}, [{ptyp_desc = Ptyp_arrow (_, _, ct2)}; _]),
Tconstr (Pident {name = "function$"},[{desc = Tarrow (_, _, t2, _)}; _],_) ->
let repr_args, repr_res = parse_native_repr_attributes env ct2 t2 in
let native_repr_args = Same_as_ocaml_repr :: repr_args in
(native_repr_args, repr_res)
| _ -> parse_native_repr_attributes env core_type ty
(* Translate a value declaration *)
let transl_value_decl env loc valdecl =
let cty = Typetexp.transl_type_scheme env valdecl.pval_type in
let ty = cty.ctyp_type in
let v =
match valdecl.pval_prim with
[] when Env.is_in_signature env ->
{ val_type = ty; val_kind = Val_reg; Types.val_loc = loc;
val_attributes = valdecl.pval_attributes }
| [] ->
raise (Error(valdecl.pval_loc, Val_in_structure))
| _ ->
let native_repr_args, native_repr_res =
let rec scann (attrs : Parsetree.attributes) =
match attrs with
| ({txt = "internal.arity";_},
PStr [ {pstr_desc = Pstr_eval
(
({pexp_desc = Pexp_constant (Pconst_integer (i,_))} :
Parsetree.expression) ,_)}]) :: _ ->
Some (int_of_string i)
| _ :: rest -> scann rest
| [] -> None
and make n =
if n = 0 then []
else Primitive.Same_as_ocaml_repr :: make (n - 1)
in
match scann valdecl.pval_attributes with
| None -> parse_native_repr_attributes env valdecl.pval_type ty
| Some x -> make x , Primitive.Same_as_ocaml_repr
in
let prim =
Primitive.parse_declaration valdecl
~native_repr_args
~native_repr_res
in
let prim_native_name = prim.prim_native_name in
if prim.prim_arity = 0 &&
not ( String.length prim_native_name >= 20 &&
String.unsafe_get prim_native_name 0 = '\132' &&
String.unsafe_get prim_native_name 1 = '\149'
) &&
(prim.prim_name = "" || (prim.prim_name.[0] <> '%' && prim.prim_name.[0] <> '#')) then
raise(Error(valdecl.pval_type.ptyp_loc, Null_arity_external));
{ val_type = ty; val_kind = Val_prim prim; Types.val_loc = loc;
val_attributes = valdecl.pval_attributes }
in
let (id, newenv) =
Env.enter_value valdecl.pval_name.txt v env
~check:(fun s -> Warnings.Unused_value_declaration s)
in
let desc =
{
val_id = id;
val_name = valdecl.pval_name;
val_desc = cty; val_val = v;
val_prim = valdecl.pval_prim;
val_loc = valdecl.pval_loc;
val_attributes = valdecl.pval_attributes;
}
in
desc, newenv
let transl_value_decl env loc valdecl =
Builtin_attributes.warning_scope valdecl.pval_attributes
(fun () -> transl_value_decl env loc valdecl)
Translate a " with " constraint -- much simplified version of
transl_type_decl .
transl_type_decl. *)
let transl_with_constraint env id row_path orig_decl sdecl =
Env.mark_type_used env (Ident.name id) orig_decl;
reset_type_variables();
Ctype.begin_def();
let tparams = make_params env sdecl.ptype_params in
let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in
let orig_decl = Ctype.instance_declaration orig_decl in
let arity_ok = List.length params = orig_decl.type_arity in
if arity_ok then
List.iter2 (Ctype.unify_var env) params orig_decl.type_params;
let constraints = List.map
(function (ty, ty', loc) ->
try
let cty = transl_simple_type env false ty in
let cty' = transl_simple_type env false ty' in
let ty = cty.ctyp_type in
let ty' = cty'.ctyp_type in
Ctype.unify env ty ty';
(cty, cty', loc)
with Ctype.Unify tr ->
raise(Error(loc, Inconsistent_constraint (env, tr))))
sdecl.ptype_cstrs
in
let no_row = not (is_fixed_type sdecl) in
let (tman, man) = match sdecl.ptype_manifest with
None -> None, None
| Some sty ->
let cty = transl_simple_type env no_row sty in
Some cty, Some cty.ctyp_type
in
let priv =
if sdecl.ptype_private = Private then Private else
if arity_ok && orig_decl.type_kind <> Type_abstract
then orig_decl.type_private else sdecl.ptype_private
in
if arity_ok && orig_decl.type_kind <> Type_abstract
&& sdecl.ptype_private = Private then
Location.deprecated sdecl.ptype_loc "spurious use of private";
let type_kind, type_unboxed =
if arity_ok && man <> None then
orig_decl.type_kind, orig_decl.type_unboxed
else
Type_abstract, unboxed_false_default_false
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind;
type_private = priv;
type_manifest = man;
type_variance = [];
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed;
}
in
begin match row_path with None -> ()
| Some p -> set_fixed_row env sdecl.ptype_loc p decl
end;
begin match Ctype.closed_type_decl decl with None -> ()
| Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
end;
let decl = name_recursion sdecl id decl in
let type_variance =
compute_variance_decl env true decl
(add_injectivity (List.map snd sdecl.ptype_params), sdecl.ptype_loc)
in
let type_immediate = compute_immediacy env decl in
let decl = {decl with type_variance; type_immediate} in
Ctype.end_def();
generalize_decl decl;
{
typ_id = id;
typ_name = sdecl.ptype_name;
typ_params = tparams;
typ_type = decl;
typ_cstrs = constraints;
typ_loc = sdecl.ptype_loc;
typ_manifest = tman;
typ_kind = Ttype_abstract;
typ_private = sdecl.ptype_private;
typ_attributes = sdecl.ptype_attributes;
}
(* Approximate a type declaration: just make all types abstract *)
let abstract_type_decl arity =
let rec make_params n =
if n <= 0 then [] else Ctype.newvar() :: make_params (n-1) in
Ctype.begin_def();
let decl =
{ type_params = make_params arity;
type_arity = arity;
type_kind = Type_abstract;
type_private = Public;
type_manifest = None;
type_variance = replicate_list Variance.full arity;
type_newtype_level = None;
type_loc = Location.none;
type_attributes = [];
type_immediate = false;
type_unboxed = unboxed_false_default_false;
} in
Ctype.end_def();
generalize_decl decl;
decl
let approx_type_decl sdecl_list =
List.map
(fun sdecl ->
(Ident.create sdecl.ptype_name.txt,
abstract_type_decl (List.length sdecl.ptype_params)))
sdecl_list
(* Variant of check_abbrev_recursion to check the well-formedness
conditions on type abbreviations defined within recursive modules. *)
let check_recmod_typedecl env loc recmod_ids path decl =
recmod_ids is the list of recursively - defined module idents .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
let to_check path =
List.exists (fun id -> Path.isfree id path) recmod_ids in
check_well_founded_decl env loc path decl to_check;
check_recursion env loc path decl to_check
(**** Error report ****)
open Format
let explain_unbound_gen ppf tv tl typ kwd pr =
try
let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in
let ty0 = (* Hack to force aliasing when needed *)
Btype.newgenty (Tobject(tv, ref None)) in
Printtyp.reset_and_mark_loops_list [typ ti; ty0];
fprintf ppf
".@.@[<hov2>In %s@ %a@;<1 -2>the variable %a is unbound@]"
kwd pr ti Printtyp.type_expr tv
with Not_found -> ()
let explain_unbound ppf tv tl typ kwd lab =
explain_unbound_gen ppf tv tl typ kwd
(fun ppf ti -> fprintf ppf "%s%a" (lab ti) Printtyp.type_expr (typ ti))
let explain_unbound_single ppf tv ty =
let trivial ty =
explain_unbound ppf tv [ty] (fun t -> t) "type" (fun _ -> "") in
match (Ctype.repr ty).desc with
Tobject(fi,_) ->
let (tl, rv) = Ctype.flatten_fields fi in
if rv == tv then trivial ty else
explain_unbound ppf tv tl (fun (_,_,t) -> t)
"method" (fun (lab,_,_) -> lab ^ ": ")
| Tvariant row ->
let row = Btype.row_repr row in
if row.row_more == tv then trivial ty else
explain_unbound ppf tv row.row_fields
(fun (_l,f) -> match Btype.row_field_repr f with
Rpresent (Some t) -> t
| Reither (_,[t],_,_) -> t
| Reither (_,tl,_,_) -> Btype.newgenty (Ttuple tl)
| _ -> Btype.newgenty (Ttuple[]))
"case" (fun (lab,_) -> "`" ^ lab ^ " of ")
| _ -> trivial ty
let tys_of_constr_args = function
| Types.Cstr_tuple tl -> tl
| Types.Cstr_record lbls -> List.map (fun l -> l.Types.ld_type) lbls
let report_error ppf = function
| Repeated_parameter ->
fprintf ppf "A type parameter occurs several times"
| Duplicate_constructor s ->
fprintf ppf "Two constructors are named %s" s
| Duplicate_label s ->
fprintf ppf "Two labels are named %s" s
| Recursive_abbrev s ->
fprintf ppf "The type abbreviation %s is cyclic" s
| Cycle_in_def (s, ty) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>The definition of %s contains a cycle:@ %a@]"
s Printtyp.type_expr ty
| Definition_mismatch (ty, errs) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%a@]%a@]"
"This variant or record definition" "does not match that of type"
Printtyp.type_expr ty
(Includecore.report_type_mismatch "the original" "this" "definition")
errs
| Constraint_failed (ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf "@[%s@ @[<hv>Type@ %a@ should be an instance of@ %a@]@]"
"Constraints are not satisfied in this type."
Printtyp.type_expr ty Printtyp.type_expr ty'
| Parameters_differ (path, ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf
"@[<hv>In the definition of %s, type@ %a@ should be@ %a@]"
(Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty'
| Inconsistent_constraint (env, trace) ->
fprintf ppf "The type constraints are not consistent.@.";
Printtyp.report_unification_error ppf env trace
(fun ppf -> fprintf ppf "Type")
(fun ppf -> fprintf ppf "is not compatible with type")
| Type_clash (env, trace) ->
Printtyp.report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This type constructor expands to type")
(function ppf ->
fprintf ppf "but is used here with type")
| Null_arity_external ->
fprintf ppf "External identifiers must be functions"
| Unbound_type_var (ty, decl) ->
fprintf ppf "A type variable is unbound in this type declaration";
let ty = Ctype.repr ty in
begin match decl.type_kind, decl.type_manifest with
| Type_variant tl, _ ->
explain_unbound_gen ppf ty tl (fun c ->
let tl = tys_of_constr_args c.Types.cd_args in
Btype.newgenty (Ttuple tl)
)
"case" (fun ppf c ->
fprintf ppf
"%s of %a" (Ident.name c.Types.cd_id)
Printtyp.constructor_arguments c.Types.cd_args)
| Type_record (tl, _), _ ->
explain_unbound ppf ty tl (fun l -> l.Types.ld_type)
"field" (fun l -> Ident.name l.Types.ld_id ^ ": ")
| Type_abstract, Some ty' ->
explain_unbound_single ppf ty ty'
| _ -> ()
end
| Unbound_type_var_ext (ty, ext) ->
fprintf ppf "A type variable is unbound in this extension constructor";
let args = tys_of_constr_args ext.ext_args in
explain_unbound ppf ty args (fun c -> c) "type" (fun _ -> "")
| Cannot_extend_private_type path ->
fprintf ppf "@[%s@ %a@]"
"Cannot extend private type definition"
Printtyp.path path
| Not_extensible_type path ->
fprintf ppf "@[%s@ %a@ %s@]"
"Type definition"
Printtyp.path path
"is not extensible"
| Extension_mismatch (path, errs) ->
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%s@]%a@]"
"This extension" "does not match the definition of type"
(Path.name path)
(Includecore.report_type_mismatch
"the type" "this extension" "definition")
errs
| Rebind_wrong_type (lid, env, trace) ->
Printtyp.report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The constructor %a@ has type"
Printtyp.longident lid)
(function ppf ->
fprintf ppf "but was expected to be of type")
| Rebind_mismatch (lid, p, p') ->
fprintf ppf
"@[%s@ %a@ %s@ %s@ %s@ %s@ %s@]"
"The constructor" Printtyp.longident lid
"extends type" (Path.name p)
"whose declaration does not match"
"the declaration of type" (Path.name p')
| Rebind_private lid ->
fprintf ppf "@[%s@ %a@ %s@]"
"The constructor"
Printtyp.longident lid
"is private"
| Bad_variance (n, v1, v2) ->
let variance (p,n,i) =
let inj = if i then "injective " else "" in
match p, n with
true, true -> inj ^ "invariant"
| true, false -> inj ^ "covariant"
| false, true -> inj ^ "contravariant"
| false, false -> if inj = "" then "unrestricted" else inj
in
let suffix n =
let teen = (n mod 100)/10 = 1 in
match n mod 10 with
| 1 when not teen -> "st"
| 2 when not teen -> "nd"
| 3 when not teen -> "rd"
| _ -> "th"
in
if n = -1 then
fprintf ppf "@[%s@ %s@ It"
"In this definition, a type variable has a variance that"
"is not reflected by its occurrence in type parameters."
else if n = -2 then
fprintf ppf "@[%s@ %s@]"
"In this definition, a type variable cannot be deduced"
"from the type parameters."
else if n = -3 then
fprintf ppf "@[%s@ %s@ It"
"In this definition, a type variable has a variance that"
"cannot be deduced from the type parameters."
else
fprintf ppf "@[%s@ %s@ The %d%s type parameter"
"In this definition, expected parameter"
"variances are not satisfied."
n (suffix n);
if n <> -2 then
fprintf ppf " was expected to be %s,@ but it is %s.@]"
(variance v2) (variance v1)
| Unavailable_type_constructor p ->
fprintf ppf "The definition of type %a@ is unavailable" Printtyp.path p
| Bad_fixed_type r ->
fprintf ppf "This fixed type %s" r
| Varying_anonymous ->
fprintf ppf "@[%s@ %s@ %s@]"
"In this GADT definition," "the variance of some parameter"
"cannot be checked"
| Val_in_structure ->
fprintf ppf "Value declarations are only allowed in signatures"
| Bad_immediate_attribute ->
fprintf ppf "@[%s@ %s@]"
"Types marked with the immediate attribute must be"
"non-pointer types like int or bool"
| Bad_unboxed_attribute msg ->
fprintf ppf "@[This type cannot be unboxed because@ %s.@]" msg
| Boxed_and_unboxed ->
fprintf ppf "@[A type cannot be boxed and unboxed at the same time.@]"
| Nonrec_gadt ->
fprintf ppf
"@[GADT case syntax cannot be used in a 'nonrec' block.@]"
let () =
Location.register_error_of_exn
(function
| Error (loc, err) ->
Some (Location.error_of_printer loc report_error err)
| _ ->
None
)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/eb07cb50b6e6ba2bf26ce667d4e3c638a24b35c4/jscomp/ml/typedecl.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
*** Typing of type definitions ***
Note: do not factor the branches in the following pattern-matching:
the records must be constants for the compiler to do sharing on them.
Enter all declared types in the environment as abstract types
This case can occur when checking a recursive unboxed type
declaration.
only the above can be unboxed
Do not give too much fuel: PR#7424
Set the row variable in a fixed type
Check that all the variables found in [ty] are in [univ].
Because [ty] is the argument to an abstract type, the representation
of that abstract type could be any subexpression of [ty], in particular
any type variable present in [ty].
Bind type parameters
The type is not unboxable, mark it as boxed
Check constraints
Add abstract row
Check for cyclic abbreviations
Check that all constraints are enforced
If both a variant/record definition and a type equation are given,
need to check that the equation refers to a type of the same kind
with the same constructors and labels.
Check that recursion is well-founded
Format.eprintf "@[%a@]@." Printtyp.raw_type_expr ty;
!Clflags.recursive_types
Will be detected by check_recursion
Attempt to expand a type abbreviation if:
1- [to_check path'] holds
(otherwise the expansion cannot involve [path]);
2- we haven't expanded this type constructor before
(otherwise we could loop if [path'] is itself
a non-regular abbreviation).
Attempt expansion
Compute variance
Requirements
Prepare
Compute occurrences in the body
Check variance of parameters
Check propagation from constrained parameters
If there are no extra variables there is nothing to do
Use required variance where relevant
|| tr = Type_new
set
only check
A parameter is constrained if it is either instantiated,
or it is a variable appearing in another parameter
fv1 @ fv2 = free_variables of other parameters
Computes the fixpoint for the variance and immediacy of type declarations
for typeclass.ml
Check multiple declarations of labels/constructors
Translate a set of type declarations, mutually recursive or not
Add dummy types for fixed rows
Create identifiers.
Enter types.
Translate each declaration.
See typecore.ml for a description of the algorithm used
to detect unused declarations in a set of recursive definitions.
Check for duplicates
Build the final env.
Update stubs
Check that all type variables are closed
Check that constraints are enforced
Name recursion
Add variances to the environment
Check re-exportation
Keep original declaration
Done
Translating type extensions
Remove "_" names from parameters used in the constructor
Ensure that constructor's type matches the type being extended
Disallow rebinding private constructors to non-private
Check that all type variables are closed
Check variances are correct
Add extension constructors to the environment
Check that all type variables are closed
Translate a value declaration
Approximate a type declaration: just make all types abstract
Variant of check_abbrev_recursion to check the well-formedness
conditions on type abbreviations defined within recursive modules.
*** Error report ***
Hack to force aliasing when needed | and , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Asttypes
open Parsetree
open Primitive
open Types
open Typetexp
type native_repr_kind = Unboxed | Untagged
type error =
Repeated_parameter
| Duplicate_constructor of string
| Duplicate_label of string
| Recursive_abbrev of string
| Cycle_in_def of string * type_expr
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Inconsistent_constraint of Env.t * (type_expr * type_expr) list
| Type_clash of Env.t * (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Unbound_type_var of type_expr * type_declaration
| Cannot_extend_private_type of Path.t
| Not_extensible_type of Path.t
| Extension_mismatch of Path.t * Includecore.type_mismatch list
| Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list
| Rebind_mismatch of Longident.t * Path.t * Path.t
| Rebind_private of Longident.t
| Bad_variance of int * (bool * bool * bool) * (bool * bool * bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_ext of type_expr * extension_constructor
| Varying_anonymous
| Val_in_structure
| Bad_immediate_attribute
| Bad_unboxed_attribute of string
| Boxed_and_unboxed
| Nonrec_gadt
open Typedtree
exception Error of Location.t * error
let get_unboxed_from_attributes sdecl =
let unboxed = Builtin_attributes.has_unboxed sdecl.ptype_attributes in
let boxed = Builtin_attributes.has_boxed sdecl.ptype_attributes in
match boxed, unboxed, !Clflags.unboxed_types with
| true, true, _ -> raise (Error(sdecl.ptype_loc, Boxed_and_unboxed))
| true, false, _ -> unboxed_false_default_false
| false, true, _ -> unboxed_true_default_false
| false, false, false -> unboxed_false_default_true
| false, false, true -> unboxed_true_default_true
let enter_type rec_flag env sdecl id =
let needed =
match rec_flag with
| Asttypes.Nonrecursive ->
begin match sdecl.ptype_kind with
| Ptype_variant scds ->
List.iter (fun cd ->
if cd.pcd_res <> None then raise (Error(cd.pcd_loc, Nonrec_gadt)))
scds
| _ -> ()
end;
Btype.is_row_name (Ident.name id)
| Asttypes.Recursive -> true
in
if not needed then env else
let decl =
{ type_params =
List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;
type_arity = List.length sdecl.ptype_params;
type_kind = Type_abstract;
type_private = sdecl.ptype_private;
type_manifest =
begin match sdecl.ptype_manifest with None -> None
| Some _ -> Some(Ctype.newvar ()) end;
type_variance = List.map (fun _ -> Variance.full) sdecl.ptype_params;
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed = unboxed_false_default_false;
}
in
Env.add_type ~check:true id decl env
let update_type temp_env env id loc =
let path = Path.Pident id in
let decl = Env.find_type path temp_env in
match decl.type_manifest with None -> ()
| Some ty ->
let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in
try Ctype.unify env (Ctype.newconstr path params) ty
with Ctype.Unify trace ->
raise (Error(loc, Type_clash (env, trace)))
We use the Ctype.expand_head_opt version of expand_head to get access
to the manifest type of private abbreviations .
to the manifest type of private abbreviations. *)
let rec get_unboxed_type_representation env ty fuel =
if fuel < 0 then None else
let ty = Ctype.repr (Ctype.expand_head_opt env ty) in
match ty.desc with
| Tconstr (p, args, _) ->
begin match Env.find_type p env with
| exception Not_found -> Some ty
| {type_unboxed = {unboxed = false}} -> Some ty
| {type_params; type_kind =
Type_record ([{ld_type = ty2; _}], _)
| Type_variant [{cd_args = Cstr_tuple [ty2]; _}]
| Type_variant [{cd_args = Cstr_record [{ld_type = ty2; _}]; _}]}
-> get_unboxed_type_representation env
(Ctype.apply env type_params ty2 args) (fuel - 1)
| {type_kind=Type_abstract} -> None
end
| _ -> Some ty
let get_unboxed_type_representation env ty =
get_unboxed_type_representation env ty 100
;;
Determine if a type definition defines a fixed type . ( PW )
let is_fixed_type sd =
let rec has_row_var sty =
match sty.ptyp_desc with
Ptyp_alias (sty, _) -> has_row_var sty
| Ptyp_class _
| Ptyp_object (_, Open)
| Ptyp_variant (_, Open, _)
| Ptyp_variant (_, Closed, Some _) -> true
| _ -> false
in
match sd.ptype_manifest with
None -> false
| Some sty ->
sd.ptype_kind = Ptype_abstract &&
sd.ptype_private = Private &&
has_row_var sty
let set_fixed_row env loc p decl =
let tm =
match decl.type_manifest with
None -> assert false
| Some t -> Ctype.expand_head env t
in
let rv =
match tm.desc with
Tvariant row ->
let row = Btype.row_repr row in
tm.desc <- Tvariant {row with row_fixed = true};
if Btype.static_row row then Btype.newgenty Tnil
else row.row_more
| Tobject (ty, _) ->
snd (Ctype.flatten_fields ty)
| _ ->
raise (Error (loc, Bad_fixed_type "is not an object or variant"))
in
if not (Btype.is_Tvar rv) then
raise (Error (loc, Bad_fixed_type "has no row variable"));
rv.desc <- Tconstr (p, decl.type_params, ref Mnil)
Translate one type declaration
module StringSet =
Set.Make(struct
type t = string
let compare (x:t) y = compare x y
end)
let make_params env params =
let make_param (sty, v) =
try
(transl_type_param env sty, v)
with Already_bound ->
raise(Error(sty.ptyp_loc, Repeated_parameter))
in
List.map make_param params
let transl_labels env closed lbls =
if !Config.bs_only then
match !Builtin_attributes.check_duplicated_labels lbls with
| None -> ()
| Some {loc;txt=name} -> raise (Error(loc,Duplicate_label name))
else (
let all_labels = ref StringSet.empty in
List.iter
(fun {pld_name = {txt=name; loc}} ->
if StringSet.mem name !all_labels then
raise(Error(loc, Duplicate_label name));
all_labels := StringSet.add name !all_labels)
lbls);
let mk {pld_name=name;pld_mutable=mut;pld_type=arg;pld_loc=loc;
pld_attributes=attrs} =
Builtin_attributes.warning_scope attrs
(fun () ->
let arg = Ast_helper.Typ.force_poly arg in
let cty = transl_simple_type env closed arg in
{ld_id = Ident.create name.txt; ld_name = name; ld_mutable = mut;
ld_type = cty; ld_loc = loc; ld_attributes = attrs}
)
in
let lbls = List.map mk lbls in
let lbls' =
List.map
(fun ld ->
let ty = ld.ld_type.ctyp_type in
let ty = match ty.desc with Tpoly(t,[]) -> t | _ -> ty in
{Types.ld_id = ld.ld_id;
ld_mutable = ld.ld_mutable;
ld_type = ty;
ld_loc = ld.ld_loc;
ld_attributes = ld.ld_attributes
}
)
lbls in
lbls, lbls'
let transl_constructor_arguments env closed = function
| Pcstr_tuple l ->
let l = List.map (transl_simple_type env closed) l in
Types.Cstr_tuple (List.map (fun t -> t.ctyp_type) l),
Cstr_tuple l
| Pcstr_record l ->
let lbls, lbls' = transl_labels env closed l in
Types.Cstr_record lbls',
Cstr_record lbls
let make_constructor env type_path type_params sargs sret_type =
match sret_type with
| None ->
let args, targs =
transl_constructor_arguments env true sargs
in
targs, None, args, None, type_params
| Some sret_type ->
if it 's a generalized constructor we must first narrow and
then widen so as to not introduce any new constraints
then widen so as to not introduce any new constraints *)
let z = narrow () in
reset_type_variables ();
let args, targs =
transl_constructor_arguments env false sargs
in
let tret_type = transl_simple_type env false sret_type in
let ret_type = tret_type.ctyp_type in
let params =
match (Ctype.repr ret_type).desc with
| Tconstr (p', params, _) when Path.same type_path p' ->
params
| _ ->
raise (Error (sret_type.ptyp_loc, Constraint_failed
(ret_type, Ctype.newconstr type_path type_params)))
in
widen z;
targs, Some tret_type, args, Some ret_type, params
let transl_declaration env sdecl id =
reset_type_variables();
Ctype.begin_def ();
let tparams = make_params env sdecl.ptype_params in
let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in
let cstrs = List.map
(fun (sty, sty', loc) ->
transl_simple_type env false sty,
transl_simple_type env false sty', loc)
sdecl.ptype_cstrs
in
let raw_status = get_unboxed_from_attributes sdecl in
if raw_status.unboxed && not raw_status.default then begin
match sdecl.ptype_kind with
| Ptype_abstract ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it is abstract"))
| Ptype_variant [{pcd_args = Pcstr_tuple []; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has no argument"))
| Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}] -> ()
| Ptype_variant [{pcd_args = Pcstr_tuple _; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has more than one argument"))
| Ptype_variant [{pcd_args = Pcstr_record
[{pld_mutable=Immutable; _}]; _}] -> ()
| Ptype_variant [{pcd_args = Pcstr_record [{pld_mutable=Mutable; _}]; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute "it is mutable"))
| Ptype_variant [{pcd_args = Pcstr_record _; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"its constructor has more than one argument"))
| Ptype_variant _ ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it has more than one constructor"))
| Ptype_record [{pld_mutable=Immutable; _}] -> ()
| Ptype_record [{pld_mutable=Mutable; _}] ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it is mutable"))
| Ptype_record _ ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"it has more than one field"))
| Ptype_open ->
raise(Error(sdecl.ptype_loc, Bad_unboxed_attribute
"extensible variant types cannot be unboxed"))
end;
let unboxed_status =
match sdecl.ptype_kind with
| Ptype_variant [{pcd_args = Pcstr_tuple [_]; _}]
| Ptype_variant [{pcd_args = Pcstr_record
[{pld_mutable = Immutable; _}]; _}]
| Ptype_record [{pld_mutable = Immutable; _}] ->
raw_status
unboxed_false_default_false
in
let unbox = unboxed_status.unboxed in
let (tkind, kind) =
match sdecl.ptype_kind with
| Ptype_abstract -> Ttype_abstract, Type_abstract
| Ptype_variant scstrs ->
assert (scstrs <> []);
if List.exists (fun cstr -> cstr.pcd_res <> None) scstrs then begin
match cstrs with
[] -> ()
| (_,_,loc)::_ ->
Location.prerr_warning loc Warnings.Constraint_on_gadt
end;
let has_optional attrs = Ext_list.exists attrs (fun ({txt },_) -> txt = "res.optional") in
let scstrs =
Ext_list.map scstrs (fun ({pcd_args} as cstr) ->
match pcd_args with
| Pcstr_tuple _ -> cstr
| Pcstr_record lds ->
{cstr with pcd_args = Pcstr_record (Ext_list.map lds (fun ld ->
if has_optional ld.pld_attributes then
let typ = ld.pld_type in
let typ = {typ with ptyp_desc = Ptyp_constr ({txt = Lident "option"; loc=typ.ptyp_loc}, [typ])} in
{ld with pld_type = typ}
else ld
))}
) in
let all_constrs = ref StringSet.empty in
List.iter
(fun {pcd_name = {txt = name}} ->
if StringSet.mem name !all_constrs then
raise(Error(sdecl.ptype_loc, Duplicate_constructor name));
all_constrs := StringSet.add name !all_constrs)
scstrs;
let make_cstr scstr =
let name = Ident.create scstr.pcd_name.txt in
let targs, tret_type, args, ret_type, _cstr_params =
make_constructor env (Path.Pident id) params
scstr.pcd_args scstr.pcd_res
in
let tcstr =
{ cd_id = name;
cd_name = scstr.pcd_name;
cd_args = targs;
cd_res = tret_type;
cd_loc = scstr.pcd_loc;
cd_attributes = scstr.pcd_attributes }
in
let cstr =
{ Types.cd_id = name;
cd_args = args;
cd_res = ret_type;
cd_loc = scstr.pcd_loc;
cd_attributes = scstr.pcd_attributes }
in
tcstr, cstr
in
let make_cstr scstr =
Builtin_attributes.warning_scope scstr.pcd_attributes
(fun () -> make_cstr scstr)
in
let tcstrs, cstrs = List.split (List.map make_cstr scstrs) in
Ttype_variant tcstrs, Type_variant cstrs
| Ptype_record lbls ->
let has_optional attrs = Ext_list.exists attrs (fun ({txt },_) -> txt = "res.optional") in
let optionalLabels =
Ext_list.filter_map lbls
(fun lbl -> if has_optional lbl.pld_attributes then Some lbl.pld_name.txt else None) in
let lbls =
if optionalLabels = [] then lbls
else Ext_list.map lbls (fun lbl ->
let typ = lbl.pld_type in
let typ =
if has_optional lbl.pld_attributes then
{typ with ptyp_desc = Ptyp_constr ({txt = Lident "option"; loc=typ.ptyp_loc}, [typ])}
else typ in
{lbl with pld_type = typ }) in
let lbls, lbls' = transl_labels env true lbls in
let rep =
if unbox then Record_unboxed false
else
if optionalLabels <> []
then Record_optional_labels optionalLabels
else Record_regular
in
Ttype_record lbls, Type_record(lbls', rep)
| Ptype_open -> Ttype_open, Type_open
in
let (tman, man) = match sdecl.ptype_manifest with
None -> None, None
| Some sty ->
let no_row = not (is_fixed_type sdecl) in
let cty = transl_simple_type env no_row sty in
Some cty, Some cty.ctyp_type
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind = kind;
type_private = sdecl.ptype_private;
type_manifest = man;
type_variance = List.map (fun _ -> Variance.full) params;
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed = unboxed_status;
} in
List.iter
(fun (cty, cty', loc) ->
let ty = cty.ctyp_type in
let ty' = cty'.ctyp_type in
try Ctype.unify env ty ty' with Ctype.Unify tr ->
raise(Error(loc, Inconsistent_constraint (env, tr))))
cstrs;
Ctype.end_def ();
if is_fixed_type sdecl then begin
let p =
try Env.lookup_type (Longident.Lident(Ident.name id ^ "#row")) env
with Not_found -> assert false in
set_fixed_row env sdecl.ptype_loc p decl
end;
begin match decl.type_manifest with None -> ()
| Some ty ->
if Ctype.cyclic_abbrev env id ty then
raise(Error(sdecl.ptype_loc, Recursive_abbrev sdecl.ptype_name.txt));
end;
{
typ_id = id;
typ_name = sdecl.ptype_name;
typ_params = tparams;
typ_type = decl;
typ_cstrs = cstrs;
typ_loc = sdecl.ptype_loc;
typ_manifest = tman;
typ_kind = tkind;
typ_private = sdecl.ptype_private;
typ_attributes = sdecl.ptype_attributes;
}
a type declaration
let generalize_decl decl =
List.iter Ctype.generalize decl.type_params;
Btype.iter_type_expr_kind Ctype.generalize decl.type_kind;
begin match decl.type_manifest with
| None -> ()
| Some ty -> Ctype.generalize ty
end
module TypeSet = Btype.TypeSet
module TypeMap = Btype.TypeMap
let rec check_constraints_rec env loc visited ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else begin
visited := TypeSet.add ty !visited;
match ty.desc with
| Tconstr (path, args, _) ->
let args' = List.map (fun _ -> Ctype.newvar ()) args in
let ty' = Ctype.newconstr path args' in
begin try Ctype.enforce_constraints env ty'
with Ctype.Unify _ -> assert false
| Not_found -> raise (Error(loc, Unavailable_type_constructor path))
end;
if not (Ctype.matches env ty ty') then
raise (Error(loc, Constraint_failed (ty, ty')));
List.iter (check_constraints_rec env loc visited) args
| Tpoly (ty, tl) ->
let _, ty = Ctype.instance_poly false tl ty in
check_constraints_rec env loc visited ty
| _ ->
Btype.iter_type_expr (check_constraints_rec env loc visited) ty
end
module SMap = Map.Make(String)
let check_constraints_labels env visited l pl =
let rec get_loc name = function
[] -> assert false
| pld :: tl ->
if name = pld.pld_name.txt then pld.pld_type.ptyp_loc
else get_loc name tl
in
List.iter
(fun {Types.ld_id=name; ld_type=ty} ->
check_constraints_rec env (get_loc (Ident.name name) pl) visited ty)
l
let check_constraints env sdecl (_, decl) =
let visited = ref TypeSet.empty in
begin match decl.type_kind with
| Type_abstract -> ()
| Type_variant l ->
let find_pl = function
Ptype_variant pl -> pl
| Ptype_record _ | Ptype_abstract | Ptype_open -> assert false
in
let pl = find_pl sdecl.ptype_kind in
let pl_index =
let foldf acc x =
SMap.add x.pcd_name.txt x acc
in
List.fold_left foldf SMap.empty pl
in
List.iter
(fun {Types.cd_id=name; cd_args; cd_res} ->
let {pcd_args; pcd_res; _} =
try SMap.find (Ident.name name) pl_index
with Not_found -> assert false in
begin match cd_args, pcd_args with
| Cstr_tuple tyl, Pcstr_tuple styl ->
List.iter2
(fun sty ty ->
check_constraints_rec env sty.ptyp_loc visited ty)
styl tyl
| Cstr_record tyl, Pcstr_record styl ->
check_constraints_labels env visited tyl styl
| _ -> assert false
end;
match pcd_res, cd_res with
| Some sr, Some r ->
check_constraints_rec env sr.ptyp_loc visited r
| _ ->
() )
l
| Type_record (l, _) ->
let find_pl = function
Ptype_record pl -> pl
| Ptype_variant _ | Ptype_abstract | Ptype_open -> assert false
in
let pl = find_pl sdecl.ptype_kind in
check_constraints_labels env visited l pl
| Type_open -> ()
end;
begin match decl.type_manifest with
| None -> ()
| Some ty ->
let sty =
match sdecl.ptype_manifest with Some sty -> sty | _ -> assert false
in
check_constraints_rec env sty.ptyp_loc visited ty
end
let check_coherence env loc id decl =
match decl with
{ type_kind = (Type_variant _ | Type_record _| Type_open);
type_manifest = Some ty } ->
begin match (Ctype.repr ty).desc with
Tconstr(path, args, _) ->
begin try
let decl' = Env.find_type path env in
let err =
if List.length args <> List.length decl.type_params
then [Includecore.Arity]
else if not (Ctype.equal env false args decl.type_params)
then [Includecore.Constraint]
else
Includecore.type_declarations ~loc ~equality:true env
(Path.last path)
decl'
id
(Subst.type_declaration
(Subst.add_type id path Subst.identity) decl)
in
if err <> [] then
raise(Error(loc, Definition_mismatch (ty, err)))
with Not_found ->
raise(Error(loc, Unavailable_type_constructor path))
end
| _ -> raise(Error(loc, Definition_mismatch (ty, [])))
end
| _ -> ()
let check_abbrev env sdecl (id, decl) =
check_coherence env sdecl.ptype_loc id decl
let check_well_founded env loc path to_check ty =
let visited = ref TypeMap.empty in
let rec check ty0 parents ty =
let ty = Btype.repr ty in
if TypeSet.mem ty parents then begin
if match ty0.desc with
| Tconstr (p, _, _) -> Path.same p path
| _ -> false
then raise (Error (loc, Recursive_abbrev (Path.name path)))
else raise (Error (loc, Cycle_in_def (Path.name path, ty0)))
end;
let (fini, parents) =
try
let prev = TypeMap.find ty !visited in
if TypeSet.subset parents prev then (true, parents) else
(false, TypeSet.union parents prev)
with Not_found ->
(false, parents)
in
if fini then () else
let rec_ok =
match ty.desc with
Tconstr(_p,_,_) ->
! Clflags.recursive_types & & Ctype.is_contractive env p
| Tobject _ | Tvariant _ -> true
in
let visited' = TypeMap.add ty parents !visited in
let arg_exn =
try
visited := visited';
let parents =
if rec_ok then TypeSet.empty else TypeSet.add ty parents in
Btype.iter_type_expr (check ty0 parents) ty;
None
with e ->
visited := visited'; Some e
in
match ty.desc with
| Tconstr(p, _, _) when arg_exn <> None || to_check p ->
if to_check p then may raise arg_exn
else Btype.iter_type_expr (check ty0 TypeSet.empty) ty;
begin try
let ty' = Ctype.try_expand_once_opt env ty in
let ty0 = if TypeSet.is_empty parents then ty else ty0 in
check ty0 (TypeSet.add ty parents) ty'
with
Ctype.Cannot_expand -> may raise arg_exn
end
| _ -> may raise arg_exn
in
let snap = Btype.snapshot () in
try Ctype.wrap_trace_gadt_instances env (check ty TypeSet.empty) ty
with Ctype.Unify _ ->
Btype.backtrack snap
let check_well_founded_manifest env loc path decl =
if decl.type_manifest = None then () else
let args = List.map (fun _ -> Ctype.newvar()) decl.type_params in
check_well_founded env loc path (Path.same path) (Ctype.newconstr path args)
let check_well_founded_decl env loc path decl to_check =
let open Btype in
let it =
{type_iterators with
it_type_expr = (fun _ -> check_well_founded env loc path to_check)} in
it.it_type_declaration it (Ctype.instance_declaration decl)
Check for ill - defined
let check_recursion env loc path decl to_check =
to_check is true for potentially mutually recursive paths .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
if decl.type_params = [] then () else
let visited = ref [] in
let rec check_regular cpath args prev_exp ty =
let ty = Ctype.repr ty in
if not (List.memq ty !visited) then begin
visited := ty :: !visited;
match ty.desc with
| Tconstr(path', args', _) ->
if Path.same path path' then begin
if not (Ctype.equal env false args args') then
raise (Error(loc,
Parameters_differ(cpath, ty, Ctype.newconstr path args)))
end
else if to_check path' && not (List.mem path' prev_exp) then begin
try
let (params0, body0, _) = Env.find_type_expansion path' env in
let (params, body) =
Ctype.instance_parameterized_type params0 body0 in
begin
try List.iter2 (Ctype.unify env) params args'
with Ctype.Unify _ ->
raise (Error(loc, Constraint_failed
(ty, Ctype.newconstr path' params0)));
end;
check_regular path' args (path' :: prev_exp) body
with Not_found -> ()
end;
List.iter (check_regular cpath args prev_exp) args'
| Tpoly (ty, tl) ->
let (_, ty) = Ctype.instance_poly ~keep_names:true false tl ty in
check_regular cpath args prev_exp ty
| _ ->
Btype.iter_type_expr (check_regular cpath args prev_exp) ty
end in
Misc.may
(fun body ->
let (args, body) =
Ctype.instance_parameterized_type
~keep_names:true decl.type_params body in
check_regular path args [] body)
decl.type_manifest
let check_abbrev_recursion env id_loc_list to_check tdecl =
let decl = tdecl.typ_type in
let id = tdecl.typ_id in
check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl to_check
let get_variance ty visited =
try TypeMap.find ty !visited with Not_found -> Variance.null
let compute_variance env visited vari ty =
let rec compute_variance_rec vari ty =
Format.eprintf " % a : % x@. " ty ( Obj.magic vari ) ;
let ty = Ctype.repr ty in
let vari' = get_variance ty visited in
if Variance.subset vari vari' then () else
let vari = Variance.union vari vari' in
visited := TypeMap.add ty vari !visited;
let compute_same = compute_variance_rec vari in
match ty.desc with
Tarrow (_, ty1, ty2, _) ->
let open Variance in
let v = conjugate vari in
let v1 =
if mem May_pos v || mem May_neg v
then set May_weak true v else v
in
compute_variance_rec v1 ty1;
compute_same ty2
| Ttuple tl ->
List.iter compute_same tl
| Tconstr (path, tl, _) ->
let open Variance in
if tl = [] then () else begin
try
let decl = Env.find_type path env in
let cvari f = mem f vari in
List.iter2
(fun ty v ->
let cv f = mem f v in
let strict =
cvari Inv && cv Inj || (cvari Pos || cvari Neg) && cv Inv
in
if strict then compute_variance_rec full ty else
let p1 = inter v vari
and n1 = inter v (conjugate vari) in
let v1 =
union (inter covariant (union p1 (conjugate p1)))
(inter (conjugate covariant) (union n1 (conjugate n1)))
and weak =
cvari May_weak && (cv May_pos || cv May_neg) ||
(cvari May_pos || cvari May_neg) && cv May_weak
in
let v2 = set May_weak weak v1 in
compute_variance_rec v2 ty)
tl decl.type_variance
with Not_found ->
List.iter (compute_variance_rec may_inv) tl
end
| Tobject (ty, _) ->
compute_same ty
| Tfield (_, _, ty1, ty2) ->
compute_same ty1;
compute_same ty2
| Tsubst ty ->
compute_same ty
| Tvariant row ->
let row = Btype.row_repr row in
List.iter
(fun (_,f) ->
match Btype.row_field_repr f with
Rpresent (Some ty) ->
compute_same ty
| Reither (_, tyl, _, _) ->
let open Variance in
let upper =
List.fold_left (fun s f -> set f true s)
null [May_pos; May_neg; May_weak]
in
let v = inter vari upper in
cf PR#7269 :
if tyl > 1 then upper else inter vari upper
if List.length tyl > 1 then upper else inter vari upper *)
List.iter (compute_variance_rec v) tyl
| _ -> ())
row.row_fields;
compute_same row.row_more
| Tpoly (ty, _) ->
compute_same ty
| Tvar _ | Tnil | Tlink _ | Tunivar _ -> ()
| Tpackage (_, _, tyl) ->
let v =
Variance.(if mem Pos vari || mem Neg vari then full else may_inv)
in
List.iter (compute_variance_rec v) tyl
in
compute_variance_rec vari ty
let make p n i =
let open Variance in
set May_pos p (set May_neg n (set May_weak n (set Inj i null)))
let compute_variance_type env check (required, loc) decl tyl =
let required =
List.map (fun (c,n,i) -> if c || n then (c,n,i) else (true,true,i))
required
in
let params = List.map Btype.repr decl.type_params in
let tvl = ref TypeMap.empty in
let open Variance in
List.iter
(fun (cn,ty) ->
compute_variance env tvl (if cn then full else covariant) ty)
tyl;
if check then begin
let pos = ref 0 in
List.iter2
(fun ty (c, n, i) ->
incr pos;
let var = get_variance ty tvl in
let (co,cn) = get_upper var and ij = mem Inj var in
if Btype.is_Tvar ty && (co && not c || cn && not n || not ij && i)
then raise (Error(loc, Bad_variance (!pos, (co,cn,ij), (c,n,i)))))
params required;
let args = Btype.newgenty (Ttuple params) in
let fvl = Ctype.free_variables args in
let fvl = Ext_list.filter fvl (fun v -> not (List.memq v params)) in
if fvl = [] then () else
let tvl2 = ref TypeMap.empty in
List.iter2
(fun ty (p,n,_) ->
if Btype.is_Tvar ty then () else
let v =
if p then if n then full else covariant else conjugate covariant in
compute_variance env tvl2 v ty)
params required;
let visited = ref TypeSet.empty in
let rec check ty =
let ty = Ctype.repr ty in
if TypeSet.mem ty !visited then () else
let visited' = TypeSet.add ty !visited in
visited := visited';
let v1 = get_variance ty tvl in
let snap = Btype.snapshot () in
let v2 =
TypeMap.fold
(fun t vt v ->
if Ctype.equal env false [ty] [t] then union vt v else v)
!tvl2 null in
Btype.backtrack snap;
let (c1,n1) = get_upper v1 and (c2,n2,_,i2) = get_lower v2 in
if c1 && not c2 || n1 && not n2 then
if List.memq ty fvl then
let code = if not i2 then -2 else if c2 || n2 then -1 else -3 in
raise (Error (loc, Bad_variance (code, (c1,n1,false), (c2,n2,false))))
else
Btype.iter_type_expr check ty
in
List.iter (fun (_,ty) -> check ty) tyl;
end;
List.map2
(fun ty (p, n, i) ->
let v = get_variance ty tvl in
let tr = decl.type_private in
let (p, n) =
and i = concr || i && tr = Private in
let v = union v (make p n i) in
let v =
if not concr then v else
if mem Pos v && mem Neg v then full else
if Btype.is_Tvar ty then v else
union v
(if p then if n then full else covariant else conjugate covariant)
in
if decl.type_kind = Type_abstract && tr = Public then v else
set May_weak (mem May_neg v) v)
params required
let add_false = List.map (fun ty -> false, ty)
let constrained vars ty =
match ty.desc with
| Tvar _ -> List.exists (fun tl -> List.memq ty tl) vars
| _ -> true
let for_constr = function
| Types.Cstr_tuple l -> add_false l
| Types.Cstr_record l ->
List.map
(fun {Types.ld_mutable; ld_type} -> (ld_mutable = Mutable, ld_type))
l
let compute_variance_gadt env check (required, loc as rloc) decl
(tl, ret_type_opt) =
match ret_type_opt with
| None ->
compute_variance_type env check rloc {decl with type_private = Private}
(for_constr tl)
| Some ret_type ->
match Ctype.repr ret_type with
| {desc=Tconstr (_, tyl, _)} ->
let tyl = List.map ( Ctype.expand_head env ) tyl in
let tyl = List.map Ctype.repr tyl in
let fvl = List.map (Ctype.free_variables ?env:None) tyl in
let _ =
List.fold_left2
(fun (fv1,fv2) ty (c,n,_) ->
match fv2 with [] -> assert false
| fv :: fv2 ->
if (c||n) && constrained (fv1 @ fv2) ty then
raise (Error(loc, Varying_anonymous));
(fv :: fv1, fv2))
([], fvl) tyl required
in
compute_variance_type env check rloc
{decl with type_params = tyl; type_private = Private}
(for_constr tl)
| _ -> assert false
let compute_variance_extension env check decl ext rloc =
compute_variance_gadt env check rloc
{decl with type_params = ext.ext_type_params}
(ext.ext_args, ext.ext_ret_type)
let compute_variance_decl env check decl (required, _ as rloc) =
if (decl.type_kind = Type_abstract || decl.type_kind = Type_open)
&& decl.type_manifest = None then
List.map
(fun (c, n, i) ->
make (not n) (not c) (decl.type_kind <> Type_abstract || i))
required
else
let mn =
match decl.type_manifest with
None -> []
| Some ty -> [false, ty]
in
match decl.type_kind with
Type_abstract | Type_open ->
compute_variance_type env check rloc decl mn
| Type_variant tll ->
if List.for_all (fun c -> c.Types.cd_res = None) tll then
compute_variance_type env check rloc decl
(mn @ List.flatten (List.map (fun c -> for_constr c.Types.cd_args)
tll))
else begin
let mn =
List.map (fun (_,ty) -> (Types.Cstr_tuple [ty],None)) mn in
let tll =
mn @ List.map (fun c -> c.Types.cd_args, c.Types.cd_res) tll in
match List.map (compute_variance_gadt env check rloc decl) tll with
| vari :: rem ->
let varl = List.fold_left (List.map2 Variance.union) vari rem in
List.map
Variance.(fun v -> if mem Pos v && mem Neg v then full else v)
varl
| _ -> assert false
end
| Type_record (ftl, _) ->
compute_variance_type env check rloc decl
(mn @ List.map (fun {Types.ld_mutable; ld_type} ->
(ld_mutable = Mutable, ld_type)) ftl)
let is_hash id =
let s = Ident.name id in
String.length s > 0 && s.[0] = '#'
let marked_as_immediate decl =
Builtin_attributes.immediate decl.type_attributes
let compute_immediacy env tdecl =
match (tdecl.type_kind, tdecl.type_manifest) with
| (Type_variant [{cd_args = Cstr_tuple [arg]; _}], _)
| (Type_variant [{cd_args = Cstr_record [{ld_type = arg; _}]; _}], _)
| (Type_record ([{ld_type = arg; _}], _), _)
when tdecl.type_unboxed.unboxed ->
begin match get_unboxed_type_representation env arg with
| Some argrepr -> not (Ctype.maybe_pointer_type env argrepr)
| None -> false
end
| (Type_variant (_ :: _ as cstrs), _) ->
not (List.exists (fun c -> c.Types.cd_args <> Types.Cstr_tuple []) cstrs)
| (Type_abstract, Some(typ)) ->
not (Ctype.maybe_pointer_type env typ)
| (Type_abstract, None) -> marked_as_immediate tdecl
| _ -> false
let rec compute_properties_fixpoint env decls required variances immediacies =
let new_decls =
List.map2
(fun (id, decl) (variance, immediacy) ->
id, {decl with type_variance = variance; type_immediate = immediacy})
decls (List.combine variances immediacies)
in
let new_env =
List.fold_right
(fun (id, decl) env -> Env.add_type ~check:true id decl env)
new_decls env
in
let new_variances =
List.map2
(fun (_id, decl) -> compute_variance_decl new_env false decl)
new_decls required
in
let new_variances =
List.map2 (List.map2 Variance.union) new_variances variances in
let new_immediacies =
List.map
(fun (_id, decl) -> compute_immediacy new_env decl)
new_decls
in
if new_variances <> variances || new_immediacies <> immediacies then
compute_properties_fixpoint env decls required new_variances new_immediacies
else begin
List.iter ( fun ( i d , ) - >
Printf.eprintf " % s : " ( Ident.name i d ) ;
List.iter ( fun ( v : Variance.t ) - >
Printf.eprintf " % x " ( Obj.magic v : int ) )
decl.type_variance ;
prerr_endline " " )
new_decls ;
Printf.eprintf "%s:" (Ident.name id);
List.iter (fun (v : Variance.t) ->
Printf.eprintf " %x" (Obj.magic v : int))
decl.type_variance;
prerr_endline "")
new_decls; *)
List.iter (fun (_, decl) ->
if (marked_as_immediate decl) && (not decl.type_immediate) then
raise (Error (decl.type_loc, Bad_immediate_attribute))
else ())
new_decls;
List.iter2
(fun (id, decl) req -> if not (is_hash id) then
ignore (compute_variance_decl new_env true decl req))
new_decls required;
new_decls, new_env
end
let init_variance (_id, decl) =
List.map (fun _ -> Variance.null) decl.type_params
let add_injectivity =
List.map
(function
| Covariant -> (true, false, false)
| Contravariant -> (false, true, false)
| Invariant -> (false, false, false)
)
let compute_variance_decls env cldecls =
let decls, required =
List.fold_right
(fun (obj_id, obj_abbr, _cl_abbr, _clty, _cltydef, ci) (decls, req) ->
let variance = List.map snd ci.ci_params in
(obj_id, obj_abbr) :: decls,
(add_injectivity variance, ci.ci_loc) :: req)
cldecls ([],[])
in
let (decls, _) =
compute_properties_fixpoint env decls required
(List.map init_variance decls)
(List.map (fun _ -> false) decls)
in
List.map2
(fun (_,decl) (_, _, cl_abbr, clty, cltydef, _) ->
let variance = decl.type_variance in
(decl, {cl_abbr with type_variance = variance},
{clty with cty_variance = variance},
{cltydef with clty_variance = variance}))
decls cldecls
let check_duplicates sdecl_list =
let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in
List.iter
(fun sdecl -> match sdecl.ptype_kind with
Ptype_variant cl ->
List.iter
(fun pcd ->
try
let name' = Hashtbl.find constrs pcd.pcd_name.txt in
Location.prerr_warning pcd.pcd_loc
(Warnings.Duplicate_definitions
("constructor", pcd.pcd_name.txt, name',
sdecl.ptype_name.txt))
with Not_found ->
Hashtbl.add constrs pcd.pcd_name.txt sdecl.ptype_name.txt)
cl
| Ptype_record fl ->
List.iter
(fun {pld_name=cname;pld_loc=loc} ->
try
let name' = Hashtbl.find labels cname.txt in
Location.prerr_warning loc
(Warnings.Duplicate_definitions
("label", cname.txt, name', sdecl.ptype_name.txt))
with Not_found -> Hashtbl.add labels cname.txt sdecl.ptype_name.txt)
fl
| Ptype_abstract -> ()
| Ptype_open -> ())
sdecl_list
Force recursion to go through i d for private types
let name_recursion sdecl id decl =
match decl with
| { type_kind = Type_abstract;
type_manifest = Some ty;
type_private = Private; } when is_fixed_type sdecl ->
let ty = Ctype.repr ty in
let ty' = Btype.newty2 ty.level ty.desc in
if Ctype.deep_occur ty ty' then
let td = Tconstr(Path.Pident id, decl.type_params, ref Mnil) in
Btype.link_type ty (Btype.newty2 ty.level td);
{decl with type_manifest = Some ty'}
else decl
| _ -> decl
let transl_type_decl env rec_flag sdecl_list =
let fixed_types = Ext_list.filter sdecl_list is_fixed_type in
let sdecl_list =
List.map
(fun sdecl ->
let ptype_name =
mkloc (sdecl.ptype_name.txt ^"#row") sdecl.ptype_name.loc in
{sdecl with
ptype_name; ptype_kind = Ptype_abstract; ptype_manifest = None})
fixed_types
@ sdecl_list
in
let id_list =
List.map (fun sdecl -> Ident.create sdecl.ptype_name.txt) sdecl_list
in
Since we 've introduced fresh idents , make sure the definition
level is at least the binding time of these events . Otherwise ,
passing one of the recursively - defined type as argument
to an abbreviation may fail .
Since we've introduced fresh idents, make sure the definition
level is at least the binding time of these events. Otherwise,
passing one of the recursively-defined type constrs as argument
to an abbreviation may fail.
*)
Ctype.init_def(Ident.current_time());
Ctype.begin_def();
let temp_env =
List.fold_left2 (enter_type rec_flag) env sdecl_list id_list in
let current_slot = ref None in
let warn_unused = Warnings.is_active (Warnings.Unused_type_declaration "") in
let id_slots id =
match rec_flag with
| Asttypes.Recursive when warn_unused ->
let slot = ref [] in
let td = Env.find_type (Path.Pident id) temp_env in
let name = Ident.name id in
Env.set_type_used_callback
name td
(fun old_callback ->
match !current_slot with
| Some slot -> slot := (name, td) :: !slot
| None ->
List.iter (fun (name, d) -> Env.mark_type_used env name d)
(get_ref slot);
old_callback ()
);
id, Some slot
| Asttypes.Recursive | Asttypes.Nonrecursive ->
id, None
in
let transl_declaration name_sdecl (id, slot) =
current_slot := slot;
Builtin_attributes.warning_scope
name_sdecl.ptype_attributes
(fun () -> transl_declaration temp_env name_sdecl id)
in
let tdecls =
List.map2 transl_declaration sdecl_list (List.map id_slots id_list) in
let decls =
List.map (fun tdecl -> (tdecl.typ_id, tdecl.typ_type)) tdecls in
current_slot := None;
check_duplicates sdecl_list;
let newenv =
List.fold_right
(fun (id, decl) env -> Env.add_type ~check:true id decl env)
decls env
in
begin match rec_flag with
| Asttypes.Nonrecursive -> ()
| Asttypes.Recursive ->
List.iter2
(fun id sdecl -> update_type temp_env newenv id sdecl.ptype_loc)
id_list sdecl_list
end;
Generalize type declarations .
Ctype.end_def();
List.iter (fun (_, decl) -> generalize_decl decl) decls;
Check for ill - formed
let id_loc_list =
List.map2 (fun id sdecl -> (id, sdecl.ptype_loc))
id_list sdecl_list
in
List.iter (fun (id, decl) ->
check_well_founded_manifest newenv (List.assoc id id_loc_list)
(Path.Pident id) decl)
decls;
let to_check =
function Path.Pident id -> List.mem_assoc id id_loc_list | _ -> false in
List.iter (fun (id, decl) ->
check_well_founded_decl newenv (List.assoc id id_loc_list) (Path.Pident id)
decl to_check)
decls;
List.iter (check_abbrev_recursion newenv id_loc_list to_check) tdecls;
List.iter2
(fun sdecl tdecl ->
let decl = tdecl.typ_type in
match Ctype.closed_type_decl decl with
Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
| None -> ())
sdecl_list tdecls;
List.iter2 (check_constraints newenv) sdecl_list decls;
let decls =
List.map2 (fun sdecl (id, decl) -> id, name_recursion sdecl id decl)
sdecl_list decls
in
let required =
List.map
(fun sdecl ->
add_injectivity (List.map snd sdecl.ptype_params),
sdecl.ptype_loc
)
sdecl_list
in
let final_decls, final_env =
compute_properties_fixpoint env decls required
(List.map init_variance decls)
(List.map (fun _ -> false) decls)
in
List.iter2 (check_abbrev final_env) sdecl_list final_decls;
let final_decls =
List.map2
(fun tdecl (_id2, decl) ->
{ tdecl with typ_type = decl }
) tdecls final_decls
in
(final_decls, final_env)
let transl_extension_constructor env type_path type_params
typext_params priv sext =
let id = Ident.create sext.pext_name.txt in
let args, ret_type, kind =
match sext.pext_kind with
Pext_decl(sargs, sret_type) ->
let targs, tret_type, args, ret_type, _ =
make_constructor env type_path typext_params
sargs sret_type
in
args, ret_type, Text_decl(targs, tret_type)
| Pext_rebind lid ->
let cdescr = Typetexp.find_constructor env lid.loc lid.txt in
let usage =
if cdescr.cstr_private = Private || priv = Public
then Env.Positive else Env.Privatize
in
Env.mark_constructor usage env (Longident.last lid.txt) cdescr;
let (args, cstr_res) = Ctype.instance_constructor cdescr in
let res, ret_type =
if cdescr.cstr_generalized then
let params = Ctype.instance_list env type_params in
let res = Ctype.newconstr type_path params in
let ret_type = Some (Ctype.newconstr type_path params) in
res, ret_type
else (Ctype.newconstr type_path typext_params), None
in
begin
try
Ctype.unify env cstr_res res
with Ctype.Unify trace ->
raise (Error(lid.loc,
Rebind_wrong_type(lid.txt, env, trace)))
end;
if not cdescr.cstr_generalized then begin
let vars =
Ctype.free_variables (Btype.newgenty (Ttuple args))
in
List.iter
(function {desc = Tvar (Some "_")} as ty ->
if List.memq ty vars then ty.desc <- Tvar None
| _ -> ())
typext_params
end;
let cstr_type_path, cstr_type_params =
match cdescr.cstr_res.desc with
Tconstr (p, _, _) ->
let decl = Env.find_type p env in
p, decl.type_params
| _ -> assert false
in
let cstr_types =
(Btype.newgenty
(Tconstr(cstr_type_path, cstr_type_params, ref Mnil)))
:: cstr_type_params
in
let ext_types =
(Btype.newgenty
(Tconstr(type_path, type_params, ref Mnil)))
:: type_params
in
if not (Ctype.equal env true cstr_types ext_types) then
raise (Error(lid.loc,
Rebind_mismatch(lid.txt, cstr_type_path, type_path)));
begin
match cdescr.cstr_private, priv with
Private, Public ->
raise (Error(lid.loc, Rebind_private lid.txt))
| _ -> ()
end;
let path =
match cdescr.cstr_tag with
Cstr_extension(path, _) -> path
| _ -> assert false
in
let args =
match cdescr.cstr_inlined with
| None ->
Types.Cstr_tuple args
| Some decl ->
let tl =
match args with
| [ {desc=Tconstr(_, tl, _)} ] -> tl
| _ -> assert false
in
let decl = Ctype.instance_declaration decl in
assert (List.length decl.type_params = List.length tl);
List.iter2 (Ctype.unify env) decl.type_params tl;
let lbls =
match decl.type_kind with
| Type_record (lbls, Record_extension) -> lbls
| _ -> assert false
in
Types.Cstr_record lbls
in
args, ret_type, Text_rebind(path, lid)
in
let ext =
{ ext_type_path = type_path;
ext_type_params = typext_params;
ext_args = args;
ext_ret_type = ret_type;
ext_private = priv;
Types.ext_loc = sext.pext_loc;
Types.ext_attributes = sext.pext_attributes; }
in
{ ext_id = id;
ext_name = sext.pext_name;
ext_type = ext;
ext_kind = kind;
Typedtree.ext_loc = sext.pext_loc;
Typedtree.ext_attributes = sext.pext_attributes; }
let transl_extension_constructor env type_path type_params
typext_params priv sext =
Builtin_attributes.warning_scope sext.pext_attributes
(fun () -> transl_extension_constructor env type_path type_params
typext_params priv sext)
let transl_type_extension extend env loc styext =
reset_type_variables();
Ctype.begin_def();
let (type_path, type_decl) =
let lid = styext.ptyext_path in
Typetexp.find_type env lid.loc lid.txt
in
begin
match type_decl.type_kind with
| Type_open -> begin
match type_decl.type_private with
| Private when extend -> begin
match
List.find
(function {pext_kind = Pext_decl _} -> true
| {pext_kind = Pext_rebind _} -> false)
styext.ptyext_constructors
with
| {pext_loc} ->
raise (Error(pext_loc, Cannot_extend_private_type type_path))
| exception Not_found -> ()
end
| _ -> ()
end
| _ ->
raise (Error(loc, Not_extensible_type type_path))
end;
let type_variance =
List.map (fun v ->
let (co, cn) = Variance.get_upper v in
(not cn, not co, false))
type_decl.type_variance
in
let err =
if type_decl.type_arity <> List.length styext.ptyext_params then
[Includecore.Arity]
else
if List.for_all2
(fun (c1, n1, _) (c2, n2, _) -> (not c2 || c1) && (not n2 || n1))
type_variance
(add_injectivity (List.map snd styext.ptyext_params))
then [] else [Includecore.Variance]
in
if err <> [] then
raise (Error(loc, Extension_mismatch (type_path, err)));
let ttype_params = make_params env styext.ptyext_params in
let type_params = List.map (fun (cty, _) -> cty.ctyp_type) ttype_params in
List.iter2 (Ctype.unify_var env)
(Ctype.instance_list env type_decl.type_params)
type_params;
let constructors =
List.map (transl_extension_constructor env type_path
type_decl.type_params type_params styext.ptyext_private)
styext.ptyext_constructors
in
Ctype.end_def();
types
List.iter Ctype.generalize type_params;
List.iter
(fun ext ->
Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;
may Ctype.generalize ext.ext_type.ext_ret_type)
constructors;
List.iter
(fun ext ->
match Ctype.closed_extension_constructor ext.ext_type with
Some ty ->
raise(Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type)))
| None -> ())
constructors;
List.iter
(fun ext->
ignore (compute_variance_extension env true type_decl
ext.ext_type (type_variance, loc)))
constructors;
let newenv =
List.fold_left
(fun env ext ->
Env.add_extension ~check:true ext.ext_id ext.ext_type env)
env constructors
in
let tyext =
{ tyext_path = type_path;
tyext_txt = styext.ptyext_path;
tyext_params = ttype_params;
tyext_constructors = constructors;
tyext_private = styext.ptyext_private;
tyext_attributes = styext.ptyext_attributes; }
in
(tyext, newenv)
let transl_type_extension extend env loc styext =
Builtin_attributes.warning_scope styext.ptyext_attributes
(fun () -> transl_type_extension extend env loc styext)
let transl_exception env sext =
reset_type_variables();
Ctype.begin_def();
let ext =
transl_extension_constructor env
Predef.path_exn [] [] Asttypes.Public sext
in
Ctype.end_def();
types
Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;
may Ctype.generalize ext.ext_type.ext_ret_type;
begin match Ctype.closed_extension_constructor ext.ext_type with
Some ty ->
raise (Error(ext.ext_loc, Unbound_type_var_ext(ty, ext.ext_type)))
| None -> ()
end;
let newenv = Env.add_extension ~check:true ext.ext_id ext.ext_type env in
ext, newenv
let rec parse_native_repr_attributes env core_type ty =
match core_type.ptyp_desc, (Ctype.repr ty).desc
with
| Ptyp_arrow (_, _, ct2), Tarrow (_, _, t2, _) ->
let repr_arg = Same_as_ocaml_repr in
let repr_args, repr_res =
parse_native_repr_attributes env ct2 t2
in
(repr_arg :: repr_args, repr_res)
| Ptyp_arrow _, _ | _, Tarrow _ -> assert false
| _ -> ([], Same_as_ocaml_repr)
let parse_native_repr_attributes env core_type ty =
match core_type.ptyp_desc, (Ctype.repr ty).desc
with
| Ptyp_constr ({txt = Lident "function$"}, [{ptyp_desc = Ptyp_arrow (_, _, ct2)}; _]),
Tconstr (Pident {name = "function$"},[{desc = Tarrow (_, _, t2, _)}; _],_) ->
let repr_args, repr_res = parse_native_repr_attributes env ct2 t2 in
let native_repr_args = Same_as_ocaml_repr :: repr_args in
(native_repr_args, repr_res)
| _ -> parse_native_repr_attributes env core_type ty
let transl_value_decl env loc valdecl =
let cty = Typetexp.transl_type_scheme env valdecl.pval_type in
let ty = cty.ctyp_type in
let v =
match valdecl.pval_prim with
[] when Env.is_in_signature env ->
{ val_type = ty; val_kind = Val_reg; Types.val_loc = loc;
val_attributes = valdecl.pval_attributes }
| [] ->
raise (Error(valdecl.pval_loc, Val_in_structure))
| _ ->
let native_repr_args, native_repr_res =
let rec scann (attrs : Parsetree.attributes) =
match attrs with
| ({txt = "internal.arity";_},
PStr [ {pstr_desc = Pstr_eval
(
({pexp_desc = Pexp_constant (Pconst_integer (i,_))} :
Parsetree.expression) ,_)}]) :: _ ->
Some (int_of_string i)
| _ :: rest -> scann rest
| [] -> None
and make n =
if n = 0 then []
else Primitive.Same_as_ocaml_repr :: make (n - 1)
in
match scann valdecl.pval_attributes with
| None -> parse_native_repr_attributes env valdecl.pval_type ty
| Some x -> make x , Primitive.Same_as_ocaml_repr
in
let prim =
Primitive.parse_declaration valdecl
~native_repr_args
~native_repr_res
in
let prim_native_name = prim.prim_native_name in
if prim.prim_arity = 0 &&
not ( String.length prim_native_name >= 20 &&
String.unsafe_get prim_native_name 0 = '\132' &&
String.unsafe_get prim_native_name 1 = '\149'
) &&
(prim.prim_name = "" || (prim.prim_name.[0] <> '%' && prim.prim_name.[0] <> '#')) then
raise(Error(valdecl.pval_type.ptyp_loc, Null_arity_external));
{ val_type = ty; val_kind = Val_prim prim; Types.val_loc = loc;
val_attributes = valdecl.pval_attributes }
in
let (id, newenv) =
Env.enter_value valdecl.pval_name.txt v env
~check:(fun s -> Warnings.Unused_value_declaration s)
in
let desc =
{
val_id = id;
val_name = valdecl.pval_name;
val_desc = cty; val_val = v;
val_prim = valdecl.pval_prim;
val_loc = valdecl.pval_loc;
val_attributes = valdecl.pval_attributes;
}
in
desc, newenv
let transl_value_decl env loc valdecl =
Builtin_attributes.warning_scope valdecl.pval_attributes
(fun () -> transl_value_decl env loc valdecl)
Translate a " with " constraint -- much simplified version of
transl_type_decl .
transl_type_decl. *)
let transl_with_constraint env id row_path orig_decl sdecl =
Env.mark_type_used env (Ident.name id) orig_decl;
reset_type_variables();
Ctype.begin_def();
let tparams = make_params env sdecl.ptype_params in
let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in
let orig_decl = Ctype.instance_declaration orig_decl in
let arity_ok = List.length params = orig_decl.type_arity in
if arity_ok then
List.iter2 (Ctype.unify_var env) params orig_decl.type_params;
let constraints = List.map
(function (ty, ty', loc) ->
try
let cty = transl_simple_type env false ty in
let cty' = transl_simple_type env false ty' in
let ty = cty.ctyp_type in
let ty' = cty'.ctyp_type in
Ctype.unify env ty ty';
(cty, cty', loc)
with Ctype.Unify tr ->
raise(Error(loc, Inconsistent_constraint (env, tr))))
sdecl.ptype_cstrs
in
let no_row = not (is_fixed_type sdecl) in
let (tman, man) = match sdecl.ptype_manifest with
None -> None, None
| Some sty ->
let cty = transl_simple_type env no_row sty in
Some cty, Some cty.ctyp_type
in
let priv =
if sdecl.ptype_private = Private then Private else
if arity_ok && orig_decl.type_kind <> Type_abstract
then orig_decl.type_private else sdecl.ptype_private
in
if arity_ok && orig_decl.type_kind <> Type_abstract
&& sdecl.ptype_private = Private then
Location.deprecated sdecl.ptype_loc "spurious use of private";
let type_kind, type_unboxed =
if arity_ok && man <> None then
orig_decl.type_kind, orig_decl.type_unboxed
else
Type_abstract, unboxed_false_default_false
in
let decl =
{ type_params = params;
type_arity = List.length params;
type_kind;
type_private = priv;
type_manifest = man;
type_variance = [];
type_newtype_level = None;
type_loc = sdecl.ptype_loc;
type_attributes = sdecl.ptype_attributes;
type_immediate = false;
type_unboxed;
}
in
begin match row_path with None -> ()
| Some p -> set_fixed_row env sdecl.ptype_loc p decl
end;
begin match Ctype.closed_type_decl decl with None -> ()
| Some ty -> raise(Error(sdecl.ptype_loc, Unbound_type_var(ty,decl)))
end;
let decl = name_recursion sdecl id decl in
let type_variance =
compute_variance_decl env true decl
(add_injectivity (List.map snd sdecl.ptype_params), sdecl.ptype_loc)
in
let type_immediate = compute_immediacy env decl in
let decl = {decl with type_variance; type_immediate} in
Ctype.end_def();
generalize_decl decl;
{
typ_id = id;
typ_name = sdecl.ptype_name;
typ_params = tparams;
typ_type = decl;
typ_cstrs = constraints;
typ_loc = sdecl.ptype_loc;
typ_manifest = tman;
typ_kind = Ttype_abstract;
typ_private = sdecl.ptype_private;
typ_attributes = sdecl.ptype_attributes;
}
let abstract_type_decl arity =
let rec make_params n =
if n <= 0 then [] else Ctype.newvar() :: make_params (n-1) in
Ctype.begin_def();
let decl =
{ type_params = make_params arity;
type_arity = arity;
type_kind = Type_abstract;
type_private = Public;
type_manifest = None;
type_variance = replicate_list Variance.full arity;
type_newtype_level = None;
type_loc = Location.none;
type_attributes = [];
type_immediate = false;
type_unboxed = unboxed_false_default_false;
} in
Ctype.end_def();
generalize_decl decl;
decl
let approx_type_decl sdecl_list =
List.map
(fun sdecl ->
(Ident.create sdecl.ptype_name.txt,
abstract_type_decl (List.length sdecl.ptype_params)))
sdecl_list
let check_recmod_typedecl env loc recmod_ids path decl =
recmod_ids is the list of recursively - defined module idents .
( path , ) is the type declaration to be checked .
(path, decl) is the type declaration to be checked. *)
let to_check path =
List.exists (fun id -> Path.isfree id path) recmod_ids in
check_well_founded_decl env loc path decl to_check;
check_recursion env loc path decl to_check
open Format
let explain_unbound_gen ppf tv tl typ kwd pr =
try
let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in
Btype.newgenty (Tobject(tv, ref None)) in
Printtyp.reset_and_mark_loops_list [typ ti; ty0];
fprintf ppf
".@.@[<hov2>In %s@ %a@;<1 -2>the variable %a is unbound@]"
kwd pr ti Printtyp.type_expr tv
with Not_found -> ()
let explain_unbound ppf tv tl typ kwd lab =
explain_unbound_gen ppf tv tl typ kwd
(fun ppf ti -> fprintf ppf "%s%a" (lab ti) Printtyp.type_expr (typ ti))
let explain_unbound_single ppf tv ty =
let trivial ty =
explain_unbound ppf tv [ty] (fun t -> t) "type" (fun _ -> "") in
match (Ctype.repr ty).desc with
Tobject(fi,_) ->
let (tl, rv) = Ctype.flatten_fields fi in
if rv == tv then trivial ty else
explain_unbound ppf tv tl (fun (_,_,t) -> t)
"method" (fun (lab,_,_) -> lab ^ ": ")
| Tvariant row ->
let row = Btype.row_repr row in
if row.row_more == tv then trivial ty else
explain_unbound ppf tv row.row_fields
(fun (_l,f) -> match Btype.row_field_repr f with
Rpresent (Some t) -> t
| Reither (_,[t],_,_) -> t
| Reither (_,tl,_,_) -> Btype.newgenty (Ttuple tl)
| _ -> Btype.newgenty (Ttuple[]))
"case" (fun (lab,_) -> "`" ^ lab ^ " of ")
| _ -> trivial ty
let tys_of_constr_args = function
| Types.Cstr_tuple tl -> tl
| Types.Cstr_record lbls -> List.map (fun l -> l.Types.ld_type) lbls
let report_error ppf = function
| Repeated_parameter ->
fprintf ppf "A type parameter occurs several times"
| Duplicate_constructor s ->
fprintf ppf "Two constructors are named %s" s
| Duplicate_label s ->
fprintf ppf "Two labels are named %s" s
| Recursive_abbrev s ->
fprintf ppf "The type abbreviation %s is cyclic" s
| Cycle_in_def (s, ty) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>The definition of %s contains a cycle:@ %a@]"
s Printtyp.type_expr ty
| Definition_mismatch (ty, errs) ->
Printtyp.reset_and_mark_loops ty;
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%a@]%a@]"
"This variant or record definition" "does not match that of type"
Printtyp.type_expr ty
(Includecore.report_type_mismatch "the original" "this" "definition")
errs
| Constraint_failed (ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf "@[%s@ @[<hv>Type@ %a@ should be an instance of@ %a@]@]"
"Constraints are not satisfied in this type."
Printtyp.type_expr ty Printtyp.type_expr ty'
| Parameters_differ (path, ty, ty') ->
Printtyp.reset_and_mark_loops ty;
Printtyp.mark_loops ty';
fprintf ppf
"@[<hv>In the definition of %s, type@ %a@ should be@ %a@]"
(Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty'
| Inconsistent_constraint (env, trace) ->
fprintf ppf "The type constraints are not consistent.@.";
Printtyp.report_unification_error ppf env trace
(fun ppf -> fprintf ppf "Type")
(fun ppf -> fprintf ppf "is not compatible with type")
| Type_clash (env, trace) ->
Printtyp.report_unification_error ppf env trace
(function ppf ->
fprintf ppf "This type constructor expands to type")
(function ppf ->
fprintf ppf "but is used here with type")
| Null_arity_external ->
fprintf ppf "External identifiers must be functions"
| Unbound_type_var (ty, decl) ->
fprintf ppf "A type variable is unbound in this type declaration";
let ty = Ctype.repr ty in
begin match decl.type_kind, decl.type_manifest with
| Type_variant tl, _ ->
explain_unbound_gen ppf ty tl (fun c ->
let tl = tys_of_constr_args c.Types.cd_args in
Btype.newgenty (Ttuple tl)
)
"case" (fun ppf c ->
fprintf ppf
"%s of %a" (Ident.name c.Types.cd_id)
Printtyp.constructor_arguments c.Types.cd_args)
| Type_record (tl, _), _ ->
explain_unbound ppf ty tl (fun l -> l.Types.ld_type)
"field" (fun l -> Ident.name l.Types.ld_id ^ ": ")
| Type_abstract, Some ty' ->
explain_unbound_single ppf ty ty'
| _ -> ()
end
| Unbound_type_var_ext (ty, ext) ->
fprintf ppf "A type variable is unbound in this extension constructor";
let args = tys_of_constr_args ext.ext_args in
explain_unbound ppf ty args (fun c -> c) "type" (fun _ -> "")
| Cannot_extend_private_type path ->
fprintf ppf "@[%s@ %a@]"
"Cannot extend private type definition"
Printtyp.path path
| Not_extensible_type path ->
fprintf ppf "@[%s@ %a@ %s@]"
"Type definition"
Printtyp.path path
"is not extensible"
| Extension_mismatch (path, errs) ->
fprintf ppf "@[<v>@[<hov>%s@ %s@;<1 2>%s@]%a@]"
"This extension" "does not match the definition of type"
(Path.name path)
(Includecore.report_type_mismatch
"the type" "this extension" "definition")
errs
| Rebind_wrong_type (lid, env, trace) ->
Printtyp.report_unification_error ppf env trace
(function ppf ->
fprintf ppf "The constructor %a@ has type"
Printtyp.longident lid)
(function ppf ->
fprintf ppf "but was expected to be of type")
| Rebind_mismatch (lid, p, p') ->
fprintf ppf
"@[%s@ %a@ %s@ %s@ %s@ %s@ %s@]"
"The constructor" Printtyp.longident lid
"extends type" (Path.name p)
"whose declaration does not match"
"the declaration of type" (Path.name p')
| Rebind_private lid ->
fprintf ppf "@[%s@ %a@ %s@]"
"The constructor"
Printtyp.longident lid
"is private"
| Bad_variance (n, v1, v2) ->
let variance (p,n,i) =
let inj = if i then "injective " else "" in
match p, n with
true, true -> inj ^ "invariant"
| true, false -> inj ^ "covariant"
| false, true -> inj ^ "contravariant"
| false, false -> if inj = "" then "unrestricted" else inj
in
let suffix n =
let teen = (n mod 100)/10 = 1 in
match n mod 10 with
| 1 when not teen -> "st"
| 2 when not teen -> "nd"
| 3 when not teen -> "rd"
| _ -> "th"
in
if n = -1 then
fprintf ppf "@[%s@ %s@ It"
"In this definition, a type variable has a variance that"
"is not reflected by its occurrence in type parameters."
else if n = -2 then
fprintf ppf "@[%s@ %s@]"
"In this definition, a type variable cannot be deduced"
"from the type parameters."
else if n = -3 then
fprintf ppf "@[%s@ %s@ It"
"In this definition, a type variable has a variance that"
"cannot be deduced from the type parameters."
else
fprintf ppf "@[%s@ %s@ The %d%s type parameter"
"In this definition, expected parameter"
"variances are not satisfied."
n (suffix n);
if n <> -2 then
fprintf ppf " was expected to be %s,@ but it is %s.@]"
(variance v2) (variance v1)
| Unavailable_type_constructor p ->
fprintf ppf "The definition of type %a@ is unavailable" Printtyp.path p
| Bad_fixed_type r ->
fprintf ppf "This fixed type %s" r
| Varying_anonymous ->
fprintf ppf "@[%s@ %s@ %s@]"
"In this GADT definition," "the variance of some parameter"
"cannot be checked"
| Val_in_structure ->
fprintf ppf "Value declarations are only allowed in signatures"
| Bad_immediate_attribute ->
fprintf ppf "@[%s@ %s@]"
"Types marked with the immediate attribute must be"
"non-pointer types like int or bool"
| Bad_unboxed_attribute msg ->
fprintf ppf "@[This type cannot be unboxed because@ %s.@]" msg
| Boxed_and_unboxed ->
fprintf ppf "@[A type cannot be boxed and unboxed at the same time.@]"
| Nonrec_gadt ->
fprintf ppf
"@[GADT case syntax cannot be used in a 'nonrec' block.@]"
let () =
Location.register_error_of_exn
(function
| Error (loc, err) ->
Some (Location.error_of_printer loc report_error err)
| _ ->
None
)
|
5acfdf67611c9af37974c602f8ad1068c888529c9af0ff00dd84237d4fccb1a7 | brendanhay/terrafomo | Types.hs | -- This module was auto-generated. If it is modified, it will not be overwritten.
-- |
Module : . NewRelic . Types
Copyright : ( c ) 2017 - 2018
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
module Terrafomo.NewRelic.Types where
import Data . Text ( Text )
import Terrafomo
-- import Formatting (Format, (%))
import Terrafomo . NewRelic . Lens
import qualified Terrafomo . Attribute as TF
import qualified Terrafomo . HCL as TF
import qualified Terrafomo . Name as TF
import qualified Terrafomo . Provider as TF
import qualified Terrafomo . Schema as TF
| null | https://raw.githubusercontent.com/brendanhay/terrafomo/387a0e9341fb9cd5543ef8332dea126f50f1070e/provider/terrafomo-newrelic/src/Terrafomo/NewRelic/Types.hs | haskell | This module was auto-generated. If it is modified, it will not be overwritten.
|
Stability : auto-generated
import Formatting (Format, (%)) |
Module : . NewRelic . Types
Copyright : ( c ) 2017 - 2018
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Terrafomo.NewRelic.Types where
import Data . Text ( Text )
import Terrafomo
import Terrafomo . NewRelic . Lens
import qualified Terrafomo . Attribute as TF
import qualified Terrafomo . HCL as TF
import qualified Terrafomo . Name as TF
import qualified Terrafomo . Provider as TF
import qualified Terrafomo . Schema as TF
|
5329ca050c5453b2b9513933fbb76b2c4155555d0344ea56838c1dfd2994114d | NoRedInk/jetpack | Files.hs | {-| Helpers for working with files/paths/dirs)
-}
module Utils.Files
( pathToFileName
)
where
import qualified Data.List as L
import qualified Data.Text as T
import System.FilePath ((<.>), splitDirectories)
| Converts a path into a flat filename .
> > > import System . FilePath ( ( < / > ) , ( < . > ) )
> > > pathToFileName ( " . " < / > " foo " < / > " bar " < . > " elm " ) " js "
" foo___bar.elm.js "
> > > pathToFileName ( " . " < / > " bar " < . > " elm " ) " js "
" bar.elm.js "
>>> import System.FilePath ((</>), (<.>))
>>> pathToFileName ("." </> "foo" </> "bar" <.> "elm") "js"
"foo___bar.elm.js"
>>> pathToFileName ("." </> "bar" <.> "elm") "js"
"bar.elm.js"
-}
pathToFileName :: FilePath -> String -> FilePath
pathToFileName filePath extension = safeFileName filePath <.> extension
safeFileName :: FilePath -> FilePath
safeFileName =
T.unpack .
T.replace "-" "_" .
T.concat .
L.intersperse "___" .
filter ((/=) ".") .
fmap T.pack .
splitDirectories
| null | https://raw.githubusercontent.com/NoRedInk/jetpack/721d12226b593c117cba26ceb7c463c7c3334b8b/src/Utils/Files.hs | haskell | | Helpers for working with files/paths/dirs)
| module Utils.Files
( pathToFileName
)
where
import qualified Data.List as L
import qualified Data.Text as T
import System.FilePath ((<.>), splitDirectories)
| Converts a path into a flat filename .
> > > import System . FilePath ( ( < / > ) , ( < . > ) )
> > > pathToFileName ( " . " < / > " foo " < / > " bar " < . > " elm " ) " js "
" foo___bar.elm.js "
> > > pathToFileName ( " . " < / > " bar " < . > " elm " ) " js "
" bar.elm.js "
>>> import System.FilePath ((</>), (<.>))
>>> pathToFileName ("." </> "foo" </> "bar" <.> "elm") "js"
"foo___bar.elm.js"
>>> pathToFileName ("." </> "bar" <.> "elm") "js"
"bar.elm.js"
-}
pathToFileName :: FilePath -> String -> FilePath
pathToFileName filePath extension = safeFileName filePath <.> extension
safeFileName :: FilePath -> FilePath
safeFileName =
T.unpack .
T.replace "-" "_" .
T.concat .
L.intersperse "___" .
filter ((/=) ".") .
fmap T.pack .
splitDirectories
|
2d6ae00f59bb0b5f09bb981516903812b15d7eab9d6869ffa27ccfcf11b05281 | dfinity-side-projects/dhc | Boost.hs | # LANGUAGE CPP #
module Boost
( QuasiWasm
, QuasiWasmHelper(..)
, tag_const, Tag(..)
, Boost(..)
#ifdef __HASTE__
, (<>)
#endif
) where
#ifndef __HASTE__
import Data.Semigroup ()
#endif
import Ast
import WasmOp
| Data on the heap is 64 - bit aligned . The first 8 bits hold a tag .
--
-- The following tables describe the field at a given offset of an object
on the heap . All fields are 32 bits wide except the value field of a 64 - bit
-- integer type.
--
-- Int64s:
0 TagInt
8 64 - bit value
--
-- Ports:
0 TagRef
4 32 - bit value
--
( sum ) types :
-- 0 TagSum | (arity << 8)
4
8 , 12 .. Heap addresses of components .
--
-- Application `f x`:
0 TagAp
4 Unused
8 f
-- 12 x
--
-- Global function:
0 | ( arity < < 8)
4 Function index
--
-- Indirection:
-- 0 TagInd
4 Heap address of target
--
-- String:
0 TagString
4 address
8 offset
12 length
--
-- For example, `Just 42` is represented by:
--
[ TagSum , 1 , p ] , where p points to [ TagInt , 0 , 42 ]
--
where each list item is a 32 - bit integer .
data Tag = TagAp | TagInd | TagGlobal | TagInt | TagRef | TagSum | TagString deriving Enum
tag_const :: Tag -> CustomWasmOp a
tag_const = I32_const . fromIntegral . fromEnum
-- | A few helpers for inline assembly.
type QuasiWasm = CustomWasmOp QuasiWasmHelper
data QuasiWasmHelper =
CallSym String -- Find function index and call it.
Copy arguments from heap and reduce them to WHNF .
deriving Show
type WasmImport = ((String, String), ([WasmType], [WasmType]))
-- | A Boost is a custom collection of extra declarations and functions that
-- are added to a binary.
data Boost = Boost
-- Wasm import declarations.
{ boostImports :: [WasmImport]
definitions .
, boostPrelude :: String
-- Primitive Haskell functions.
, boostPrims :: [(String, (Type, [QuasiWasm]))]
Internal wasm functions , indexed by strings for CallSym .
, boostWasm :: [(String, (([WasmType], [WasmType]), [QuasiWasm]))]
}
#ifdef __HASTE__
(<>) :: Boost -> Boost -> Boost
Boost a b c d <> Boost x y z w = Boost (a ++ x) (b ++ y) (c ++ z) (d ++ w)
#else
instance Semigroup Boost where
Boost a b c d <> Boost x y z w = Boost (a <> x) (b <> y) (c <> z) (d <> w)
instance Monoid Boost where
mempty = Boost [] [] [] []
mappend = (<>)
#endif
| null | https://raw.githubusercontent.com/dfinity-side-projects/dhc/60ac6c85ca02b53c0fdd1f5852c1eaf35f97d579/src/Boost.hs | haskell |
The following tables describe the field at a given offset of an object
integer type.
Int64s:
Ports:
0 TagSum | (arity << 8)
Application `f x`:
12 x
Global function:
Indirection:
0 TagInd
String:
For example, `Just 42` is represented by:
| A few helpers for inline assembly.
Find function index and call it.
| A Boost is a custom collection of extra declarations and functions that
are added to a binary.
Wasm import declarations.
Primitive Haskell functions. | # LANGUAGE CPP #
module Boost
( QuasiWasm
, QuasiWasmHelper(..)
, tag_const, Tag(..)
, Boost(..)
#ifdef __HASTE__
, (<>)
#endif
) where
#ifndef __HASTE__
import Data.Semigroup ()
#endif
import Ast
import WasmOp
| Data on the heap is 64 - bit aligned . The first 8 bits hold a tag .
on the heap . All fields are 32 bits wide except the value field of a 64 - bit
0 TagInt
8 64 - bit value
0 TagRef
4 32 - bit value
( sum ) types :
4
8 , 12 .. Heap addresses of components .
0 TagAp
4 Unused
8 f
0 | ( arity < < 8)
4 Function index
4 Heap address of target
0 TagString
4 address
8 offset
12 length
[ TagSum , 1 , p ] , where p points to [ TagInt , 0 , 42 ]
where each list item is a 32 - bit integer .
data Tag = TagAp | TagInd | TagGlobal | TagInt | TagRef | TagSum | TagString deriving Enum
tag_const :: Tag -> CustomWasmOp a
tag_const = I32_const . fromIntegral . fromEnum
type QuasiWasm = CustomWasmOp QuasiWasmHelper
data QuasiWasmHelper =
Copy arguments from heap and reduce them to WHNF .
deriving Show
type WasmImport = ((String, String), ([WasmType], [WasmType]))
data Boost = Boost
{ boostImports :: [WasmImport]
definitions .
, boostPrelude :: String
, boostPrims :: [(String, (Type, [QuasiWasm]))]
Internal wasm functions , indexed by strings for CallSym .
, boostWasm :: [(String, (([WasmType], [WasmType]), [QuasiWasm]))]
}
#ifdef __HASTE__
(<>) :: Boost -> Boost -> Boost
Boost a b c d <> Boost x y z w = Boost (a ++ x) (b ++ y) (c ++ z) (d ++ w)
#else
instance Semigroup Boost where
Boost a b c d <> Boost x y z w = Boost (a <> x) (b <> y) (c <> z) (d <> w)
instance Monoid Boost where
mempty = Boost [] [] [] []
mappend = (<>)
#endif
|
f8194e8af7220f6e0e750f026f067abedf8e2ce017fdd53f3411c877707424e2 | CardanoSolutions/ogmios | Util.hs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Test.Instances.Util
( eqShowJson
) where
import Ogmios.Prelude
import Data.Aeson
( ToJSON (..)
)
import Test.Hspec
( Expectation
, SpecWith
, shouldBe
, specify
)
| Non - interesting test meant to tick coverage for Eq , Show and JSON
-- instances, as well as record-fields.
eqShowJson :: (Eq a, Show a, ToJSON a) => String -> a -> (a -> Expectation) -> SpecWith ()
eqShowJson lbl a predicate =
specify (lbl <> " / " <> show a <> " / " <> show (toEncoding a)) $
predicate a >> shouldBe a a
| null | https://raw.githubusercontent.com/CardanoSolutions/ogmios/317c826d9d0388cb7efaf61a34085fc7c1b12b06/server/test/unit/Test/Instances/Util.hs | haskell | instances, as well as record-fields. | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
module Test.Instances.Util
( eqShowJson
) where
import Ogmios.Prelude
import Data.Aeson
( ToJSON (..)
)
import Test.Hspec
( Expectation
, SpecWith
, shouldBe
, specify
)
| Non - interesting test meant to tick coverage for Eq , Show and JSON
eqShowJson :: (Eq a, Show a, ToJSON a) => String -> a -> (a -> Expectation) -> SpecWith ()
eqShowJson lbl a predicate =
specify (lbl <> " / " <> show a <> " / " <> show (toEncoding a)) $
predicate a >> shouldBe a a
|
64306739174509bc440164fd0f31d8973fb7ff932a0819f2f260993ecf631ac1 | cram-code/cram_core | designator-pose.lisp | ;;;
Copyright ( c ) 2010 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Intelligent Autonomous Systems Group/
;;; Technische Universitaet Muenchen nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
;;;
(in-package :desig)
(defgeneric designator-pose (desig)
(:documentation "Returns the pose of the object referenced by `desig'")
(:method :around ((desig designator))
(when (effective desig)
(call-next-method))))
(defgeneric designator-distance (desig-1 desig-2)
(:documentation "Returns the (euclidean) distance between the
entities referenced by the two designators")
(:method :around ((desig-1 designator) (desig-2 designator))
(when (and (effective desig-1) (effective desig-2))
(call-next-method))))
| null | https://raw.githubusercontent.com/cram-code/cram_core/984046abe2ec9e25b63e52007ed3b857c3d9a13c/cram_designators/src/cram-designators/designator-pose.lisp | lisp |
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Technische Universitaet Muenchen nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
| Copyright ( c ) 2010 , < >
* Neither the name of the Intelligent Autonomous Systems Group/
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :desig)
(defgeneric designator-pose (desig)
(:documentation "Returns the pose of the object referenced by `desig'")
(:method :around ((desig designator))
(when (effective desig)
(call-next-method))))
(defgeneric designator-distance (desig-1 desig-2)
(:documentation "Returns the (euclidean) distance between the
entities referenced by the two designators")
(:method :around ((desig-1 designator) (desig-2 designator))
(when (and (effective desig-1) (effective desig-2))
(call-next-method))))
|
65cce28fe09d54f34b8d5d35ed92066a1688a65ea5592daf199bbc2b5031d5fe | puppetlabs/trapperkeeper | project.clj | (defproject puppetlabs/trapperkeeper "3.2.2-SNAPSHOT"
:description "A framework for configuring, composing, and running Clojure services."
:license {:name "Apache License, Version 2.0"
:url "-2.0.html"}
:min-lein-version "2.9.0"
:parent-project {:coords [puppetlabs/clj-parent "4.6.17"]
:inherit [:managed-dependencies]}
;; Abort when version ranges or version conflicts are detected in
;; dependencies. Also supports :warn to simply emit warnings.
requires 2.2.0 + .
:pedantic? :abort
:dependencies [[org.clojure/clojure]
[org.clojure/tools.logging]
[org.clojure/tools.macro]
[org.clojure/core.async]
[org.slf4j/log4j-over-slf4j]
[ch.qos.logback/logback-classic]
even though we do n't strictly have a dependency on the following two
;; logback artifacts, specifying the dependency version here ensures
;; that downstream projects don't pick up different versions that would
;; conflict with our version of logback-classic
[ch.qos.logback/logback-core]
[ch.qos.logback/logback-access]
can be used for some advanced logback configurations
[org.codehaus.janino/janino]
[clj-time]
[clj-commons/fs]
[clj-commons/clj-yaml]
[prismatic/plumbing]
[prismatic/schema]
[beckon]
[puppetlabs/typesafe-config]
[puppetlabs/kitchensink]
[puppetlabs/i18n]
[nrepl/nrepl]
]
:deploy-repositories [["releases" {:url ""
:username :env/clojars_jenkins_username
:password :env/clojars_jenkins_password
:sign-releases false}]]
;; Convenience for manually testing application shutdown support - run `lein test-external-shutdown`
:aliases {"test-external-shutdown" ["trampoline" "run" "-m" "examples.shutdown-app.test-external-shutdown"]}
;; By declaring a classifier here and a corresponding profile below we'll get an additional jar
during ` jar ` that has all the code in the test/ directory . Downstream projects can then
;; depend on this test jar using a :classifier in their :dependencies to reuse the test utility
;; code that we have.
:classifiers [["test" :testutils]]
:profiles {:dev {:source-paths ["examples/shutdown_app/src"
"examples/java_service/src/clj"]
:java-source-paths ["examples/java_service/src/java"]
:dependencies [[puppetlabs/kitchensink :classifier "test"]]}
:testutils {:source-paths ^:replace ["test"]}
:uberjar {:aot [puppetlabs.trapperkeeper.main]
:classifiers ^:replace []}}
:plugins [[lein-parent "0.3.7"]
[puppetlabs/i18n "0.8.0"]]
:main puppetlabs.trapperkeeper.main
)
| null | https://raw.githubusercontent.com/puppetlabs/trapperkeeper/5ac89448d149838f19649856b17178c7d943d1da/project.clj | clojure | Abort when version ranges or version conflicts are detected in
dependencies. Also supports :warn to simply emit warnings.
logback artifacts, specifying the dependency version here ensures
that downstream projects don't pick up different versions that would
conflict with our version of logback-classic
Convenience for manually testing application shutdown support - run `lein test-external-shutdown`
By declaring a classifier here and a corresponding profile below we'll get an additional jar
depend on this test jar using a :classifier in their :dependencies to reuse the test utility
code that we have. | (defproject puppetlabs/trapperkeeper "3.2.2-SNAPSHOT"
:description "A framework for configuring, composing, and running Clojure services."
:license {:name "Apache License, Version 2.0"
:url "-2.0.html"}
:min-lein-version "2.9.0"
:parent-project {:coords [puppetlabs/clj-parent "4.6.17"]
:inherit [:managed-dependencies]}
requires 2.2.0 + .
:pedantic? :abort
:dependencies [[org.clojure/clojure]
[org.clojure/tools.logging]
[org.clojure/tools.macro]
[org.clojure/core.async]
[org.slf4j/log4j-over-slf4j]
[ch.qos.logback/logback-classic]
even though we do n't strictly have a dependency on the following two
[ch.qos.logback/logback-core]
[ch.qos.logback/logback-access]
can be used for some advanced logback configurations
[org.codehaus.janino/janino]
[clj-time]
[clj-commons/fs]
[clj-commons/clj-yaml]
[prismatic/plumbing]
[prismatic/schema]
[beckon]
[puppetlabs/typesafe-config]
[puppetlabs/kitchensink]
[puppetlabs/i18n]
[nrepl/nrepl]
]
:deploy-repositories [["releases" {:url ""
:username :env/clojars_jenkins_username
:password :env/clojars_jenkins_password
:sign-releases false}]]
:aliases {"test-external-shutdown" ["trampoline" "run" "-m" "examples.shutdown-app.test-external-shutdown"]}
during ` jar ` that has all the code in the test/ directory . Downstream projects can then
:classifiers [["test" :testutils]]
:profiles {:dev {:source-paths ["examples/shutdown_app/src"
"examples/java_service/src/clj"]
:java-source-paths ["examples/java_service/src/java"]
:dependencies [[puppetlabs/kitchensink :classifier "test"]]}
:testutils {:source-paths ^:replace ["test"]}
:uberjar {:aot [puppetlabs.trapperkeeper.main]
:classifiers ^:replace []}}
:plugins [[lein-parent "0.3.7"]
[puppetlabs/i18n "0.8.0"]]
:main puppetlabs.trapperkeeper.main
)
|
157f9d758e695781c5df87316b351ab35cc4e64759b3ffb20516d52e257bab96 | reflex-frp/reflex | Class.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
-- |
-- Module:
Reflex . Query . Class
-- Description:
-- A class that ties together queries to some data source and their results,
-- providing methods for requesting data from the source and accumulating
-- streamed results.
module Reflex.Query.Class
( Query (..)
, QueryMorphism (..)
, SelectedCount (..)
, combineSelectedCounts
, MonadQuery (..)
, tellQueryDyn
, queryDyn
, subQuery
, mapQuery
, mapQueryResult
) where
import Control.Applicative
import Control.Category (Category)
import qualified Control.Category as Cat
import Control.Monad.Reader
import Data.Bits
import Data.Data
import Data.Ix
import Data.Kind (Type)
import Data.Map.Monoidal (MonoidalMap)
import qualified Data.Map.Monoidal as MonoidalMap
import Data.Semigroup (Semigroup(..))
import Data.Semigroup.Commutative
import Data.Void
import Data.Monoid hiding ((<>))
import Foreign.Storable
import Reflex.Class
-- | A 'Query' can be thought of as a declaration of interest in some set of data.
-- A 'QueryResult' is the set of data associated with that interest set.
-- The @crop@ function provides a way to determine what part of a given 'QueryResult'
-- is relevant to a given 'Query'.
class (Monoid (QueryResult a), Semigroup (QueryResult a)) => Query a where
type QueryResult a :: Type
crop :: a -> QueryResult a -> QueryResult a
instance (Ord k, Query v) => Query (MonoidalMap k v) where
type QueryResult (MonoidalMap k v) = MonoidalMap k (QueryResult v)
crop q r = MonoidalMap.intersectionWith (flip crop) r q
| the result of two queries is both results .
instance (Query a, Query b) => Query (a, b) where
type QueryResult (a, b) = (QueryResult a, QueryResult b)
crop (x, x') (y, y') = (crop x y, crop x' y')
-- | Trivial queries have trivial results.
instance Query () where
type QueryResult () = ()
crop _ _ = ()
-- | The result of an absurd query is trivial; If you can ask the question, the
-- answer cannot tell you anything you didn't already know.
--
-- 'QueryResult Void = @Void@' seems like it would also work, but that has
-- problems of robustness. In some applications, an unasked question can still
-- be answered, so it is important that the result is inhabited even when the
-- question isn't. Applications that wish to prevent this can mandate that the
-- query result be paired with the query: then the whole response will be
-- uninhabited as desired.
instance Query Void where
type QueryResult Void = ()
crop = absurd
#if MIN_VERSION_base(4,12,0)
-- | We can lift queries into monoidal containers.
But beware of Applicatives whose monoid is different from ( pure mempty , liftA2 mappend )
instance (Query q, Applicative f) => Query (Ap f q) where
type QueryResult (Ap f q) = Ap f (QueryResult q)
crop = liftA2 crop
#endif
-- | QueryMorphism's must be group homomorphisms when acting on the query type
-- and compatible with the query relationship when acting on the query result.
data QueryMorphism q q' = QueryMorphism
{ _queryMorphism_mapQuery :: q -> q'
, _queryMorphism_mapQueryResult :: QueryResult q' -> QueryResult q
}
instance Category QueryMorphism where
id = QueryMorphism id id
qm . qm' = QueryMorphism
{ _queryMorphism_mapQuery = mapQuery qm . mapQuery qm'
, _queryMorphism_mapQueryResult = mapQueryResult qm' . mapQueryResult qm
}
-- | Apply a 'QueryMorphism' to a 'Query'
mapQuery :: QueryMorphism q q' -> q -> q'
mapQuery = _queryMorphism_mapQuery
-- | Map a 'QueryMorphism' to a 'QueryResult'
mapQueryResult :: QueryMorphism q q' -> QueryResult q' -> QueryResult q
mapQueryResult = _queryMorphism_mapQueryResult
-- | This type can be used to track of the frequency of interest in a given 'Query'. See note on
-- 'combineSelectedCounts'
newtype SelectedCount = SelectedCount { unSelectedCount :: Int }
deriving (Eq, Ord, Show, Read, Integral, Num, Bounded, Enum, Real, Ix, Bits, FiniteBits, Storable, Data)
instance Semigroup SelectedCount where
SelectedCount a <> SelectedCount b = SelectedCount (a + b)
instance Monoid SelectedCount where
mempty = SelectedCount 0
mappend = (<>)
instance Group SelectedCount where
negateG (SelectedCount a) = SelectedCount (negate a)
instance Commutative SelectedCount
| The Semigroup\/Monoid\/Group instances for a Query containing ' SelectedCount 's should use
-- this function which returns Nothing if the result is 0. This allows the pruning of leaves
-- of the 'Query' that are no longer wanted.
combineSelectedCounts :: SelectedCount -> SelectedCount -> Maybe SelectedCount
combineSelectedCounts (SelectedCount i) (SelectedCount j) = if i == negate j then Nothing else Just $ SelectedCount (i + j)
| A class that allows sending of ' Query 's and retrieval of ' QueryResult 's . See ' queryDyn ' for a commonly
-- used interface.
class (Group q, Commutative q, Query q, Monad m) => MonadQuery t q m | m -> q t where
tellQueryIncremental :: Incremental t (AdditivePatch q) -> m ()
askQueryResult :: m (Dynamic t (QueryResult q))
queryIncremental :: Incremental t (AdditivePatch q) -> m (Dynamic t (QueryResult q))
instance MonadQuery t q m => MonadQuery t q (ReaderT r m) where
tellQueryIncremental = lift . tellQueryIncremental
askQueryResult = lift askQueryResult
queryIncremental = lift . queryIncremental
-- | Produce and send an 'Incremental' 'Query' from a 'Dynamic' 'Query'.
tellQueryDyn :: (Reflex t, MonadQuery t q m) => Dynamic t q -> m ()
tellQueryDyn d = tellQueryIncremental $ unsafeBuildIncremental (sample (current d)) $ attachWith (\old new -> AdditivePatch $ new ~~ old) (current d) (updated d)
| Retrieve ' Dynamic'ally updating ' QueryResult 's for a ' Dynamic'ally updating ' Query ' .
queryDyn :: (Reflex t, MonadQuery t q m) => Dynamic t q -> m (Dynamic t (QueryResult q))
queryDyn q = do
tellQueryDyn q
zipDynWith crop q <$> askQueryResult
-- | Use a query morphism to operate on a smaller version of a query.
subQuery :: (Reflex t, MonadQuery t q2 m) => QueryMorphism q1 q2 -> Dynamic t q1 -> m (Dynamic t (QueryResult q1))
subQuery (QueryMorphism f g) x = fmap g <$> queryDyn (fmap f x)
| null | https://raw.githubusercontent.com/reflex-frp/reflex/d9a40cd97072869c91479303ee52577b793c11d2/src/Reflex/Query/Class.hs | haskell | # LANGUAGE DeriveDataTypeable #
|
Module:
Description:
A class that ties together queries to some data source and their results,
providing methods for requesting data from the source and accumulating
streamed results.
| A 'Query' can be thought of as a declaration of interest in some set of data.
A 'QueryResult' is the set of data associated with that interest set.
The @crop@ function provides a way to determine what part of a given 'QueryResult'
is relevant to a given 'Query'.
| Trivial queries have trivial results.
| The result of an absurd query is trivial; If you can ask the question, the
answer cannot tell you anything you didn't already know.
'QueryResult Void = @Void@' seems like it would also work, but that has
problems of robustness. In some applications, an unasked question can still
be answered, so it is important that the result is inhabited even when the
question isn't. Applications that wish to prevent this can mandate that the
query result be paired with the query: then the whole response will be
uninhabited as desired.
| We can lift queries into monoidal containers.
| QueryMorphism's must be group homomorphisms when acting on the query type
and compatible with the query relationship when acting on the query result.
| Apply a 'QueryMorphism' to a 'Query'
| Map a 'QueryMorphism' to a 'QueryResult'
| This type can be used to track of the frequency of interest in a given 'Query'. See note on
'combineSelectedCounts'
this function which returns Nothing if the result is 0. This allows the pruning of leaves
of the 'Query' that are no longer wanted.
used interface.
| Produce and send an 'Incremental' 'Query' from a 'Dynamic' 'Query'.
| Use a query morphism to operate on a smaller version of a query. | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
Reflex . Query . Class
module Reflex.Query.Class
( Query (..)
, QueryMorphism (..)
, SelectedCount (..)
, combineSelectedCounts
, MonadQuery (..)
, tellQueryDyn
, queryDyn
, subQuery
, mapQuery
, mapQueryResult
) where
import Control.Applicative
import Control.Category (Category)
import qualified Control.Category as Cat
import Control.Monad.Reader
import Data.Bits
import Data.Data
import Data.Ix
import Data.Kind (Type)
import Data.Map.Monoidal (MonoidalMap)
import qualified Data.Map.Monoidal as MonoidalMap
import Data.Semigroup (Semigroup(..))
import Data.Semigroup.Commutative
import Data.Void
import Data.Monoid hiding ((<>))
import Foreign.Storable
import Reflex.Class
class (Monoid (QueryResult a), Semigroup (QueryResult a)) => Query a where
type QueryResult a :: Type
crop :: a -> QueryResult a -> QueryResult a
instance (Ord k, Query v) => Query (MonoidalMap k v) where
type QueryResult (MonoidalMap k v) = MonoidalMap k (QueryResult v)
crop q r = MonoidalMap.intersectionWith (flip crop) r q
| the result of two queries is both results .
instance (Query a, Query b) => Query (a, b) where
type QueryResult (a, b) = (QueryResult a, QueryResult b)
crop (x, x') (y, y') = (crop x y, crop x' y')
instance Query () where
type QueryResult () = ()
crop _ _ = ()
instance Query Void where
type QueryResult Void = ()
crop = absurd
#if MIN_VERSION_base(4,12,0)
But beware of Applicatives whose monoid is different from ( pure mempty , liftA2 mappend )
instance (Query q, Applicative f) => Query (Ap f q) where
type QueryResult (Ap f q) = Ap f (QueryResult q)
crop = liftA2 crop
#endif
data QueryMorphism q q' = QueryMorphism
{ _queryMorphism_mapQuery :: q -> q'
, _queryMorphism_mapQueryResult :: QueryResult q' -> QueryResult q
}
instance Category QueryMorphism where
id = QueryMorphism id id
qm . qm' = QueryMorphism
{ _queryMorphism_mapQuery = mapQuery qm . mapQuery qm'
, _queryMorphism_mapQueryResult = mapQueryResult qm' . mapQueryResult qm
}
mapQuery :: QueryMorphism q q' -> q -> q'
mapQuery = _queryMorphism_mapQuery
mapQueryResult :: QueryMorphism q q' -> QueryResult q' -> QueryResult q
mapQueryResult = _queryMorphism_mapQueryResult
newtype SelectedCount = SelectedCount { unSelectedCount :: Int }
deriving (Eq, Ord, Show, Read, Integral, Num, Bounded, Enum, Real, Ix, Bits, FiniteBits, Storable, Data)
instance Semigroup SelectedCount where
SelectedCount a <> SelectedCount b = SelectedCount (a + b)
instance Monoid SelectedCount where
mempty = SelectedCount 0
mappend = (<>)
instance Group SelectedCount where
negateG (SelectedCount a) = SelectedCount (negate a)
instance Commutative SelectedCount
| The Semigroup\/Monoid\/Group instances for a Query containing ' SelectedCount 's should use
combineSelectedCounts :: SelectedCount -> SelectedCount -> Maybe SelectedCount
combineSelectedCounts (SelectedCount i) (SelectedCount j) = if i == negate j then Nothing else Just $ SelectedCount (i + j)
| A class that allows sending of ' Query 's and retrieval of ' QueryResult 's . See ' queryDyn ' for a commonly
class (Group q, Commutative q, Query q, Monad m) => MonadQuery t q m | m -> q t where
tellQueryIncremental :: Incremental t (AdditivePatch q) -> m ()
askQueryResult :: m (Dynamic t (QueryResult q))
queryIncremental :: Incremental t (AdditivePatch q) -> m (Dynamic t (QueryResult q))
instance MonadQuery t q m => MonadQuery t q (ReaderT r m) where
tellQueryIncremental = lift . tellQueryIncremental
askQueryResult = lift askQueryResult
queryIncremental = lift . queryIncremental
tellQueryDyn :: (Reflex t, MonadQuery t q m) => Dynamic t q -> m ()
tellQueryDyn d = tellQueryIncremental $ unsafeBuildIncremental (sample (current d)) $ attachWith (\old new -> AdditivePatch $ new ~~ old) (current d) (updated d)
| Retrieve ' Dynamic'ally updating ' QueryResult 's for a ' Dynamic'ally updating ' Query ' .
queryDyn :: (Reflex t, MonadQuery t q m) => Dynamic t q -> m (Dynamic t (QueryResult q))
queryDyn q = do
tellQueryDyn q
zipDynWith crop q <$> askQueryResult
subQuery :: (Reflex t, MonadQuery t q2 m) => QueryMorphism q1 q2 -> Dynamic t q1 -> m (Dynamic t (QueryResult q1))
subQuery (QueryMorphism f g) x = fmap g <$> queryDyn (fmap f x)
|
f5c3c73da3b5edcd981a97ce083b93cf9cf7aa59089629f2cee05f5ce88ad3e1 | mbutterick/txexpr | container.rkt | #lang racket/base
(provide txexpr?/recur
txexpr->values/attrs?)
(require racket/list)
A [ TXcontainer T A E ] is one of :
- ( List * T A ( E ) )
- ( Lxst * T ( E ) )
;; Where A and E are disjoint.
;; txexpr?/recur :
;; Any
;; [Any -> Bool : T]
;; [Any -> Bool : #:+ (and A (! E)) #:- (! A)]
;; [Any -> Bool : #:+ (and E (! A)) #:- (! E)]
;; ->
;; : [TXcontainer T A E]
;; the attrs? predicate and the element? predicate should be disjoint
(define (txexpr?/recur v tag? attrs? element?)
(and (list? v)
(not (empty? v))
(tag? (first v))
(cond [(and (not (empty? (rest v)))
(attrs? (second v)))
(andmap element? (rest (rest v)))]
[else
(andmap element? (rest v))])))
;; txexpr->values/attrs? :
;; [TXcontainer T A E]
;; [Any -> Bool : #:+ (and A (! E)) #:- (! A)]
;; ->
( values T A ( E ) )
(define (txexpr->values/attrs? tx attrs?)
(cond [(and (not (empty? (rest tx)))
(attrs? (second tx)))
(values (first tx) (second tx) (rest (rest tx)))]
[else
(values (first tx) '() (rest tx))]))
| null | https://raw.githubusercontent.com/mbutterick/txexpr/435c6e6f36fd39065ae9d8a00285fda0e4e41fa1/txexpr/private/container.rkt | racket | Where A and E are disjoint.
txexpr?/recur :
Any
[Any -> Bool : T]
[Any -> Bool : #:+ (and A (! E)) #:- (! A)]
[Any -> Bool : #:+ (and E (! A)) #:- (! E)]
->
: [TXcontainer T A E]
the attrs? predicate and the element? predicate should be disjoint
txexpr->values/attrs? :
[TXcontainer T A E]
[Any -> Bool : #:+ (and A (! E)) #:- (! A)]
-> | #lang racket/base
(provide txexpr?/recur
txexpr->values/attrs?)
(require racket/list)
A [ TXcontainer T A E ] is one of :
- ( List * T A ( E ) )
- ( Lxst * T ( E ) )
(define (txexpr?/recur v tag? attrs? element?)
(and (list? v)
(not (empty? v))
(tag? (first v))
(cond [(and (not (empty? (rest v)))
(attrs? (second v)))
(andmap element? (rest (rest v)))]
[else
(andmap element? (rest v))])))
( values T A ( E ) )
(define (txexpr->values/attrs? tx attrs?)
(cond [(and (not (empty? (rest tx)))
(attrs? (second tx)))
(values (first tx) (second tx) (rest (rest tx)))]
[else
(values (first tx) '() (rest tx))]))
|
9cf6bb5c2acb391dbed759f5ed6b54f70ffe3b8c09b485f0e6610bc4d46c78a4 | district0x/district0x-network-token | main_panel.cljs | (ns contribution.components.main-panel
(:require
[cljs-react-material-ui.core :refer [get-mui-theme]]
[cljs-react-material-ui.reagent :as ui]
[cljs-time.core :as t]
[clojure.set :as set]
[contribution.constants :as constants]
[contribution.styles :as styles]
[district0x.components.active-address-select-field :refer [active-address-select-field]]
[district0x.components.misc :as misc :refer [row row-with-cols col center-layout etherscan-link]]
[district0x.components.utils :refer [create-with-default-props parse-props-children]]
[district0x.utils :as u]
[medley.core :as medley]
[re-frame.core :refer [subscribe dispatch]]
[reagent.core :as r]
[cljs-web3.core :as web3]))
(def paper (create-with-default-props misc/paper {:style styles/paper}))
(defn contracts-not-found-page []
[center-layout
[paper
[row
{:center "xs"
:middle "xs"
:style {:min-height "400px"}}
[:h3 "We couldn't find district0x Contribution smart contracts." [:br]
"Your MetaMask Chrome extension is most likely not pointed to Ethereum Mainnet, please check."]]]])
(defn app-bar-right-elements []
(let [active-address-balance-eth (subscribe [:district0x/active-address-balance :eth])
active-address-balance-dnt (subscribe [:district0x/active-address-balance :dnt])
connection-error? (subscribe [:district0x/blockchain-connection-error?])
my-addresses (subscribe [:district0x/my-addresses])
contracts-not-found? (subscribe [:district0x/contracts-not-found?])]
(fn []
(when-not @connection-error?
[row
{:middle "xs"
:end "xs"}
(when (and (seq @my-addresses)
@active-address-balance-dnt)
[:h2.bolder {:style (merge styles/app-bar-balance
{:margin-right 10})}
(u/format-dnt-with-symbol @active-address-balance-dnt)])
(when (and (seq @my-addresses)
@active-address-balance-eth)
[:h2.bolder {:style (merge styles/app-bar-balance
{:margin-right 20})}
(u/format-eth-with-symbol @active-address-balance-eth)])
[active-address-select-field
{:select-field-props {:style styles/active-address-select-field
:label-style styles/active-address-select-field-label
:underline-style {:border-color styles/theme-blue}}
:single-address-props {:style styles/active-address-single}}]]))))
(defn info-line [props & children]
(let [[props children] (parse-props-children props children)]
[:div
props
[:b
{:style {:color styles/theme-cyan}}
(first children)] " "
(into
[:span]
(rest children))]))
(defn admin-panel []
(let [can-see-admin-panel? (subscribe [:db/can-see-admin-panel?])
contrib-period (subscribe [:contribution/contrib-period])
enable-contrib-form (subscribe [:form.contribution/enable-contrib-period])
contrib-config (subscribe [:contribution/configuration])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
contrib-contract-dnt-balance (subscribe [:contribution/dnt-balance])]
(fn []
(let [{:keys [:contrib-period/start-time :contrib-period/end-time
:contrib-period/soft-cap-amount :contrib-period/after-soft-cap-duration
:contrib-period/hard-cap-amount :contrib-period/enabled?
:contrib-period/stake :contrib-period/soft-cap-reached? :contrib-period/total-contributed
:contrib-period/hard-cap-reached? :contrib-period/contributors-count]} @contrib-period
{:keys [:loading?]} @enable-contrib-form
{:keys [:contribution/stopped? :contribution/founder1 :contribution/founder2
:contribution/early-sponsor :contribution/wallet :contribution/advisers
:contribution-address dnt-token-address :dnt-token/transfers-enabled?]} @contrib-config]
(when @can-see-admin-panel?
[paper
[row-with-cols
[col
{:xs 12}
[:h1
{:style (merge styles/margin-bottom-gutter styles/text-center)}
"Admin Panel"]]
[col
{:xs 12
:style styles/margin-bottom-gutter}
[info-line "Contribution Contract:" [etherscan-link {:address contribution-address}]]
[info-line "DNT Token Contract:" [etherscan-link {:address dnt-token-address}]]
[info-line "Founder 1:" [etherscan-link {:address founder1}]]
[info-line "Founder 2:" [etherscan-link {:address founder2}]]
[info-line "Early Sponsor:" [etherscan-link {:address early-sponsor}]]
[info-line "Wallet:" [etherscan-link {:address wallet}]]
(for [[i adviser] (medley/indexed advisers)]
[info-line
{:key i}
(str (when (= adviser (last advisers)) "Community ")
"Adviser " (inc i) ":") [etherscan-link {:address adviser}]])]
(when total-contributed
[col
{:xs 12}
[info-line "Contribution Round:" (inc constants/current-contrib-period)]
[info-line "Start Time:" (u/format-local-datetime start-time)]
[info-line "End Time:" (u/format-local-datetime end-time)]
[info-line "Soft Cap:" (u/format-eth-with-symbol soft-cap-amount)]
[info-line "After Soft Cap Duration:" (t/in-hours (t/seconds after-soft-cap-duration)) " hours"]
[info-line "Hard Cap:" (u/format-eth-with-symbol hard-cap-amount)]
[info-line "Enabled?" (u/bool->yes|no enabled?)]
[info-line "Token Distribution:" (u/format-dnt-with-symbol stake)]
[info-line "Soft Cap Reached?" (u/bool->yes|no soft-cap-reached?)]
[info-line "Hard Cap Reached?" (u/bool->yes|no hard-cap-reached?)]
[info-line "Total Contributed:" (u/format-eth-with-symbol total-contributed)]
[info-line "Contributors Count:" contributors-count]
[info-line "Emergency stop?" (u/bool->yes|no stopped?)]
[info-line "Contribution Contract DNT Balance:" (u/format-dnt-with-symbol @contrib-contract-dnt-balance)]
[info-line "DNT Transfers Enabled?" (u/bool->yes|no transfers-enabled?)]])]])))))
(defn contribution-tile []
(let [xs-sm-width? (subscribe [:district0x/window-xs-sm-width?])]
(fn [{:keys [:title :index]} & children]
[col
{:xs 12 :md 4
:key index
:style {:padding-left 0
:padding-right 0}}
[:div
{:style (merge styles/stats-tile-title
(if @xs-sm-width?
(merge
styles/margin-top-gutter-less
styles/stats-tile-border-bottom
{:margin-right 0 :margin-left 0})
styles/stats-tile-border-bottom))}
title]
(into [row
{:middle "xs"
:center "xs"
:style (merge styles/stats-tile
(when-not (= index 2)
(if @xs-sm-width?
styles/stats-tile-border-bottom
styles/stats-tile-border-right)))}]
children)])))
(def contrib-period-status->countdown-title
{:contrib-period-status/not-started "Starts in"
:contrib-period-status/running "Ends in"
:contrib-period-status/ended "Ended"})
(def unit->name
{:days "day"
:hours "hour"
:minutes "minute"
:seconds "second"})
(defn countdown-time-item [{:keys [:unit :value]}]
[:span
[:h1
{:style (merge styles/contrib-countdown-value
styles/text-left)}
(if (< value 10)
(str 0 value)
value)]
[:span
{:style styles/contrib-countdown-unit}
" " (u/pluralize (unit->name unit) value)]])
(defn- countdown []
(fn [{:keys [:from-time :to-time]}]
(when (and from-time to-time)
(let [{:keys [:days :hours :minutes :seconds]} (u/time-remaining from-time to-time)]
[:div
{:style {:margin-top "-10px"}}
[:div
{:style styles/no-wrap}
[countdown-time-item
{:value days
:unit :days}]
" "
[countdown-time-item
{:value hours
:unit :hours}]]
[:div
{:style styles/no-wrap}
[countdown-time-item
{:value minutes
:unit :minutes}]
" "
[countdown-time-item
{:value seconds
:unit :seconds}]]
[:div
{:style {:margin-top 5}}
(u/format-local-datetime to-time)]]))))
(defn contribution-stats-tiles []
(let [xs-width? (subscribe [:district0x/window-xs-width?])
contrib-period (subscribe [:contribution/contrib-period])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
now (subscribe [:db/now])]
(fn []
(let [{:keys [:contrib-period/loading? :contrib-period/start-time :contrib-period/end-time
:contrib-period/total-contributed :contrib-period/stake
:contrib-period/contributors-count]} @contrib-period]
[row-with-cols
{:center "xs"}
[col
{:xs 12}
[:h1 {:style (merge styles/margin-bottom-gutter-more
styles/text-center)}
"Contribution Period " (constants/contrib-period->name constants/current-contrib-period)]]
[contribution-tile
{:title (contrib-period-status->countdown-title @contrib-period-status)
:index 0}
(if (and start-time end-time)
(condp = @contrib-period-status
:contrib-period-status/not-started
[countdown
{:from-time @now
:to-time start-time}]
:contrib-period-status/running
[countdown
{:from-time @now
:to-time end-time}]
:contrib-period-status/ended
[:h1 (u/format-local-date end-time)])
[ui/circular-progress])]
[contribution-tile
{:title "Raised"
:index 1}
(if total-contributed
[row
{:middle "xs"
:center "xs"}
[:div
[:h1
{:style styles/stats-tile-amount}
(u/format-metric total-contributed)]
[:span {:style {:font-size "1.2em"}} " ETH"]]
[:h3
{:style styles/stats-tile-amount-subtitle}
contributors-count (u/pluralize " participant" contributors-count)]]
[ui/circular-progress])]
[contribution-tile
{:title "Token Distribution"
:index 2}
(if stake
[:div
[:h1
{:style styles/stats-tile-amount}
(/ stake 1000000) " Mil"]
[:h3
{:style styles/stats-tile-amount-subtitle}
"DNT tokens"]]
[ui/circular-progress])]]))))
(defn contribution-soft-cap-progress []
(let [current-contrib-period (subscribe [:contribution/contrib-period])]
(fn []
(let [{:keys [:contrib-period/total-contributed :contrib-period/soft-cap-amount
:contrib-period/after-soft-cap-duration :contrib-period/hard-cap-amount]} @current-contrib-period]
(when (and total-contributed soft-cap-amount after-soft-cap-duration)
[row
{:center "xs"
:style styles/margin-top-gutter-more}
(if (< total-contributed soft-cap-amount)
[:div
{:style styles/full-width}
[ui/linear-progress
{:mode "determinate"
:style styles/cap-progress
:color styles/theme-orange
:max soft-cap-amount
:value total-contributed}]
[:h3
{:style styles/full-width}
"Soft Cap " (js/parseInt total-contributed) "/" (or soft-cap-amount 0) " ETH"]
[:div
{:style (merge styles/full-width
styles/fade-white-text)}
"After soft cap is reached, the contribution period will be closed in " (t/in-hours (t/seconds after-soft-cap-duration))
" hours" [:br]
"In case of reaching " (or hard-cap-amount 0) " ETH hard cap, contribution period closes immediately"]]
[:div
{:style styles/full-width}
[ui/linear-progress
{:mode "determinate"
:style styles/cap-progress
:color styles/theme-orange
:max hard-cap-amount
:value total-contributed}]
[:h3
{:style styles/full-width}
"Hard Cap " (js/parseInt total-contributed) "/" (or hard-cap-amount 0) " ETH"]
[:div
{:style (merge styles/full-width
styles/fade-white-text)}
"After hard cap is reached, no more contributions will be accepted"]])])))))
(defn- external-link [body href]
[:a
{:href href
:target :_blank
:style {:color styles/theme-green}}
body])
(defn contribution-contribute-section []
(let [contribution-address (subscribe [:contribution-contract-address])
contribute-form (subscribe [:form.contribution/contribute])
can-use-form? (subscribe [:district0x/can-submit-into-blockchain?])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
current-contrib-period (subscribe [:contribution/contrib-period])
contrib-config (subscribe [:contribution/configuration])
confirmed-not-us-citizen? (subscribe [:confirmed-not-us-citizen?])
confirmed-terms? (subscribe [:confirmed-terms?])
confirmed-gas-price? (subscribe [:confirmed-gas-price?])
confirmed-compensation? (subscribe [:confirmed-compensation?])
confirmations-submitted? (subscribe [:confirmations-submitted?])
disallowed-country? (subscribe [:disallowed-country?])
]
(fn []
(let [{:keys [:contrib-period/stake :contrib-period/enabled?]} @current-contrib-period
{:keys [:data :loading?]} @contribute-form
{:keys [:contribution/amount]} data
{:keys [:contribution/stopped? :contribution/max-gas-price]} @contrib-config
error-text (cond
(not (u/non-neg-ether-value? amount)) "This is not valid Ether value"
(< (u/parse-float amount) constants/min-contrib-amount) (str "Minimum contribution amount is "
constants/min-contrib-amount
" ETH"))]
[row
{:center "xs"
:style styles/margin-top-gutter-more}
(if @disallowed-country?
[:div "We have detected that you are visiting this page from the United States or another unpermitted country. Please note: US citizens and residents are not permitted to participate in the district0x Contribution Period."]
(if @confirmations-submitted?
[:div
[:h2
{:style (merge styles/full-width
styles/margin-bottom-gutter-less)}
"How to Contribute"]
[:div
{:style (merge
{:color styles/theme-orange
:font-size "1em"}
styles/margin-bottom-gutter-less)}
[:b
"Important: Maximum allowed gas price is " (web3/from-wei max-gas-price :gwei) " Gwei "
"(" max-gas-price " wei)." [:br]
"Recommended gas limit is 200000" [:br]
(if stopped?
"Contribution was temporarily paused due to emergency"
({:contrib-period-status/not-started "Contribution period has not started yet"
:contrib-period-status/ended "Contribution period has been finished"}
@contrib-period-status))]]
[:div
{:style styles/full-width}
"You can send Ether directly to contribution smart contract at"]
[:h1
{:style (merge styles/full-width
styles/margin-top-gutter-less
{:color styles/theme-green
:font-family "filson-soft, sans-serif"})}
"district.eth" [:br]
[:span {:style {:font-size "0.7em"}} @contribution-address]]
[:div
{:style (merge styles/full-width
styles/margin-top-gutter-less)}
"or you can use following form by using " [external-link "MetaMask" "/"] ", "
[external-link "Mist" ""] ", or "
[external-link "Parity" "/"]]
[row
{:style styles/full-width
:middle "xs"
:center "xs"}
[ui/text-field
{:floating-label-fixed true
:floating-label-text "Amount in Ether"
:default-value amount
:style (merge {:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini}
styles/margin-bottom-gutter-less)
:error-text error-text
:error-style styles/text-left
:disabled (or (= :contrib-period-status/ended @contrib-period-status)
(not enabled?)
stopped?)
:on-change #(dispatch [:district0x.form/set-value :form.contribution/contribute :default :contribution/amount %2])}]
(if-not loading?
[ui/raised-button
{:primary true
:label "Send"
:disabled (or (not @can-use-form?)
(boolean error-text)
(= :contrib-period-status/ended @contrib-period-status)
(not enabled?)
stopped?)
:style {:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini
:margin-top 20}
:on-touch-tap #(dispatch [:contribution/contribute data])}]
[:div
{:style {:margin-top 20
:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini
:width 88}}
[ui/circular-progress
{:size 30
:thickness 2}]])]
[:div
{:style (merge styles/full-width)}
"For detailed instructions watch our "
[external-link "tutorials on Youtube" ""]]]
[:div
{:style styles/full-width}
[:h2
{:style (merge styles/full-width
styles/margin-bottom-gutter)}
"Before You Contribute"]
[row
{:center "xs"
:style styles/full-width}
[:div
{:style styles/text-left}
[ui/checkbox
{:label "I confirm that I have read and agree to the Contribution Terms."
:checked @confirmed-terms?
:on-check #(dispatch [:set-confirmation :confirmed-terms? %2])}]
[ui/checkbox
{:label "I confirm that I am not a citizen or resident of the United States or other unpermitted country."
:checked @confirmed-not-us-citizen?
:on-check #(dispatch [:set-confirmation :confirmed-not-us-citizen? %2])}]
[ui/checkbox
{:label (r/as-element [:span "I understand "
[:b "the maximum gas price when contributing is 50 Gwei"]
" and any transaction sent with a higher gas price will be rejected."])
:checked @confirmed-gas-price?
:on-check #(dispatch [:set-confirmation :confirmed-gas-price? %2])}]
[ui/checkbox
{:label "I understand that it may take up to 7 days from the time the contribution period ends to receive DNT."
:checked @confirmed-compensation?
:on-check #(dispatch [:set-confirmation :confirmed-compensation? %2])}]]
[:div
{:style styles/full-width}
[ui/raised-button
{:primary true
:label "Continue"
:style styles/margin-top-gutter-less
:disabled (or (not @confirmed-terms?)
(not @confirmed-not-us-citizen?)
(not @confirmed-gas-price?)
(not @confirmed-compensation?))
:on-touch-tap #(dispatch [:set-confirmation :confirmations-submitted? true])}]]]]))
(when stake
[:div
[:div
{:style styles/distribution-note}
"Please note: " (u/format-eth stake) " DNT tokens will be divided and distributed amongst all
participants " [:b "after the contribution period ends"] ". Each participant will receive an allocation
proportional to the amount they contributed, relative to the total collected."]
[:div
{:style (merge styles/full-width
styles/margin-top-gutter)}
[:a {:href "-terms.pdf"
:target :_blank
:style styles/contrib-terms-link}
"Contribution Terms"]]])
[:small
{:style (merge styles/full-width
styles/fade-white-text
styles/margin-top-gutter
styles/margin-bottom-gutter-mini)}
"Copyright © 2017 district0x"]
[:div
{:style styles/full-width}
[:img {:src "./images/district0x-logo-title-white.svg"
:style {:height 15}}]]]))))
(defn contribution-panel []
[paper
[contribution-stats-tiles]
[contribution-soft-cap-progress]
[contribution-contribute-section]])
(defn logo []
[:a
{:href ""}
[:img
{:style styles/logo
:src "./images/district0x-logo.svg"}]])
(defn main-panel []
(let [connection-error? (subscribe [:district0x/blockchain-connection-error?])
snackbar (subscribe [:district0x/snackbar])
contracts-not-found? (subscribe [:district0x/contracts-not-found?])
xs-width? (subscribe [:district0x/window-xs-width?])]
(fn []
[misc/main-panel
{:mui-theme styles/mui-theme}
[:div
{:style {:padding-bottom 20
:overflow :hidden
:position :relative
:min-height "100%"}}
[:img {:src "./images/green-blob2.svg"
:style styles/blob4}]
[:img {:src "./images/cyan-blob.svg"
:style styles/blob1}]
[:img {:src "./images/green-blob1.svg"
:style styles/blob2}]
[:img {:src "./images/green-blobs.svg"
:style styles/blob3}]
[ui/app-bar
{:show-menu-icon-button false
:style styles/app-bar
:title (r/as-element [logo])
:icon-element-right (r/as-element [app-bar-right-elements])}]
[:div {:style (merge styles/content-wrap
(when @xs-width?
(styles/padding-all styles/desktop-gutter-mini)))}
(if @contracts-not-found?
[contracts-not-found-page]
[center-layout
[contribution-panel]
[admin-panel]])]]]))) | null | https://raw.githubusercontent.com/district0x/district0x-network-token/a002f39312989d4099468d155c68feeedd708c16/src/cljs/contribution/components/main_panel.cljs | clojure | (ns contribution.components.main-panel
(:require
[cljs-react-material-ui.core :refer [get-mui-theme]]
[cljs-react-material-ui.reagent :as ui]
[cljs-time.core :as t]
[clojure.set :as set]
[contribution.constants :as constants]
[contribution.styles :as styles]
[district0x.components.active-address-select-field :refer [active-address-select-field]]
[district0x.components.misc :as misc :refer [row row-with-cols col center-layout etherscan-link]]
[district0x.components.utils :refer [create-with-default-props parse-props-children]]
[district0x.utils :as u]
[medley.core :as medley]
[re-frame.core :refer [subscribe dispatch]]
[reagent.core :as r]
[cljs-web3.core :as web3]))
(def paper (create-with-default-props misc/paper {:style styles/paper}))
(defn contracts-not-found-page []
[center-layout
[paper
[row
{:center "xs"
:middle "xs"
:style {:min-height "400px"}}
[:h3 "We couldn't find district0x Contribution smart contracts." [:br]
"Your MetaMask Chrome extension is most likely not pointed to Ethereum Mainnet, please check."]]]])
(defn app-bar-right-elements []
(let [active-address-balance-eth (subscribe [:district0x/active-address-balance :eth])
active-address-balance-dnt (subscribe [:district0x/active-address-balance :dnt])
connection-error? (subscribe [:district0x/blockchain-connection-error?])
my-addresses (subscribe [:district0x/my-addresses])
contracts-not-found? (subscribe [:district0x/contracts-not-found?])]
(fn []
(when-not @connection-error?
[row
{:middle "xs"
:end "xs"}
(when (and (seq @my-addresses)
@active-address-balance-dnt)
[:h2.bolder {:style (merge styles/app-bar-balance
{:margin-right 10})}
(u/format-dnt-with-symbol @active-address-balance-dnt)])
(when (and (seq @my-addresses)
@active-address-balance-eth)
[:h2.bolder {:style (merge styles/app-bar-balance
{:margin-right 20})}
(u/format-eth-with-symbol @active-address-balance-eth)])
[active-address-select-field
{:select-field-props {:style styles/active-address-select-field
:label-style styles/active-address-select-field-label
:underline-style {:border-color styles/theme-blue}}
:single-address-props {:style styles/active-address-single}}]]))))
(defn info-line [props & children]
(let [[props children] (parse-props-children props children)]
[:div
props
[:b
{:style {:color styles/theme-cyan}}
(first children)] " "
(into
[:span]
(rest children))]))
(defn admin-panel []
(let [can-see-admin-panel? (subscribe [:db/can-see-admin-panel?])
contrib-period (subscribe [:contribution/contrib-period])
enable-contrib-form (subscribe [:form.contribution/enable-contrib-period])
contrib-config (subscribe [:contribution/configuration])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
contrib-contract-dnt-balance (subscribe [:contribution/dnt-balance])]
(fn []
(let [{:keys [:contrib-period/start-time :contrib-period/end-time
:contrib-period/soft-cap-amount :contrib-period/after-soft-cap-duration
:contrib-period/hard-cap-amount :contrib-period/enabled?
:contrib-period/stake :contrib-period/soft-cap-reached? :contrib-period/total-contributed
:contrib-period/hard-cap-reached? :contrib-period/contributors-count]} @contrib-period
{:keys [:loading?]} @enable-contrib-form
{:keys [:contribution/stopped? :contribution/founder1 :contribution/founder2
:contribution/early-sponsor :contribution/wallet :contribution/advisers
:contribution-address dnt-token-address :dnt-token/transfers-enabled?]} @contrib-config]
(when @can-see-admin-panel?
[paper
[row-with-cols
[col
{:xs 12}
[:h1
{:style (merge styles/margin-bottom-gutter styles/text-center)}
"Admin Panel"]]
[col
{:xs 12
:style styles/margin-bottom-gutter}
[info-line "Contribution Contract:" [etherscan-link {:address contribution-address}]]
[info-line "DNT Token Contract:" [etherscan-link {:address dnt-token-address}]]
[info-line "Founder 1:" [etherscan-link {:address founder1}]]
[info-line "Founder 2:" [etherscan-link {:address founder2}]]
[info-line "Early Sponsor:" [etherscan-link {:address early-sponsor}]]
[info-line "Wallet:" [etherscan-link {:address wallet}]]
(for [[i adviser] (medley/indexed advisers)]
[info-line
{:key i}
(str (when (= adviser (last advisers)) "Community ")
"Adviser " (inc i) ":") [etherscan-link {:address adviser}]])]
(when total-contributed
[col
{:xs 12}
[info-line "Contribution Round:" (inc constants/current-contrib-period)]
[info-line "Start Time:" (u/format-local-datetime start-time)]
[info-line "End Time:" (u/format-local-datetime end-time)]
[info-line "Soft Cap:" (u/format-eth-with-symbol soft-cap-amount)]
[info-line "After Soft Cap Duration:" (t/in-hours (t/seconds after-soft-cap-duration)) " hours"]
[info-line "Hard Cap:" (u/format-eth-with-symbol hard-cap-amount)]
[info-line "Enabled?" (u/bool->yes|no enabled?)]
[info-line "Token Distribution:" (u/format-dnt-with-symbol stake)]
[info-line "Soft Cap Reached?" (u/bool->yes|no soft-cap-reached?)]
[info-line "Hard Cap Reached?" (u/bool->yes|no hard-cap-reached?)]
[info-line "Total Contributed:" (u/format-eth-with-symbol total-contributed)]
[info-line "Contributors Count:" contributors-count]
[info-line "Emergency stop?" (u/bool->yes|no stopped?)]
[info-line "Contribution Contract DNT Balance:" (u/format-dnt-with-symbol @contrib-contract-dnt-balance)]
[info-line "DNT Transfers Enabled?" (u/bool->yes|no transfers-enabled?)]])]])))))
(defn contribution-tile []
(let [xs-sm-width? (subscribe [:district0x/window-xs-sm-width?])]
(fn [{:keys [:title :index]} & children]
[col
{:xs 12 :md 4
:key index
:style {:padding-left 0
:padding-right 0}}
[:div
{:style (merge styles/stats-tile-title
(if @xs-sm-width?
(merge
styles/margin-top-gutter-less
styles/stats-tile-border-bottom
{:margin-right 0 :margin-left 0})
styles/stats-tile-border-bottom))}
title]
(into [row
{:middle "xs"
:center "xs"
:style (merge styles/stats-tile
(when-not (= index 2)
(if @xs-sm-width?
styles/stats-tile-border-bottom
styles/stats-tile-border-right)))}]
children)])))
(def contrib-period-status->countdown-title
{:contrib-period-status/not-started "Starts in"
:contrib-period-status/running "Ends in"
:contrib-period-status/ended "Ended"})
(def unit->name
{:days "day"
:hours "hour"
:minutes "minute"
:seconds "second"})
(defn countdown-time-item [{:keys [:unit :value]}]
[:span
[:h1
{:style (merge styles/contrib-countdown-value
styles/text-left)}
(if (< value 10)
(str 0 value)
value)]
[:span
{:style styles/contrib-countdown-unit}
" " (u/pluralize (unit->name unit) value)]])
(defn- countdown []
(fn [{:keys [:from-time :to-time]}]
(when (and from-time to-time)
(let [{:keys [:days :hours :minutes :seconds]} (u/time-remaining from-time to-time)]
[:div
{:style {:margin-top "-10px"}}
[:div
{:style styles/no-wrap}
[countdown-time-item
{:value days
:unit :days}]
" "
[countdown-time-item
{:value hours
:unit :hours}]]
[:div
{:style styles/no-wrap}
[countdown-time-item
{:value minutes
:unit :minutes}]
" "
[countdown-time-item
{:value seconds
:unit :seconds}]]
[:div
{:style {:margin-top 5}}
(u/format-local-datetime to-time)]]))))
(defn contribution-stats-tiles []
(let [xs-width? (subscribe [:district0x/window-xs-width?])
contrib-period (subscribe [:contribution/contrib-period])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
now (subscribe [:db/now])]
(fn []
(let [{:keys [:contrib-period/loading? :contrib-period/start-time :contrib-period/end-time
:contrib-period/total-contributed :contrib-period/stake
:contrib-period/contributors-count]} @contrib-period]
[row-with-cols
{:center "xs"}
[col
{:xs 12}
[:h1 {:style (merge styles/margin-bottom-gutter-more
styles/text-center)}
"Contribution Period " (constants/contrib-period->name constants/current-contrib-period)]]
[contribution-tile
{:title (contrib-period-status->countdown-title @contrib-period-status)
:index 0}
(if (and start-time end-time)
(condp = @contrib-period-status
:contrib-period-status/not-started
[countdown
{:from-time @now
:to-time start-time}]
:contrib-period-status/running
[countdown
{:from-time @now
:to-time end-time}]
:contrib-period-status/ended
[:h1 (u/format-local-date end-time)])
[ui/circular-progress])]
[contribution-tile
{:title "Raised"
:index 1}
(if total-contributed
[row
{:middle "xs"
:center "xs"}
[:div
[:h1
{:style styles/stats-tile-amount}
(u/format-metric total-contributed)]
[:span {:style {:font-size "1.2em"}} " ETH"]]
[:h3
{:style styles/stats-tile-amount-subtitle}
contributors-count (u/pluralize " participant" contributors-count)]]
[ui/circular-progress])]
[contribution-tile
{:title "Token Distribution"
:index 2}
(if stake
[:div
[:h1
{:style styles/stats-tile-amount}
(/ stake 1000000) " Mil"]
[:h3
{:style styles/stats-tile-amount-subtitle}
"DNT tokens"]]
[ui/circular-progress])]]))))
(defn contribution-soft-cap-progress []
(let [current-contrib-period (subscribe [:contribution/contrib-period])]
(fn []
(let [{:keys [:contrib-period/total-contributed :contrib-period/soft-cap-amount
:contrib-period/after-soft-cap-duration :contrib-period/hard-cap-amount]} @current-contrib-period]
(when (and total-contributed soft-cap-amount after-soft-cap-duration)
[row
{:center "xs"
:style styles/margin-top-gutter-more}
(if (< total-contributed soft-cap-amount)
[:div
{:style styles/full-width}
[ui/linear-progress
{:mode "determinate"
:style styles/cap-progress
:color styles/theme-orange
:max soft-cap-amount
:value total-contributed}]
[:h3
{:style styles/full-width}
"Soft Cap " (js/parseInt total-contributed) "/" (or soft-cap-amount 0) " ETH"]
[:div
{:style (merge styles/full-width
styles/fade-white-text)}
"After soft cap is reached, the contribution period will be closed in " (t/in-hours (t/seconds after-soft-cap-duration))
" hours" [:br]
"In case of reaching " (or hard-cap-amount 0) " ETH hard cap, contribution period closes immediately"]]
[:div
{:style styles/full-width}
[ui/linear-progress
{:mode "determinate"
:style styles/cap-progress
:color styles/theme-orange
:max hard-cap-amount
:value total-contributed}]
[:h3
{:style styles/full-width}
"Hard Cap " (js/parseInt total-contributed) "/" (or hard-cap-amount 0) " ETH"]
[:div
{:style (merge styles/full-width
styles/fade-white-text)}
"After hard cap is reached, no more contributions will be accepted"]])])))))
(defn- external-link [body href]
[:a
{:href href
:target :_blank
:style {:color styles/theme-green}}
body])
(defn contribution-contribute-section []
(let [contribution-address (subscribe [:contribution-contract-address])
contribute-form (subscribe [:form.contribution/contribute])
can-use-form? (subscribe [:district0x/can-submit-into-blockchain?])
contrib-period-status (subscribe [:contribution/current-contrib-period-status])
current-contrib-period (subscribe [:contribution/contrib-period])
contrib-config (subscribe [:contribution/configuration])
confirmed-not-us-citizen? (subscribe [:confirmed-not-us-citizen?])
confirmed-terms? (subscribe [:confirmed-terms?])
confirmed-gas-price? (subscribe [:confirmed-gas-price?])
confirmed-compensation? (subscribe [:confirmed-compensation?])
confirmations-submitted? (subscribe [:confirmations-submitted?])
disallowed-country? (subscribe [:disallowed-country?])
]
(fn []
(let [{:keys [:contrib-period/stake :contrib-period/enabled?]} @current-contrib-period
{:keys [:data :loading?]} @contribute-form
{:keys [:contribution/amount]} data
{:keys [:contribution/stopped? :contribution/max-gas-price]} @contrib-config
error-text (cond
(not (u/non-neg-ether-value? amount)) "This is not valid Ether value"
(< (u/parse-float amount) constants/min-contrib-amount) (str "Minimum contribution amount is "
constants/min-contrib-amount
" ETH"))]
[row
{:center "xs"
:style styles/margin-top-gutter-more}
(if @disallowed-country?
[:div "We have detected that you are visiting this page from the United States or another unpermitted country. Please note: US citizens and residents are not permitted to participate in the district0x Contribution Period."]
(if @confirmations-submitted?
[:div
[:h2
{:style (merge styles/full-width
styles/margin-bottom-gutter-less)}
"How to Contribute"]
[:div
{:style (merge
{:color styles/theme-orange
:font-size "1em"}
styles/margin-bottom-gutter-less)}
[:b
"Important: Maximum allowed gas price is " (web3/from-wei max-gas-price :gwei) " Gwei "
"(" max-gas-price " wei)." [:br]
"Recommended gas limit is 200000" [:br]
(if stopped?
"Contribution was temporarily paused due to emergency"
({:contrib-period-status/not-started "Contribution period has not started yet"
:contrib-period-status/ended "Contribution period has been finished"}
@contrib-period-status))]]
[:div
{:style styles/full-width}
"You can send Ether directly to contribution smart contract at"]
[:h1
{:style (merge styles/full-width
styles/margin-top-gutter-less
{:color styles/theme-green
:font-family "filson-soft, sans-serif"})}
"district.eth" [:br]
[:span {:style {:font-size "0.7em"}} @contribution-address]]
[:div
{:style (merge styles/full-width
styles/margin-top-gutter-less)}
"or you can use following form by using " [external-link "MetaMask" "/"] ", "
[external-link "Mist" ""] ", or "
[external-link "Parity" "/"]]
[row
{:style styles/full-width
:middle "xs"
:center "xs"}
[ui/text-field
{:floating-label-fixed true
:floating-label-text "Amount in Ether"
:default-value amount
:style (merge {:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini}
styles/margin-bottom-gutter-less)
:error-text error-text
:error-style styles/text-left
:disabled (or (= :contrib-period-status/ended @contrib-period-status)
(not enabled?)
stopped?)
:on-change #(dispatch [:district0x.form/set-value :form.contribution/contribute :default :contribution/amount %2])}]
(if-not loading?
[ui/raised-button
{:primary true
:label "Send"
:disabled (or (not @can-use-form?)
(boolean error-text)
(= :contrib-period-status/ended @contrib-period-status)
(not enabled?)
stopped?)
:style {:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini
:margin-top 20}
:on-touch-tap #(dispatch [:contribution/contribute data])}]
[:div
{:style {:margin-top 20
:margin-left styles/desktop-gutter-mini
:margin-right styles/desktop-gutter-mini
:width 88}}
[ui/circular-progress
{:size 30
:thickness 2}]])]
[:div
{:style (merge styles/full-width)}
"For detailed instructions watch our "
[external-link "tutorials on Youtube" ""]]]
[:div
{:style styles/full-width}
[:h2
{:style (merge styles/full-width
styles/margin-bottom-gutter)}
"Before You Contribute"]
[row
{:center "xs"
:style styles/full-width}
[:div
{:style styles/text-left}
[ui/checkbox
{:label "I confirm that I have read and agree to the Contribution Terms."
:checked @confirmed-terms?
:on-check #(dispatch [:set-confirmation :confirmed-terms? %2])}]
[ui/checkbox
{:label "I confirm that I am not a citizen or resident of the United States or other unpermitted country."
:checked @confirmed-not-us-citizen?
:on-check #(dispatch [:set-confirmation :confirmed-not-us-citizen? %2])}]
[ui/checkbox
{:label (r/as-element [:span "I understand "
[:b "the maximum gas price when contributing is 50 Gwei"]
" and any transaction sent with a higher gas price will be rejected."])
:checked @confirmed-gas-price?
:on-check #(dispatch [:set-confirmation :confirmed-gas-price? %2])}]
[ui/checkbox
{:label "I understand that it may take up to 7 days from the time the contribution period ends to receive DNT."
:checked @confirmed-compensation?
:on-check #(dispatch [:set-confirmation :confirmed-compensation? %2])}]]
[:div
{:style styles/full-width}
[ui/raised-button
{:primary true
:label "Continue"
:style styles/margin-top-gutter-less
:disabled (or (not @confirmed-terms?)
(not @confirmed-not-us-citizen?)
(not @confirmed-gas-price?)
(not @confirmed-compensation?))
:on-touch-tap #(dispatch [:set-confirmation :confirmations-submitted? true])}]]]]))
(when stake
[:div
[:div
{:style styles/distribution-note}
"Please note: " (u/format-eth stake) " DNT tokens will be divided and distributed amongst all
participants " [:b "after the contribution period ends"] ". Each participant will receive an allocation
proportional to the amount they contributed, relative to the total collected."]
[:div
{:style (merge styles/full-width
styles/margin-top-gutter)}
[:a {:href "-terms.pdf"
:target :_blank
:style styles/contrib-terms-link}
"Contribution Terms"]]])
[:small
{:style (merge styles/full-width
styles/fade-white-text
styles/margin-top-gutter
styles/margin-bottom-gutter-mini)}
"Copyright © 2017 district0x"]
[:div
{:style styles/full-width}
[:img {:src "./images/district0x-logo-title-white.svg"
:style {:height 15}}]]]))))
(defn contribution-panel []
[paper
[contribution-stats-tiles]
[contribution-soft-cap-progress]
[contribution-contribute-section]])
(defn logo []
[:a
{:href ""}
[:img
{:style styles/logo
:src "./images/district0x-logo.svg"}]])
(defn main-panel []
(let [connection-error? (subscribe [:district0x/blockchain-connection-error?])
snackbar (subscribe [:district0x/snackbar])
contracts-not-found? (subscribe [:district0x/contracts-not-found?])
xs-width? (subscribe [:district0x/window-xs-width?])]
(fn []
[misc/main-panel
{:mui-theme styles/mui-theme}
[:div
{:style {:padding-bottom 20
:overflow :hidden
:position :relative
:min-height "100%"}}
[:img {:src "./images/green-blob2.svg"
:style styles/blob4}]
[:img {:src "./images/cyan-blob.svg"
:style styles/blob1}]
[:img {:src "./images/green-blob1.svg"
:style styles/blob2}]
[:img {:src "./images/green-blobs.svg"
:style styles/blob3}]
[ui/app-bar
{:show-menu-icon-button false
:style styles/app-bar
:title (r/as-element [logo])
:icon-element-right (r/as-element [app-bar-right-elements])}]
[:div {:style (merge styles/content-wrap
(when @xs-width?
(styles/padding-all styles/desktop-gutter-mini)))}
(if @contracts-not-found?
[contracts-not-found-page]
[center-layout
[contribution-panel]
[admin-panel]])]]]))) | |
39c0b4ea55245464692227d23474389056d6122a874dccde9f4b66ea272d5254 | cardmagic/lucash | package.scm | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
; Structures 'n' packages.
; --------------------
; Structures
;
; A structure is a map from names to binding records, determined by an
; interface (a set of names) and a package (a map from names to binding
; records).
;
; The interface is specified as a thunk. This removes dependencies on the
; order in which structures are defined. Also, if the interface is redefined,
; re-evaluating the thunk produces the new, correct interface (see
; env/pedit.scm).
;
; Clients are packages that import the structure's bindings.
(define-record-type structure :structure
(really-make-structure package interface-thunk interface clients name)
structure?
(interface-thunk structure-interface-thunk)
(interface structure-interface-really set-structure-interface!)
(package structure-package)
(clients structure-clients)
(name structure-name set-structure-name!))
(define-record-discloser :structure
(lambda (structure)
(list 'structure
(package-uid (structure-package structure))
(structure-name structure))))
; Get the actual interface, calling the thunk if necessary.
(define (structure-interface structure)
(or (structure-interface-really structure)
(begin (initialize-structure! structure)
(structure-interface-really structure))))
(define (initialize-structure! structure)
(let ((int ((structure-interface-thunk structure))))
(if (interface? int)
(begin (set-structure-interface! structure int)
(note-reference-to-interface! int structure))
(call-error "invalid interface" initialize-structure! structure))))
; Make a structure over PACKAGE and the interface returned by INT-THUNK.
(define (make-structure package int-thunk . name-option)
(if (not (package? package))
(call-error "invalid package" make-structure package int-thunk))
(let ((struct (really-make-structure package
(if (procedure? int-thunk)
int-thunk
(lambda () int-thunk))
#f
(make-population)
#f)))
(if (not (null? name-option))
(note-structure-name! struct (car name-option)))
(add-to-population! struct (package-clients package))
struct))
Make a structure by using COMMANDS to modify the STRUCTURE 's interface .
(define (make-modified-structure structure commands)
(let ((new-struct (make-structure (structure-package structure)
(lambda ()
(make-modified-interface
(structure-interface structure)
commands)))))
(if (structure-unstable? structure)
(add-to-population! new-struct (structure-clients structure)))
new-struct))
; STRUCT has name NAME. NAME can then also be used to refer to STRUCT's
; package.
(define (note-structure-name! struct name)
(if (and name (not (structure-name struct)))
(begin (set-structure-name! struct name)
(note-package-name! (structure-package struct) name))))
A structure is unstable if its package is . An unstable package is one
; where new code may be added, possibly modifying the exported bindings.
(define (structure-unstable? struct)
(package-unstable? (structure-package struct)))
; Map PROC down the the [name type binding] triples provided by STRUCT.
(define (for-each-export proc struct)
(let ((int (structure-interface struct)))
(for-each-declaration
(lambda (name base-name want-type)
(let ((binding (real-structure-lookup struct base-name want-type #t)))
(proc name
(if (and (binding? binding)
(eq? want-type undeclared-type))
(let ((type (binding-type binding)))
(if (variable-type? type)
(variable-value-type type)
type))
want-type)
binding)))
int)))
; --------------------
; Packages
(define-record-type package :package
(really-make-package uid
opens-thunk opens accesses-thunk
definitions
undefineds
undefined-but-assigneds
get-location
cached
clients
unstable?
integrate?
file-name clauses loaded?)
package?
(uid package-uid)
(opens package-opens-really set-package-opens!)
(definitions package-definitions)
(unstable? package-unstable?)
(integrate? package-integrate? set-package-integrate?!)
For EVAL and LOAD ( which can only be done in unstable packages )
(get-location package-get-location set-package-get-location!)
(file-name package-file-name)
(clauses package-clauses)
(loaded? package-loaded? set-package-loaded?!)
(env package->environment set-package->environment!)
;; For package mutation
(opens-thunk package-opens-thunk set-package-opens-thunk!)
(accesses-thunk package-accesses-thunk)
(undefineds package-real-undefineds set-package-undefineds!)
(undefined-but-assigneds
package-real-undefined-but-assigneds
set-package-undefined-but-assigneds!)
(clients package-clients)
(cached package-cached))
(define-record-discloser :package
(lambda (package)
(let ((name (package-name package)))
(if name
(list 'package (package-uid package) name)
(list 'package (package-uid package))))))
(define (make-package opens-thunk accesses-thunk unstable? tower file clauses
uid name)
(let ((new (really-make-package
(if uid
(begin (if (>= uid *package-uid*)
(set! *package-uid* (+ uid 1)))
uid)
(new-package-uid))
opens-thunk
#f ;opens
accesses-thunk ;thunk returning alist
(make-name-table) ;definitions
undefineds
#f ;undefined-but-assigned
(fluid $get-location) ;procedure for making new locations
(make-name-table) ;bindings cached in templates
(make-population) ;structures
unstable ( suitable for EVAL ) ?
#t ;integrate?
file ;file containing DEFINE-STRUCTURE form
clauses ;misc. DEFINE-STRUCTURE clauses
#f))) ;loaded?
(note-package-name! new name)
(set-package->environment! new (really-package->environment new tower))
new))
TOWER is a promise that is expected to deliver , when forced , a
; pair (eval . env).
(define (really-package->environment package tower)
(make-compiler-env (lambda (name)
(package-lookup package name))
(lambda (name type . maybe-static)
(package-define! package
name
type
#f
(if (null? maybe-static)
#f
(car maybe-static))))
tower
package)) ; interim hack
Two tables that we add lazily .
(define (lazy-table-accessor slot-ref slot-set!)
(lambda (package)
(or (slot-ref package)
(let ((table (make-name-table)))
(slot-set! package table)
table))))
(define package-undefineds
(lazy-table-accessor package-real-undefineds
set-package-undefineds!))
(define package-undefined-but-assigneds
(lazy-table-accessor package-real-undefined-but-assigneds
set-package-undefined-but-assigneds!))
; Unique id's
(define (new-package-uid)
(let ((uid *package-uid*)) ;unique identifier
(set! *package-uid* (+ *package-uid* 1))
uid))
(define *package-uid* 0)
; Package names
(define package-name-table (make-table))
(define (package-name package)
(table-ref package-name-table (package-uid package)))
(define (note-package-name! package name)
(if name
(let ((uid (package-uid package)))
(if (not (table-ref package-name-table uid))
(table-set! package-name-table uid name)))))
(define (package-opens package)
(initialize-package-if-necessary! package)
(package-opens-really package))
(define (initialize-package-if-necessary! package)
(if (not (package-opens-really package))
(initialize-package! package)))
(define (package-accesses package) ;=> alist
((package-accesses-thunk package)))
; --------------------
; A simple package has no ACCESSes or other far-out clauses.
(define (make-simple-package opens unstable? tower . name-option)
(if (not (list? opens))
(error "invalid package opens list" opens))
(let ((package (make-package (lambda () opens)
(lambda () '()) ;accesses-thunk
unstable?
tower
"" ;file containing DEFINE-STRUCTURE form
'() ;clauses
uid
(if (null? name-option)
#f
(car name-option)))))
(set-package-loaded?! package #t)
package))
; --------------------
; The definitions table
; Each entry in the package-definitions table is a binding.
(define (package-definition package name)
(initialize-package-if-necessary! package)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(maybe-fix-place! probe)
#f)))
(define (package-define! package name type place static)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(begin
(clobber-binding! probe type place static)
(binding-place (maybe-fix-place! probe)))
(let ((place (or place (get-new-location package name))))
(table-set! (package-definitions package)
name
(make-binding type place static))
place))))
(define (package-add-static! package name static)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(clobber-binding! probe
(binding-type probe)
(binding-place probe)
static)
(error "internal error: name not bound" package name))))
(define (package-refine-type! package name type)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(clobber-binding! probe
type
(binding-place probe)
(binding-static probe))
(error "internal error: name not bound" package name))))
; --------------------
; Lookup
; Look up a name in a package. Returns a binding if bound or #F if not.
(define (package-lookup package name)
(really-package-lookup package name (package-integrate? package)))
(define (really-package-lookup package name integrate?)
(let ((probe (package-definition package name)))
(cond (probe
(if integrate?
probe
(forget-integration probe)))
((generated? name)
; Access path is (generated-parent-name name)
(generic-lookup (generated-env name)
(generated-name name)))
(else
(search-opens (package-opens-really package) name integrate?)))))
; Look for NAME in structures OPENS.
(define (search-opens opens name integrate?)
(let loop ((opens opens))
(if (null? opens)
#f
(or (structure-lookup (car opens) name integrate?)
(loop (cdr opens))))))
(define (structure-lookup struct name integrate?)
(call-with-values
(lambda ()
(interface-ref (structure-interface struct) name))
(lambda (base-name type)
(if type
(real-structure-lookup struct base-name type integrate?)
#f))))
(define (real-structure-lookup struct name type integrate?)
(impose-type type
(really-package-lookup (structure-package struct)
name
integrate?)
integrate?))
(define (generic-lookup env name)
(cond ((package? env)
(package-lookup env name))
((structure? env)
(or (structure-lookup env
name
(package-integrate? (structure-package env)))
(call-error "not exported" generic-lookup env name)))
((procedure? env)
(lookup env name))
(else
(error "invalid environment" env name))))
; --------------------
; Package initialization
(define (initialize-package! package)
(let ((opens ((package-opens-thunk package))))
(set-package-opens! package opens)
(for-each (lambda (struct)
(if (structure-unstable? struct)
(add-to-population! package (structure-clients struct))))
opens))
(for-each (lambda (name+struct)
;; Cf. CLASSIFY method for STRUCTURE-REF
(package-define! package
(car name+struct)
structure-type
#f
(cdr name+struct)))
(package-accesses package)))
; (define (package->environment? env)
; (eq? env (package->environment
; (extract-package-from-environment env))))
; --------------------
; For implementation of INTEGRATE-ALL-PRIMITIVES! in scanner, etc.
(define (for-each-definition proc package)
(table-walk (lambda (name binding)
(proc name (maybe-fix-place! binding)))
(package-definitions package)))
; --------------------
; Locations
(define (get-new-location package name)
((package-get-location package) package name))
; Default new-location method for new packages
(define (make-new-location package name)
(let ((uid *location-uid*))
(set! *location-uid* (+ *location-uid* 1))
(table-set! location-info-table uid
(make-immutable!
(cons (name->symbol name) (package-uid package))))
(make-undefined-location uid)))
(define $get-location (make-fluid make-new-location))
1510 in initial system as of 1/22/94
(define location-info-table (make-table))
(define (flush-location-names)
(set! location-info-table (make-table))
;; (set! package-name-table (make-table)) ;hmm, not much of a space saver
)
( put ' package - define ! ' scheme - indent - hook 2 )
| null | https://raw.githubusercontent.com/cardmagic/lucash/0452d410430d12140c14948f7f583624f819cdad/reference/scsh-0.6.6/scheme/bcomp/package.scm | scheme | Structures 'n' packages.
--------------------
Structures
A structure is a map from names to binding records, determined by an
interface (a set of names) and a package (a map from names to binding
records).
The interface is specified as a thunk. This removes dependencies on the
order in which structures are defined. Also, if the interface is redefined,
re-evaluating the thunk produces the new, correct interface (see
env/pedit.scm).
Clients are packages that import the structure's bindings.
Get the actual interface, calling the thunk if necessary.
Make a structure over PACKAGE and the interface returned by INT-THUNK.
STRUCT has name NAME. NAME can then also be used to refer to STRUCT's
package.
where new code may be added, possibly modifying the exported bindings.
Map PROC down the the [name type binding] triples provided by STRUCT.
--------------------
Packages
For package mutation
opens
thunk returning alist
definitions
undefined-but-assigned
procedure for making new locations
bindings cached in templates
structures
integrate?
file containing DEFINE-STRUCTURE form
misc. DEFINE-STRUCTURE clauses
loaded?
pair (eval . env).
interim hack
Unique id's
unique identifier
Package names
=> alist
--------------------
A simple package has no ACCESSes or other far-out clauses.
accesses-thunk
file containing DEFINE-STRUCTURE form
clauses
--------------------
The definitions table
Each entry in the package-definitions table is a binding.
--------------------
Lookup
Look up a name in a package. Returns a binding if bound or #F if not.
Access path is (generated-parent-name name)
Look for NAME in structures OPENS.
--------------------
Package initialization
Cf. CLASSIFY method for STRUCTURE-REF
(define (package->environment? env)
(eq? env (package->environment
(extract-package-from-environment env))))
--------------------
For implementation of INTEGRATE-ALL-PRIMITIVES! in scanner, etc.
--------------------
Locations
Default new-location method for new packages
(set! package-name-table (make-table)) ;hmm, not much of a space saver | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
(define-record-type structure :structure
(really-make-structure package interface-thunk interface clients name)
structure?
(interface-thunk structure-interface-thunk)
(interface structure-interface-really set-structure-interface!)
(package structure-package)
(clients structure-clients)
(name structure-name set-structure-name!))
(define-record-discloser :structure
(lambda (structure)
(list 'structure
(package-uid (structure-package structure))
(structure-name structure))))
(define (structure-interface structure)
(or (structure-interface-really structure)
(begin (initialize-structure! structure)
(structure-interface-really structure))))
(define (initialize-structure! structure)
(let ((int ((structure-interface-thunk structure))))
(if (interface? int)
(begin (set-structure-interface! structure int)
(note-reference-to-interface! int structure))
(call-error "invalid interface" initialize-structure! structure))))
(define (make-structure package int-thunk . name-option)
(if (not (package? package))
(call-error "invalid package" make-structure package int-thunk))
(let ((struct (really-make-structure package
(if (procedure? int-thunk)
int-thunk
(lambda () int-thunk))
#f
(make-population)
#f)))
(if (not (null? name-option))
(note-structure-name! struct (car name-option)))
(add-to-population! struct (package-clients package))
struct))
Make a structure by using COMMANDS to modify the STRUCTURE 's interface .
(define (make-modified-structure structure commands)
(let ((new-struct (make-structure (structure-package structure)
(lambda ()
(make-modified-interface
(structure-interface structure)
commands)))))
(if (structure-unstable? structure)
(add-to-population! new-struct (structure-clients structure)))
new-struct))
(define (note-structure-name! struct name)
(if (and name (not (structure-name struct)))
(begin (set-structure-name! struct name)
(note-package-name! (structure-package struct) name))))
A structure is unstable if its package is . An unstable package is one
(define (structure-unstable? struct)
(package-unstable? (structure-package struct)))
(define (for-each-export proc struct)
(let ((int (structure-interface struct)))
(for-each-declaration
(lambda (name base-name want-type)
(let ((binding (real-structure-lookup struct base-name want-type #t)))
(proc name
(if (and (binding? binding)
(eq? want-type undeclared-type))
(let ((type (binding-type binding)))
(if (variable-type? type)
(variable-value-type type)
type))
want-type)
binding)))
int)))
(define-record-type package :package
(really-make-package uid
opens-thunk opens accesses-thunk
definitions
undefineds
undefined-but-assigneds
get-location
cached
clients
unstable?
integrate?
file-name clauses loaded?)
package?
(uid package-uid)
(opens package-opens-really set-package-opens!)
(definitions package-definitions)
(unstable? package-unstable?)
(integrate? package-integrate? set-package-integrate?!)
For EVAL and LOAD ( which can only be done in unstable packages )
(get-location package-get-location set-package-get-location!)
(file-name package-file-name)
(clauses package-clauses)
(loaded? package-loaded? set-package-loaded?!)
(env package->environment set-package->environment!)
(opens-thunk package-opens-thunk set-package-opens-thunk!)
(accesses-thunk package-accesses-thunk)
(undefineds package-real-undefineds set-package-undefineds!)
(undefined-but-assigneds
package-real-undefined-but-assigneds
set-package-undefined-but-assigneds!)
(clients package-clients)
(cached package-cached))
(define-record-discloser :package
(lambda (package)
(let ((name (package-name package)))
(if name
(list 'package (package-uid package) name)
(list 'package (package-uid package))))))
(define (make-package opens-thunk accesses-thunk unstable? tower file clauses
uid name)
(let ((new (really-make-package
(if uid
(begin (if (>= uid *package-uid*)
(set! *package-uid* (+ uid 1)))
uid)
(new-package-uid))
opens-thunk
undefineds
unstable ( suitable for EVAL ) ?
(note-package-name! new name)
(set-package->environment! new (really-package->environment new tower))
new))
TOWER is a promise that is expected to deliver , when forced , a
(define (really-package->environment package tower)
(make-compiler-env (lambda (name)
(package-lookup package name))
(lambda (name type . maybe-static)
(package-define! package
name
type
#f
(if (null? maybe-static)
#f
(car maybe-static))))
tower
Two tables that we add lazily .
(define (lazy-table-accessor slot-ref slot-set!)
(lambda (package)
(or (slot-ref package)
(let ((table (make-name-table)))
(slot-set! package table)
table))))
(define package-undefineds
(lazy-table-accessor package-real-undefineds
set-package-undefineds!))
(define package-undefined-but-assigneds
(lazy-table-accessor package-real-undefined-but-assigneds
set-package-undefined-but-assigneds!))
(define (new-package-uid)
(set! *package-uid* (+ *package-uid* 1))
uid))
(define *package-uid* 0)
(define package-name-table (make-table))
(define (package-name package)
(table-ref package-name-table (package-uid package)))
(define (note-package-name! package name)
(if name
(let ((uid (package-uid package)))
(if (not (table-ref package-name-table uid))
(table-set! package-name-table uid name)))))
(define (package-opens package)
(initialize-package-if-necessary! package)
(package-opens-really package))
(define (initialize-package-if-necessary! package)
(if (not (package-opens-really package))
(initialize-package! package)))
((package-accesses-thunk package)))
(define (make-simple-package opens unstable? tower . name-option)
(if (not (list? opens))
(error "invalid package opens list" opens))
(let ((package (make-package (lambda () opens)
unstable?
tower
uid
(if (null? name-option)
#f
(car name-option)))))
(set-package-loaded?! package #t)
package))
(define (package-definition package name)
(initialize-package-if-necessary! package)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(maybe-fix-place! probe)
#f)))
(define (package-define! package name type place static)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(begin
(clobber-binding! probe type place static)
(binding-place (maybe-fix-place! probe)))
(let ((place (or place (get-new-location package name))))
(table-set! (package-definitions package)
name
(make-binding type place static))
place))))
(define (package-add-static! package name static)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(clobber-binding! probe
(binding-type probe)
(binding-place probe)
static)
(error "internal error: name not bound" package name))))
(define (package-refine-type! package name type)
(let ((probe (table-ref (package-definitions package) name)))
(if probe
(clobber-binding! probe
type
(binding-place probe)
(binding-static probe))
(error "internal error: name not bound" package name))))
(define (package-lookup package name)
(really-package-lookup package name (package-integrate? package)))
(define (really-package-lookup package name integrate?)
(let ((probe (package-definition package name)))
(cond (probe
(if integrate?
probe
(forget-integration probe)))
((generated? name)
(generic-lookup (generated-env name)
(generated-name name)))
(else
(search-opens (package-opens-really package) name integrate?)))))
(define (search-opens opens name integrate?)
(let loop ((opens opens))
(if (null? opens)
#f
(or (structure-lookup (car opens) name integrate?)
(loop (cdr opens))))))
(define (structure-lookup struct name integrate?)
(call-with-values
(lambda ()
(interface-ref (structure-interface struct) name))
(lambda (base-name type)
(if type
(real-structure-lookup struct base-name type integrate?)
#f))))
(define (real-structure-lookup struct name type integrate?)
(impose-type type
(really-package-lookup (structure-package struct)
name
integrate?)
integrate?))
(define (generic-lookup env name)
(cond ((package? env)
(package-lookup env name))
((structure? env)
(or (structure-lookup env
name
(package-integrate? (structure-package env)))
(call-error "not exported" generic-lookup env name)))
((procedure? env)
(lookup env name))
(else
(error "invalid environment" env name))))
(define (initialize-package! package)
(let ((opens ((package-opens-thunk package))))
(set-package-opens! package opens)
(for-each (lambda (struct)
(if (structure-unstable? struct)
(add-to-population! package (structure-clients struct))))
opens))
(for-each (lambda (name+struct)
(package-define! package
(car name+struct)
structure-type
#f
(cdr name+struct)))
(package-accesses package)))
(define (for-each-definition proc package)
(table-walk (lambda (name binding)
(proc name (maybe-fix-place! binding)))
(package-definitions package)))
(define (get-new-location package name)
((package-get-location package) package name))
(define (make-new-location package name)
(let ((uid *location-uid*))
(set! *location-uid* (+ *location-uid* 1))
(table-set! location-info-table uid
(make-immutable!
(cons (name->symbol name) (package-uid package))))
(make-undefined-location uid)))
(define $get-location (make-fluid make-new-location))
1510 in initial system as of 1/22/94
(define location-info-table (make-table))
(define (flush-location-names)
(set! location-info-table (make-table))
)
( put ' package - define ! ' scheme - indent - hook 2 )
|
a6cee196bf79aa08242d2a60ae8e6093d0b2841ff4bf6bd45c8a3c66fb48f7bb | RedPenguin101/aoc2021 | day01.clj | (ns aoc2021.day01
(:require [clojure.string :as str]))
(def input (map #(Long/parseLong %) (str/split-lines (slurp "resources/day01input.txt"))))
(count (filter #(apply < %) (partition 2 1 input)))
= > 1715
(->> input
(partition 3 1)
(map #(apply + %))
(partition 2 1)
(filter #(apply < %))
count)
= > 1739
(comment
"beautiful solution from
-of-code/blob/main/src/aoc/2021/d01.clj"
(count (filter pos? (map - (rest input) input)))
= > 1715
(count (filter pos? (map - (drop 3 input) input)))
= > 1739
) | null | https://raw.githubusercontent.com/RedPenguin101/aoc2021/891eec01684e218879c2ac150b83e62a30fe6ba6/clojure/src/aoc2021/day01.clj | clojure | (ns aoc2021.day01
(:require [clojure.string :as str]))
(def input (map #(Long/parseLong %) (str/split-lines (slurp "resources/day01input.txt"))))
(count (filter #(apply < %) (partition 2 1 input)))
= > 1715
(->> input
(partition 3 1)
(map #(apply + %))
(partition 2 1)
(filter #(apply < %))
count)
= > 1739
(comment
"beautiful solution from
-of-code/blob/main/src/aoc/2021/d01.clj"
(count (filter pos? (map - (rest input) input)))
= > 1715
(count (filter pos? (map - (drop 3 input) input)))
= > 1739
) | |
2d4350fad305eb67eeb9798c9342bc2db100904bbc619bd6b8a63143ffe6d87f | jaseemabid/mit-scheme | div.scm | (declare (usual-integrations))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; File: div.sch
Description : DIV benchmarks
Author :
; Created: 8-Apr-85
Modified : 19 - Jul-85 18:28:01 ( )
23 - Jul-87 ( )
; Language: Scheme
; Status: Public Domain
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
DIV2 -- Benchmark which divides by 2 using lists of n ( ) 's .
;;; This file contains a recursive as well as an iterative test.
(define (create-n n)
(do ((n n (- n 1))
(a '() (cons '() a)))
((= n 0) a)))
(define *ll* (create-n 200))
(define (iterative-div2 l)
(do ((l l (cddr l))
(a '() (cons (car l) a)))
((null? l) a)))
(define (recursive-div2 l)
(cond ((null? l) '())
(else (cons (car l) (recursive-div2 (cddr l))))))
(define (test-1 l)
(do ((i 300 (- i 1)))
((= i 0))
(iterative-div2 l)
(iterative-div2 l)
(iterative-div2 l)
(iterative-div2 l)))
(define (test-2 l)
(do ((i 300 (- i 1)))
((= i 0))
(recursive-div2 l)
(recursive-div2 l)
(recursive-div2 l)
(recursive-div2 l)))
;;; for the iterative test call: (test-1 *ll*)
;;; for the recursive test call: (test-2 *ll*)
(lambda () (begin (test-1 *ll*) (test-2 *ll*)))
(lambda ()
(do ((i 10 (- i 1)))
((= i 0))
(test-1 *ll*)
(test-2 *ll*)))
| null | https://raw.githubusercontent.com/jaseemabid/mit-scheme/d30da6b2c103e34b6e0805bd5cbefeb9501382c1/v8/src/bench/div.scm | scheme |
File: div.sch
Created: 8-Apr-85
Language: Scheme
Status: Public Domain
This file contains a recursive as well as an iterative test.
for the iterative test call: (test-1 *ll*)
for the recursive test call: (test-2 *ll*) | (declare (usual-integrations))
Description : DIV benchmarks
Author :
Modified : 19 - Jul-85 18:28:01 ( )
23 - Jul-87 ( )
DIV2 -- Benchmark which divides by 2 using lists of n ( ) 's .
(define (create-n n)
(do ((n n (- n 1))
(a '() (cons '() a)))
((= n 0) a)))
(define *ll* (create-n 200))
(define (iterative-div2 l)
(do ((l l (cddr l))
(a '() (cons (car l) a)))
((null? l) a)))
(define (recursive-div2 l)
(cond ((null? l) '())
(else (cons (car l) (recursive-div2 (cddr l))))))
(define (test-1 l)
(do ((i 300 (- i 1)))
((= i 0))
(iterative-div2 l)
(iterative-div2 l)
(iterative-div2 l)
(iterative-div2 l)))
(define (test-2 l)
(do ((i 300 (- i 1)))
((= i 0))
(recursive-div2 l)
(recursive-div2 l)
(recursive-div2 l)
(recursive-div2 l)))
(lambda () (begin (test-1 *ll*) (test-2 *ll*)))
(lambda ()
(do ((i 10 (- i 1)))
((= i 0))
(test-1 *ll*)
(test-2 *ll*)))
|
6915cc65532997e49de06af6baaefb90c47414a6b54e86b43ea2782931d30ced | TrustInSoft/tis-kernel | emitter.ml | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
Modules [ ] and [ Kernel ] are not usable here . Thus use above modules
instead .
instead. *)
(**************************************************************************)
* { 2 Datatype }
(**************************************************************************)
type kind = Property_status | Alarm | Code_annot | Funspec | Global_annot
type emitter =
{ name: string;
kinds: kind list;
tuning_parameters: Typed_parameter.t list;
correctness_parameters: Typed_parameter.t list }
module D =
Datatype.Make_with_collections
(struct
type t = emitter
let name = "Emitter.t"
let rehash = Datatype.identity
let structural_descr = Structural_descr.t_unknown
let reprs =
[ { name = "";
kinds = [];
tuning_parameters = [];
correctness_parameters = [] } ]
does not use (= =) in order to prevent unmarshalling issue + in order
to be able to compare emitters coming from Usable_emitter.get
to be able to compare emitters coming from Usable_emitter.get *)
let equal x y = Datatype.String.equal x.name y.name
let compare x y = Datatype.String.compare x.name y.name
let hash x = Datatype.String.hash x.name
let copy x = x (* strings are immutable here *)
let pretty fmt x = Format.pp_print_string fmt x.name
let internal_pretty_code = Datatype.undefined
let varname _ = assert false (* unused while [internal_pretty_code]
unimplemented *)
let mem_project = Datatype.never_any_project
end)
type usable_emitter =
{ u_id: int;
u_name: string;
u_kinds: kind list;
mutable used: bool;
mutable version: int;
(* maps below associate the parameter to its value (as a string) at the
time of using. *)
tuning_values: string Datatype.String.Map.t;
correctness_values: string Datatype.String.Map.t }
let has_several_versions_ref = Extlib.mk_fun "Emitter.has_several_versions"
module Usable_emitter = struct
include Datatype.Make_with_collections
(struct
type t = usable_emitter
let name = "Emitter.Usable_emitter.t"
let rehash = Datatype.identity
let structural_descr = Structural_descr.t_abstract
let reprs =
let p = Datatype.String.Map.empty in
[ { u_id = -1;
u_name = "";
u_kinds = [ Property_status ];
used = false;
version = -1;
tuning_values = p;
correctness_values = p } ]
let equal = ( == )
let compare x y = if x == y then 0 else Datatype.Int.compare x.u_id y.u_id
let hash x = Datatype.Int.hash x.u_id
let copy x = x (* strings are immutable here *)
let pretty fmt x =
let name = x.u_name in
if !has_several_versions_ref name then
Format.fprintf fmt "%s (v%d)" name x.version
else
Format.pp_print_string fmt name
let internal_pretty_code = Datatype.undefined
let varname _ = assert false (* unused while [internal_pretty_code]
unimplemented *)
let mem_project = Datatype.never_any_project
end)
let get e =
let get_params map =
Datatype.String.Map.fold
(fun s _ acc -> Typed_parameter.get s :: acc)
map
[]
in
{ name = e.u_name;
kinds = e.u_kinds;
correctness_parameters = get_params e.correctness_values;
tuning_parameters = get_params e.tuning_values }
let get_name e = e.u_name
let get_unique_name e = Pretty_utils.sfprintf "%a" pretty e
let correctness_parameters e =
Datatype.String.Map.fold (fun p _ acc -> p :: acc) e.correctness_values []
let tuning_parameters e =
Datatype.String.Map.fold (fun p _ acc -> p :: acc) e.tuning_values []
let pretty_parameter fmt ~tuning e s =
let map = if tuning then e.tuning_values else e.correctness_values in
let v = Datatype.String.Map.find s map in
Format.fprintf fmt "%s %s" s v
end
(**************************************************************************)
* { 2 Implementation for Plug - in Developers }
(**************************************************************************)
let names: unit Datatype.String.Hashtbl.t = Datatype.String.Hashtbl.create 7
let create name kinds ~correctness ~tuning =
if Datatype.String.Hashtbl.mem names name then
Kernel.fatal "emitter %s already exists with the same parameters" name;
let e =
{ name = name;
kinds = kinds;
correctness_parameters = correctness;
tuning_parameters = tuning }
in
Datatype.String.Hashtbl.add names name ();
e
let dummy = create "dummy" [] ~correctness:[] ~tuning:[]
let get_name e = e.name
let correctness_parameters e =
List.map (fun p -> p.Typed_parameter.name) e.correctness_parameters
let tuning_parameters e =
List.map (fun p -> p.Typed_parameter.name) e.tuning_parameters
let end_user =
create
"End-User"
[ Property_status; Code_annot; Funspec; Global_annot ]
~correctness:[]
~tuning:[]
let kernel =
create
"TrustInSoft Kernel"
[ Property_status; Funspec ]
~correctness:[]
~tuning:[]
(**************************************************************************)
* { 2 State of all known emitters }
(**************************************************************************)
module Usable_id =
State_builder.SharedCounter(struct let name = "Emitter.Usable_id" end)
(* For each emitter, the info required to be able to get the right usable
emitter. *)
module Usable_emitters_of_emitter =
State_builder.Hashtbl
(Datatype.String.Hashtbl)
(Datatype.Pair
(Datatype.Ref(Usable_emitter)) (* current usable emitter with the
current parameter values *)
(Datatype.Ref(Usable_emitter.Set))) (* existing usables emitters with
the old parameter values *)
(struct
let name = "Emitter.Usable_emitters_of_emitter"
let size = 7
let dependencies = [ Usable_id.self ]
end)
let self = Usable_emitters_of_emitter.self
let has_several_versions name =
try
let _, set = Usable_emitters_of_emitter.find name in
Usable_emitter.Set.cardinal !set > 1
with Not_found ->
Kernel.fatal "Unknown emitter %s" name
let () = has_several_versions_ref := has_several_versions
let distinct_parameters get_them tuning e =
let name = e.u_name in
let values = get_them e in
let get e s =
Pretty_utils.sfprintf
"%t"
(fun fmt -> Usable_emitter.pretty_parameter fmt ~tuning e s)
in
try
let _, set = Usable_emitters_of_emitter.find name in
Usable_emitter.Set.fold
(fun e' acc ->
List.fold_left2
(fun acc s1 s2 ->
if get e s1 = get e' s2 then acc
else Datatype.String.Set.add s1 acc)
acc
values
(get_them e))
!set
Datatype.String.Set.empty
with Not_found ->
Kernel.fatal "Unknown emitter %s" name
let distinct_tuning_parameters =
distinct_parameters Usable_emitter.tuning_parameters true
let distinct_correctness_parameters =
distinct_parameters Usable_emitter.correctness_parameters false
(**************************************************************************)
* { 2 Kernel Internal Implementation }
(**************************************************************************)
(* set the value of a parameter of an emitter *)
let update_usable_emitter tuning ~used usable_e param_name value =
let id = Usable_id.next () in
let name = usable_e.u_name in
let kinds = usable_e.u_kinds in
let add = Datatype.String.Map.add param_name value in
if tuning then
{ u_id = id;
u_name = name;
u_kinds = kinds;
used = used;
version = -1; (* delayed *)
tuning_values = add usable_e.tuning_values;
correctness_values = usable_e.correctness_values }
else
{ u_id = id;
u_name = name;
u_kinds = kinds;
used = used;
version = -1; (* delayed *)
tuning_values = usable_e.tuning_values;
correctness_values = add usable_e.correctness_values }
exception Found of Usable_emitter.t
let update_parameter tuning usable_e p =
let param_name = p.Typed_parameter.name in
let value = Typed_parameter.get_value p in
try
let _, set = Usable_emitters_of_emitter.find usable_e.u_name in
try
Usable_emitter.Set.iter
(fun e ->
let map = if tuning then e.tuning_values else e.correctness_values in
let exists =
try
Datatype.String.equal
value
(Datatype.String.Map.find param_name map)
with Not_found ->
false
in
if exists then raise (Found e))
!set;
(* we are setting the value of a parameter, but we are not sure yet that
the corresponding usable emitter will be used *)
let e =
update_usable_emitter tuning ~used:false usable_e param_name value
in
set := Usable_emitter.Set.add e !set;
e
with Found e ->
(* we already create an usable emitter with this value for this
parameter *)
e
with Not_found ->
we are creating the first usable emitter of the given name :
it is going to be used
it is going to be used *)
update_usable_emitter tuning ~used:true usable_e param_name value
let kinds: (kind, State.t list) Hashtbl.t = Hashtbl.create 7
let iter_on_kinds f l =
List.iter
(fun k ->
try
let states = Hashtbl.find kinds k in
f states
with Not_found ->
())
l
let correctness_states: unit State.Hashtbl.t = State.Hashtbl.create 7
let register_correctness_parameter name emitter_name kinds =
try
let state = State.get name in
State.Hashtbl.replace correctness_states state ();
iter_on_kinds (State_dependency_graph.add_dependencies ~from:state) kinds
with State.Unknown ->
(* in multi-sessions mode (e.g. save/load), the state for this parameter may
not exist if the plug-in which defines it is not here anymore (fix bug
#2181) *)
Kernel.warning
~once:true
"emitter %s: correctness parameter %s does not exist anymore. Ignored."
emitter_name
name
let parameter_hooks
: (unit -> unit) Datatype.String.Hashtbl.t Typed_parameter.Hashtbl.t
= Typed_parameter.Hashtbl.create 97
let register_tuning_parameter name p =
let update () =
try
let current, set = Usable_emitters_of_emitter.find name in
let c = !current in
let v = c.version in
let new_e = update_parameter true c p in
if c.used then new_e.version <- v + 1
else begin
set := Usable_emitter.Set.remove c !set;
new_e.version <- v
end;
current := new_e
with Not_found ->
in multi - sessions mode ( e.g. save / load ) , the emitters could exist in
the previous session but not in the current one . In this case , there
is nothing to do .
, even if it still exists , it could be not yet restored
since the project library does not ensure that it restores the table
of emitters before the states of parameters . In such a case , it is
also possible to do nothing since the right table in the right state
is going to be restored .
the previous session but not in the current one. In this case, there
is nothing to do.
Additionnally, even if it still exists, it could be not yet restored
since the project library does not ensure that it restores the table
of emitters before the states of parameters. In such a case, it is
also possible to do nothing since the right table in the right state
is going to be restored. *)
()
in
try
let tbl = Typed_parameter.Hashtbl.find parameter_hooks p in
Datatype.String.Hashtbl.replace tbl name update
with Not_found ->
Kernel.fatal
"[Emitter] no hook table for parameter %s"
p.Typed_parameter.name
let () =
Cmdline.run_after_extended_stage
(fun () ->
State_selection.Static.iter
(fun s ->
let tbl = Datatype.String.Hashtbl.create 7 in
let p = Typed_parameter.get (State.get_name s) in
Typed_parameter.Hashtbl.add parameter_hooks p tbl;
let update () = Datatype.String.Hashtbl.iter (fun _ f -> f ()) tbl in
match p.Typed_parameter.accessor with
| Typed_parameter.Bool(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ())
| Typed_parameter.Int(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ())
| Typed_parameter.String(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ()))
(* [JS 2012/02/07] should be limited to
[Option_functor.get_selection_context], but it is not possible while
each plug-in (including Wp) is not projectified *)
(*(Option_functor.get_selection_context ~is_set:false ()))*)
(Parameter_state.get_selection ~is_set:false ()))
let update_table tbl =
(* remove old stuff *)
Usable_emitters_of_emitter.iter
(fun _ (_, all_usable_e) ->
Usable_emitter.Set.iter
(fun e ->
(* remove dependencies corresponding to old correctness parameters *)
Datatype.String.Map.iter
(fun p _ ->
try
iter_on_kinds
(State_dependency_graph.remove_dependencies
~from:(State.get p))
e.u_kinds
with State.Unknown ->
In multi - sessions mode ( e.g. save / load ) , the state for this
parameter may not exist if the plug - in which defines it is
not here anymore . Nothing special to do since the
dependencies have already been removed by the load mechanism
when states are missing ( fix bug # 2181 ) .
parameter may not exist if the plug-in which defines it is
not here anymore. Nothing special to do since the
dependencies have already been removed by the load mechanism
when states are missing (fix bug #2181). *)
())
e.correctness_values;
(* remove hooks corresponding to old tuning parameters *)
Typed_parameter.Hashtbl.iter
(fun _ tbl -> Datatype.String.Hashtbl.clear tbl)
parameter_hooks)
!all_usable_e);
(* register new stuff *)
Datatype.String.Hashtbl.iter
(fun e_name (_, all_usable_e) ->
Usable_emitter.Set.iter
(fun e ->
Datatype.String.Map.iter
(fun p _ -> register_correctness_parameter p e.u_name e.u_kinds)
e.correctness_values;
Datatype.String.Map.iter
(fun p _ ->
register_tuning_parameter e_name (Typed_parameter.get p))
e.tuning_values)
!all_usable_e)
tbl
let () = Usable_emitters_of_emitter.add_hook_on_update update_table
let register_parameter tuning usable_e p =
let usable_e = update_parameter tuning usable_e p in
if tuning then register_tuning_parameter usable_e.u_name p
else
register_correctness_parameter
p.Typed_parameter.name
usable_e.u_name
usable_e.u_kinds;
usable_e
let create_usable_emitter e =
let id = Usable_id.next () in
let usable_e =
{ u_id = id;
u_name = e.name;
u_kinds = e.kinds;
used = true;
version = -1; (* delayed *)
tuning_values = Datatype.String.Map.empty;
correctness_values = Datatype.String.Map.empty }
in
let usable_e =
List.fold_left (register_parameter true) usable_e e.tuning_parameters
in
let usable_e =
List.fold_left (register_parameter false) usable_e e.correctness_parameters
in
usable_e.version <- 1;
usable_e
let get e =
let name = e.name in
try
let current, _ = Usable_emitters_of_emitter.find name in
let c = !current in
c.used <- true;
c
with Not_found ->
let usable_e = create_usable_emitter e in
Usable_emitters_of_emitter.add
name
(ref usable_e, ref (Usable_emitter.Set.singleton usable_e));
usable_e
module ED = D (* for debugging *)
module Make_table
(H: Datatype.Hashtbl)
(E: sig
include Datatype.S_with_collections
val local_clear: H.key -> 'a Hashtbl.t -> unit
val usable_get: t -> Usable_emitter.t
val get: t -> emitter
end)
(D: Datatype.S)
(Info: sig include State_builder.Info_with_size val kinds: kind list end) =
struct
module Remove_hooks = Hook.Build(struct type t = E.t * H.key * D.t end)
let add_hook_on_remove f = Remove_hooks.extend (fun (e, k, d) -> f e k d)
let apply_hooks_on_remove e k d = Remove_hooks.apply (e, k, d)
(* this list is computed after defining [self] *)
let static_dependencies = ref []
let must_clear_all sel =
List.exists (State_selection.mem sel) !static_dependencies
(* [KNOWN LIMITATION] only works iff the selection contains the parameter'
state. In particular, that does not work if one writes something like
let selection =
State_selection.only_dependencies Kernel.MainFunction.self
in
Project.clear ~selection () *)
let must_local_clear sel =
try
State.Hashtbl.iter
(fun s () -> if State_selection.mem sel s then raise Exit)
correctness_states;
true
with Exit ->
false
let create () = H.create Info.size
let state = ref (create ())
module Tbl = E.Hashtbl.Make(D)
type internal_tbl = Tbl.t
module H_datatype = H.Make(Tbl)
let dkey = Kernel.register_category "emitter"
(* standard projectified hashtbl, but an ad-hoc function 'clear' *)
include State_builder.Register
(H_datatype)
(struct
type t = Tbl.t H.t
let create = create
let clear tbl =
let sel = Project.get_current_selection () in
(* Kernel.feedback "SELECT: %a" State_selection.pretty sel;*)
if must_clear_all sel then begin
(* someone explicitly requires to fully reset the table *)
Kernel.debug ~dkey ~level:3 "FULL CLEAR of %s in %a"
Info.name Project.pretty (Project.current ());
H.clear tbl
end else
AST is unchanged
if must_local_clear sel then begin
(* one have to clear the table, but we have to keep the keys *)
Kernel.debug ~dkey ~level:3 "LOCAL CLEAR of %s in %a"
Info.name Project.pretty (Project.current ());
H.iter
(fun k h ->
if not (Remove_hooks.is_empty ()) then
E.Hashtbl.iter (fun e x -> apply_hooks_on_remove e k x) h;
E.local_clear k h)
tbl;
end else begin
(* we have to clear only the bindings corresponding to the selected
correctness parameters *)
let to_be_removed = ref [] in
H.iter
(fun k h ->
E.Hashtbl.iter
(fun e x ->
let is_param_selected =
List.exists
(fun p -> State_selection.mem sel (State.get p))
(Usable_emitter.correctness_parameters (E.usable_get e))
in
if is_param_selected then
to_be_removed := (k, e, x) :: !to_be_removed)
h)
tbl;
List.iter
(fun (k, e, x) ->
try
let h = H.find tbl k in
Kernel.debug ~dkey ~level:3 "CLEARING binding %a of %s in %a"
ED.pretty (E.get e)
Info.name
Project.pretty (Project.current ());
E.Hashtbl.remove h e;
apply_hooks_on_remove e k x
with Not_found ->
assert false)
!to_be_removed
end
let get () = !state
let set x = state := x
let clear_some_projects _f _h = false
end)
(struct
include Info
let unique_name = name
let dependencies = self :: dependencies
end)
let add_kind k =
try
let l = Hashtbl.find kinds k in
Hashtbl.replace kinds k (self :: l)
with Not_found ->
Hashtbl.add kinds k [ self ]
(* compute which states always impact this one (i.e. [self]) *)
let () =
List.iter add_kind Info.kinds;
let get_dependencies () =
State_dependency_graph.G.fold_pred
(fun s acc -> s :: acc)
State_dependency_graph.graph
self
[]
in
Cmdline.run_after_early_stage
(fun () -> static_dependencies := get_dependencies ())
let add key v = H.add !state key v
let find key = H.find !state key
let mem key = H.mem !state key
let iter f = H.iter f !state
let fold f acc = H.fold f !state acc
let iter_sorted ~cmp f = H.iter_sorted ~cmp f !state
let fold_sorted ~cmp f acc = H.fold_sorted ~cmp f !state acc
let remove key =
if not (Remove_hooks.is_empty ()) then begin
try
let tbl = find key in
E.Hashtbl.iter (fun e v -> apply_hooks_on_remove e key v) tbl;
with Not_found ->
()
end;
H.remove !state key;
end
include D
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/kernel_services/plugin_entry_points/emitter.ml | ocaml | ************************************************************************
************************************************************************
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
************************************************************************
************************************************************************
strings are immutable here
unused while [internal_pretty_code]
unimplemented
maps below associate the parameter to its value (as a string) at the
time of using.
strings are immutable here
unused while [internal_pretty_code]
unimplemented
************************************************************************
************************************************************************
************************************************************************
************************************************************************
For each emitter, the info required to be able to get the right usable
emitter.
current usable emitter with the
current parameter values
existing usables emitters with
the old parameter values
************************************************************************
************************************************************************
set the value of a parameter of an emitter
delayed
delayed
we are setting the value of a parameter, but we are not sure yet that
the corresponding usable emitter will be used
we already create an usable emitter with this value for this
parameter
in multi-sessions mode (e.g. save/load), the state for this parameter may
not exist if the plug-in which defines it is not here anymore (fix bug
#2181)
[JS 2012/02/07] should be limited to
[Option_functor.get_selection_context], but it is not possible while
each plug-in (including Wp) is not projectified
(Option_functor.get_selection_context ~is_set:false ()))
remove old stuff
remove dependencies corresponding to old correctness parameters
remove hooks corresponding to old tuning parameters
register new stuff
delayed
for debugging
this list is computed after defining [self]
[KNOWN LIMITATION] only works iff the selection contains the parameter'
state. In particular, that does not work if one writes something like
let selection =
State_selection.only_dependencies Kernel.MainFunction.self
in
Project.clear ~selection ()
standard projectified hashtbl, but an ad-hoc function 'clear'
Kernel.feedback "SELECT: %a" State_selection.pretty sel;
someone explicitly requires to fully reset the table
one have to clear the table, but we have to keep the keys
we have to clear only the bindings corresponding to the selected
correctness parameters
compute which states always impact this one (i.e. [self])
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
Modules [ ] and [ Kernel ] are not usable here . Thus use above modules
instead .
instead. *)
* { 2 Datatype }
type kind = Property_status | Alarm | Code_annot | Funspec | Global_annot
type emitter =
{ name: string;
kinds: kind list;
tuning_parameters: Typed_parameter.t list;
correctness_parameters: Typed_parameter.t list }
module D =
Datatype.Make_with_collections
(struct
type t = emitter
let name = "Emitter.t"
let rehash = Datatype.identity
let structural_descr = Structural_descr.t_unknown
let reprs =
[ { name = "";
kinds = [];
tuning_parameters = [];
correctness_parameters = [] } ]
does not use (= =) in order to prevent unmarshalling issue + in order
to be able to compare emitters coming from Usable_emitter.get
to be able to compare emitters coming from Usable_emitter.get *)
let equal x y = Datatype.String.equal x.name y.name
let compare x y = Datatype.String.compare x.name y.name
let hash x = Datatype.String.hash x.name
let pretty fmt x = Format.pp_print_string fmt x.name
let internal_pretty_code = Datatype.undefined
let mem_project = Datatype.never_any_project
end)
type usable_emitter =
{ u_id: int;
u_name: string;
u_kinds: kind list;
mutable used: bool;
mutable version: int;
tuning_values: string Datatype.String.Map.t;
correctness_values: string Datatype.String.Map.t }
let has_several_versions_ref = Extlib.mk_fun "Emitter.has_several_versions"
module Usable_emitter = struct
include Datatype.Make_with_collections
(struct
type t = usable_emitter
let name = "Emitter.Usable_emitter.t"
let rehash = Datatype.identity
let structural_descr = Structural_descr.t_abstract
let reprs =
let p = Datatype.String.Map.empty in
[ { u_id = -1;
u_name = "";
u_kinds = [ Property_status ];
used = false;
version = -1;
tuning_values = p;
correctness_values = p } ]
let equal = ( == )
let compare x y = if x == y then 0 else Datatype.Int.compare x.u_id y.u_id
let hash x = Datatype.Int.hash x.u_id
let pretty fmt x =
let name = x.u_name in
if !has_several_versions_ref name then
Format.fprintf fmt "%s (v%d)" name x.version
else
Format.pp_print_string fmt name
let internal_pretty_code = Datatype.undefined
let mem_project = Datatype.never_any_project
end)
let get e =
let get_params map =
Datatype.String.Map.fold
(fun s _ acc -> Typed_parameter.get s :: acc)
map
[]
in
{ name = e.u_name;
kinds = e.u_kinds;
correctness_parameters = get_params e.correctness_values;
tuning_parameters = get_params e.tuning_values }
let get_name e = e.u_name
let get_unique_name e = Pretty_utils.sfprintf "%a" pretty e
let correctness_parameters e =
Datatype.String.Map.fold (fun p _ acc -> p :: acc) e.correctness_values []
let tuning_parameters e =
Datatype.String.Map.fold (fun p _ acc -> p :: acc) e.tuning_values []
let pretty_parameter fmt ~tuning e s =
let map = if tuning then e.tuning_values else e.correctness_values in
let v = Datatype.String.Map.find s map in
Format.fprintf fmt "%s %s" s v
end
* { 2 Implementation for Plug - in Developers }
let names: unit Datatype.String.Hashtbl.t = Datatype.String.Hashtbl.create 7
let create name kinds ~correctness ~tuning =
if Datatype.String.Hashtbl.mem names name then
Kernel.fatal "emitter %s already exists with the same parameters" name;
let e =
{ name = name;
kinds = kinds;
correctness_parameters = correctness;
tuning_parameters = tuning }
in
Datatype.String.Hashtbl.add names name ();
e
let dummy = create "dummy" [] ~correctness:[] ~tuning:[]
let get_name e = e.name
let correctness_parameters e =
List.map (fun p -> p.Typed_parameter.name) e.correctness_parameters
let tuning_parameters e =
List.map (fun p -> p.Typed_parameter.name) e.tuning_parameters
let end_user =
create
"End-User"
[ Property_status; Code_annot; Funspec; Global_annot ]
~correctness:[]
~tuning:[]
let kernel =
create
"TrustInSoft Kernel"
[ Property_status; Funspec ]
~correctness:[]
~tuning:[]
* { 2 State of all known emitters }
module Usable_id =
State_builder.SharedCounter(struct let name = "Emitter.Usable_id" end)
module Usable_emitters_of_emitter =
State_builder.Hashtbl
(Datatype.String.Hashtbl)
(Datatype.Pair
(struct
let name = "Emitter.Usable_emitters_of_emitter"
let size = 7
let dependencies = [ Usable_id.self ]
end)
let self = Usable_emitters_of_emitter.self
let has_several_versions name =
try
let _, set = Usable_emitters_of_emitter.find name in
Usable_emitter.Set.cardinal !set > 1
with Not_found ->
Kernel.fatal "Unknown emitter %s" name
let () = has_several_versions_ref := has_several_versions
let distinct_parameters get_them tuning e =
let name = e.u_name in
let values = get_them e in
let get e s =
Pretty_utils.sfprintf
"%t"
(fun fmt -> Usable_emitter.pretty_parameter fmt ~tuning e s)
in
try
let _, set = Usable_emitters_of_emitter.find name in
Usable_emitter.Set.fold
(fun e' acc ->
List.fold_left2
(fun acc s1 s2 ->
if get e s1 = get e' s2 then acc
else Datatype.String.Set.add s1 acc)
acc
values
(get_them e))
!set
Datatype.String.Set.empty
with Not_found ->
Kernel.fatal "Unknown emitter %s" name
let distinct_tuning_parameters =
distinct_parameters Usable_emitter.tuning_parameters true
let distinct_correctness_parameters =
distinct_parameters Usable_emitter.correctness_parameters false
* { 2 Kernel Internal Implementation }
let update_usable_emitter tuning ~used usable_e param_name value =
let id = Usable_id.next () in
let name = usable_e.u_name in
let kinds = usable_e.u_kinds in
let add = Datatype.String.Map.add param_name value in
if tuning then
{ u_id = id;
u_name = name;
u_kinds = kinds;
used = used;
tuning_values = add usable_e.tuning_values;
correctness_values = usable_e.correctness_values }
else
{ u_id = id;
u_name = name;
u_kinds = kinds;
used = used;
tuning_values = usable_e.tuning_values;
correctness_values = add usable_e.correctness_values }
exception Found of Usable_emitter.t
let update_parameter tuning usable_e p =
let param_name = p.Typed_parameter.name in
let value = Typed_parameter.get_value p in
try
let _, set = Usable_emitters_of_emitter.find usable_e.u_name in
try
Usable_emitter.Set.iter
(fun e ->
let map = if tuning then e.tuning_values else e.correctness_values in
let exists =
try
Datatype.String.equal
value
(Datatype.String.Map.find param_name map)
with Not_found ->
false
in
if exists then raise (Found e))
!set;
let e =
update_usable_emitter tuning ~used:false usable_e param_name value
in
set := Usable_emitter.Set.add e !set;
e
with Found e ->
e
with Not_found ->
we are creating the first usable emitter of the given name :
it is going to be used
it is going to be used *)
update_usable_emitter tuning ~used:true usable_e param_name value
let kinds: (kind, State.t list) Hashtbl.t = Hashtbl.create 7
let iter_on_kinds f l =
List.iter
(fun k ->
try
let states = Hashtbl.find kinds k in
f states
with Not_found ->
())
l
let correctness_states: unit State.Hashtbl.t = State.Hashtbl.create 7
let register_correctness_parameter name emitter_name kinds =
try
let state = State.get name in
State.Hashtbl.replace correctness_states state ();
iter_on_kinds (State_dependency_graph.add_dependencies ~from:state) kinds
with State.Unknown ->
Kernel.warning
~once:true
"emitter %s: correctness parameter %s does not exist anymore. Ignored."
emitter_name
name
let parameter_hooks
: (unit -> unit) Datatype.String.Hashtbl.t Typed_parameter.Hashtbl.t
= Typed_parameter.Hashtbl.create 97
let register_tuning_parameter name p =
let update () =
try
let current, set = Usable_emitters_of_emitter.find name in
let c = !current in
let v = c.version in
let new_e = update_parameter true c p in
if c.used then new_e.version <- v + 1
else begin
set := Usable_emitter.Set.remove c !set;
new_e.version <- v
end;
current := new_e
with Not_found ->
in multi - sessions mode ( e.g. save / load ) , the emitters could exist in
the previous session but not in the current one . In this case , there
is nothing to do .
, even if it still exists , it could be not yet restored
since the project library does not ensure that it restores the table
of emitters before the states of parameters . In such a case , it is
also possible to do nothing since the right table in the right state
is going to be restored .
the previous session but not in the current one. In this case, there
is nothing to do.
Additionnally, even if it still exists, it could be not yet restored
since the project library does not ensure that it restores the table
of emitters before the states of parameters. In such a case, it is
also possible to do nothing since the right table in the right state
is going to be restored. *)
()
in
try
let tbl = Typed_parameter.Hashtbl.find parameter_hooks p in
Datatype.String.Hashtbl.replace tbl name update
with Not_found ->
Kernel.fatal
"[Emitter] no hook table for parameter %s"
p.Typed_parameter.name
let () =
Cmdline.run_after_extended_stage
(fun () ->
State_selection.Static.iter
(fun s ->
let tbl = Datatype.String.Hashtbl.create 7 in
let p = Typed_parameter.get (State.get_name s) in
Typed_parameter.Hashtbl.add parameter_hooks p tbl;
let update () = Datatype.String.Hashtbl.iter (fun _ f -> f ()) tbl in
match p.Typed_parameter.accessor with
| Typed_parameter.Bool(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ())
| Typed_parameter.Int(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ())
| Typed_parameter.String(a, _) ->
a.Typed_parameter.add_set_hook (fun _ _ -> update ()))
(Parameter_state.get_selection ~is_set:false ()))
let update_table tbl =
Usable_emitters_of_emitter.iter
(fun _ (_, all_usable_e) ->
Usable_emitter.Set.iter
(fun e ->
Datatype.String.Map.iter
(fun p _ ->
try
iter_on_kinds
(State_dependency_graph.remove_dependencies
~from:(State.get p))
e.u_kinds
with State.Unknown ->
In multi - sessions mode ( e.g. save / load ) , the state for this
parameter may not exist if the plug - in which defines it is
not here anymore . Nothing special to do since the
dependencies have already been removed by the load mechanism
when states are missing ( fix bug # 2181 ) .
parameter may not exist if the plug-in which defines it is
not here anymore. Nothing special to do since the
dependencies have already been removed by the load mechanism
when states are missing (fix bug #2181). *)
())
e.correctness_values;
Typed_parameter.Hashtbl.iter
(fun _ tbl -> Datatype.String.Hashtbl.clear tbl)
parameter_hooks)
!all_usable_e);
Datatype.String.Hashtbl.iter
(fun e_name (_, all_usable_e) ->
Usable_emitter.Set.iter
(fun e ->
Datatype.String.Map.iter
(fun p _ -> register_correctness_parameter p e.u_name e.u_kinds)
e.correctness_values;
Datatype.String.Map.iter
(fun p _ ->
register_tuning_parameter e_name (Typed_parameter.get p))
e.tuning_values)
!all_usable_e)
tbl
let () = Usable_emitters_of_emitter.add_hook_on_update update_table
let register_parameter tuning usable_e p =
let usable_e = update_parameter tuning usable_e p in
if tuning then register_tuning_parameter usable_e.u_name p
else
register_correctness_parameter
p.Typed_parameter.name
usable_e.u_name
usable_e.u_kinds;
usable_e
let create_usable_emitter e =
let id = Usable_id.next () in
let usable_e =
{ u_id = id;
u_name = e.name;
u_kinds = e.kinds;
used = true;
tuning_values = Datatype.String.Map.empty;
correctness_values = Datatype.String.Map.empty }
in
let usable_e =
List.fold_left (register_parameter true) usable_e e.tuning_parameters
in
let usable_e =
List.fold_left (register_parameter false) usable_e e.correctness_parameters
in
usable_e.version <- 1;
usable_e
let get e =
let name = e.name in
try
let current, _ = Usable_emitters_of_emitter.find name in
let c = !current in
c.used <- true;
c
with Not_found ->
let usable_e = create_usable_emitter e in
Usable_emitters_of_emitter.add
name
(ref usable_e, ref (Usable_emitter.Set.singleton usable_e));
usable_e
module Make_table
(H: Datatype.Hashtbl)
(E: sig
include Datatype.S_with_collections
val local_clear: H.key -> 'a Hashtbl.t -> unit
val usable_get: t -> Usable_emitter.t
val get: t -> emitter
end)
(D: Datatype.S)
(Info: sig include State_builder.Info_with_size val kinds: kind list end) =
struct
module Remove_hooks = Hook.Build(struct type t = E.t * H.key * D.t end)
let add_hook_on_remove f = Remove_hooks.extend (fun (e, k, d) -> f e k d)
let apply_hooks_on_remove e k d = Remove_hooks.apply (e, k, d)
let static_dependencies = ref []
let must_clear_all sel =
List.exists (State_selection.mem sel) !static_dependencies
let must_local_clear sel =
try
State.Hashtbl.iter
(fun s () -> if State_selection.mem sel s then raise Exit)
correctness_states;
true
with Exit ->
false
let create () = H.create Info.size
let state = ref (create ())
module Tbl = E.Hashtbl.Make(D)
type internal_tbl = Tbl.t
module H_datatype = H.Make(Tbl)
let dkey = Kernel.register_category "emitter"
include State_builder.Register
(H_datatype)
(struct
type t = Tbl.t H.t
let create = create
let clear tbl =
let sel = Project.get_current_selection () in
if must_clear_all sel then begin
Kernel.debug ~dkey ~level:3 "FULL CLEAR of %s in %a"
Info.name Project.pretty (Project.current ());
H.clear tbl
end else
AST is unchanged
if must_local_clear sel then begin
Kernel.debug ~dkey ~level:3 "LOCAL CLEAR of %s in %a"
Info.name Project.pretty (Project.current ());
H.iter
(fun k h ->
if not (Remove_hooks.is_empty ()) then
E.Hashtbl.iter (fun e x -> apply_hooks_on_remove e k x) h;
E.local_clear k h)
tbl;
end else begin
let to_be_removed = ref [] in
H.iter
(fun k h ->
E.Hashtbl.iter
(fun e x ->
let is_param_selected =
List.exists
(fun p -> State_selection.mem sel (State.get p))
(Usable_emitter.correctness_parameters (E.usable_get e))
in
if is_param_selected then
to_be_removed := (k, e, x) :: !to_be_removed)
h)
tbl;
List.iter
(fun (k, e, x) ->
try
let h = H.find tbl k in
Kernel.debug ~dkey ~level:3 "CLEARING binding %a of %s in %a"
ED.pretty (E.get e)
Info.name
Project.pretty (Project.current ());
E.Hashtbl.remove h e;
apply_hooks_on_remove e k x
with Not_found ->
assert false)
!to_be_removed
end
let get () = !state
let set x = state := x
let clear_some_projects _f _h = false
end)
(struct
include Info
let unique_name = name
let dependencies = self :: dependencies
end)
let add_kind k =
try
let l = Hashtbl.find kinds k in
Hashtbl.replace kinds k (self :: l)
with Not_found ->
Hashtbl.add kinds k [ self ]
let () =
List.iter add_kind Info.kinds;
let get_dependencies () =
State_dependency_graph.G.fold_pred
(fun s acc -> s :: acc)
State_dependency_graph.graph
self
[]
in
Cmdline.run_after_early_stage
(fun () -> static_dependencies := get_dependencies ())
let add key v = H.add !state key v
let find key = H.find !state key
let mem key = H.mem !state key
let iter f = H.iter f !state
let fold f acc = H.fold f !state acc
let iter_sorted ~cmp f = H.iter_sorted ~cmp f !state
let fold_sorted ~cmp f acc = H.fold_sorted ~cmp f !state acc
let remove key =
if not (Remove_hooks.is_empty ()) then begin
try
let tbl = find key in
E.Hashtbl.iter (fun e v -> apply_hooks_on_remove e key v) tbl;
with Not_found ->
()
end;
H.remove !state key;
end
include D
|
a5c8d3f21a92ad3305c4255b334a6887d09cb3b9e1b933cd03cd0379d32dfa17 | jackfirth/rebellion | endpoint-map-range-set.rkt | #lang racket/base
(require racket/contract/base)
(provide
for/range-set
for*/range-set
(contract-out
[range-set (->* () (#:comparator comparator?) #:rest (listof nonempty-range?) immutable-range-set?)]
[make-mutable-range-set
(->* (#:comparator comparator?) ((sequence/c nonempty-range?)) mutable-range-set?)]
[sequence->range-set
(-> (sequence/c nonempty-range?) #:comparator comparator? immutable-range-set?)]
[into-range-set (-> comparator? (reducer/c nonempty-range? immutable-range-set?))]))
(require (for-syntax racket/base
rebellion/private/for-body)
(only-in racket/list empty? first)
racket/match
racket/sequence
racket/stream
(only-in racket/vector vector-sort)
rebellion/base/comparator
rebellion/base/option
rebellion/base/range
(submod rebellion/base/range private-for-rebellion-only)
rebellion/collection/entry
rebellion/collection/private/range-set-interface
(submod rebellion/collection/private/range-set-interface private-for-rebellion-only)
rebellion/collection/sorted-map
rebellion/collection/vector
rebellion/collection/vector/builder
rebellion/streaming/reducer
(submod rebellion/streaming/reducer private-for-rebellion-only)
rebellion/streaming/transducer
rebellion/private/cut
rebellion/private/guarded-block
rebellion/private/precondition
rebellion/private/static-name
rebellion/private/todo
rebellion/private/vector-merge-adjacent
syntax/parse/define)
;@----------------------------------------------------------------------------------------------------
(struct immutable-endpoint-map-range-set abstract-immutable-range-set (endpoints comparator)
#:omit-define-syntaxes
#:methods gen:range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define (in-range-set this #:descending? [descending? #false])
(define cmp (this-comparator this))
(for/stream ([endpoint-entry (in-sorted-map (this-endpoints this) #:descending? descending?)])
(match-define (entry lower upper) endpoint-entry)
(range-from-cuts lower upper #:comparator cmp)))
(define (range-set-comparator this)
(this-comparator this))
(define (range-set-size this)
(sorted-map-size (this-endpoints this)))
(define (range-set-contains? this value)
(endpoint-map-contains? (this-endpoints this) (this-comparator this) value))
(define (range-set-encloses? this range)
(endpoint-map-encloses? (this-endpoints this) (this-comparator this) range))
(define (range-set-intersects? this range)
(endpoint-map-intersects? (this-endpoints this) (this-comparator this) range))
(define (range-set-range-containing-or-absent this value)
(endpoint-map-range-containing-or-absent (this-endpoints this) (this-comparator this) value))
(define (range-set-span-or-absent this)
(endpoint-map-span-or-absent (this-endpoints this)))
(define (range-subset this subset-range)
(define cut-comparator (cut<=> (range-comparator subset-range)))
(define lower-subset-cut (range-lower-cut subset-range))
(define upper-subset-cut (range-upper-cut subset-range))
(define subset-endpoint-range
(closed-range lower-subset-cut upper-subset-cut #:comparator cut-comparator))
(define endpoints-submap (sorted-submap (this-endpoints this) subset-endpoint-range))
(define endpoints-submap-with-left-end-corrected
(guarded-block
(guard-match (present (entry leftmost-range-lower-cut leftmost-range-upper-cut))
(sorted-map-entry-at-most (this-endpoints this) lower-subset-cut)
else
endpoints-submap)
(guard (compare-infix cut-comparator leftmost-range-upper-cut > lower-subset-cut) else
endpoints-submap)
(define corrected-lower-range
(range-from-cuts lower-subset-cut leftmost-range-upper-cut #:comparator cut-comparator))
(guard (empty-range? corrected-lower-range) then
endpoints-submap)
(sorted-map-put endpoints-submap lower-subset-cut leftmost-range-upper-cut)))
(define endpoints-submap-with-right-end-corrected
(guarded-block
(guard-match (present (entry rightmost-range-lower-cut rightmost-range-upper-cut))
(sorted-map-greatest-entry endpoints-submap-with-left-end-corrected)
else
endpoints-submap-with-left-end-corrected)
(define corrected-upper-cut
(comparator-min cut-comparator rightmost-range-upper-cut upper-subset-cut))
(define corrected-rightmost-range
(range-from-cuts rightmost-range-lower-cut corrected-upper-cut #:comparator cut-comparator))
(guard (empty-range? corrected-rightmost-range) then
(sorted-map-remove endpoints-submap-with-left-end-corrected rightmost-range-lower-cut))
(sorted-map-put
endpoints-submap-with-left-end-corrected rightmost-range-lower-cut corrected-upper-cut)))
(immutable-endpoint-map-range-set
endpoints-submap-with-right-end-corrected (this-comparator this)))]
#:methods gen:immutable-range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define/guard (range-set-add this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-add)
"added range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
this)
(define endpoints (this-endpoints this))
(define cut-cmp (sorted-map-key-comparator endpoints))
(define lower-endpoint-cut (range-lower-cut range))
(define upper-endpoint-cut (range-upper-cut range))
(define left-overlapping-range (sorted-map-entry-at-most endpoints lower-endpoint-cut))
(define right-overlapping-range
(sorted-map-entry-at-most
(sorted-submap endpoints (at-least-range lower-endpoint-cut #:comparator cut-cmp))
upper-endpoint-cut))
(define new-lower-endpoint
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(if (compare-infix cut-cmp left-overlapping-upper-endpoint >= lower-endpoint-cut)
(comparator-min cut-cmp lower-endpoint-cut left-overlapping-lower-endpoint)
lower-endpoint-cut)]
[(== absent) lower-endpoint-cut]))
(define new-upper-endpoint
(match right-overlapping-range
[(present (entry _ right-overlapping-upper-endpoint))
(comparator-max cut-cmp upper-endpoint-cut right-overlapping-upper-endpoint)]
[(== absent) upper-endpoint-cut]))
(define range-to-remove
(closed-range new-lower-endpoint new-upper-endpoint #:comparator cut-cmp))
(define endpoints-to-remove (sorted-map-keys (sorted-submap endpoints range-to-remove)))
(define new-endpoints
(sorted-map-put (sorted-map-remove-all endpoints endpoints-to-remove)
new-lower-endpoint new-upper-endpoint))
(immutable-endpoint-map-range-set new-endpoints cmp))
(define/guard (range-set-remove this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-remove)
"removed range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
this)
(define endpoints (this-endpoints this))
(define cut-cmp (sorted-map-key-comparator endpoints))
(define lower-endpoint (range-lower-cut range))
(define upper-endpoint (range-upper-cut range))
(define left-overlapping-range (sorted-map-entry-at-most endpoints lower-endpoint))
(define right-overlapping-range (sorted-map-entry-at-most endpoints upper-endpoint))
(define lowest-endpoint
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(if (compare-infix cut-cmp left-overlapping-upper-endpoint >= lower-endpoint)
(comparator-min cut-cmp lower-endpoint left-overlapping-lower-endpoint)
lower-endpoint)]
[(== absent) lower-endpoint]))
(define new-left-overlapping-endpoints
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(present
(entry
left-overlapping-lower-endpoint
(comparator-min cut-cmp left-overlapping-upper-endpoint lower-endpoint)))]
[(== absent) absent]))
(define highest-endpoint
(match right-overlapping-range
[(present (entry _ right-overlapping-upper-endpoint))
(comparator-max cut-cmp upper-endpoint right-overlapping-upper-endpoint)]
[(== absent) upper-endpoint]))
(define new-right-overlapping-endpoints
(match right-overlapping-range
[(present (entry right-overlapping-lower-endpoint right-overlapping-upper-endpoint))
(if (compare-infix cut-cmp upper-endpoint < right-overlapping-upper-endpoint)
(present
(entry
(comparator-max cut-cmp right-overlapping-lower-endpoint upper-endpoint)
right-overlapping-upper-endpoint))
absent)]
[(== absent) absent]))
(define range-to-remove
(closed-range lowest-endpoint highest-endpoint #:comparator cut-cmp))
(define endpoints-to-remove (sorted-map-keys (sorted-submap endpoints range-to-remove)))
(let* ([new-endpoints (sorted-map-remove-all endpoints endpoints-to-remove)]
[new-endpoints
(match new-left-overlapping-endpoints
[(present (entry new-left-lower new-left-upper))
#:when (not (equal? new-left-lower new-left-upper))
(sorted-map-put new-endpoints new-left-lower new-left-upper)]
[_ new-endpoints])]
[new-endpoints
(match new-right-overlapping-endpoints
[(present (entry new-right-lower new-right-upper))
#:when (not (equal? new-right-lower new-right-upper))
(sorted-map-put new-endpoints new-right-lower new-right-upper)]
[_ new-endpoints])])
(immutable-endpoint-map-range-set new-endpoints cmp)))])
(struct mutable-endpoint-map-range-set abstract-mutable-range-set (endpoints comparator)
#:omit-define-syntaxes
#:methods gen:range-set
[(define (this-endpoints this)
(mutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(mutable-endpoint-map-range-set-comparator this))
(define (in-range-set this #:descending? [descending? #false])
(define cmp (this-comparator this))
(for/stream ([endpoint-entry (in-sorted-map (this-endpoints this) #:descending? descending?)])
(match-define (entry lower upper) endpoint-entry)
(range-from-cuts lower upper #:comparator cmp)))
(define (range-set-comparator this)
(this-comparator this))
(define (range-set-size this)
(sorted-map-size (this-endpoints this)))
(define (range-set-contains? this value)
(endpoint-map-contains? (this-endpoints this) (this-comparator this) value))
(define (range-set-encloses? this range)
(endpoint-map-encloses? (this-endpoints this) (this-comparator this) range))
(define (range-set-intersects? this range)
(endpoint-map-intersects? (this-endpoints this) (this-comparator this) range))
(define (range-set-range-containing-or-absent this value)
(endpoint-map-range-containing-or-absent (this-endpoints this) (this-comparator this) value))
(define (range-set-span-or-absent this)
(endpoint-map-span-or-absent (this-endpoints this)))
(define (range-subset this subset-range)
TODO)]
#:methods gen:mutable-range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define/guard (range-set-add! this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-add)
"added range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
(void))
TODO)
(define/guard (range-set-remove! this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-remove)
"removed range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
(void))
TODO)
(define (range-set-clear! this)
(sorted-map-clear! (this-endpoints this)))])
(define (make-mutable-range-set [initial-ranges '()] #:comparator comparator)
(define ranges (sequence->vector initial-ranges))
(check-ranges-use-comparator #:who (name make-mutable-range-set) ranges comparator)
(define sorted-ranges (vector-sort ranges range<?))
(check-ranges-disjoint #:who (name make-mutable-range-set) sorted-ranges)
(define coalesced-ranges (vector-merge-adjacent sorted-ranges range-connected? range-span))
(define endpoints (make-mutable-sorted-map #:key-comparator (cut<=> comparator)))
(for ([range (in-vector coalesced-ranges)])
(sorted-map-put! endpoints (range-lower-cut range) (range-upper-cut range)))
(mutable-endpoint-map-range-set endpoints comparator))
(define (endpoint-map-contains? endpoints comparator value)
(match (endpoint-map-get-nearest-range endpoints comparator (middle-cut value))
[(== absent) #false]
[(present nearest-range) (range-contains? nearest-range value)]))
(define (endpoint-map-encloses? endpoints comparator range)
(match (endpoint-map-get-nearest-range endpoints comparator (range-lower-cut range))
[(== absent) #false]
[(present nearest-range) (range-encloses? nearest-range range)]))
(define/guard (endpoint-map-intersects? endpoints comparator range)
(define lower-cut (range-lower-cut range))
(define upper-cut (range-upper-cut range))
(guard-match (present (entry _ upper)) (sorted-map-entry-at-most endpoints upper-cut) else
#false)
(compare-infix (cut<=> (range-comparator range)) lower-cut < upper))
(define (endpoint-map-range-containing-or-absent endpoints comparator value)
(option-filter
(endpoint-map-get-nearest-range endpoints comparator (middle-cut value))
(λ (nearest-range) (range-contains? nearest-range value))))
(define (endpoint-map-span-or-absent endpoints)
TODO)
(define/guard (endpoint-map-get-nearest-range endpoints comparator cut)
(guard-match (present (entry lower upper)) (sorted-map-entry-at-most endpoints cut) else
absent)
(present (range-from-cuts lower upper #:comparator comparator)))
;@----------------------------------------------------------------------------------------------------
;; Construction APIs
(define (range-set #:comparator [comparator #false] . ranges)
(check-precondition
(or comparator (not (empty? ranges)))
(name range-set)
"cannot construct an empty range set without a comparator")
(let ([comparator (or comparator (range-comparator (first ranges)))])
(sequence->range-set ranges #:comparator comparator)))
(define (sequence->range-set ranges #:comparator comparator)
(transduce ranges #:into (into-range-set comparator)))
(struct range-set-builder ([range-vector-builder #:mutable] comparator))
(define (make-range-set-builder comparator)
(range-set-builder (make-vector-builder) comparator))
(define (range-set-builder-add builder range)
(define vector-builder (vector-builder-add (range-set-builder-range-vector-builder builder) range))
(set-range-set-builder-range-vector-builder! builder vector-builder)
builder)
(define (build-range-set builder)
(define ranges (build-vector (range-set-builder-range-vector-builder builder)))
(define comparator (range-set-builder-comparator builder))
(check-ranges-use-comparator #:who (name build-range-set) ranges comparator)
(define sorted-ranges (vector-sort ranges range<?))
(check-ranges-disjoint #:who (name build-range-set) sorted-ranges)
(define coalesced-ranges (vector-merge-adjacent sorted-ranges range-connected? range-span))
(define endpoints
(for/sorted-map #:key-comparator (cut<=> comparator) ([range (in-vector coalesced-ranges)])
(entry (range-lower-cut range) (range-upper-cut range))))
(immutable-endpoint-map-range-set endpoints comparator))
(define (check-ranges-use-comparator #:who who ranges comparator)
(for ([range (in-vector ranges)])
(check-precondition
(equal? (range-comparator range) comparator)
who
"not all ranges use the same comparator"
"range" range
"range comparator" (range-comparator range)
"expected comparator" comparator)))
(define (range<? range other-range)
(equal? (compare range<=> range other-range) lesser))
(define (check-ranges-disjoint #:who who ranges)
(unless (zero? (vector-length ranges))
(for ([range (in-vector ranges)]
[next-range (in-vector ranges 1)])
(when (range-overlaps? range next-range)
(raise-arguments-error
who
"overlapping ranges not allowed"
"range" range
"next range" next-range)))))
(define (into-range-set comparator)
(define (start)
(make-range-set-builder comparator))
(make-effectful-fold-reducer
range-set-builder-add start build-range-set #:name (name into-range-set)))
(define-syntax-parse-rule (for/range-set #:comparator comparator clauses body)
#:declare comparator (expr/c #'comparator?)
#:declare body (for-body this-syntax)
#:with context this-syntax
(for/reducer/derived context (into-range-set comparator.c) clauses (~@ . body)))
(define-syntax-parse-rule (for*/range-set #:comparator comparator clauses body)
#:declare comparator (expr/c #'comparator?)
#:declare body (for-body this-syntax)
#:with context this-syntax
(for*/reducer/derived context (into-range-set comparator.c) clauses (~@ . body)))
| null | https://raw.githubusercontent.com/jackfirth/rebellion/69dce215e231e62889389bc40be11f5b4387b304/collection/private/endpoint-map-range-set.rkt | racket | @----------------------------------------------------------------------------------------------------
@----------------------------------------------------------------------------------------------------
Construction APIs | #lang racket/base
(require racket/contract/base)
(provide
for/range-set
for*/range-set
(contract-out
[range-set (->* () (#:comparator comparator?) #:rest (listof nonempty-range?) immutable-range-set?)]
[make-mutable-range-set
(->* (#:comparator comparator?) ((sequence/c nonempty-range?)) mutable-range-set?)]
[sequence->range-set
(-> (sequence/c nonempty-range?) #:comparator comparator? immutable-range-set?)]
[into-range-set (-> comparator? (reducer/c nonempty-range? immutable-range-set?))]))
(require (for-syntax racket/base
rebellion/private/for-body)
(only-in racket/list empty? first)
racket/match
racket/sequence
racket/stream
(only-in racket/vector vector-sort)
rebellion/base/comparator
rebellion/base/option
rebellion/base/range
(submod rebellion/base/range private-for-rebellion-only)
rebellion/collection/entry
rebellion/collection/private/range-set-interface
(submod rebellion/collection/private/range-set-interface private-for-rebellion-only)
rebellion/collection/sorted-map
rebellion/collection/vector
rebellion/collection/vector/builder
rebellion/streaming/reducer
(submod rebellion/streaming/reducer private-for-rebellion-only)
rebellion/streaming/transducer
rebellion/private/cut
rebellion/private/guarded-block
rebellion/private/precondition
rebellion/private/static-name
rebellion/private/todo
rebellion/private/vector-merge-adjacent
syntax/parse/define)
(struct immutable-endpoint-map-range-set abstract-immutable-range-set (endpoints comparator)
#:omit-define-syntaxes
#:methods gen:range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define (in-range-set this #:descending? [descending? #false])
(define cmp (this-comparator this))
(for/stream ([endpoint-entry (in-sorted-map (this-endpoints this) #:descending? descending?)])
(match-define (entry lower upper) endpoint-entry)
(range-from-cuts lower upper #:comparator cmp)))
(define (range-set-comparator this)
(this-comparator this))
(define (range-set-size this)
(sorted-map-size (this-endpoints this)))
(define (range-set-contains? this value)
(endpoint-map-contains? (this-endpoints this) (this-comparator this) value))
(define (range-set-encloses? this range)
(endpoint-map-encloses? (this-endpoints this) (this-comparator this) range))
(define (range-set-intersects? this range)
(endpoint-map-intersects? (this-endpoints this) (this-comparator this) range))
(define (range-set-range-containing-or-absent this value)
(endpoint-map-range-containing-or-absent (this-endpoints this) (this-comparator this) value))
(define (range-set-span-or-absent this)
(endpoint-map-span-or-absent (this-endpoints this)))
(define (range-subset this subset-range)
(define cut-comparator (cut<=> (range-comparator subset-range)))
(define lower-subset-cut (range-lower-cut subset-range))
(define upper-subset-cut (range-upper-cut subset-range))
(define subset-endpoint-range
(closed-range lower-subset-cut upper-subset-cut #:comparator cut-comparator))
(define endpoints-submap (sorted-submap (this-endpoints this) subset-endpoint-range))
(define endpoints-submap-with-left-end-corrected
(guarded-block
(guard-match (present (entry leftmost-range-lower-cut leftmost-range-upper-cut))
(sorted-map-entry-at-most (this-endpoints this) lower-subset-cut)
else
endpoints-submap)
(guard (compare-infix cut-comparator leftmost-range-upper-cut > lower-subset-cut) else
endpoints-submap)
(define corrected-lower-range
(range-from-cuts lower-subset-cut leftmost-range-upper-cut #:comparator cut-comparator))
(guard (empty-range? corrected-lower-range) then
endpoints-submap)
(sorted-map-put endpoints-submap lower-subset-cut leftmost-range-upper-cut)))
(define endpoints-submap-with-right-end-corrected
(guarded-block
(guard-match (present (entry rightmost-range-lower-cut rightmost-range-upper-cut))
(sorted-map-greatest-entry endpoints-submap-with-left-end-corrected)
else
endpoints-submap-with-left-end-corrected)
(define corrected-upper-cut
(comparator-min cut-comparator rightmost-range-upper-cut upper-subset-cut))
(define corrected-rightmost-range
(range-from-cuts rightmost-range-lower-cut corrected-upper-cut #:comparator cut-comparator))
(guard (empty-range? corrected-rightmost-range) then
(sorted-map-remove endpoints-submap-with-left-end-corrected rightmost-range-lower-cut))
(sorted-map-put
endpoints-submap-with-left-end-corrected rightmost-range-lower-cut corrected-upper-cut)))
(immutable-endpoint-map-range-set
endpoints-submap-with-right-end-corrected (this-comparator this)))]
#:methods gen:immutable-range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define/guard (range-set-add this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-add)
"added range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
this)
(define endpoints (this-endpoints this))
(define cut-cmp (sorted-map-key-comparator endpoints))
(define lower-endpoint-cut (range-lower-cut range))
(define upper-endpoint-cut (range-upper-cut range))
(define left-overlapping-range (sorted-map-entry-at-most endpoints lower-endpoint-cut))
(define right-overlapping-range
(sorted-map-entry-at-most
(sorted-submap endpoints (at-least-range lower-endpoint-cut #:comparator cut-cmp))
upper-endpoint-cut))
(define new-lower-endpoint
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(if (compare-infix cut-cmp left-overlapping-upper-endpoint >= lower-endpoint-cut)
(comparator-min cut-cmp lower-endpoint-cut left-overlapping-lower-endpoint)
lower-endpoint-cut)]
[(== absent) lower-endpoint-cut]))
(define new-upper-endpoint
(match right-overlapping-range
[(present (entry _ right-overlapping-upper-endpoint))
(comparator-max cut-cmp upper-endpoint-cut right-overlapping-upper-endpoint)]
[(== absent) upper-endpoint-cut]))
(define range-to-remove
(closed-range new-lower-endpoint new-upper-endpoint #:comparator cut-cmp))
(define endpoints-to-remove (sorted-map-keys (sorted-submap endpoints range-to-remove)))
(define new-endpoints
(sorted-map-put (sorted-map-remove-all endpoints endpoints-to-remove)
new-lower-endpoint new-upper-endpoint))
(immutable-endpoint-map-range-set new-endpoints cmp))
(define/guard (range-set-remove this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-remove)
"removed range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
this)
(define endpoints (this-endpoints this))
(define cut-cmp (sorted-map-key-comparator endpoints))
(define lower-endpoint (range-lower-cut range))
(define upper-endpoint (range-upper-cut range))
(define left-overlapping-range (sorted-map-entry-at-most endpoints lower-endpoint))
(define right-overlapping-range (sorted-map-entry-at-most endpoints upper-endpoint))
(define lowest-endpoint
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(if (compare-infix cut-cmp left-overlapping-upper-endpoint >= lower-endpoint)
(comparator-min cut-cmp lower-endpoint left-overlapping-lower-endpoint)
lower-endpoint)]
[(== absent) lower-endpoint]))
(define new-left-overlapping-endpoints
(match left-overlapping-range
[(present (entry left-overlapping-lower-endpoint left-overlapping-upper-endpoint))
(present
(entry
left-overlapping-lower-endpoint
(comparator-min cut-cmp left-overlapping-upper-endpoint lower-endpoint)))]
[(== absent) absent]))
(define highest-endpoint
(match right-overlapping-range
[(present (entry _ right-overlapping-upper-endpoint))
(comparator-max cut-cmp upper-endpoint right-overlapping-upper-endpoint)]
[(== absent) upper-endpoint]))
(define new-right-overlapping-endpoints
(match right-overlapping-range
[(present (entry right-overlapping-lower-endpoint right-overlapping-upper-endpoint))
(if (compare-infix cut-cmp upper-endpoint < right-overlapping-upper-endpoint)
(present
(entry
(comparator-max cut-cmp right-overlapping-lower-endpoint upper-endpoint)
right-overlapping-upper-endpoint))
absent)]
[(== absent) absent]))
(define range-to-remove
(closed-range lowest-endpoint highest-endpoint #:comparator cut-cmp))
(define endpoints-to-remove (sorted-map-keys (sorted-submap endpoints range-to-remove)))
(let* ([new-endpoints (sorted-map-remove-all endpoints endpoints-to-remove)]
[new-endpoints
(match new-left-overlapping-endpoints
[(present (entry new-left-lower new-left-upper))
#:when (not (equal? new-left-lower new-left-upper))
(sorted-map-put new-endpoints new-left-lower new-left-upper)]
[_ new-endpoints])]
[new-endpoints
(match new-right-overlapping-endpoints
[(present (entry new-right-lower new-right-upper))
#:when (not (equal? new-right-lower new-right-upper))
(sorted-map-put new-endpoints new-right-lower new-right-upper)]
[_ new-endpoints])])
(immutable-endpoint-map-range-set new-endpoints cmp)))])
(struct mutable-endpoint-map-range-set abstract-mutable-range-set (endpoints comparator)
#:omit-define-syntaxes
#:methods gen:range-set
[(define (this-endpoints this)
(mutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(mutable-endpoint-map-range-set-comparator this))
(define (in-range-set this #:descending? [descending? #false])
(define cmp (this-comparator this))
(for/stream ([endpoint-entry (in-sorted-map (this-endpoints this) #:descending? descending?)])
(match-define (entry lower upper) endpoint-entry)
(range-from-cuts lower upper #:comparator cmp)))
(define (range-set-comparator this)
(this-comparator this))
(define (range-set-size this)
(sorted-map-size (this-endpoints this)))
(define (range-set-contains? this value)
(endpoint-map-contains? (this-endpoints this) (this-comparator this) value))
(define (range-set-encloses? this range)
(endpoint-map-encloses? (this-endpoints this) (this-comparator this) range))
(define (range-set-intersects? this range)
(endpoint-map-intersects? (this-endpoints this) (this-comparator this) range))
(define (range-set-range-containing-or-absent this value)
(endpoint-map-range-containing-or-absent (this-endpoints this) (this-comparator this) value))
(define (range-set-span-or-absent this)
(endpoint-map-span-or-absent (this-endpoints this)))
(define (range-subset this subset-range)
TODO)]
#:methods gen:mutable-range-set
[(define (this-endpoints this)
(immutable-endpoint-map-range-set-endpoints this))
(define (this-comparator this)
(immutable-endpoint-map-range-set-comparator this))
(define/guard (range-set-add! this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-add)
"added range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
(void))
TODO)
(define/guard (range-set-remove! this range)
(define cmp (this-comparator this))
(check-precondition
(equal? cmp (range-comparator range))
(name range-set-remove)
"removed range does not use the same comparator as the range set"
"range" range
"range comparator" (range-comparator range)
"range set comparator" cmp)
(guard (empty-range? range) then
(void))
TODO)
(define (range-set-clear! this)
(sorted-map-clear! (this-endpoints this)))])
(define (make-mutable-range-set [initial-ranges '()] #:comparator comparator)
(define ranges (sequence->vector initial-ranges))
(check-ranges-use-comparator #:who (name make-mutable-range-set) ranges comparator)
(define sorted-ranges (vector-sort ranges range<?))
(check-ranges-disjoint #:who (name make-mutable-range-set) sorted-ranges)
(define coalesced-ranges (vector-merge-adjacent sorted-ranges range-connected? range-span))
(define endpoints (make-mutable-sorted-map #:key-comparator (cut<=> comparator)))
(for ([range (in-vector coalesced-ranges)])
(sorted-map-put! endpoints (range-lower-cut range) (range-upper-cut range)))
(mutable-endpoint-map-range-set endpoints comparator))
(define (endpoint-map-contains? endpoints comparator value)
(match (endpoint-map-get-nearest-range endpoints comparator (middle-cut value))
[(== absent) #false]
[(present nearest-range) (range-contains? nearest-range value)]))
(define (endpoint-map-encloses? endpoints comparator range)
(match (endpoint-map-get-nearest-range endpoints comparator (range-lower-cut range))
[(== absent) #false]
[(present nearest-range) (range-encloses? nearest-range range)]))
(define/guard (endpoint-map-intersects? endpoints comparator range)
(define lower-cut (range-lower-cut range))
(define upper-cut (range-upper-cut range))
(guard-match (present (entry _ upper)) (sorted-map-entry-at-most endpoints upper-cut) else
#false)
(compare-infix (cut<=> (range-comparator range)) lower-cut < upper))
(define (endpoint-map-range-containing-or-absent endpoints comparator value)
(option-filter
(endpoint-map-get-nearest-range endpoints comparator (middle-cut value))
(λ (nearest-range) (range-contains? nearest-range value))))
(define (endpoint-map-span-or-absent endpoints)
TODO)
(define/guard (endpoint-map-get-nearest-range endpoints comparator cut)
(guard-match (present (entry lower upper)) (sorted-map-entry-at-most endpoints cut) else
absent)
(present (range-from-cuts lower upper #:comparator comparator)))
(define (range-set #:comparator [comparator #false] . ranges)
(check-precondition
(or comparator (not (empty? ranges)))
(name range-set)
"cannot construct an empty range set without a comparator")
(let ([comparator (or comparator (range-comparator (first ranges)))])
(sequence->range-set ranges #:comparator comparator)))
(define (sequence->range-set ranges #:comparator comparator)
(transduce ranges #:into (into-range-set comparator)))
(struct range-set-builder ([range-vector-builder #:mutable] comparator))
(define (make-range-set-builder comparator)
(range-set-builder (make-vector-builder) comparator))
(define (range-set-builder-add builder range)
(define vector-builder (vector-builder-add (range-set-builder-range-vector-builder builder) range))
(set-range-set-builder-range-vector-builder! builder vector-builder)
builder)
(define (build-range-set builder)
(define ranges (build-vector (range-set-builder-range-vector-builder builder)))
(define comparator (range-set-builder-comparator builder))
(check-ranges-use-comparator #:who (name build-range-set) ranges comparator)
(define sorted-ranges (vector-sort ranges range<?))
(check-ranges-disjoint #:who (name build-range-set) sorted-ranges)
(define coalesced-ranges (vector-merge-adjacent sorted-ranges range-connected? range-span))
(define endpoints
(for/sorted-map #:key-comparator (cut<=> comparator) ([range (in-vector coalesced-ranges)])
(entry (range-lower-cut range) (range-upper-cut range))))
(immutable-endpoint-map-range-set endpoints comparator))
(define (check-ranges-use-comparator #:who who ranges comparator)
(for ([range (in-vector ranges)])
(check-precondition
(equal? (range-comparator range) comparator)
who
"not all ranges use the same comparator"
"range" range
"range comparator" (range-comparator range)
"expected comparator" comparator)))
(define (range<? range other-range)
(equal? (compare range<=> range other-range) lesser))
(define (check-ranges-disjoint #:who who ranges)
(unless (zero? (vector-length ranges))
(for ([range (in-vector ranges)]
[next-range (in-vector ranges 1)])
(when (range-overlaps? range next-range)
(raise-arguments-error
who
"overlapping ranges not allowed"
"range" range
"next range" next-range)))))
(define (into-range-set comparator)
(define (start)
(make-range-set-builder comparator))
(make-effectful-fold-reducer
range-set-builder-add start build-range-set #:name (name into-range-set)))
(define-syntax-parse-rule (for/range-set #:comparator comparator clauses body)
#:declare comparator (expr/c #'comparator?)
#:declare body (for-body this-syntax)
#:with context this-syntax
(for/reducer/derived context (into-range-set comparator.c) clauses (~@ . body)))
(define-syntax-parse-rule (for*/range-set #:comparator comparator clauses body)
#:declare comparator (expr/c #'comparator?)
#:declare body (for-body this-syntax)
#:with context this-syntax
(for*/reducer/derived context (into-range-set comparator.c) clauses (~@ . body)))
|
d2fd774c6ac56e79872079e5d992a5d058add47374a6b7b0440a7d7e4636afee | ktakashi/sagittarius-scheme | kdfs.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; sagittarius/crypto/kdfs.scm - KDFs
;;;
Copyright ( c ) 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
#!nounbound
(library (sagittarius crypto kdfs)
(export pbkdf-1 pbkdf-2
mac->prf-provider
hkdf
pkcs12-kdf
;; hmmmm should these be here?
pkcs12-derive-iv
pkcs12-derive-mac)
(import (rnrs)
(sagittarius crypto digests)
(rename (sagittarius crypto digests descriptors)
(tc-digest-descriptor? builtin-digest-descriptor?))
(sagittarius crypto mac)
(prefix (sagittarius crypto tomcrypt) tc:)
(util bytevector))
(define (pbkdf-1 P S c dk-len :key (digest *digest:sha-1*))
(define digest-len (digest-descriptor-digest-size digest))
(define md (make-message-digest digest))
(when (> dk-len digest-len)
(assertion-violation 'pbkdf-1 "Derived key too long"))
(let* ((buf (make-bytevector digest-len))
(dk (make-bytevector dk-len)))
(message-digest-init! md)
(message-digest-process! md P)
(message-digest-process! md S)
(message-digest-done! md buf)
(do ((i 0 (+ i 1)) (c (- c 1)))
((= i c)
(bytevector-copy! buf 0 dk 0 dk-len)
dk)
(digest-message! md buf buf))))
MAC = PRF :)
ref : Equivalence between MAC and PRF for based Constructions
and
;;
(define (mac->prf-provider scheme . opts)
(lambda (S) (apply make-mac scheme S opts)))
(define *hmac-sha1-prf* (mac->prf-provider *mac:hmac* :digest *digest:sha-1*))
(define (pbkdf-2 P S c dk-len :key (prf *hmac-sha1-prf*))
(define (compute mac generate-mac! left block-no stored buf0 buf1 out)
(bytevector-fill! buf0 0)
(bytevector-fill! buf1 0)
(bytevector-u32-set! buf1 0 block-no (endianness big))
;; block-no++
(mac-init! mac)
(mac-process! mac S)
(mac-process! mac buf1 0 4)
(let ((x (mac-done! mac buf0)))
(bytevector-copy! buf0 0 buf1 0 x)
(do ((i 1 (+ i 1)))
((= i c)
(let ((l (min x left)))
(bytevector-copy! buf1 0 out stored l)
l))
(generate-mac! buf0 buf0)
(bytevector-xor! buf1 buf1 buf0))))
(let* ((mac (prf P))
(hlen (mac-mac-size mac)))
(unless (mac? mac)
(assertion-violation 'pbkdf-2 "Invalid PRF" mac))
(when (> dk-len (* #xffffffff hlen))
(assertion-violation 'pbkdf-2 "Derived key too long"))
(let ((buf0 (make-bytevector hlen))
(buf1 (make-bytevector hlen))
(out (make-bytevector dk-len))
(generate-mac! (make-mac-generator mac)))
(let loop ((left dk-len) (block-no 1) (stored 0))
(if (zero? left)
out
(let ((l (compute mac generate-mac!
left block-no stored buf0 buf1 out)))
(loop (- left l) (+ block-no 1) (+ stored l))))))))
HKDF : RFC 5869
(define (hkdf (digest builtin-digest-descriptor?) ikm salt info dk-len)
(tc:hkdf (tc-digest-descriptor-digest digest) ikm salt info dk-len))
(define (pkcs12-kdf (digest builtin-digest-descriptor?)
pw salt iteration dk-len)
(call-tc:pkcs12-kdf digest pw salt iteration dk-len tc:*pkcs12:key-material*))
(define (pkcs12-derive-iv (digest builtin-digest-descriptor?)
pw salt iteration iv-len)
(call-tc:pkcs12-kdf digest pw salt iteration iv-len tc:*pkcs12:iv-material*))
(define (pkcs12-derive-mac (digest builtin-digest-descriptor?)
pw salt iteration len)
(call-tc:pkcs12-kdf digest pw salt iteration len tc:*pkcs12:mac-material*))
(define (call-tc:pkcs12-kdf digest pw salt iteration len purpose)
(tc:pkcs12-kdf (tc-digest-descriptor-digest digest)
(string->utf16 (string-append pw "\x0;") (endianness big))
salt iteration purpose len))
)
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/791dea707ecd116670691d764b7726cc49f648be/ext/crypto/sagittarius/crypto/kdfs.scm | scheme | coding : utf-8 ; -*-
sagittarius/crypto/kdfs.scm - KDFs
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
hmmmm should these be here?
block-no++ | Copyright ( c ) 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
#!nounbound
(library (sagittarius crypto kdfs)
(export pbkdf-1 pbkdf-2
mac->prf-provider
hkdf
pkcs12-kdf
pkcs12-derive-iv
pkcs12-derive-mac)
(import (rnrs)
(sagittarius crypto digests)
(rename (sagittarius crypto digests descriptors)
(tc-digest-descriptor? builtin-digest-descriptor?))
(sagittarius crypto mac)
(prefix (sagittarius crypto tomcrypt) tc:)
(util bytevector))
(define (pbkdf-1 P S c dk-len :key (digest *digest:sha-1*))
(define digest-len (digest-descriptor-digest-size digest))
(define md (make-message-digest digest))
(when (> dk-len digest-len)
(assertion-violation 'pbkdf-1 "Derived key too long"))
(let* ((buf (make-bytevector digest-len))
(dk (make-bytevector dk-len)))
(message-digest-init! md)
(message-digest-process! md P)
(message-digest-process! md S)
(message-digest-done! md buf)
(do ((i 0 (+ i 1)) (c (- c 1)))
((= i c)
(bytevector-copy! buf 0 dk 0 dk-len)
dk)
(digest-message! md buf buf))))
MAC = PRF :)
ref : Equivalence between MAC and PRF for based Constructions
and
(define (mac->prf-provider scheme . opts)
(lambda (S) (apply make-mac scheme S opts)))
(define *hmac-sha1-prf* (mac->prf-provider *mac:hmac* :digest *digest:sha-1*))
(define (pbkdf-2 P S c dk-len :key (prf *hmac-sha1-prf*))
(define (compute mac generate-mac! left block-no stored buf0 buf1 out)
(bytevector-fill! buf0 0)
(bytevector-fill! buf1 0)
(bytevector-u32-set! buf1 0 block-no (endianness big))
(mac-init! mac)
(mac-process! mac S)
(mac-process! mac buf1 0 4)
(let ((x (mac-done! mac buf0)))
(bytevector-copy! buf0 0 buf1 0 x)
(do ((i 1 (+ i 1)))
((= i c)
(let ((l (min x left)))
(bytevector-copy! buf1 0 out stored l)
l))
(generate-mac! buf0 buf0)
(bytevector-xor! buf1 buf1 buf0))))
(let* ((mac (prf P))
(hlen (mac-mac-size mac)))
(unless (mac? mac)
(assertion-violation 'pbkdf-2 "Invalid PRF" mac))
(when (> dk-len (* #xffffffff hlen))
(assertion-violation 'pbkdf-2 "Derived key too long"))
(let ((buf0 (make-bytevector hlen))
(buf1 (make-bytevector hlen))
(out (make-bytevector dk-len))
(generate-mac! (make-mac-generator mac)))
(let loop ((left dk-len) (block-no 1) (stored 0))
(if (zero? left)
out
(let ((l (compute mac generate-mac!
left block-no stored buf0 buf1 out)))
(loop (- left l) (+ block-no 1) (+ stored l))))))))
HKDF : RFC 5869
(define (hkdf (digest builtin-digest-descriptor?) ikm salt info dk-len)
(tc:hkdf (tc-digest-descriptor-digest digest) ikm salt info dk-len))
(define (pkcs12-kdf (digest builtin-digest-descriptor?)
pw salt iteration dk-len)
(call-tc:pkcs12-kdf digest pw salt iteration dk-len tc:*pkcs12:key-material*))
(define (pkcs12-derive-iv (digest builtin-digest-descriptor?)
pw salt iteration iv-len)
(call-tc:pkcs12-kdf digest pw salt iteration iv-len tc:*pkcs12:iv-material*))
(define (pkcs12-derive-mac (digest builtin-digest-descriptor?)
pw salt iteration len)
(call-tc:pkcs12-kdf digest pw salt iteration len tc:*pkcs12:mac-material*))
(define (call-tc:pkcs12-kdf digest pw salt iteration len purpose)
(tc:pkcs12-kdf (tc-digest-descriptor-digest digest)
(string->utf16 (string-append pw "\x0;") (endianness big))
salt iteration purpose len))
)
|
9f65deabd119927eaf28d2dc25d592b412fc3703baae434d9a707d10dbaf3665 | cyverse-archive/DiscoveryEnvironmentBackend | folder.clj | (ns iplant_groups.routes.domain.folder
(:use [common-swagger-api.schema :only [describe ->optional-param]])
(:require [iplant_groups.routes.domain.params :as params]
[schema.core :as s]))
(s/defschema BaseFolder
{:name
(describe String "The internal folder name.")
(s/optional-key :description)
(describe String "A brief description of the folder.")
(s/optional-key :display_extension)
(describe String "The displayable folder name extension.")})
(s/defschema Folder
(assoc BaseFolder
(s/optional-key :display_name)
(describe String "The displayable folder name.")
(s/optional-key :extension)
(describe String "The internal folder name extension.")
:id_index
(describe String "The sequential ID index number.")
:id
(describe String "The folder ID.")))
(s/defschema FolderUpdate
(-> BaseFolder
(->optional-param :name)))
(s/defschema FolderStub
(-> Folder
(->optional-param :name)
(->optional-param :id)
(->optional-param :id_index)))
(s/defschema FolderList
{:folders (describe [Folder] "The list of folders in the result set.")})
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/iplant-groups/src/iplant_groups/routes/domain/folder.clj | clojure | (ns iplant_groups.routes.domain.folder
(:use [common-swagger-api.schema :only [describe ->optional-param]])
(:require [iplant_groups.routes.domain.params :as params]
[schema.core :as s]))
(s/defschema BaseFolder
{:name
(describe String "The internal folder name.")
(s/optional-key :description)
(describe String "A brief description of the folder.")
(s/optional-key :display_extension)
(describe String "The displayable folder name extension.")})
(s/defschema Folder
(assoc BaseFolder
(s/optional-key :display_name)
(describe String "The displayable folder name.")
(s/optional-key :extension)
(describe String "The internal folder name extension.")
:id_index
(describe String "The sequential ID index number.")
:id
(describe String "The folder ID.")))
(s/defschema FolderUpdate
(-> BaseFolder
(->optional-param :name)))
(s/defschema FolderStub
(-> Folder
(->optional-param :name)
(->optional-param :id)
(->optional-param :id_index)))
(s/defschema FolderList
{:folders (describe [Folder] "The list of folders in the result set.")})
| |
df0d9493704d6a686561401cab757b5270dddd56166d18320c4f5b2fe002144b | BitGameEN/bitgamex | ranch_server.erl | Copyright ( c ) 2012 - 2018 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(ranch_server).
-behaviour(gen_server).
%% API.
-export([start_link/0]).
-export([set_new_listener_opts/5]).
-export([cleanup_listener_opts/1]).
-export([set_connections_sup/2]).
-export([get_connections_sup/1]).
-export([get_connections_sups/0]).
-export([set_listener_sup/2]).
-export([get_listener_sup/1]).
-export([get_listener_sups/0]).
-export([set_addr/2]).
-export([get_addr/1]).
-export([set_max_connections/2]).
-export([get_max_connections/1]).
-export([set_transport_options/2]).
-export([get_transport_options/1]).
-export([set_protocol_options/2]).
-export([get_protocol_options/1]).
-export([get_listener_start_args/1]).
-export([count_connections/1]).
%% gen_server.
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
-define(TAB, ?MODULE).
-type monitors() :: [{{reference(), pid()}, any()}].
-record(state, {
monitors = [] :: monitors()
}).
%% API.
-spec start_link() -> {ok, pid()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec set_new_listener_opts(ranch:ref(), ranch:max_conns(), any(), any(), [any()]) -> ok.
set_new_listener_opts(Ref, MaxConns, TransOpts, ProtoOpts, StartArgs) ->
gen_server:call(?MODULE, {set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}).
-spec cleanup_listener_opts(ranch:ref()) -> ok.
cleanup_listener_opts(Ref) ->
_ = ets:delete(?TAB, {addr, Ref}),
_ = ets:delete(?TAB, {max_conns, Ref}),
_ = ets:delete(?TAB, {trans_opts, Ref}),
_ = ets:delete(?TAB, {proto_opts, Ref}),
_ = ets:delete(?TAB, {listener_start_args, Ref}),
%% We also remove the pid of the connections supervisor.
%% Depending on the timing, it might already have been deleted
%% when we handled the monitor DOWN message. However, in some
cases when calling followed by get_connections_sup ,
%% we could end up with the pid still being returned, when we
%% expected a crash (because the listener was stopped).
%% Deleting it explictly here removes any possible confusion.
_ = ets:delete(?TAB, {conns_sup, Ref}),
%% Ditto for the listener supervisor.
_ = ets:delete(?TAB, {listener_sup, Ref}),
ok.
-spec set_connections_sup(ranch:ref(), pid()) -> ok.
set_connections_sup(Ref, Pid) ->
true = gen_server:call(?MODULE, {set_connections_sup, Ref, Pid}),
ok.
-spec get_connections_sup(ranch:ref()) -> pid().
get_connections_sup(Ref) ->
ets:lookup_element(?TAB, {conns_sup, Ref}, 2).
-spec get_connections_sups() -> [{ranch:ref(), pid()}].
get_connections_sups() ->
[{Ref, Pid} || [Ref, Pid] <- ets:match(?TAB, {{conns_sup, '$1'}, '$2'})].
-spec set_listener_sup(ranch:ref(), pid()) -> ok.
set_listener_sup(Ref, Pid) ->
true = gen_server:call(?MODULE, {set_listener_sup, Ref, Pid}),
ok.
-spec get_listener_sup(ranch:ref()) -> pid().
get_listener_sup(Ref) ->
ets:lookup_element(?TAB, {listener_sup, Ref}, 2).
-spec get_listener_sups() -> [{ranch:ref(), pid()}].
get_listener_sups() ->
[{Ref, Pid} || [Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})].
-spec set_addr(ranch:ref(), {inet:ip_address(), inet:port_number()} | {undefined, undefined}) -> ok.
set_addr(Ref, Addr) ->
gen_server:call(?MODULE, {set_addr, Ref, Addr}).
-spec get_addr(ranch:ref()) -> {inet:ip_address(), inet:port_number()} | {undefined, undefined}.
get_addr(Ref) ->
ets:lookup_element(?TAB, {addr, Ref}, 2).
-spec set_max_connections(ranch:ref(), ranch:max_conns()) -> ok.
set_max_connections(Ref, MaxConnections) ->
gen_server:call(?MODULE, {set_max_conns, Ref, MaxConnections}).
-spec get_max_connections(ranch:ref()) -> ranch:max_conns().
get_max_connections(Ref) ->
ets:lookup_element(?TAB, {max_conns, Ref}, 2).
-spec set_transport_options(ranch:ref(), any()) -> ok.
set_transport_options(Ref, TransOpts) ->
gen_server:call(?MODULE, {set_trans_opts, Ref, TransOpts}).
-spec get_transport_options(ranch:ref()) -> any().
get_transport_options(Ref) ->
ets:lookup_element(?TAB, {trans_opts, Ref}, 2).
-spec set_protocol_options(ranch:ref(), any()) -> ok.
set_protocol_options(Ref, ProtoOpts) ->
gen_server:call(?MODULE, {set_proto_opts, Ref, ProtoOpts}).
-spec get_protocol_options(ranch:ref()) -> any().
get_protocol_options(Ref) ->
ets:lookup_element(?TAB, {proto_opts, Ref}, 2).
-spec get_listener_start_args(ranch:ref()) -> [any()].
get_listener_start_args(Ref) ->
ets:lookup_element(?TAB, {listener_start_args, Ref}, 2).
-spec count_connections(ranch:ref()) -> non_neg_integer().
count_connections(Ref) ->
ranch_conns_sup:active_connections(get_connections_sup(Ref)).
%% gen_server.
init([]) ->
ConnMonitors = [{{erlang:monitor(process, Pid), Pid}, {conns_sup, Ref}} ||
[Ref, Pid] <- ets:match(?TAB, {{conns_sup, '$1'}, '$2'})],
ListenerMonitors = [{{erlang:monitor(process, Pid), Pid}, {listener_sup, Ref}} ||
[Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})],
{ok, #state{monitors=ConnMonitors++ListenerMonitors}}.
handle_call({set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}, _, State) ->
ets:insert_new(?TAB, {{max_conns, Ref}, MaxConns}),
ets:insert_new(?TAB, {{trans_opts, Ref}, TransOpts}),
ets:insert_new(?TAB, {{proto_opts, Ref}, ProtoOpts}),
ets:insert_new(?TAB, {{listener_start_args, Ref}, StartArgs}),
{reply, ok, State};
handle_call({set_connections_sup, Ref, Pid}, _,
State=#state{monitors=Monitors}) ->
case ets:insert_new(?TAB, {{conns_sup, Ref}, Pid}) of
true ->
MonitorRef = erlang:monitor(process, Pid),
{reply, true,
State#state{monitors=[{{MonitorRef, Pid}, {conns_sup, Ref}}|Monitors]}};
false ->
{reply, false, State}
end;
handle_call({set_listener_sup, Ref, Pid}, _,
State=#state{monitors=Monitors}) ->
case ets:insert_new(?TAB, {{listener_sup, Ref}, Pid}) of
true ->
MonitorRef = erlang:monitor(process, Pid),
{reply, true,
State#state{monitors=[{{MonitorRef, Pid}, {listener_sup, Ref}}|Monitors]}};
false ->
{reply, false, State}
end;
handle_call({set_addr, Ref, Addr}, _, State) ->
true = ets:insert(?TAB, {{addr, Ref}, Addr}),
{reply, ok, State};
handle_call({set_max_conns, Ref, MaxConns}, _, State) ->
ets:insert(?TAB, {{max_conns, Ref}, MaxConns}),
ConnsSup = get_connections_sup(Ref),
ConnsSup ! {set_max_conns, MaxConns},
{reply, ok, State};
handle_call({set_trans_opts, Ref, Opts}, _, State) ->
ets:insert(?TAB, {{trans_opts, Ref}, Opts}),
{reply, ok, State};
handle_call({set_proto_opts, Ref, Opts}, _, State) ->
ets:insert(?TAB, {{proto_opts, Ref}, Opts}),
ConnsSup = get_connections_sup(Ref),
ConnsSup ! {set_opts, Opts},
{reply, ok, State};
handle_call(_Request, _From, State) ->
{reply, ignore, State}.
handle_cast(_Request, State) ->
{noreply, State}.
handle_info({'DOWN', MonitorRef, process, Pid, _},
State=#state{monitors=Monitors}) ->
{_, TypeRef} = lists:keyfind({MonitorRef, Pid}, 1, Monitors),
_ = ets:delete(?TAB, TypeRef),
Monitors2 = lists:keydelete({MonitorRef, Pid}, 1, Monitors),
{noreply, State#state{monitors=Monitors2}};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/deps/ranch/src/ranch_server.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
API.
gen_server.
API.
We also remove the pid of the connections supervisor.
Depending on the timing, it might already have been deleted
when we handled the monitor DOWN message. However, in some
we could end up with the pid still being returned, when we
expected a crash (because the listener was stopped).
Deleting it explictly here removes any possible confusion.
Ditto for the listener supervisor.
gen_server. | Copyright ( c ) 2012 - 2018 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(ranch_server).
-behaviour(gen_server).
-export([start_link/0]).
-export([set_new_listener_opts/5]).
-export([cleanup_listener_opts/1]).
-export([set_connections_sup/2]).
-export([get_connections_sup/1]).
-export([get_connections_sups/0]).
-export([set_listener_sup/2]).
-export([get_listener_sup/1]).
-export([get_listener_sups/0]).
-export([set_addr/2]).
-export([get_addr/1]).
-export([set_max_connections/2]).
-export([get_max_connections/1]).
-export([set_transport_options/2]).
-export([get_transport_options/1]).
-export([set_protocol_options/2]).
-export([get_protocol_options/1]).
-export([get_listener_start_args/1]).
-export([count_connections/1]).
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
-define(TAB, ?MODULE).
-type monitors() :: [{{reference(), pid()}, any()}].
-record(state, {
monitors = [] :: monitors()
}).
-spec start_link() -> {ok, pid()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec set_new_listener_opts(ranch:ref(), ranch:max_conns(), any(), any(), [any()]) -> ok.
set_new_listener_opts(Ref, MaxConns, TransOpts, ProtoOpts, StartArgs) ->
gen_server:call(?MODULE, {set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}).
-spec cleanup_listener_opts(ranch:ref()) -> ok.
cleanup_listener_opts(Ref) ->
_ = ets:delete(?TAB, {addr, Ref}),
_ = ets:delete(?TAB, {max_conns, Ref}),
_ = ets:delete(?TAB, {trans_opts, Ref}),
_ = ets:delete(?TAB, {proto_opts, Ref}),
_ = ets:delete(?TAB, {listener_start_args, Ref}),
cases when calling followed by get_connections_sup ,
_ = ets:delete(?TAB, {conns_sup, Ref}),
_ = ets:delete(?TAB, {listener_sup, Ref}),
ok.
-spec set_connections_sup(ranch:ref(), pid()) -> ok.
set_connections_sup(Ref, Pid) ->
true = gen_server:call(?MODULE, {set_connections_sup, Ref, Pid}),
ok.
-spec get_connections_sup(ranch:ref()) -> pid().
get_connections_sup(Ref) ->
ets:lookup_element(?TAB, {conns_sup, Ref}, 2).
-spec get_connections_sups() -> [{ranch:ref(), pid()}].
get_connections_sups() ->
[{Ref, Pid} || [Ref, Pid] <- ets:match(?TAB, {{conns_sup, '$1'}, '$2'})].
-spec set_listener_sup(ranch:ref(), pid()) -> ok.
set_listener_sup(Ref, Pid) ->
true = gen_server:call(?MODULE, {set_listener_sup, Ref, Pid}),
ok.
-spec get_listener_sup(ranch:ref()) -> pid().
get_listener_sup(Ref) ->
ets:lookup_element(?TAB, {listener_sup, Ref}, 2).
-spec get_listener_sups() -> [{ranch:ref(), pid()}].
get_listener_sups() ->
[{Ref, Pid} || [Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})].
-spec set_addr(ranch:ref(), {inet:ip_address(), inet:port_number()} | {undefined, undefined}) -> ok.
set_addr(Ref, Addr) ->
gen_server:call(?MODULE, {set_addr, Ref, Addr}).
-spec get_addr(ranch:ref()) -> {inet:ip_address(), inet:port_number()} | {undefined, undefined}.
get_addr(Ref) ->
ets:lookup_element(?TAB, {addr, Ref}, 2).
-spec set_max_connections(ranch:ref(), ranch:max_conns()) -> ok.
set_max_connections(Ref, MaxConnections) ->
gen_server:call(?MODULE, {set_max_conns, Ref, MaxConnections}).
-spec get_max_connections(ranch:ref()) -> ranch:max_conns().
get_max_connections(Ref) ->
ets:lookup_element(?TAB, {max_conns, Ref}, 2).
-spec set_transport_options(ranch:ref(), any()) -> ok.
set_transport_options(Ref, TransOpts) ->
gen_server:call(?MODULE, {set_trans_opts, Ref, TransOpts}).
-spec get_transport_options(ranch:ref()) -> any().
get_transport_options(Ref) ->
ets:lookup_element(?TAB, {trans_opts, Ref}, 2).
-spec set_protocol_options(ranch:ref(), any()) -> ok.
set_protocol_options(Ref, ProtoOpts) ->
gen_server:call(?MODULE, {set_proto_opts, Ref, ProtoOpts}).
-spec get_protocol_options(ranch:ref()) -> any().
get_protocol_options(Ref) ->
ets:lookup_element(?TAB, {proto_opts, Ref}, 2).
-spec get_listener_start_args(ranch:ref()) -> [any()].
get_listener_start_args(Ref) ->
ets:lookup_element(?TAB, {listener_start_args, Ref}, 2).
-spec count_connections(ranch:ref()) -> non_neg_integer().
count_connections(Ref) ->
ranch_conns_sup:active_connections(get_connections_sup(Ref)).
init([]) ->
ConnMonitors = [{{erlang:monitor(process, Pid), Pid}, {conns_sup, Ref}} ||
[Ref, Pid] <- ets:match(?TAB, {{conns_sup, '$1'}, '$2'})],
ListenerMonitors = [{{erlang:monitor(process, Pid), Pid}, {listener_sup, Ref}} ||
[Ref, Pid] <- ets:match(?TAB, {{listener_sup, '$1'}, '$2'})],
{ok, #state{monitors=ConnMonitors++ListenerMonitors}}.
handle_call({set_new_listener_opts, Ref, MaxConns, TransOpts, ProtoOpts, StartArgs}, _, State) ->
ets:insert_new(?TAB, {{max_conns, Ref}, MaxConns}),
ets:insert_new(?TAB, {{trans_opts, Ref}, TransOpts}),
ets:insert_new(?TAB, {{proto_opts, Ref}, ProtoOpts}),
ets:insert_new(?TAB, {{listener_start_args, Ref}, StartArgs}),
{reply, ok, State};
handle_call({set_connections_sup, Ref, Pid}, _,
State=#state{monitors=Monitors}) ->
case ets:insert_new(?TAB, {{conns_sup, Ref}, Pid}) of
true ->
MonitorRef = erlang:monitor(process, Pid),
{reply, true,
State#state{monitors=[{{MonitorRef, Pid}, {conns_sup, Ref}}|Monitors]}};
false ->
{reply, false, State}
end;
handle_call({set_listener_sup, Ref, Pid}, _,
State=#state{monitors=Monitors}) ->
case ets:insert_new(?TAB, {{listener_sup, Ref}, Pid}) of
true ->
MonitorRef = erlang:monitor(process, Pid),
{reply, true,
State#state{monitors=[{{MonitorRef, Pid}, {listener_sup, Ref}}|Monitors]}};
false ->
{reply, false, State}
end;
handle_call({set_addr, Ref, Addr}, _, State) ->
true = ets:insert(?TAB, {{addr, Ref}, Addr}),
{reply, ok, State};
handle_call({set_max_conns, Ref, MaxConns}, _, State) ->
ets:insert(?TAB, {{max_conns, Ref}, MaxConns}),
ConnsSup = get_connections_sup(Ref),
ConnsSup ! {set_max_conns, MaxConns},
{reply, ok, State};
handle_call({set_trans_opts, Ref, Opts}, _, State) ->
ets:insert(?TAB, {{trans_opts, Ref}, Opts}),
{reply, ok, State};
handle_call({set_proto_opts, Ref, Opts}, _, State) ->
ets:insert(?TAB, {{proto_opts, Ref}, Opts}),
ConnsSup = get_connections_sup(Ref),
ConnsSup ! {set_opts, Opts},
{reply, ok, State};
handle_call(_Request, _From, State) ->
{reply, ignore, State}.
handle_cast(_Request, State) ->
{noreply, State}.
handle_info({'DOWN', MonitorRef, process, Pid, _},
State=#state{monitors=Monitors}) ->
{_, TypeRef} = lists:keyfind({MonitorRef, Pid}, 1, Monitors),
_ = ets:delete(?TAB, TypeRef),
Monitors2 = lists:keydelete({MonitorRef, Pid}, 1, Monitors),
{noreply, State#state{monitors=Monitors2}};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
fc93ad4b90635bf5ef17a1e2db0edfdcaadf948b6717a63c1172ff754482b512 | kennknowles/aspcc | tools.ml |
(** ASPCC module with the Tools object *)
open VbValues
open VbClass
open VbTypes
open Runtime
open Printf
class tools =
object(self)
inherit opaque_object
method strname = "MSWC.Tools"
method m_gets name params =
match name with
| "fileexists" ->
wrap_bool (Sys.file_exists (get_string !(arg1 "Tools.FileExists" params)))
| _ -> self # not_found name params
end
;;
(** {4 Loader Function} *)
let load runtime =
Runtime.add_class runtime
(Symbol.of_string "MSWC.Tools")
(fun () -> (new tools :> object_t));
Runtime.add_class runtime
(Symbol.of_string "Tools")
(fun () -> (new tools :> object_t));
;;
let _ =
register_module "tools" load
| null | https://raw.githubusercontent.com/kennknowles/aspcc/951a91cc21e291b1d3c750bbbca7fa79209edd08/runtime/modules/tools.ml | ocaml | * ASPCC module with the Tools object
* {4 Loader Function} |
open VbValues
open VbClass
open VbTypes
open Runtime
open Printf
class tools =
object(self)
inherit opaque_object
method strname = "MSWC.Tools"
method m_gets name params =
match name with
| "fileexists" ->
wrap_bool (Sys.file_exists (get_string !(arg1 "Tools.FileExists" params)))
| _ -> self # not_found name params
end
;;
let load runtime =
Runtime.add_class runtime
(Symbol.of_string "MSWC.Tools")
(fun () -> (new tools :> object_t));
Runtime.add_class runtime
(Symbol.of_string "Tools")
(fun () -> (new tools :> object_t));
;;
let _ =
register_module "tools" load
|
ac0a9bd26ee3554f93cdbd498be995eeeea0d9a3feca7144ca9d68ee696e650b | cram2/cram | atomic-action-designators.lisp | ;;;
Copyright ( c ) 2016 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Institute for Artificial Intelligence/
;;; Universitaet Bremen nor the names of its contributors may be used to
;;; endorse or promote products derived from this software without
;;; specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
(in-package :boxy-plans)
(def-fact-group boxy-atomic-actions (desig:action-grounding)
(<- (desig:action-grounding ?action-designator (wiggle ?left-poses ?right-poses))
(property ?action-designator (:type :pushing))
(once (or (property ?action-designator (:left-poses ?left-poses))
(equal ?left-poses nil)))
(once (or (property ?action-designator (:right-poses ?right-poses))
(equal ?right-poses nil))))
(<- (desig:action-grounding ?action-designator (cram-inspect ?augmented-designator))
(property ?action-designator (:type :inspecting))
(property ?action-designator (:object ?object-designator))
(property ?action-designator (:for ?for-value))
(-> (lisp-type ?for-value desig:object-designator)
(equal ?description-to-add (:for (:object)))
(equal ?description-to-add (:for (?for-value))))
(desig:desig-description ?object-designator ?properties)
(equal ?augmented-description (?description-to-add . ?properties))
(desig:designator :object ?augmented-description ?augmented-designator)
(-> (lisp-type ?for-value desig:object-designator)
(and (desig:current-designator ?for-value ?for-object-designator)
;; (property ?for-object-designator (:type ?for-object-type))
(prolog:slot-value ?for-object-designator desig:quantifier ?for-quantifier)
(prolog:slot-value ?augmented-designator desig:quantifier ?for-quantifier))
(true)))
;; (<- (desig:action-grounding ?action-designator (perceive :inspecting ?object-designator))
;; (property ?action-designator (:type :inspecting))
;; (property ?action-designator (:object ?object-designator)))
)
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_boxy/cram_boxy_plans/src/atomic-action-designators.lisp | lisp |
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Universitaet Bremen nor the names of its contributors may be used to
endorse or promote products derived from this software without
specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
(property ?for-object-designator (:type ?for-object-type))
(<- (desig:action-grounding ?action-designator (perceive :inspecting ?object-designator))
(property ?action-designator (:type :inspecting))
(property ?action-designator (:object ?object-designator))) | Copyright ( c ) 2016 , < >
* Neither the name of the Institute for Artificial Intelligence/
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :boxy-plans)
(def-fact-group boxy-atomic-actions (desig:action-grounding)
(<- (desig:action-grounding ?action-designator (wiggle ?left-poses ?right-poses))
(property ?action-designator (:type :pushing))
(once (or (property ?action-designator (:left-poses ?left-poses))
(equal ?left-poses nil)))
(once (or (property ?action-designator (:right-poses ?right-poses))
(equal ?right-poses nil))))
(<- (desig:action-grounding ?action-designator (cram-inspect ?augmented-designator))
(property ?action-designator (:type :inspecting))
(property ?action-designator (:object ?object-designator))
(property ?action-designator (:for ?for-value))
(-> (lisp-type ?for-value desig:object-designator)
(equal ?description-to-add (:for (:object)))
(equal ?description-to-add (:for (?for-value))))
(desig:desig-description ?object-designator ?properties)
(equal ?augmented-description (?description-to-add . ?properties))
(desig:designator :object ?augmented-description ?augmented-designator)
(-> (lisp-type ?for-value desig:object-designator)
(and (desig:current-designator ?for-value ?for-object-designator)
(prolog:slot-value ?for-object-designator desig:quantifier ?for-quantifier)
(prolog:slot-value ?augmented-designator desig:quantifier ?for-quantifier))
(true)))
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.