_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
827314dddeebad43cbbde6d6db8163e68e7dcb2a6f1e57657b57868f64910123 | zclj/test.check.insights | coverage.cljc | (ns test.check.insights.coverage)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Inverse normal cumulative distribution function ( same as QuickCheck )
;; #L606
;; Algorithm taken from
;; /~pjacklam/notes/invnorm/
Accurate to about one part in 10 ^ 9 .
;; The 'erf' package uses the same algorithm, but with an extra step
;; to get a fully accurate result, which we skip because it requires
;; the 'erfc' function.
(def p-low 0.02425)
(def p-high (- 1 p-low))
(def a1 -3.969683028665376e+01)
(def a2 2.209460984245205e+02)
(def a3 -2.759285104469687e+02)
(def a4 1.383577518672690e+02)
(def a5 -3.066479806614716e+01)
(def a6 2.506628277459239e+00)
(def b1 -5.447609879822406e+01)
(def b2 1.615858368580409e+02)
(def b3 -1.556989798598866e+02)
(def b4 6.680131188771972e+01)
(def b5 -1.328068155288572e+01)
(def c1 -7.784894002430293e-03)
(def c2 -3.223964580411365e-01)
(def c3 -2.400758277161838e+00)
(def c4 -2.549732539343734e+00)
(def c5 4.374664141464968e+00)
(def c6 2.938163982698783e+00)
(def d1 7.784695709041462e-03)
(def d2 3.224671290700398e-01)
(def d3 2.445134137142996e+00)
(def d4 3.754408661907416e+00)
(defn invnormcdf
[p]
(cond
(< p p-low)
(let [q (Math/sqrt (* -2 (Math/log p)))]
(/ (+ (* (+ (* (+ (* (+ (* (+ (* c1 q) c2) q) c3) q) c4) q) c5) q) c6)
(+ (* (+ (* (+ (* (+ (* d1 q) d2) q) d3) q) d4) q) 1)))
(<= p p-high)
(let [q (- p 0.5)
r (* q q)]
(/ (* (+ (* (+ (* (+ (* (+ (* (+ (* a1 r) a2) r) a3) r) a4) r) a5) r) a6) q)
(+ (* (+ (* (+ (* (+ (* (+ (* b1 r) b2) r) b3) r) b4) r) b5) r) 1)))
:else
(let [q (Math/sqrt (* -2 (Math/log (- 1 p))))]
(-
(/ (+ (* (+ (* (+ (* (+ (* (+ (* c1 q) c2) q) c3) q) c4) q) c5) q) c6)
(+ (* (+ (* (+ (* (+ (* d1 q) d2) q) d3) q) d4) q) 1))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Wilson scoring
(defn wilson
[k n z]
(let [nf n
p (/ k n)]
(/
(+ p
(/ (* z z) (* 2 nf))
(* z
(Math/sqrt
(+ (/ (* p (- 1 p)) nf)
(/ (* z z) (* 4 nf nf))))))
(+ 1 (/ (* z z) nf)))))
(defn wilson-low
[k n a]
(wilson k n (invnormcdf (/ a 2))))
(defn wilson-high
[k n a]
(wilson k n (invnormcdf (- 1 (/ a 2)))))
(defn sufficiently-covered?
[{:keys [certainty tolerance]} n k p]
(if-not (zero? n)
(>= (wilson-low k n (/ 1 certainty))
(* tolerance p))
false))
(defn insufficiently-covered?
[certainty n k p]
(if-not (zero? n)
(if certainty
(< (wilson-high k n (/ 1 certainty)) p)
(< k (* p n)))
false))
(def default-confidence
{:certainty 1.0E9
:tolerance 0.9})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Coverage checks
(defn check-coverage
([tests-n class-n p]
(check-coverage tests-n class-n p default-confidence))
([tests-n class-n p confidence]
{::sufficiently-covered?
(sufficiently-covered? confidence tests-n class-n p)
::insufficiently-covered?
(insufficiently-covered? (:certainty confidence) tests-n class-n p)}))
(defn apply-coverage
[coverage-m args]
(reduce-kv
(fn [acc k {:keys [test.check.insights/classify]}]
(let [classification
(mapv
(fn [arg]
(apply classify arg))
args)]
(assoc acc k {::count (count (filter identity classification))})))
{}
coverage-m))
(defn evaluate-coverage
[coverage-m coverage number-of-tests]
(reduce-kv
(fn [acc k {:keys [test.check.insights/cover]}]
(let [coverage-result
(check-coverage
number-of-tests (get-in coverage [k ::count]) (/ cover 100))]
(merge
acc
{k (merge (get coverage k) {::target-% cover} coverage-result)})))
{}
coverage-m))
(defn filter-k
[k eval-result]
(filterv
(fn [result]
(get (val result) k))
eval-result))
(def filter-sufficient (partial filter-k ::sufficiently-covered?))
(def filter-insufficient (partial filter-k ::insufficiently-covered?))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Reporting
(defn report-coverage
[coverage-categories args]
(reduce
(fn [acc coverage-category]
(let [coverage-result (apply-coverage coverage-category args)
evaluated-result
(evaluate-coverage coverage-category coverage-result (count args))]
(conj acc (merge coverage-result evaluated-result))))
[]
coverage-categories))
(defn ->%
[nom denom]
(* 100 (double (/ nom denom))))
(defn humanize-coverage
[coverage total-count]
{:test.check.insights/coverage (->% (::count coverage) total-count)
:test.check.insights/target-coverage (::target-% coverage)})
(defn summarize-report
[report]
(let [total-count (reduce + (map ::count (vals report)))
failed (reduce-kv
(fn [acc k v]
(if (not (::sufficiently-covered? v))
(conj acc k)
acc))
#{}
report)
human-report (reduce-kv
(fn [acc k coverage]
(assoc acc k (humanize-coverage coverage total-count)))
{}
report)]
(if (seq failed)
(assoc human-report :test.check.insights/statistically-failed failed)
human-report)))
(defn humanize-report
[coverage-reports]
(if (map? coverage-reports)
(summarize-report coverage-reports)
(mapv summarize-report coverage-reports)))
(comment
(def coverage
{::sufficiently-covered? false
::insufficiently-covered? false
::coverage-count 2
::target-coverage-% 50})
(humanize-coverage coverage 10)
(def coverage-reports
[{:one coverage
:two coverage}
{:three coverage}])
(humanize-report coverage-reports)
)
| null | https://raw.githubusercontent.com/zclj/test.check.insights/8a91b1c4094f74768ff79ee938b98f13f07f1c9a/src/test/check/insights/coverage.cljc | clojure |
#L606
Algorithm taken from
/~pjacklam/notes/invnorm/
The 'erf' package uses the same algorithm, but with an extra step
to get a fully accurate result, which we skip because it requires
the 'erfc' function.
Coverage checks
Reporting | (ns test.check.insights.coverage)
Inverse normal cumulative distribution function ( same as QuickCheck )
Accurate to about one part in 10 ^ 9 .
(def p-low 0.02425)
(def p-high (- 1 p-low))
(def a1 -3.969683028665376e+01)
(def a2 2.209460984245205e+02)
(def a3 -2.759285104469687e+02)
(def a4 1.383577518672690e+02)
(def a5 -3.066479806614716e+01)
(def a6 2.506628277459239e+00)
(def b1 -5.447609879822406e+01)
(def b2 1.615858368580409e+02)
(def b3 -1.556989798598866e+02)
(def b4 6.680131188771972e+01)
(def b5 -1.328068155288572e+01)
(def c1 -7.784894002430293e-03)
(def c2 -3.223964580411365e-01)
(def c3 -2.400758277161838e+00)
(def c4 -2.549732539343734e+00)
(def c5 4.374664141464968e+00)
(def c6 2.938163982698783e+00)
(def d1 7.784695709041462e-03)
(def d2 3.224671290700398e-01)
(def d3 2.445134137142996e+00)
(def d4 3.754408661907416e+00)
(defn invnormcdf
[p]
(cond
(< p p-low)
(let [q (Math/sqrt (* -2 (Math/log p)))]
(/ (+ (* (+ (* (+ (* (+ (* (+ (* c1 q) c2) q) c3) q) c4) q) c5) q) c6)
(+ (* (+ (* (+ (* (+ (* d1 q) d2) q) d3) q) d4) q) 1)))
(<= p p-high)
(let [q (- p 0.5)
r (* q q)]
(/ (* (+ (* (+ (* (+ (* (+ (* (+ (* a1 r) a2) r) a3) r) a4) r) a5) r) a6) q)
(+ (* (+ (* (+ (* (+ (* (+ (* b1 r) b2) r) b3) r) b4) r) b5) r) 1)))
:else
(let [q (Math/sqrt (* -2 (Math/log (- 1 p))))]
(-
(/ (+ (* (+ (* (+ (* (+ (* (+ (* c1 q) c2) q) c3) q) c4) q) c5) q) c6)
(+ (* (+ (* (+ (* (+ (* d1 q) d2) q) d3) q) d4) q) 1))))))
Wilson scoring
(defn wilson
[k n z]
(let [nf n
p (/ k n)]
(/
(+ p
(/ (* z z) (* 2 nf))
(* z
(Math/sqrt
(+ (/ (* p (- 1 p)) nf)
(/ (* z z) (* 4 nf nf))))))
(+ 1 (/ (* z z) nf)))))
(defn wilson-low
[k n a]
(wilson k n (invnormcdf (/ a 2))))
(defn wilson-high
[k n a]
(wilson k n (invnormcdf (- 1 (/ a 2)))))
(defn sufficiently-covered?
[{:keys [certainty tolerance]} n k p]
(if-not (zero? n)
(>= (wilson-low k n (/ 1 certainty))
(* tolerance p))
false))
(defn insufficiently-covered?
[certainty n k p]
(if-not (zero? n)
(if certainty
(< (wilson-high k n (/ 1 certainty)) p)
(< k (* p n)))
false))
(def default-confidence
{:certainty 1.0E9
:tolerance 0.9})
(defn check-coverage
([tests-n class-n p]
(check-coverage tests-n class-n p default-confidence))
([tests-n class-n p confidence]
{::sufficiently-covered?
(sufficiently-covered? confidence tests-n class-n p)
::insufficiently-covered?
(insufficiently-covered? (:certainty confidence) tests-n class-n p)}))
(defn apply-coverage
[coverage-m args]
(reduce-kv
(fn [acc k {:keys [test.check.insights/classify]}]
(let [classification
(mapv
(fn [arg]
(apply classify arg))
args)]
(assoc acc k {::count (count (filter identity classification))})))
{}
coverage-m))
(defn evaluate-coverage
[coverage-m coverage number-of-tests]
(reduce-kv
(fn [acc k {:keys [test.check.insights/cover]}]
(let [coverage-result
(check-coverage
number-of-tests (get-in coverage [k ::count]) (/ cover 100))]
(merge
acc
{k (merge (get coverage k) {::target-% cover} coverage-result)})))
{}
coverage-m))
(defn filter-k
[k eval-result]
(filterv
(fn [result]
(get (val result) k))
eval-result))
(def filter-sufficient (partial filter-k ::sufficiently-covered?))
(def filter-insufficient (partial filter-k ::insufficiently-covered?))
(defn report-coverage
[coverage-categories args]
(reduce
(fn [acc coverage-category]
(let [coverage-result (apply-coverage coverage-category args)
evaluated-result
(evaluate-coverage coverage-category coverage-result (count args))]
(conj acc (merge coverage-result evaluated-result))))
[]
coverage-categories))
(defn ->%
[nom denom]
(* 100 (double (/ nom denom))))
(defn humanize-coverage
[coverage total-count]
{:test.check.insights/coverage (->% (::count coverage) total-count)
:test.check.insights/target-coverage (::target-% coverage)})
(defn summarize-report
[report]
(let [total-count (reduce + (map ::count (vals report)))
failed (reduce-kv
(fn [acc k v]
(if (not (::sufficiently-covered? v))
(conj acc k)
acc))
#{}
report)
human-report (reduce-kv
(fn [acc k coverage]
(assoc acc k (humanize-coverage coverage total-count)))
{}
report)]
(if (seq failed)
(assoc human-report :test.check.insights/statistically-failed failed)
human-report)))
(defn humanize-report
[coverage-reports]
(if (map? coverage-reports)
(summarize-report coverage-reports)
(mapv summarize-report coverage-reports)))
(comment
(def coverage
{::sufficiently-covered? false
::insufficiently-covered? false
::coverage-count 2
::target-coverage-% 50})
(humanize-coverage coverage 10)
(def coverage-reports
[{:one coverage
:two coverage}
{:three coverage}])
(humanize-report coverage-reports)
)
|
f45ce83a565a48c82a7720197a6ef18dd1c21bf1953864fab2cd9e9d69bcdb77 | Incubaid/arakoon | log_extra.ml |
Copyright ( 2010 - 2014 ) INCUBAID BVBA
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
let option2s f = function
| None -> "None"
| Some v -> "Some (\"" ^ String.escaped (f v ) ^ "\")"
let string_option2s = option2s (fun s -> s)
let int_option2s = option2s string_of_int
let p_option = string_option2s
let list2s e_to_s list =
let inner =
List.fold_left (fun acc a -> acc ^ (e_to_s a) ^ ";") "" list
in "[" ^ inner ^ "]"
let log_o o x =
let k s =
let os = o # to_string () in
Client_log.debug (os ^": " ^ s)
in
Printf.ksprintf k x
| null | https://raw.githubusercontent.com/Incubaid/arakoon/43a8d0b26e4876ef91d9657149f105c7e57e0cb0/src/tools/log_extra.ml | ocaml |
Copyright ( 2010 - 2014 ) INCUBAID BVBA
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
let option2s f = function
| None -> "None"
| Some v -> "Some (\"" ^ String.escaped (f v ) ^ "\")"
let string_option2s = option2s (fun s -> s)
let int_option2s = option2s string_of_int
let p_option = string_option2s
let list2s e_to_s list =
let inner =
List.fold_left (fun acc a -> acc ^ (e_to_s a) ^ ";") "" list
in "[" ^ inner ^ "]"
let log_o o x =
let k s =
let os = o # to_string () in
Client_log.debug (os ^": " ^ s)
in
Printf.ksprintf k x
| |
32b30ffdc3d73153adaf672f9dab5cb5e37ceaccce0016eaa69b7b94ea04f366 | tweag/ormolu | list-notation-3-out.hs | foo =
reportSDoc "tc.cc" 30 $ sep $ do
(prettyTCM q <+> " before compilation") : do
map (prettyTCM . map unArg . clPats) cls
foo =
reportSDoc "tc.cc" 30 $ sep $ do
(prettyTCM q <+> " before compilation")
: do
map (prettyTCM . map unArg . clPats) cls
| null | https://raw.githubusercontent.com/tweag/ormolu/1f63136d047205f95b7d3c0f6aa34c34bb29ac7f/data/examples/declaration/value/function/list-notation-3-out.hs | haskell | foo =
reportSDoc "tc.cc" 30 $ sep $ do
(prettyTCM q <+> " before compilation") : do
map (prettyTCM . map unArg . clPats) cls
foo =
reportSDoc "tc.cc" 30 $ sep $ do
(prettyTCM q <+> " before compilation")
: do
map (prettyTCM . map unArg . clPats) cls
| |
6fb28c0997160ea0923a868bfff9c15505f92215b7939c1ce32d9f971244208f | jfeser/castor | simple_tactics.ml | open Castor
open Ast
open Collections
module A = Abslayout
module Config = struct
module type S = sig
include Ops.Config.S
end
end
module Make (Config : Config.S) = struct
open Config
open Ops.Make (Config)
let row_store r =
(* Relation has no free variables that are bound at runtime. *)
if Is_serializable.is_static ~params r then
let scope = Fresh.name Global.fresh "s%d" in
let scalars =
Schema.schema r |> Schema.scoped scope |> List.map ~f:A.scalar_name
in
Some (A.list (strip_meta r) scope (A.tuple scalars Cross))
else None
let row_store =
of_func_pre row_store ~pre:Is_serializable.annotate_stage
~name:"to-row-store"
end
| null | https://raw.githubusercontent.com/jfeser/castor/e9f394e9c0984300f71dc77b5a457ae4e4faa226/eopt/simple_tactics.ml | ocaml | Relation has no free variables that are bound at runtime. | open Castor
open Ast
open Collections
module A = Abslayout
module Config = struct
module type S = sig
include Ops.Config.S
end
end
module Make (Config : Config.S) = struct
open Config
open Ops.Make (Config)
let row_store r =
if Is_serializable.is_static ~params r then
let scope = Fresh.name Global.fresh "s%d" in
let scalars =
Schema.schema r |> Schema.scoped scope |> List.map ~f:A.scalar_name
in
Some (A.list (strip_meta r) scope (A.tuple scalars Cross))
else None
let row_store =
of_func_pre row_store ~pre:Is_serializable.annotate_stage
~name:"to-row-store"
end
|
c46056f874e579cd7a3849c0144c8bf2d6ca54061f90146d7e21ee82ebf62aa4 | sionescu/iolib | gray-streams.lisp | ;;;; -*- Mode: Lisp; indent-tabs-mode: nil -*-
;;;
;;; --- GRAY stream mixin.
;;;
(in-package :iolib/common-lisp)
(defclass trivial-gray-stream-mixin ()
((%open :initform t)))
(defmethod close ((s trivial-gray-stream-mixin) &key abort)
(declare (ignore abort))
(prog1 (slot-value s '%open)
(setf (slot-value s '%open) nil)))
(defmethod open-stream-p ((s trivial-gray-stream-mixin))
(slot-value s '%open))
(defgeneric stream-read-sequence
(stream sequence start end &key &allow-other-keys))
(defgeneric stream-write-sequence
(stream sequence start end &key &allow-other-keys))
(defgeneric stream-file-position (stream))
(defgeneric (setf stream-file-position) (newval stream))
(defmethod stream-write-string
((stream trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence stream seq (or start 0) (or end (length seq))))
;; Implementations should provide this default method, I believe, but
;; at least sbcl and allegro don't.
(defmethod stream-terpri ((stream trivial-gray-stream-mixin))
(write-char #\newline stream))
(defmethod stream-file-position ((stream trivial-gray-stream-mixin))
nil)
(defmethod (setf stream-file-position)
(newval (stream trivial-gray-stream-mixin))
(declare (ignore newval))
nil)
#+allegro
(progn
(defmethod excl:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod excl:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
#+cmu
(progn
(defmethod ext:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod ext:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
#+lispworks
(progn
(defmethod stream:stream-read-sequence
((s trivial-gray-stream-mixin) seq start end)
(stream-read-sequence s seq start end))
(defmethod stream:stream-write-sequence
((s trivial-gray-stream-mixin) seq start end)
(stream-write-sequence s seq start end))
(defmethod stream:stream-file-position ((stream trivial-gray-stream-mixin))
(stream-file-position stream))
(defmethod (setf stream:stream-file-position)
(newval (stream trivial-gray-stream-mixin))
(setf (stream-file-position stream) newval)))
#+openmcl
(progn
(defmethod ccl:stream-read-vector
((s trivial-gray-stream-mixin) seq start end)
(stream-read-sequence s seq start end))
(defmethod ccl:stream-write-vector
((s trivial-gray-stream-mixin) seq start end)
(stream-write-sequence s seq start end)))
#+clisp
(eval-when (:compile-toplevel :load-toplevel :execute)
(let* ((pkg (find-package :gray))
(sym (and pkg (find-symbol (string '#:stream-read-sequence) pkg))))
(unless (and sym (fboundp sym))
(error "Your CLISP does not have ~A and is therefore unsupported"
"gray:stream-read-sequence"))))
#+clisp
(progn
(defmethod gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &key start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &key start end)
(stream-write-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-position ((stream trivial-gray-stream-mixin) position)
(if position
(setf (stream-file-position stream) position)
(stream-file-position stream))))
#+sbcl
(progn
(defmethod sb-gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod sb-gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq))))
SBCL extension :
(defmethod sb-gray:stream-line-length ((stream trivial-gray-stream-mixin))
80))
#+ecl
(progn
(defmethod gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
| null | https://raw.githubusercontent.com/sionescu/iolib/dac715c81db55704db623d8b2cfc399ebcf6175f/src/new-cl/gray-streams.lisp | lisp | -*- Mode: Lisp; indent-tabs-mode: nil -*-
--- GRAY stream mixin.
Implementations should provide this default method, I believe, but
at least sbcl and allegro don't. |
(in-package :iolib/common-lisp)
(defclass trivial-gray-stream-mixin ()
((%open :initform t)))
(defmethod close ((s trivial-gray-stream-mixin) &key abort)
(declare (ignore abort))
(prog1 (slot-value s '%open)
(setf (slot-value s '%open) nil)))
(defmethod open-stream-p ((s trivial-gray-stream-mixin))
(slot-value s '%open))
(defgeneric stream-read-sequence
(stream sequence start end &key &allow-other-keys))
(defgeneric stream-write-sequence
(stream sequence start end &key &allow-other-keys))
(defgeneric stream-file-position (stream))
(defgeneric (setf stream-file-position) (newval stream))
(defmethod stream-write-string
((stream trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence stream seq (or start 0) (or end (length seq))))
(defmethod stream-terpri ((stream trivial-gray-stream-mixin))
(write-char #\newline stream))
(defmethod stream-file-position ((stream trivial-gray-stream-mixin))
nil)
(defmethod (setf stream-file-position)
(newval (stream trivial-gray-stream-mixin))
(declare (ignore newval))
nil)
#+allegro
(progn
(defmethod excl:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod excl:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
#+cmu
(progn
(defmethod ext:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod ext:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
#+lispworks
(progn
(defmethod stream:stream-read-sequence
((s trivial-gray-stream-mixin) seq start end)
(stream-read-sequence s seq start end))
(defmethod stream:stream-write-sequence
((s trivial-gray-stream-mixin) seq start end)
(stream-write-sequence s seq start end))
(defmethod stream:stream-file-position ((stream trivial-gray-stream-mixin))
(stream-file-position stream))
(defmethod (setf stream:stream-file-position)
(newval (stream trivial-gray-stream-mixin))
(setf (stream-file-position stream) newval)))
#+openmcl
(progn
(defmethod ccl:stream-read-vector
((s trivial-gray-stream-mixin) seq start end)
(stream-read-sequence s seq start end))
(defmethod ccl:stream-write-vector
((s trivial-gray-stream-mixin) seq start end)
(stream-write-sequence s seq start end)))
#+clisp
(eval-when (:compile-toplevel :load-toplevel :execute)
(let* ((pkg (find-package :gray))
(sym (and pkg (find-symbol (string '#:stream-read-sequence) pkg))))
(unless (and sym (fboundp sym))
(error "Your CLISP does not have ~A and is therefore unsupported"
"gray:stream-read-sequence"))))
#+clisp
(progn
(defmethod gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &key start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &key start end)
(stream-write-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-position ((stream trivial-gray-stream-mixin) position)
(if position
(setf (stream-file-position stream) position)
(stream-file-position stream))))
#+sbcl
(progn
(defmethod sb-gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod sb-gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq))))
SBCL extension :
(defmethod sb-gray:stream-line-length ((stream trivial-gray-stream-mixin))
80))
#+ecl
(progn
(defmethod gray:stream-read-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-read-sequence s seq (or start 0) (or end (length seq))))
(defmethod gray:stream-write-sequence
((s trivial-gray-stream-mixin) seq &optional start end)
(stream-write-sequence s seq (or start 0) (or end (length seq)))))
|
48cc6082723432e2098770993822904eec0b03cebbe8bfa1c76f777d60cacff5 | tmfg/mmtis-national-access-point | email_notification_settings.cljs | (ns ote.app.controller.email-notification-settings
"Controller for email notification settings"
(:require [tuck.core :as tuck :refer-macros [define-event]]
[ote.communication :as comm]
[ote.app.routes :as routes]
[ote.db.transit :as transit]
[ote.ui.form :as form]
[ote.localization :refer [tr]]
[ote.app.controller.common :refer [->ServerError]]))
(declare ->LoadData ->RegionsResponse ->UserNotificationsResponse ->UpdateSettings)
;; On Navigate to :email-settings -> load data
(defmethod routes/on-navigate-event :email-settings [_ app]
(->LoadData))
(defn fill-selected-regions
"Fill selected regions if no settings saved"
[response]
(if (get-in response [:email-settings :user-notifications :ote.db.user-notifications/created-by])
;; User have saved settings - so no need to do anything
response
;; No settings in db, so act like all regions are selected
(assoc-in response [:email-settings :user-notifications :ote.db.user-notifications/finnish-regions]
(map #(:id %) (get-in response [:email-settings :regions])))))
(defn load-email-notifications-from-server! []
(comm/get! "settings/email-notifications"
{:on-success (tuck/send-async! ->UserNotificationsResponse)
:on-failure (tuck/send-async! ->ServerError)}))
(tuck/define-event LoadData []
{:path [:email-settings]}
(load-email-notifications-from-server!)
(-> app
(assoc :regions-loading true)
(assoc :user-notifications-loading true)))
;; Region id's are strings in db. Change them to keywords
(defn str->keyword [x]
(map #(keyword %) x))
;; Create new route
(defrecord UserNotificationsResponse [response])
(defrecord SaveEmailNotificationSettings [])
(defrecord UpdateSettings [form-data])
(defrecord SaveEmailSettingsResponse [response])
(defrecord SaveEmailSettingsResponseFailure [response])
(extend-protocol tuck/Event
UserNotificationsResponse
(process-event [{response :response} app]
(let [r (fill-selected-regions response)]
(-> app
(assoc-in [:email-settings :regions] (get-in r [:email-settings :regions]))
(assoc-in [:email-settings :user-notifications] (get-in r [:email-settings :user-notifications]))
(assoc-in [:email-settings :regions-loading] false))))
SaveEmailNotificationSettings
(process-event [_ app]
(let [settings (as-> (get-in app [:email-settings :user-notifications]) n
(form/without-form-metadata n)
(assoc n :ote.db.user-notifications/finnish-regions
(into [] (map name (:ote.db.user-notifications/finnish-regions n)))))] ;; Change keywords to strings
(comm/post! "settings/email-notifications" settings
{:on-success (tuck/send-async! ->SaveEmailSettingsResponse)
:on-failure (tuck/send-async! ->SaveEmailSettingsResponseFailure)})
app))
SaveEmailSettingsResponse
(process-event [{response :response} app]
(routes/navigate! :email-settings)
(-> app
(dissoc :before-unload-message)
(assoc :flash-message
(tr [:email-notification-settings-page :save-success]))))
SaveEmailSettingsResponseFailure
(process-event [{response :response} app]
(.error js/console "Save settings failed:" (pr-str response))
(assoc app
:flash-message-error
(tr [:email-notification-settings-page :save-failure])))
UpdateSettings
(process-event [{form-data :form-data} app]
(-> app
(update-in [:email-settings :user-notifications] merge form-data))))
| null | https://raw.githubusercontent.com/tmfg/mmtis-national-access-point/a86cc890ffa1fe4f773083be5d2556e87a93d975/ote/src/cljs/ote/app/controller/email_notification_settings.cljs | clojure | On Navigate to :email-settings -> load data
User have saved settings - so no need to do anything
No settings in db, so act like all regions are selected
Region id's are strings in db. Change them to keywords
Create new route
Change keywords to strings | (ns ote.app.controller.email-notification-settings
"Controller for email notification settings"
(:require [tuck.core :as tuck :refer-macros [define-event]]
[ote.communication :as comm]
[ote.app.routes :as routes]
[ote.db.transit :as transit]
[ote.ui.form :as form]
[ote.localization :refer [tr]]
[ote.app.controller.common :refer [->ServerError]]))
(declare ->LoadData ->RegionsResponse ->UserNotificationsResponse ->UpdateSettings)
(defmethod routes/on-navigate-event :email-settings [_ app]
(->LoadData))
(defn fill-selected-regions
"Fill selected regions if no settings saved"
[response]
(if (get-in response [:email-settings :user-notifications :ote.db.user-notifications/created-by])
response
(assoc-in response [:email-settings :user-notifications :ote.db.user-notifications/finnish-regions]
(map #(:id %) (get-in response [:email-settings :regions])))))
(defn load-email-notifications-from-server! []
(comm/get! "settings/email-notifications"
{:on-success (tuck/send-async! ->UserNotificationsResponse)
:on-failure (tuck/send-async! ->ServerError)}))
(tuck/define-event LoadData []
{:path [:email-settings]}
(load-email-notifications-from-server!)
(-> app
(assoc :regions-loading true)
(assoc :user-notifications-loading true)))
(defn str->keyword [x]
(map #(keyword %) x))
(defrecord UserNotificationsResponse [response])
(defrecord SaveEmailNotificationSettings [])
(defrecord UpdateSettings [form-data])
(defrecord SaveEmailSettingsResponse [response])
(defrecord SaveEmailSettingsResponseFailure [response])
(extend-protocol tuck/Event
UserNotificationsResponse
(process-event [{response :response} app]
(let [r (fill-selected-regions response)]
(-> app
(assoc-in [:email-settings :regions] (get-in r [:email-settings :regions]))
(assoc-in [:email-settings :user-notifications] (get-in r [:email-settings :user-notifications]))
(assoc-in [:email-settings :regions-loading] false))))
SaveEmailNotificationSettings
(process-event [_ app]
(let [settings (as-> (get-in app [:email-settings :user-notifications]) n
(form/without-form-metadata n)
(assoc n :ote.db.user-notifications/finnish-regions
(comm/post! "settings/email-notifications" settings
{:on-success (tuck/send-async! ->SaveEmailSettingsResponse)
:on-failure (tuck/send-async! ->SaveEmailSettingsResponseFailure)})
app))
SaveEmailSettingsResponse
(process-event [{response :response} app]
(routes/navigate! :email-settings)
(-> app
(dissoc :before-unload-message)
(assoc :flash-message
(tr [:email-notification-settings-page :save-success]))))
SaveEmailSettingsResponseFailure
(process-event [{response :response} app]
(.error js/console "Save settings failed:" (pr-str response))
(assoc app
:flash-message-error
(tr [:email-notification-settings-page :save-failure])))
UpdateSettings
(process-event [{form-data :form-data} app]
(-> app
(update-in [:email-settings :user-notifications] merge form-data))))
|
479d239b36fc5c8cf0e387e8cbab67d87d5d1257f8f7c2761dfb367bccbf04ef | ruricolist/cloture | errors.lisp | (in-package :cloture)
(in-readtable clojure-shortcut)
(defcondition clojure-condition () ())
(defgeneric #_.getMessage (condition)
(:method ((c condition))
(princ-to-string c)))
(defcondition clojure-error (error clojure-condition)
((message :initarg :message)
(cause :initarg :cause :reader #_.getCause))
(:documentation "Sub-root of all Clojure conditions.")
(:default-initargs :cause #_nil)
(:report (lambda (c s)
(with-slots (message) c
(format s "~a" message)))))
(defmacro define-simple-error-constructor (name)
(let* ((ctor-name (string+ name "."))
(ctor (find-external-symbol ctor-name (symbol-package name) :error t)))
`(defsubst ,ctor (msg)
(make-condition ',name :message msg))))
(defmacro defcondition* (name supers &body body)
`(progn
(defcondition ,name ,supers
,@(if body body (list nil)))
(define-symbol-macro ,name (find-class ',name))))
(defcondition* #_Throwable (clojure-error))
(define-simple-error-constructor #_Throwable)
(defcondition* #_Exception (#_Throwable) ())
(define-simple-error-constructor #_Exception)
(defcondition* #_RuntimeException (#_Exception) ())
(define-simple-error-constructor #_RuntimeException)
(defcondition* #_IllegalArgumentException (#_RuntimeException) ())
(define-simple-error-constructor #_IllegalArgumentException)
(defcondition* #_IllegalStateException (#_RuntimeException) ())
(define-simple-error-constructor #_IllegalStateException)
(defcondition* #_ArityException (#_IllegalArgumentException)
((actual :initarg :actual)
(name :initarg :name))
(:report (lambda (c s)
(with-slots (name actual) c
(format s "~a got ~a arg~:p, which is the wrong arity."
(or name "Anonymous function") actual)))))
(defun #_ArityException. (actual name)
(make-condition '#_ArityException
:actual actual
:name name))
(defcondition* #_Error (#_Exception) ())
(define-simple-error-constructor #_Error)
(defcondition* #_AssertionError (#_Exception) ())
(define-simple-error-constructor #_AssertionError)
(defcondition* #_IllegalAccessError (#_Error) ()) ;Skipping some parents.
(define-simple-error-constructor #_IllegalAccessError)
(defcondition already-persistent (#_IllegalAccessError)
((transient :initarg :transient))
(:report (lambda (c s)
(with-slots (transient) c
(format s "Transient ~a has already been persisted."
transient)))))
(defcondition not-yet-implemented (#_Throwable)
((what :initarg :what))
(:report (lambda (c s)
(with-slots (what) c
(format s "Not yet implemented: ~a" what)))))
(defcondition simple-clojure-error (clojure-condition simple-error) ())
(defcondition clojure-program-error (program-error clojure-error) ())
(defcondition simple-clojure-program-error (clojure-program-error simple-condition) ())
(defcondition clojure-reader-error (clojure-error reader-error) ())
(defcondition simple-clojure-reader-error (simple-clojure-error reader-error) ())
(defcondition clojure-package-error (clojure-error package-error) ())
(defcondition clojure-syntax-error (clojure-error) ())
(defcondition simple-clojure-syntax-error (simple-error clojure-syntax-error) ())
(defcondition wrong-number-arguments (clojure-program-error)
((arguments :initarg :arguments)))
(defcondition too-many-arguments (wrong-number-arguments)
((max :initarg :max :type (integer 0 *)))
(:report (lambda (c s)
(with-slots (arguments max) c
(format s "Too many arguments (max ~a):~%~s" max arguments)))))
(defcondition too-few-arguments (wrong-number-arguments)
((min :initarg :max :type (integer 0 *)))
(:report (lambda (c s)
(with-slots (arguments max) c
(format s "Too many arguments (max ~a):~%~s" max arguments)))))
(defun clojure-error (control &rest args)
(make-condition 'simple-clojure-error
:format-control control
:format-arguments args))
(defun clojure-syntax-error (control &rest args)
(make-condition 'simple-clojure-syntax-error
:format-control control
:format-arguments args))
(defun clojure-program-error (control &rest args)
(make-condition 'simple-clojure-program-error
:format-control control
:format-arguments args))
(defun clojure-reader-error (control &rest args)
(make-condition 'simple-clojure-reader-error
:format-control control
:format-arguments args))
(defun too-many-arguments (max-arity args)
(error 'too-many-arguments
:max max-arity
:arguments args))
(defun too-few-arguments (max-arity args)
(error 'too-few-arguments
:max max-arity
:arguments args))
(defcondition does-not-extend (clojure-error)
((protocol :initarg :protocol)
(object :initarg :object))
(:report (lambda (c s)
(with-slots (protocol object) c
(format s "Class of ~a does not extend protocol ~a"
object protocol)))))
(defcondition no-such-method (clojure-error)
((multi :initarg :multi)
(value :initarg :value))
(:report (lambda (c s)
(with-slots (multi value) c
(format s "No method for ~a in multimethod ~a" value multi)))))
| null | https://raw.githubusercontent.com/ruricolist/cloture/623c15c8d2e5e91eb87f46e3ecb3975880109948/errors.lisp | lisp | Skipping some parents. | (in-package :cloture)
(in-readtable clojure-shortcut)
(defcondition clojure-condition () ())
(defgeneric #_.getMessage (condition)
(:method ((c condition))
(princ-to-string c)))
(defcondition clojure-error (error clojure-condition)
((message :initarg :message)
(cause :initarg :cause :reader #_.getCause))
(:documentation "Sub-root of all Clojure conditions.")
(:default-initargs :cause #_nil)
(:report (lambda (c s)
(with-slots (message) c
(format s "~a" message)))))
(defmacro define-simple-error-constructor (name)
(let* ((ctor-name (string+ name "."))
(ctor (find-external-symbol ctor-name (symbol-package name) :error t)))
`(defsubst ,ctor (msg)
(make-condition ',name :message msg))))
(defmacro defcondition* (name supers &body body)
`(progn
(defcondition ,name ,supers
,@(if body body (list nil)))
(define-symbol-macro ,name (find-class ',name))))
(defcondition* #_Throwable (clojure-error))
(define-simple-error-constructor #_Throwable)
(defcondition* #_Exception (#_Throwable) ())
(define-simple-error-constructor #_Exception)
(defcondition* #_RuntimeException (#_Exception) ())
(define-simple-error-constructor #_RuntimeException)
(defcondition* #_IllegalArgumentException (#_RuntimeException) ())
(define-simple-error-constructor #_IllegalArgumentException)
(defcondition* #_IllegalStateException (#_RuntimeException) ())
(define-simple-error-constructor #_IllegalStateException)
(defcondition* #_ArityException (#_IllegalArgumentException)
((actual :initarg :actual)
(name :initarg :name))
(:report (lambda (c s)
(with-slots (name actual) c
(format s "~a got ~a arg~:p, which is the wrong arity."
(or name "Anonymous function") actual)))))
(defun #_ArityException. (actual name)
(make-condition '#_ArityException
:actual actual
:name name))
(defcondition* #_Error (#_Exception) ())
(define-simple-error-constructor #_Error)
(defcondition* #_AssertionError (#_Exception) ())
(define-simple-error-constructor #_AssertionError)
(define-simple-error-constructor #_IllegalAccessError)
(defcondition already-persistent (#_IllegalAccessError)
((transient :initarg :transient))
(:report (lambda (c s)
(with-slots (transient) c
(format s "Transient ~a has already been persisted."
transient)))))
(defcondition not-yet-implemented (#_Throwable)
((what :initarg :what))
(:report (lambda (c s)
(with-slots (what) c
(format s "Not yet implemented: ~a" what)))))
(defcondition simple-clojure-error (clojure-condition simple-error) ())
(defcondition clojure-program-error (program-error clojure-error) ())
(defcondition simple-clojure-program-error (clojure-program-error simple-condition) ())
(defcondition clojure-reader-error (clojure-error reader-error) ())
(defcondition simple-clojure-reader-error (simple-clojure-error reader-error) ())
(defcondition clojure-package-error (clojure-error package-error) ())
(defcondition clojure-syntax-error (clojure-error) ())
(defcondition simple-clojure-syntax-error (simple-error clojure-syntax-error) ())
(defcondition wrong-number-arguments (clojure-program-error)
((arguments :initarg :arguments)))
(defcondition too-many-arguments (wrong-number-arguments)
((max :initarg :max :type (integer 0 *)))
(:report (lambda (c s)
(with-slots (arguments max) c
(format s "Too many arguments (max ~a):~%~s" max arguments)))))
(defcondition too-few-arguments (wrong-number-arguments)
((min :initarg :max :type (integer 0 *)))
(:report (lambda (c s)
(with-slots (arguments max) c
(format s "Too many arguments (max ~a):~%~s" max arguments)))))
(defun clojure-error (control &rest args)
(make-condition 'simple-clojure-error
:format-control control
:format-arguments args))
(defun clojure-syntax-error (control &rest args)
(make-condition 'simple-clojure-syntax-error
:format-control control
:format-arguments args))
(defun clojure-program-error (control &rest args)
(make-condition 'simple-clojure-program-error
:format-control control
:format-arguments args))
(defun clojure-reader-error (control &rest args)
(make-condition 'simple-clojure-reader-error
:format-control control
:format-arguments args))
(defun too-many-arguments (max-arity args)
(error 'too-many-arguments
:max max-arity
:arguments args))
(defun too-few-arguments (max-arity args)
(error 'too-few-arguments
:max max-arity
:arguments args))
(defcondition does-not-extend (clojure-error)
((protocol :initarg :protocol)
(object :initarg :object))
(:report (lambda (c s)
(with-slots (protocol object) c
(format s "Class of ~a does not extend protocol ~a"
object protocol)))))
(defcondition no-such-method (clojure-error)
((multi :initarg :multi)
(value :initarg :value))
(:report (lambda (c s)
(with-slots (multi value) c
(format s "No method for ~a in multimethod ~a" value multi)))))
|
549b9b76e2ce4f7c7225d1a406ab0e1da65b3d495466028aade561071d9a3ba1 | tfausak/patrol | DebugMetaSpec.hs | # LANGUAGE QuasiQuotes #
module Patrol.Type.DebugMetaSpec where
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.QQ.Simple as Aeson
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Patrol.Type.DebugImage as DebugImage
import qualified Patrol.Type.DebugMeta as DebugMeta
import qualified Patrol.Type.SystemSdkInfo as SystemSdkInfo
import qualified Test.Hspec as Hspec
spec :: Hspec.Spec
spec = Hspec.describe "Patrol.Type.DebugMeta" $ do
Hspec.describe "ToJSON" $ do
Hspec.it "works" $ do
let debugMeta = DebugMeta.empty
json = [Aeson.aesonQQ| {} |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
Hspec.it "works with an image" $ do
let image = DebugImage.Other . Map.singleton (Text.pack "example-image") $ Aeson.Bool True
debugMeta = DebugMeta.empty {DebugMeta.images = [image]}
json = [Aeson.aesonQQ| { "images": [ { "example-image": true } ] } |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
Hspec.it "works with some SDK info" $ do
let systemSdkInfo = SystemSdkInfo.empty {SystemSdkInfo.versionMajor = Just 0}
debugMeta = DebugMeta.empty {DebugMeta.sdkInfo = Just systemSdkInfo}
json = [Aeson.aesonQQ| { "sdk_info": { "version_major": 0 } } |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
| null | https://raw.githubusercontent.com/tfausak/patrol/1cae55b3840b328cda7de85ea424333fcab434cb/source/test-suite/Patrol/Type/DebugMetaSpec.hs | haskell | # LANGUAGE QuasiQuotes #
module Patrol.Type.DebugMetaSpec where
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.QQ.Simple as Aeson
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Patrol.Type.DebugImage as DebugImage
import qualified Patrol.Type.DebugMeta as DebugMeta
import qualified Patrol.Type.SystemSdkInfo as SystemSdkInfo
import qualified Test.Hspec as Hspec
spec :: Hspec.Spec
spec = Hspec.describe "Patrol.Type.DebugMeta" $ do
Hspec.describe "ToJSON" $ do
Hspec.it "works" $ do
let debugMeta = DebugMeta.empty
json = [Aeson.aesonQQ| {} |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
Hspec.it "works with an image" $ do
let image = DebugImage.Other . Map.singleton (Text.pack "example-image") $ Aeson.Bool True
debugMeta = DebugMeta.empty {DebugMeta.images = [image]}
json = [Aeson.aesonQQ| { "images": [ { "example-image": true } ] } |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
Hspec.it "works with some SDK info" $ do
let systemSdkInfo = SystemSdkInfo.empty {SystemSdkInfo.versionMajor = Just 0}
debugMeta = DebugMeta.empty {DebugMeta.sdkInfo = Just systemSdkInfo}
json = [Aeson.aesonQQ| { "sdk_info": { "version_major": 0 } } |]
Aeson.toJSON debugMeta `Hspec.shouldBe` json
| |
c19027770b2b93c539cad817d1b525781fe104fade2d275788e27e721cbbb92e | int-index/slay | Prim.hs | module Slay.Vty.Prim
( Prim(..),
image,
space,
string,
empty
) where
import qualified Graphics.Vty as Vty
import Data.String
import Inj
import Slay.Core
data Prim =
Prim
{ primExtents :: Extents,
primImage :: Vty.Image
}
deriving (Eq, Show)
instance p ~ Prim => Inj p Prim
image :: Inj Prim a => Vty.Image -> a
image img = inj (Prim e img)
where
e = vtyImageExtents img
space :: Inj Prim a => Extents -> a
space e = inj (Prim e img)
where
Extents w h = e
img = Vty.backgroundFill (fromIntegral w) (fromIntegral h)
string :: Inj Prim a => Vty.Attr -> String -> a
string attr str = image (Vty.string attr str)
empty :: Inj Prim a => a
empty = inj (Prim e img)
where
e = Extents 0 0
img = Vty.emptyImage
instance IsString Prim where
fromString = string Vty.defAttr
vtyImageExtents :: Vty.Image -> Extents
vtyImageExtents img =
Extents
{ extentsW = fromIntegral (Vty.imageWidth img),
extentsH = fromIntegral (Vty.imageHeight img) }
| null | https://raw.githubusercontent.com/int-index/slay/1c9d39b8cb4f32f0b4778677c21ebb85cc1cddf7/vty/src/Slay/Vty/Prim.hs | haskell | module Slay.Vty.Prim
( Prim(..),
image,
space,
string,
empty
) where
import qualified Graphics.Vty as Vty
import Data.String
import Inj
import Slay.Core
data Prim =
Prim
{ primExtents :: Extents,
primImage :: Vty.Image
}
deriving (Eq, Show)
instance p ~ Prim => Inj p Prim
image :: Inj Prim a => Vty.Image -> a
image img = inj (Prim e img)
where
e = vtyImageExtents img
space :: Inj Prim a => Extents -> a
space e = inj (Prim e img)
where
Extents w h = e
img = Vty.backgroundFill (fromIntegral w) (fromIntegral h)
string :: Inj Prim a => Vty.Attr -> String -> a
string attr str = image (Vty.string attr str)
empty :: Inj Prim a => a
empty = inj (Prim e img)
where
e = Extents 0 0
img = Vty.emptyImage
instance IsString Prim where
fromString = string Vty.defAttr
vtyImageExtents :: Vty.Image -> Extents
vtyImageExtents img =
Extents
{ extentsW = fromIntegral (Vty.imageWidth img),
extentsH = fromIntegral (Vty.imageHeight img) }
| |
c29617f915c225ff640d9309a3196ddd7e5adb908d15531348412651a757de90 | eutro/racket-raylib | enums.rkt | #lang scribble/text
@(require racket/match
raylib/codegen/objects)
@(provide generate-enums)
@(define (generate-enums
enums-parsed
#:module _this-mod)
@list{
#lang racket/base
(require ffi/unsafe)
(provide (all-defined-out))
@splice{
@(for/list ([parsed-enum (in-list enums-parsed)])
(match-define (api-enum name description enum-values) parsed-enum)
@list{@(void)
@(when description @list{
;; @|description|
})
(define _@|name|
(_enum '(@block{
@(add-newlines
(for/list ([enum-value (in-list enum-values)])
(match-define (api-enum-value name _enum-desc enum-int-value) enum-value)
@list{@|name| = @|enum-int-value|}))}
@; (intentional)
)))
@(add-newlines
(for/list ([enum-value (in-list enum-values)])
(match-define (api-enum-value name enum-desc enum-int-value) enum-value)
(add-newlines
#:sep " "
(list
@list{(define @|name| @|enum-int-value|)}
(when enum-desc @list{
@|enum - desc|
})))))
@(void)})
}})
| null | https://raw.githubusercontent.com/eutro/racket-raylib/8c477bec5708018b8ef067a7a6c36c1bbfb58132/configs/templates/enums.rkt | racket | @|description|
(intentional) | #lang scribble/text
@(require racket/match
raylib/codegen/objects)
@(provide generate-enums)
@(define (generate-enums
enums-parsed
#:module _this-mod)
@list{
#lang racket/base
(require ffi/unsafe)
(provide (all-defined-out))
@splice{
@(for/list ([parsed-enum (in-list enums-parsed)])
(match-define (api-enum name description enum-values) parsed-enum)
@list{@(void)
@(when description @list{
})
(define _@|name|
(_enum '(@block{
@(add-newlines
(for/list ([enum-value (in-list enum-values)])
(match-define (api-enum-value name _enum-desc enum-int-value) enum-value)
@list{@|name| = @|enum-int-value|}))}
)))
@(add-newlines
(for/list ([enum-value (in-list enum-values)])
(match-define (api-enum-value name enum-desc enum-int-value) enum-value)
(add-newlines
#:sep " "
(list
@list{(define @|name| @|enum-int-value|)}
(when enum-desc @list{
@|enum - desc|
})))))
@(void)})
}})
|
c9acba65ebe0ee8ef8b6ea395409763bd16f76a12d3c53412981cf01d93fedbf | mejgun/haskell-tdlib | PaymentProvider.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.PaymentProvider where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
-- | Contains information about a payment provider
data PaymentProvider
| Smart Glocal payment provider @public_token Public payment token
PaymentProviderSmartGlocal
{ -- |
public_token :: Maybe String
}
| -- | Stripe payment provider
PaymentProviderStripe
{ -- | True, if the cardholder name must be provided
need_cardholder_name :: Maybe Bool,
-- | True, if the user ZIP/postal code must be provided
need_postal_code :: Maybe Bool,
-- | True, if the user country must be provided
need_country :: Maybe Bool,
-- | Stripe API publishable key
publishable_key :: Maybe String
}
| -- | Some other payment provider, for which a web payment form must be shown @url Payment form URL
PaymentProviderOther
{ -- |
url :: Maybe String
}
deriving (Eq)
instance Show PaymentProvider where
show
PaymentProviderSmartGlocal
{ public_token = public_token_
} =
"PaymentProviderSmartGlocal"
++ U.cc
[ U.p "public_token" public_token_
]
show
PaymentProviderStripe
{ need_cardholder_name = need_cardholder_name_,
need_postal_code = need_postal_code_,
need_country = need_country_,
publishable_key = publishable_key_
} =
"PaymentProviderStripe"
++ U.cc
[ U.p "need_cardholder_name" need_cardholder_name_,
U.p "need_postal_code" need_postal_code_,
U.p "need_country" need_country_,
U.p "publishable_key" publishable_key_
]
show
PaymentProviderOther
{ url = url_
} =
"PaymentProviderOther"
++ U.cc
[ U.p "url" url_
]
instance T.FromJSON PaymentProvider where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"paymentProviderSmartGlocal" -> parsePaymentProviderSmartGlocal v
"paymentProviderStripe" -> parsePaymentProviderStripe v
"paymentProviderOther" -> parsePaymentProviderOther v
_ -> mempty
where
parsePaymentProviderSmartGlocal :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderSmartGlocal = A.withObject "PaymentProviderSmartGlocal" $ \o -> do
public_token_ <- o A..:? "public_token"
return $ PaymentProviderSmartGlocal {public_token = public_token_}
parsePaymentProviderStripe :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderStripe = A.withObject "PaymentProviderStripe" $ \o -> do
need_cardholder_name_ <- o A..:? "need_cardholder_name"
need_postal_code_ <- o A..:? "need_postal_code"
need_country_ <- o A..:? "need_country"
publishable_key_ <- o A..:? "publishable_key"
return $ PaymentProviderStripe {need_cardholder_name = need_cardholder_name_, need_postal_code = need_postal_code_, need_country = need_country_, publishable_key = publishable_key_}
parsePaymentProviderOther :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderOther = A.withObject "PaymentProviderOther" $ \o -> do
url_ <- o A..:? "url"
return $ PaymentProviderOther {url = url_}
parseJSON _ = mempty
instance T.ToJSON PaymentProvider where
toJSON
PaymentProviderSmartGlocal
{ public_token = public_token_
} =
A.object
[ "@type" A..= T.String "paymentProviderSmartGlocal",
"public_token" A..= public_token_
]
toJSON
PaymentProviderStripe
{ need_cardholder_name = need_cardholder_name_,
need_postal_code = need_postal_code_,
need_country = need_country_,
publishable_key = publishable_key_
} =
A.object
[ "@type" A..= T.String "paymentProviderStripe",
"need_cardholder_name" A..= need_cardholder_name_,
"need_postal_code" A..= need_postal_code_,
"need_country" A..= need_country_,
"publishable_key" A..= publishable_key_
]
toJSON
PaymentProviderOther
{ url = url_
} =
A.object
[ "@type" A..= T.String "paymentProviderOther",
"url" A..= url_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/dc380d18d49eaadc386a81dc98af2ce00f8797c2/src/TD/Data/PaymentProvider.hs | haskell | # LANGUAGE OverloadedStrings #
|
| Contains information about a payment provider
|
| Stripe payment provider
| True, if the cardholder name must be provided
| True, if the user ZIP/postal code must be provided
| True, if the user country must be provided
| Stripe API publishable key
| Some other payment provider, for which a web payment form must be shown @url Payment form URL
| |
module TD.Data.PaymentProvider where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
data PaymentProvider
| Smart Glocal payment provider @public_token Public payment token
PaymentProviderSmartGlocal
public_token :: Maybe String
}
PaymentProviderStripe
need_cardholder_name :: Maybe Bool,
need_postal_code :: Maybe Bool,
need_country :: Maybe Bool,
publishable_key :: Maybe String
}
PaymentProviderOther
url :: Maybe String
}
deriving (Eq)
instance Show PaymentProvider where
show
PaymentProviderSmartGlocal
{ public_token = public_token_
} =
"PaymentProviderSmartGlocal"
++ U.cc
[ U.p "public_token" public_token_
]
show
PaymentProviderStripe
{ need_cardholder_name = need_cardholder_name_,
need_postal_code = need_postal_code_,
need_country = need_country_,
publishable_key = publishable_key_
} =
"PaymentProviderStripe"
++ U.cc
[ U.p "need_cardholder_name" need_cardholder_name_,
U.p "need_postal_code" need_postal_code_,
U.p "need_country" need_country_,
U.p "publishable_key" publishable_key_
]
show
PaymentProviderOther
{ url = url_
} =
"PaymentProviderOther"
++ U.cc
[ U.p "url" url_
]
instance T.FromJSON PaymentProvider where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"paymentProviderSmartGlocal" -> parsePaymentProviderSmartGlocal v
"paymentProviderStripe" -> parsePaymentProviderStripe v
"paymentProviderOther" -> parsePaymentProviderOther v
_ -> mempty
where
parsePaymentProviderSmartGlocal :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderSmartGlocal = A.withObject "PaymentProviderSmartGlocal" $ \o -> do
public_token_ <- o A..:? "public_token"
return $ PaymentProviderSmartGlocal {public_token = public_token_}
parsePaymentProviderStripe :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderStripe = A.withObject "PaymentProviderStripe" $ \o -> do
need_cardholder_name_ <- o A..:? "need_cardholder_name"
need_postal_code_ <- o A..:? "need_postal_code"
need_country_ <- o A..:? "need_country"
publishable_key_ <- o A..:? "publishable_key"
return $ PaymentProviderStripe {need_cardholder_name = need_cardholder_name_, need_postal_code = need_postal_code_, need_country = need_country_, publishable_key = publishable_key_}
parsePaymentProviderOther :: A.Value -> T.Parser PaymentProvider
parsePaymentProviderOther = A.withObject "PaymentProviderOther" $ \o -> do
url_ <- o A..:? "url"
return $ PaymentProviderOther {url = url_}
parseJSON _ = mempty
instance T.ToJSON PaymentProvider where
toJSON
PaymentProviderSmartGlocal
{ public_token = public_token_
} =
A.object
[ "@type" A..= T.String "paymentProviderSmartGlocal",
"public_token" A..= public_token_
]
toJSON
PaymentProviderStripe
{ need_cardholder_name = need_cardholder_name_,
need_postal_code = need_postal_code_,
need_country = need_country_,
publishable_key = publishable_key_
} =
A.object
[ "@type" A..= T.String "paymentProviderStripe",
"need_cardholder_name" A..= need_cardholder_name_,
"need_postal_code" A..= need_postal_code_,
"need_country" A..= need_country_,
"publishable_key" A..= publishable_key_
]
toJSON
PaymentProviderOther
{ url = url_
} =
A.object
[ "@type" A..= T.String "paymentProviderOther",
"url" A..= url_
]
|
a4d463a853bde01c5da1329ded7cc7e16281ae537c156046c51a2873bdb2e822 | armedbear/abcl | defmacro.lisp | ;;; defmacro.lisp
;;;
Copyright ( C ) 2003 - 2006
$ I d : defmacro.lisp 13696 2011 - 11 - 15 22:34:19Z astalla $
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from
;;; or based on this library. If you modify this library, you may extend
;;; this exception to your version of the library, but you are not
;;; obligated to do so. If you do not wish to do so, delete this
;;; exception statement from your version.
Adapted from CMUCL / SBCL .
(in-package #:system)
Redefine DEFMACRO to use PARSE - DEFMACRO .
(defmacro defmacro (name lambda-list &rest body)
(let* ((whole (gensym "WHOLE-"))
(env (gensym "ENVIRONMENT-")))
(multiple-value-bind (body decls documentation)
(parse-defmacro lambda-list whole body name 'defmacro :environment env)
(let ((expander `(lambda (,whole ,env) ,@decls ,body)))
`(progn
(sys::record-source-information-for-type ',name :macro)
(let ((macro (make-macro ',name
(or (precompile nil ,expander) ,expander))))
,@(if (special-operator-p name)
`((put ',name 'macroexpand-macro macro))
`((fset ',name macro)))
(%set-arglist macro ',lambda-list)
,@(when documentation
`((%set-documentation ',name 'cl:function ,documentation)))
',name))))))
| null | https://raw.githubusercontent.com/armedbear/abcl/0631ea551523bb93c06263e772fbe849008e2f68/src/org/armedbear/lisp/defmacro.lisp | lisp | defmacro.lisp
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. | Copyright ( C ) 2003 - 2006
$ I d : defmacro.lisp 13696 2011 - 11 - 15 22:34:19Z astalla $
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
Adapted from CMUCL / SBCL .
(in-package #:system)
Redefine DEFMACRO to use PARSE - DEFMACRO .
(defmacro defmacro (name lambda-list &rest body)
(let* ((whole (gensym "WHOLE-"))
(env (gensym "ENVIRONMENT-")))
(multiple-value-bind (body decls documentation)
(parse-defmacro lambda-list whole body name 'defmacro :environment env)
(let ((expander `(lambda (,whole ,env) ,@decls ,body)))
`(progn
(sys::record-source-information-for-type ',name :macro)
(let ((macro (make-macro ',name
(or (precompile nil ,expander) ,expander))))
,@(if (special-operator-p name)
`((put ',name 'macroexpand-macro macro))
`((fset ',name macro)))
(%set-arglist macro ',lambda-list)
,@(when documentation
`((%set-documentation ',name 'cl:function ,documentation)))
',name))))))
|
edefa0ca8e96f16cd01058e0712bf2a6fd0dca24bddb7d279658a0cf73287781 | achirkin/vulkan | AccessFlags.hs | # OPTIONS_HADDOCK ignore - exports #
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
{-# LANGUAGE PatternSynonyms #-}
# LANGUAGE StandaloneDeriving #
{-# LANGUAGE Strict #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Graphics.Vulkan.Types.Enum.AccessFlags
(VkAccessBitmask(VkAccessBitmask, VkAccessFlags, VkAccessFlagBits,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT, VK_ACCESS_INDEX_READ_BIT,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, VK_ACCESS_UNIFORM_READ_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_ACCESS_SHADER_READ_BIT,
VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
VK_ACCESS_HOST_READ_BIT, VK_ACCESS_HOST_WRITE_BIT,
VK_ACCESS_MEMORY_READ_BIT, VK_ACCESS_MEMORY_WRITE_BIT),
VkAccessFlags, VkAccessFlagBits)
where
import Data.Bits (Bits, FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (choose, expectP)
import Graphics.Vulkan.Marshal (FlagBit, FlagMask, FlagType)
import Graphics.Vulkan.Types.BaseTypes (VkFlags (..))
import Text.ParserCombinators.ReadPrec (prec, step, (+++))
import Text.Read (Read (..), parens)
import Text.Read.Lex (Lexeme (..))
newtype VkAccessBitmask (a :: FlagType) = VkAccessBitmask VkFlags
deriving (Eq, Ord, Storable)
type VkAccessFlags = VkAccessBitmask FlagMask
type VkAccessFlagBits = VkAccessBitmask FlagBit
pattern VkAccessFlagBits :: VkFlags -> VkAccessBitmask FlagBit
pattern VkAccessFlagBits n = VkAccessBitmask n
pattern VkAccessFlags :: VkFlags -> VkAccessBitmask FlagMask
pattern VkAccessFlags n = VkAccessBitmask n
deriving instance Bits (VkAccessBitmask FlagMask)
deriving instance FiniteBits (VkAccessBitmask FlagMask)
instance Show (VkAccessBitmask a) where
showsPrec _ VK_ACCESS_INDIRECT_COMMAND_READ_BIT
= showString "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"
showsPrec _ VK_ACCESS_INDEX_READ_BIT
= showString "VK_ACCESS_INDEX_READ_BIT"
showsPrec _ VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
= showString "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"
showsPrec _ VK_ACCESS_UNIFORM_READ_BIT
= showString "VK_ACCESS_UNIFORM_READ_BIT"
showsPrec _ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_SHADER_READ_BIT
= showString "VK_ACCESS_SHADER_READ_BIT"
showsPrec _ VK_ACCESS_SHADER_WRITE_BIT
= showString "VK_ACCESS_SHADER_WRITE_BIT"
showsPrec _ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
= showString "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"
showsPrec _ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
= showString "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"
showsPrec _ VK_ACCESS_TRANSFER_READ_BIT
= showString "VK_ACCESS_TRANSFER_READ_BIT"
showsPrec _ VK_ACCESS_TRANSFER_WRITE_BIT
= showString "VK_ACCESS_TRANSFER_WRITE_BIT"
showsPrec _ VK_ACCESS_HOST_READ_BIT
= showString "VK_ACCESS_HOST_READ_BIT"
showsPrec _ VK_ACCESS_HOST_WRITE_BIT
= showString "VK_ACCESS_HOST_WRITE_BIT"
showsPrec _ VK_ACCESS_MEMORY_READ_BIT
= showString "VK_ACCESS_MEMORY_READ_BIT"
showsPrec _ VK_ACCESS_MEMORY_WRITE_BIT
= showString "VK_ACCESS_MEMORY_WRITE_BIT"
showsPrec p (VkAccessBitmask x)
= showParen (p >= 11)
(showString "VkAccessBitmask " . showsPrec 11 x)
instance Read (VkAccessBitmask a) where
readPrec
= parens
(choose
[("VK_ACCESS_INDIRECT_COMMAND_READ_BIT",
pure VK_ACCESS_INDIRECT_COMMAND_READ_BIT),
("VK_ACCESS_INDEX_READ_BIT", pure VK_ACCESS_INDEX_READ_BIT),
("VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT",
pure VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT),
("VK_ACCESS_UNIFORM_READ_BIT", pure VK_ACCESS_UNIFORM_READ_BIT),
("VK_ACCESS_INPUT_ATTACHMENT_READ_BIT",
pure VK_ACCESS_INPUT_ATTACHMENT_READ_BIT),
("VK_ACCESS_SHADER_READ_BIT", pure VK_ACCESS_SHADER_READ_BIT),
("VK_ACCESS_SHADER_WRITE_BIT", pure VK_ACCESS_SHADER_WRITE_BIT),
("VK_ACCESS_COLOR_ATTACHMENT_READ_BIT",
pure VK_ACCESS_COLOR_ATTACHMENT_READ_BIT),
("VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT",
pure VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT),
("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT",
pure VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT),
("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT",
pure VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT),
("VK_ACCESS_TRANSFER_READ_BIT", pure VK_ACCESS_TRANSFER_READ_BIT),
("VK_ACCESS_TRANSFER_WRITE_BIT",
pure VK_ACCESS_TRANSFER_WRITE_BIT),
("VK_ACCESS_HOST_READ_BIT", pure VK_ACCESS_HOST_READ_BIT),
("VK_ACCESS_HOST_WRITE_BIT", pure VK_ACCESS_HOST_WRITE_BIT),
("VK_ACCESS_MEMORY_READ_BIT", pure VK_ACCESS_MEMORY_READ_BIT),
("VK_ACCESS_MEMORY_WRITE_BIT", pure VK_ACCESS_MEMORY_WRITE_BIT)]
+++
prec 10
(expectP (Ident "VkAccessBitmask") >>
(VkAccessBitmask <$> step readPrec)))
-- | Controls coherency of indirect command reads
--
= @0@
pattern VK_ACCESS_INDIRECT_COMMAND_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INDIRECT_COMMAND_READ_BIT = VkAccessBitmask 1
-- | Controls coherency of index reads
--
= @1@
pattern VK_ACCESS_INDEX_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INDEX_READ_BIT = VkAccessBitmask 2
-- | Controls coherency of vertex attribute reads
--
=
pattern VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = VkAccessBitmask 4
-- | Controls coherency of uniform buffer reads
--
= @3@
pattern VK_ACCESS_UNIFORM_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_UNIFORM_READ_BIT = VkAccessBitmask 8
-- | Controls coherency of input attachment reads
--
= @4@
pattern VK_ACCESS_INPUT_ATTACHMENT_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = VkAccessBitmask 16
-- | Controls coherency of shader reads
--
-- bitpos = @5@
pattern VK_ACCESS_SHADER_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_SHADER_READ_BIT = VkAccessBitmask 32
-- | Controls coherency of shader writes
--
=
pattern VK_ACCESS_SHADER_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_SHADER_WRITE_BIT = VkAccessBitmask 64
-- | Controls coherency of color attachment reads
--
= @7@
pattern VK_ACCESS_COLOR_ATTACHMENT_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = VkAccessBitmask 128
-- | Controls coherency of color attachment writes
--
= @8@
pattern VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = VkAccessBitmask 256
-- | Controls coherency of depth/stencil attachment reads
--
=
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT ::
VkAccessBitmask a
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT =
VkAccessBitmask 512
-- | Controls coherency of depth/stencil attachment writes
--
= @10@
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT ::
VkAccessBitmask a
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT =
VkAccessBitmask 1024
-- | Controls coherency of transfer reads
--
= @11@
pattern VK_ACCESS_TRANSFER_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_TRANSFER_READ_BIT = VkAccessBitmask 2048
-- | Controls coherency of transfer writes
--
= @12@
pattern VK_ACCESS_TRANSFER_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_TRANSFER_WRITE_BIT = VkAccessBitmask 4096
-- | Controls coherency of host reads
--
= @13@
pattern VK_ACCESS_HOST_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_HOST_READ_BIT = VkAccessBitmask 8192
-- | Controls coherency of host writes
--
= @14@
pattern VK_ACCESS_HOST_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_HOST_WRITE_BIT = VkAccessBitmask 16384
-- | Controls coherency of memory reads
--
= @15@
pattern VK_ACCESS_MEMORY_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_MEMORY_READ_BIT = VkAccessBitmask 32768
-- | Controls coherency of memory writes
--
= @16@
pattern VK_ACCESS_MEMORY_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_MEMORY_WRITE_BIT = VkAccessBitmask 65536
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Types/Enum/AccessFlags.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE PatternSynonyms #
# LANGUAGE Strict #
# LANGUAGE TypeSynonymInstances #
| Controls coherency of indirect command reads
| Controls coherency of index reads
| Controls coherency of vertex attribute reads
| Controls coherency of uniform buffer reads
| Controls coherency of input attachment reads
| Controls coherency of shader reads
bitpos = @5@
| Controls coherency of shader writes
| Controls coherency of color attachment reads
| Controls coherency of color attachment writes
| Controls coherency of depth/stencil attachment reads
| Controls coherency of depth/stencil attachment writes
| Controls coherency of transfer reads
| Controls coherency of transfer writes
| Controls coherency of host reads
| Controls coherency of host writes
| Controls coherency of memory reads
| Controls coherency of memory writes
| # OPTIONS_HADDOCK ignore - exports #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE StandaloneDeriving #
module Graphics.Vulkan.Types.Enum.AccessFlags
(VkAccessBitmask(VkAccessBitmask, VkAccessFlags, VkAccessFlagBits,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT, VK_ACCESS_INDEX_READ_BIT,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, VK_ACCESS_UNIFORM_READ_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_ACCESS_SHADER_READ_BIT,
VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
VK_ACCESS_HOST_READ_BIT, VK_ACCESS_HOST_WRITE_BIT,
VK_ACCESS_MEMORY_READ_BIT, VK_ACCESS_MEMORY_WRITE_BIT),
VkAccessFlags, VkAccessFlagBits)
where
import Data.Bits (Bits, FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (choose, expectP)
import Graphics.Vulkan.Marshal (FlagBit, FlagMask, FlagType)
import Graphics.Vulkan.Types.BaseTypes (VkFlags (..))
import Text.ParserCombinators.ReadPrec (prec, step, (+++))
import Text.Read (Read (..), parens)
import Text.Read.Lex (Lexeme (..))
newtype VkAccessBitmask (a :: FlagType) = VkAccessBitmask VkFlags
deriving (Eq, Ord, Storable)
type VkAccessFlags = VkAccessBitmask FlagMask
type VkAccessFlagBits = VkAccessBitmask FlagBit
pattern VkAccessFlagBits :: VkFlags -> VkAccessBitmask FlagBit
pattern VkAccessFlagBits n = VkAccessBitmask n
pattern VkAccessFlags :: VkFlags -> VkAccessBitmask FlagMask
pattern VkAccessFlags n = VkAccessBitmask n
deriving instance Bits (VkAccessBitmask FlagMask)
deriving instance FiniteBits (VkAccessBitmask FlagMask)
instance Show (VkAccessBitmask a) where
showsPrec _ VK_ACCESS_INDIRECT_COMMAND_READ_BIT
= showString "VK_ACCESS_INDIRECT_COMMAND_READ_BIT"
showsPrec _ VK_ACCESS_INDEX_READ_BIT
= showString "VK_ACCESS_INDEX_READ_BIT"
showsPrec _ VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
= showString "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"
showsPrec _ VK_ACCESS_UNIFORM_READ_BIT
= showString "VK_ACCESS_UNIFORM_READ_BIT"
showsPrec _ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_SHADER_READ_BIT
= showString "VK_ACCESS_SHADER_READ_BIT"
showsPrec _ VK_ACCESS_SHADER_WRITE_BIT
= showString "VK_ACCESS_SHADER_WRITE_BIT"
showsPrec _ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
= showString "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"
showsPrec _ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
= showString "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"
showsPrec _ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
= showString "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"
showsPrec _ VK_ACCESS_TRANSFER_READ_BIT
= showString "VK_ACCESS_TRANSFER_READ_BIT"
showsPrec _ VK_ACCESS_TRANSFER_WRITE_BIT
= showString "VK_ACCESS_TRANSFER_WRITE_BIT"
showsPrec _ VK_ACCESS_HOST_READ_BIT
= showString "VK_ACCESS_HOST_READ_BIT"
showsPrec _ VK_ACCESS_HOST_WRITE_BIT
= showString "VK_ACCESS_HOST_WRITE_BIT"
showsPrec _ VK_ACCESS_MEMORY_READ_BIT
= showString "VK_ACCESS_MEMORY_READ_BIT"
showsPrec _ VK_ACCESS_MEMORY_WRITE_BIT
= showString "VK_ACCESS_MEMORY_WRITE_BIT"
showsPrec p (VkAccessBitmask x)
= showParen (p >= 11)
(showString "VkAccessBitmask " . showsPrec 11 x)
instance Read (VkAccessBitmask a) where
readPrec
= parens
(choose
[("VK_ACCESS_INDIRECT_COMMAND_READ_BIT",
pure VK_ACCESS_INDIRECT_COMMAND_READ_BIT),
("VK_ACCESS_INDEX_READ_BIT", pure VK_ACCESS_INDEX_READ_BIT),
("VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT",
pure VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT),
("VK_ACCESS_UNIFORM_READ_BIT", pure VK_ACCESS_UNIFORM_READ_BIT),
("VK_ACCESS_INPUT_ATTACHMENT_READ_BIT",
pure VK_ACCESS_INPUT_ATTACHMENT_READ_BIT),
("VK_ACCESS_SHADER_READ_BIT", pure VK_ACCESS_SHADER_READ_BIT),
("VK_ACCESS_SHADER_WRITE_BIT", pure VK_ACCESS_SHADER_WRITE_BIT),
("VK_ACCESS_COLOR_ATTACHMENT_READ_BIT",
pure VK_ACCESS_COLOR_ATTACHMENT_READ_BIT),
("VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT",
pure VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT),
("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT",
pure VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT),
("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT",
pure VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT),
("VK_ACCESS_TRANSFER_READ_BIT", pure VK_ACCESS_TRANSFER_READ_BIT),
("VK_ACCESS_TRANSFER_WRITE_BIT",
pure VK_ACCESS_TRANSFER_WRITE_BIT),
("VK_ACCESS_HOST_READ_BIT", pure VK_ACCESS_HOST_READ_BIT),
("VK_ACCESS_HOST_WRITE_BIT", pure VK_ACCESS_HOST_WRITE_BIT),
("VK_ACCESS_MEMORY_READ_BIT", pure VK_ACCESS_MEMORY_READ_BIT),
("VK_ACCESS_MEMORY_WRITE_BIT", pure VK_ACCESS_MEMORY_WRITE_BIT)]
+++
prec 10
(expectP (Ident "VkAccessBitmask") >>
(VkAccessBitmask <$> step readPrec)))
= @0@
pattern VK_ACCESS_INDIRECT_COMMAND_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INDIRECT_COMMAND_READ_BIT = VkAccessBitmask 1
= @1@
pattern VK_ACCESS_INDEX_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INDEX_READ_BIT = VkAccessBitmask 2
=
pattern VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = VkAccessBitmask 4
= @3@
pattern VK_ACCESS_UNIFORM_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_UNIFORM_READ_BIT = VkAccessBitmask 8
= @4@
pattern VK_ACCESS_INPUT_ATTACHMENT_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = VkAccessBitmask 16
pattern VK_ACCESS_SHADER_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_SHADER_READ_BIT = VkAccessBitmask 32
=
pattern VK_ACCESS_SHADER_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_SHADER_WRITE_BIT = VkAccessBitmask 64
= @7@
pattern VK_ACCESS_COLOR_ATTACHMENT_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = VkAccessBitmask 128
= @8@
pattern VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = VkAccessBitmask 256
=
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT ::
VkAccessBitmask a
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT =
VkAccessBitmask 512
= @10@
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT ::
VkAccessBitmask a
pattern VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT =
VkAccessBitmask 1024
= @11@
pattern VK_ACCESS_TRANSFER_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_TRANSFER_READ_BIT = VkAccessBitmask 2048
= @12@
pattern VK_ACCESS_TRANSFER_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_TRANSFER_WRITE_BIT = VkAccessBitmask 4096
= @13@
pattern VK_ACCESS_HOST_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_HOST_READ_BIT = VkAccessBitmask 8192
= @14@
pattern VK_ACCESS_HOST_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_HOST_WRITE_BIT = VkAccessBitmask 16384
= @15@
pattern VK_ACCESS_MEMORY_READ_BIT :: VkAccessBitmask a
pattern VK_ACCESS_MEMORY_READ_BIT = VkAccessBitmask 32768
= @16@
pattern VK_ACCESS_MEMORY_WRITE_BIT :: VkAccessBitmask a
pattern VK_ACCESS_MEMORY_WRITE_BIT = VkAccessBitmask 65536
|
536263c9a87a9465253463afc64f4a4413eebeee3811737bc66ab5e87b5c8b67 | debasishg/erlang-string-lambda | lib_lambda_utils.erl | %%
Ported from the String Lambdas in Functional Javascript
/
%%
This work is licensed under the MIT License :
%%
( c ) 2007
Portions Copyright ( c ) 2006
%%
%% Permission is hereby granted, free of charge, to any person obtaining
%% a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
%% without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
%% the following conditions:
%%
%% The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
%% OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
%% WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-module(lib_lambda_utils).
%%
%% Exported Functions
%%
-export([map/2, compose/1, reduce/3, foldl/3, foldr/3, select/2, filter/2, all/2, all_and/1]).
map(Fn, List) ->
lists:map(lib_lambda:lambda(Fn), List).
compose(Fns) ->
lib_misc:compose(lists:map(fun lib_lambda:lambda/1, Fns)).
reduce(Fn, Init, List) ->
lists:foldl(lib_lambda:lambda(Fn), Init, List).
foldl(Fn, Init, List) -> reduce(Fn, Init, List).
foldr(Fn, Init, List) ->
lists:foldr(lib_lambda:lambda(Fn), Init, List).
select(Pred, List) ->
[Elem || Elem <- List, (lib_lambda:lambda(Pred))(Elem) == true].
filter(Pred, List) ->
lists:filter(lib_lambda:lambda(Pred), List).
all(Pred, List) ->
lists:all(lib_lambda:lambda(Pred), List).
all_and(Preds) ->
lib_misc:all_and(lists:map(fun lib_lambda:lambda/1, Preds)).
| null | https://raw.githubusercontent.com/debasishg/erlang-string-lambda/392206ee0908c4edde86dc599494996ea317ee11/lib_lambda_utils.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Exported Functions
| Ported from the String Lambdas in Functional Javascript
/
This work is licensed under the MIT License :
( c ) 2007
Portions Copyright ( c ) 2006
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
-module(lib_lambda_utils).
-export([map/2, compose/1, reduce/3, foldl/3, foldr/3, select/2, filter/2, all/2, all_and/1]).
map(Fn, List) ->
lists:map(lib_lambda:lambda(Fn), List).
compose(Fns) ->
lib_misc:compose(lists:map(fun lib_lambda:lambda/1, Fns)).
reduce(Fn, Init, List) ->
lists:foldl(lib_lambda:lambda(Fn), Init, List).
foldl(Fn, Init, List) -> reduce(Fn, Init, List).
foldr(Fn, Init, List) ->
lists:foldr(lib_lambda:lambda(Fn), Init, List).
select(Pred, List) ->
[Elem || Elem <- List, (lib_lambda:lambda(Pred))(Elem) == true].
filter(Pred, List) ->
lists:filter(lib_lambda:lambda(Pred), List).
all(Pred, List) ->
lists:all(lib_lambda:lambda(Pred), List).
all_and(Preds) ->
lib_misc:all_and(lists:map(fun lib_lambda:lambda/1, Preds)).
|
969eecb0c9566dfaced9985dd7f8c563dc3e22135b4698711eb2d717ed200014 | protz/pippo | pippo.ml | (*****************************************************************************)
pippo , a pretty interesting pre - processor using
Copyright ( C ) 2013
(* *)
(* This program is free software: you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
(* along with this program. If not, see </>. *)
(* *)
(*****************************************************************************)
(** Initialize the top-level loop. *)
let init () =
(* Toploop.set_paths (); *)
Toploop.initialize_toplevel_env ();
Toploop.input_name := "//toplevel//";
Topdirs.dir_directory (Sys.getenv "OCAML_TOPLEVEL_PATH");
;;
(** Send a phrase to the top-level, and print any relevant type error. *)
let send_phrase (phrase: string): unit =
(* Report an error message in a readable format. *)
let error f =
f ();
Format.pp_print_newline Format.err_formatter ();
Format.pp_print_string Format.err_formatter "The offending phrase is:\n";
Format.pp_print_string Format.err_formatter phrase;
Format.pp_print_newline Format.err_formatter ();
exit 1
in
try
Parse the phrase . May raise Syntaxerr.error .
let p = !Toploop.parse_toplevel_phrase (Lexing.from_string phrase) in
Send it to the top - level . May raise Typecore.error .
ignore (Toploop.execute_phrase false Format.err_formatter p);
with
| Symtable.Error e ->
error (fun () -> Symtable.report_error Format.err_formatter e);
| Typetexp.Error (loc, env, e) ->
Location.print_error Format.err_formatter loc;
error (fun () -> Typetexp.report_error env Format.err_formatter e);
| Typecore.Error (loc, env, e) ->
Location.print_error Format.err_formatter loc;
error (fun () -> Typecore.report_error env Format.err_formatter e);
| Syntaxerr.Error e ->
error (fun () -> Syntaxerr.report_error Format.err_formatter e);
| Lexer.Error (e, loc) ->
error (fun () ->
Location.print_error Format.err_formatter loc;
Lexer.report_error Format.err_formatter e;
);
;;
(** Inject a value into the top-level; the type must be provided. *)
let inject_value (name: string) (typ: string) (value: 'a): unit =
(* This is, ahem, not the cleanest possible way to achieve this. *)
let value = Obj.repr value in
(* Add [name] into the Symtable of the toplevel's value. *)
Toploop.setvalue name value;
(* Create a value descriptor suitable for injection into the type environment.
* The -1 makes sure it creates a weak type variable. *)
let vd =
let open Types in {
val_type = Btype.newty2 (Ctype.get_current_level () - 1) (Tvar None);
(* val_type = Ctype.newvar (); *)
val_kind = Val_reg;
val_loc = Location.none;
val_attributes = [];
}
in
(* Register [name] in the global type-checking environment for the top-level. *)
Toploop.toplevel_env :=
Env.add_value (Ident.create name) vd !Toploop.toplevel_env;
(* Disable the "this function application is partial" warning, since that's
* what our little trick with weak variables + ignore () above uses. *)
Warnings.parse_options false "-5";
(* Instantiate the weak type variable. *)
send_phrase (Printf.sprintf "ignore (%s: %s);;" name typ);
(* Re in-state the warning. *)
Warnings.parse_options false "+5";
;;
type state = Text | OCaml
let split haystack needle =
let r = Str.regexp needle in
Str.split r haystack
;;
let send_phrase_if phrase =
let phrase = String.trim phrase in
if String.length phrase > 0 then
send_phrase (phrase ^ "\n;;")
;;
(** Loop over the lines of a file. Enters OCaml code sections when faced with {%
* and exits them when faced with %}. *)
let iter_lines (ic: in_channel): unit =
let tok = Str.regexp "\\({%=\\|{%\\|%}\\)" in
let state = ref Text in
let buf = Buffer.create 2048 in
let process_token token =
match token, !state with
| "{%", Text ->
state := OCaml
| "%}", OCaml ->
let contents = Buffer.contents buf in
Buffer.clear buf;
let phrases = split contents ";;" in
List.iter send_phrase_if phrases;
state := Text
| _, Text ->
print_string token
| _, OCaml ->
Buffer.add_string buf token;
in
let rec process_line line =
try
let i = Str.search_forward tok line 0 in
let m = Str.matched_string line in
let l = String.length m in
let before = String.sub line 0 i in
let after = String.sub line (i + l) (String.length line - i - l) in
process_token before;
if m = "{%=" then begin
process_token "{%";
process_token "print_string ";
end else begin
process_token m;
end;
process_line after
with Not_found ->
process_token (line ^ "\n")
in
while true do
let line = input_line ic in
process_line line
done
;;
let _ =
if Array.length Sys.argv <> 2 then begin
print_endline "Usage: %s FILE\n\n Preprocesses FILE.";
exit 1
end;
(* Global initialization. *)
init ();
(* Bump the current level artificially. *)
send_phrase "let _ = ();;";
(* Test the [inject_value] function. *)
inject_value
"__version"
"unit -> unit"
(fun () ->
print_endline "This is pippo v0.1");
Chew lines one by one .
let f = Sys.argv.(1) in
let ic = open_in f in
try
iter_lines ic
with End_of_file ->
close_in ic;
()
;;
| null | https://raw.githubusercontent.com/protz/pippo/5ce9a117c8b3298d25dd3cc3927dacabee778b93/pippo.ml | ocaml | ***************************************************************************
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
***************************************************************************
* Initialize the top-level loop.
Toploop.set_paths ();
* Send a phrase to the top-level, and print any relevant type error.
Report an error message in a readable format.
* Inject a value into the top-level; the type must be provided.
This is, ahem, not the cleanest possible way to achieve this.
Add [name] into the Symtable of the toplevel's value.
Create a value descriptor suitable for injection into the type environment.
* The -1 makes sure it creates a weak type variable.
val_type = Ctype.newvar ();
Register [name] in the global type-checking environment for the top-level.
Disable the "this function application is partial" warning, since that's
* what our little trick with weak variables + ignore () above uses.
Instantiate the weak type variable.
Re in-state the warning.
* Loop over the lines of a file. Enters OCaml code sections when faced with {%
* and exits them when faced with %}.
Global initialization.
Bump the current level artificially.
Test the [inject_value] function. | pippo , a pretty interesting pre - processor using
Copyright ( C ) 2013
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
let init () =
Toploop.initialize_toplevel_env ();
Toploop.input_name := "//toplevel//";
Topdirs.dir_directory (Sys.getenv "OCAML_TOPLEVEL_PATH");
;;
let send_phrase (phrase: string): unit =
let error f =
f ();
Format.pp_print_newline Format.err_formatter ();
Format.pp_print_string Format.err_formatter "The offending phrase is:\n";
Format.pp_print_string Format.err_formatter phrase;
Format.pp_print_newline Format.err_formatter ();
exit 1
in
try
Parse the phrase . May raise Syntaxerr.error .
let p = !Toploop.parse_toplevel_phrase (Lexing.from_string phrase) in
Send it to the top - level . May raise Typecore.error .
ignore (Toploop.execute_phrase false Format.err_formatter p);
with
| Symtable.Error e ->
error (fun () -> Symtable.report_error Format.err_formatter e);
| Typetexp.Error (loc, env, e) ->
Location.print_error Format.err_formatter loc;
error (fun () -> Typetexp.report_error env Format.err_formatter e);
| Typecore.Error (loc, env, e) ->
Location.print_error Format.err_formatter loc;
error (fun () -> Typecore.report_error env Format.err_formatter e);
| Syntaxerr.Error e ->
error (fun () -> Syntaxerr.report_error Format.err_formatter e);
| Lexer.Error (e, loc) ->
error (fun () ->
Location.print_error Format.err_formatter loc;
Lexer.report_error Format.err_formatter e;
);
;;
let inject_value (name: string) (typ: string) (value: 'a): unit =
let value = Obj.repr value in
Toploop.setvalue name value;
let vd =
let open Types in {
val_type = Btype.newty2 (Ctype.get_current_level () - 1) (Tvar None);
val_kind = Val_reg;
val_loc = Location.none;
val_attributes = [];
}
in
Toploop.toplevel_env :=
Env.add_value (Ident.create name) vd !Toploop.toplevel_env;
Warnings.parse_options false "-5";
send_phrase (Printf.sprintf "ignore (%s: %s);;" name typ);
Warnings.parse_options false "+5";
;;
type state = Text | OCaml
let split haystack needle =
let r = Str.regexp needle in
Str.split r haystack
;;
let send_phrase_if phrase =
let phrase = String.trim phrase in
if String.length phrase > 0 then
send_phrase (phrase ^ "\n;;")
;;
let iter_lines (ic: in_channel): unit =
let tok = Str.regexp "\\({%=\\|{%\\|%}\\)" in
let state = ref Text in
let buf = Buffer.create 2048 in
let process_token token =
match token, !state with
| "{%", Text ->
state := OCaml
| "%}", OCaml ->
let contents = Buffer.contents buf in
Buffer.clear buf;
let phrases = split contents ";;" in
List.iter send_phrase_if phrases;
state := Text
| _, Text ->
print_string token
| _, OCaml ->
Buffer.add_string buf token;
in
let rec process_line line =
try
let i = Str.search_forward tok line 0 in
let m = Str.matched_string line in
let l = String.length m in
let before = String.sub line 0 i in
let after = String.sub line (i + l) (String.length line - i - l) in
process_token before;
if m = "{%=" then begin
process_token "{%";
process_token "print_string ";
end else begin
process_token m;
end;
process_line after
with Not_found ->
process_token (line ^ "\n")
in
while true do
let line = input_line ic in
process_line line
done
;;
let _ =
if Array.length Sys.argv <> 2 then begin
print_endline "Usage: %s FILE\n\n Preprocesses FILE.";
exit 1
end;
init ();
send_phrase "let _ = ();;";
inject_value
"__version"
"unit -> unit"
(fun () ->
print_endline "This is pippo v0.1");
Chew lines one by one .
let f = Sys.argv.(1) in
let ic = open_in f in
try
iter_lines ic
with End_of_file ->
close_in ic;
()
;;
|
5f848bbfa4ab261b94da829e9ffc5b0e7cf99857a17aacce408ef5d61ebee73c | torkve/melange-seawar | game.mli | type turn = Own | Opp;;
type state = GameOver | Wait | OwnTurn | OppTurn;;
type sendable_message = < xmit_statecall : Message.Message.statecalls >;;
type sm_constr = message_id:int -> Mpl_stdlib.env -> sendable_message;;
type t = <
tick : Protocol.s -> unit;
is_server : bool;
is_ai : bool;
message_display : string -> unit;
set_message_display : (string -> unit) -> unit;
turn_change : turn -> unit;
set_turn_change : (turn -> unit) -> unit;
shot : int -> int -> unit;
set_shot : (int -> int -> unit) -> unit;
set_state : state -> unit;
disconnect : ?exc_text:string -> ?raise_exc:bool -> bool -> unit;
send_message : sm_constr -> Protocol.s;
receive_message : Message.Message.o * Protocol.s;
set_exit_on_finish : bool -> unit;
check_finish : Board.t -> Board.t -> bool;
>
;;
val init_game : unit -> t;;
| null | https://raw.githubusercontent.com/torkve/melange-seawar/e1633ef088a2dbf893536a7ba2fa8a72eac198ec/game.mli | ocaml | type turn = Own | Opp;;
type state = GameOver | Wait | OwnTurn | OppTurn;;
type sendable_message = < xmit_statecall : Message.Message.statecalls >;;
type sm_constr = message_id:int -> Mpl_stdlib.env -> sendable_message;;
type t = <
tick : Protocol.s -> unit;
is_server : bool;
is_ai : bool;
message_display : string -> unit;
set_message_display : (string -> unit) -> unit;
turn_change : turn -> unit;
set_turn_change : (turn -> unit) -> unit;
shot : int -> int -> unit;
set_shot : (int -> int -> unit) -> unit;
set_state : state -> unit;
disconnect : ?exc_text:string -> ?raise_exc:bool -> bool -> unit;
send_message : sm_constr -> Protocol.s;
receive_message : Message.Message.o * Protocol.s;
set_exit_on_finish : bool -> unit;
check_finish : Board.t -> Board.t -> bool;
>
;;
val init_game : unit -> t;;
| |
2622acb3ef1ad3c060a9674a1e41cb96076de9ff1691fa320572432fa9934e7e | divs1210/streamer | core_test.clj | (ns streamer.core-test
(:require [clojure.test :refer :all]
[streamer.core :refer :all]
[net.cgrand.xforms :as x]))
(deftest tests
(testing "anaphoric macro bindings"
(is (= (filter odd? (range 10))
(=> (range 10)
(filter odd?)
(sequence %xform %coll)))
"`%xform` and `%coll` are accessible in last form"))
(testing "sequence!"
(is (= (->> (range 10)
(filter odd?)
(map inc))
(=> (range 10)
(filter odd?)
(map inc)
(sequence!)))
"`sequence!` translates to `sequence`"))
(testing "transduce!"
(is (= (->> (range 10)
(filter even?)
(map #(Math/sqrt %))
(reduce +))
(=> (range 10)
(filter even?)
(map #(Math/sqrt %))
(transduce! +)))
"`transduce!` translates to `transduce`")
(is (= (->> (range 10)
(filter even?)
(map #(Math/sqrt %))
(reduce + 5))
(=> (range 10)
(filter even?)
(map #(Math/sqrt %))
(transduce! + 5)))
"`transduce!` translates to `transduce`"))
(testing "into!"
(is (= (->> (range 10)
(partition 2)
(map vec)
(into {}))
(=> (range 10)
(x/partition 2)
(map vec)
(into! {})))
"`into!` translates to `into`")))
| null | https://raw.githubusercontent.com/divs1210/streamer/5e6976c5025007c7f6afa55251bc6081b43a78f4/test/streamer/core_test.clj | clojure | (ns streamer.core-test
(:require [clojure.test :refer :all]
[streamer.core :refer :all]
[net.cgrand.xforms :as x]))
(deftest tests
(testing "anaphoric macro bindings"
(is (= (filter odd? (range 10))
(=> (range 10)
(filter odd?)
(sequence %xform %coll)))
"`%xform` and `%coll` are accessible in last form"))
(testing "sequence!"
(is (= (->> (range 10)
(filter odd?)
(map inc))
(=> (range 10)
(filter odd?)
(map inc)
(sequence!)))
"`sequence!` translates to `sequence`"))
(testing "transduce!"
(is (= (->> (range 10)
(filter even?)
(map #(Math/sqrt %))
(reduce +))
(=> (range 10)
(filter even?)
(map #(Math/sqrt %))
(transduce! +)))
"`transduce!` translates to `transduce`")
(is (= (->> (range 10)
(filter even?)
(map #(Math/sqrt %))
(reduce + 5))
(=> (range 10)
(filter even?)
(map #(Math/sqrt %))
(transduce! + 5)))
"`transduce!` translates to `transduce`"))
(testing "into!"
(is (= (->> (range 10)
(partition 2)
(map vec)
(into {}))
(=> (range 10)
(x/partition 2)
(map vec)
(into! {})))
"`into!` translates to `into`")))
| |
efbe7927cbe18705db2f95b8f01be67caa62e85bf1484460e5bc72f2ad4d132f | owickstrom/gi-gtk-declarative | ManyBoxes.hs | {-# LANGUAGE OverloadedLabels #-}
# LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
module ManyBoxes where
import Control.Monad ( void )
import Data.Functor ( (<&>) )
import Data.Text ( pack )
import Data.Vector ( Vector )
import qualified Data.Vector as Vector
import GI.Gtk ( Box(..)
, Button(..)
, ScrolledWindow(..)
, Window(..)
)
import GI.Gtk.Declarative
import GI.Gtk.Declarative.App.Simple
type State = Vector Int
data Event
= IncrAll
| Closed
view' :: State -> AppView Window Event
view' ns =
bin
Window
[ #title := "Many Boxes"
, on #deleteEvent (const (True, Closed))
, #widthRequest := 400
, #heightRequest := 300
]
$ bin ScrolledWindow []
$ container Box []
$ ns
<&> \n -> BoxChild defaultBoxChildProperties { padding = 10 }
$ widget Button [#label := pack (show n), on #clicked IncrAll]
update' :: State -> Event -> Transition State Event
update' ns IncrAll = Transition (succ <$> ns) (return Nothing)
update' _ Closed = Exit
main :: IO ()
main = void $ run App { view = view'
, update = update'
, inputs = []
, initialState = Vector.enumFromN 0 500
}
| null | https://raw.githubusercontent.com/owickstrom/gi-gtk-declarative/205351a54698be7b583dd34d189a89470ee2b11b/examples/ManyBoxes.hs | haskell | # LANGUAGE OverloadedLabels #
# LANGUAGE OverloadedStrings # | # LANGUAGE OverloadedLists #
module ManyBoxes where
import Control.Monad ( void )
import Data.Functor ( (<&>) )
import Data.Text ( pack )
import Data.Vector ( Vector )
import qualified Data.Vector as Vector
import GI.Gtk ( Box(..)
, Button(..)
, ScrolledWindow(..)
, Window(..)
)
import GI.Gtk.Declarative
import GI.Gtk.Declarative.App.Simple
type State = Vector Int
data Event
= IncrAll
| Closed
view' :: State -> AppView Window Event
view' ns =
bin
Window
[ #title := "Many Boxes"
, on #deleteEvent (const (True, Closed))
, #widthRequest := 400
, #heightRequest := 300
]
$ bin ScrolledWindow []
$ container Box []
$ ns
<&> \n -> BoxChild defaultBoxChildProperties { padding = 10 }
$ widget Button [#label := pack (show n), on #clicked IncrAll]
update' :: State -> Event -> Transition State Event
update' ns IncrAll = Transition (succ <$> ns) (return Nothing)
update' _ Closed = Exit
main :: IO ()
main = void $ run App { view = view'
, update = update'
, inputs = []
, initialState = Vector.enumFromN 0 500
}
|
72571eb9a930f33797cebf3cd8324b67eb13638195b3efda22d6d90bc4858a3a | ndmitchell/shake | Lexer.hs | # LANGUAGE PatternGuards #
{ - # OPTIONS_GHC -O2 # - } -- fails with GHC 7.10
-- {-# OPTIONS_GHC -ddump-simpl #-}
| Lexing is a slow point , the code below is optimised
module Development.Ninja.Lexer(Lexeme(..), lexerFile) where
import Data.Tuple.Extra
import Data.Char
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Unsafe as BS
import Development.Ninja.Type
import qualified Data.ByteString.Internal as Internal
import System.IO.Unsafe
import Data.Word
import Foreign.Ptr
import Foreign.Storable
import GHC.Exts
---------------------------------------------------------------------
LIBRARY BITS
newtype Str0 = Str0 Str -- null terminated
type S = Ptr Word8
char :: S -> Char
char x = Internal.w2c $ unsafePerformIO $ peek x
next :: S -> S
next x = x `plusPtr` 1
# INLINE dropWhile0 #
dropWhile0 :: (Char -> Bool) -> Str0 -> Str0
dropWhile0 f x = snd $ span0 f x
# INLINE span0 #
span0 :: (Char -> Bool) -> Str0 -> (Str, Str0)
span0 f = break0 (not . f)
# INLINE break0 #
break0 :: (Char -> Bool) -> Str0 -> (Str, Str0)
break0 f (Str0 bs) = (BS.unsafeTake i bs, Str0 $ BS.unsafeDrop i bs)
where
i = unsafePerformIO $ BS.unsafeUseAsCString bs $ \ptr -> do
let start = castPtr ptr :: S
let end = go start
pure $! Ptr end `minusPtr` start
go s@(Ptr a) | c == '\0' || f c = a
| otherwise = go (next s)
where c = char s
# INLINE break00 #
-- The predicate must return true for '\0'
break00 :: (Char -> Bool) -> Str0 -> (Str, Str0)
break00 f (Str0 bs) = (BS.unsafeTake i bs, Str0 $ BS.unsafeDrop i bs)
where
i = unsafePerformIO $ BS.unsafeUseAsCString bs $ \ptr -> do
let start = castPtr ptr :: S
let end = go start
pure $! Ptr end `minusPtr` start
go s@(Ptr a) | f c = a
| otherwise = go (next s)
where c = char s
head0 :: Str0 -> Char
head0 (Str0 x) = Internal.w2c $ BS.unsafeHead x
tail0 :: Str0 -> Str0
tail0 (Str0 x) = Str0 $ BS.unsafeTail x
list0 :: Str0 -> (Char, Str0)
list0 x = (head0 x, tail0 x)
take0 :: Int -> Str0 -> Str
take0 i (Str0 x) = BS.takeWhile (/= '\0') $ BS.take i x
---------------------------------------------------------------------
-- ACTUAL LEXER
Lex each line separately , rather than each lexeme
data Lexeme
= LexBind Str Expr -- [indent]foo = bar
build foo : bar | baz || qux ( | and || are represented as )
| LexInclude Expr -- include file
| LexSubninja Expr -- include file
| LexRule Str -- rule name
| LexPool Str -- pool name
| LexDefault [Expr] -- default foo bar
| LexDefine Str Expr -- foo = bar
deriving Show
isVar, isVarDot :: Char -> Bool
isVar x = x == '-' || x == '_' || isAsciiLower x || isAsciiUpper x || isDigit x
isVarDot x = x == '.' || isVar x
endsDollar :: Str -> Bool
endsDollar = BS.isSuffixOf (BS.singleton '$')
dropN :: Str0 -> Str0
dropN x = if head0 x == '\n' then tail0 x else x
dropSpace :: Str0 -> Str0
dropSpace = dropWhile0 (== ' ')
lexerFile :: Maybe FilePath -> IO [Lexeme]
lexerFile file = lexer <$> maybe BS.getContents BS.readFile file
lexer :: Str -> [Lexeme]
lexer x = lexerLoop $ Str0 $ x `BS.append` BS.pack "\n\n\0"
lexerLoop :: Str0 -> [Lexeme]
lexerLoop c_x | (c,x) <- list0 c_x = case c of
'\r' -> lexerLoop x
'\n' -> lexerLoop x
' ' -> lexBind $ dropSpace x
'#' -> lexerLoop $ dropWhile0 (/= '\n') x
'b' | Just x <- strip "uild " x -> lexBuild x
'r' | Just x <- strip "ule " x -> lexRule x
'd' | Just x <- strip "efault " x -> lexDefault x
'p' | Just x <- strip "ool " x -> lexPool x
'i' | Just x <- strip "nclude " x -> lexInclude x
's' | Just x <- strip "ubninja " x -> lexSubninja x
'\0' -> []
_ -> lexDefine c_x
where
strip str (Str0 x) = if b `BS.isPrefixOf` x then Just $ dropSpace $ Str0 $ BS.drop (BS.length b) x else Nothing
where b = BS.pack str
lexBind :: Str0 -> [Lexeme]
lexBind c_x | (c,x) <- list0 c_x = case c of
'\r' -> lexerLoop x
'\n' -> lexerLoop x
'#' -> lexerLoop $ dropWhile0 (/= '\n') x
'\0' -> []
_ -> lexxBind LexBind c_x
lexBuild :: Str0 -> [Lexeme]
lexBuild x
| (outputs,x) <- lexxExprs True x
, (rule,x) <- span0 isVarDot $ jumpCont $ dropSpace x
, (deps,x) <- lexxExprs False $ dropSpace x
= LexBuild outputs rule deps : lexerLoop x
lexDefault :: Str0 -> [Lexeme]
lexDefault x
| (files,x) <- lexxExprs False x
= LexDefault files : lexerLoop x
lexRule, lexPool, lexInclude, lexSubninja, lexDefine :: Str0 -> [Lexeme]
lexRule = lexxName LexRule
lexPool = lexxName LexPool
lexInclude = lexxFile LexInclude
lexSubninja = lexxFile LexSubninja
lexDefine = lexxBind LexDefine
lexxBind :: (Str -> Expr -> Lexeme) -> Str0 -> [Lexeme]
lexxBind ctor x
| (var,x) <- span0 isVarDot x
, ('=',x) <- list0 $ jumpCont $ dropSpace x
, (exp,x) <- lexxExpr False False $ jumpCont $ dropSpace x
= ctor var exp : lexerLoop x
lexxBind _ x = error $ "Ninja parse failed when parsing binding, " ++ show (take0 100 x)
lexxFile :: (Expr -> Lexeme) -> Str0 -> [Lexeme]
lexxFile ctor x
| (exp,rest) <- lexxExpr False False $ dropSpace x
= ctor exp : lexerLoop rest
lexxName :: (Str -> Lexeme) -> Str0 -> [Lexeme]
lexxName ctor x
| (name,rest) <- splitLineCont x
= ctor name : lexerLoop rest
lexxExprs :: Bool -> Str0 -> ([Expr], Str0)
lexxExprs stopColon x = case lexxExpr stopColon True x of
(a,c_x) | c <- head0 c_x, x <- tail0 c_x -> case c of
' ' -> add a $ lexxExprs stopColon $ dropSpace x
':' | stopColon -> new a x
_ | stopColon -> error "Ninja parsing, expected a colon"
'\r' -> new a $ dropN x
'\n' -> new a x
'\0' -> new a c_x
_ -> error "Ninja parsing, unexpected expression"
where
new a x = add a ([], x)
add (Exprs []) x = x
add a (as,x) = (a:as,x)
# NOINLINE lexxExpr #
snd will start with one of " : " or be empty
lexxExpr stopColon stopSpace = first exprs . f
where
exprs [x] = x
exprs xs = Exprs xs
special = case (stopColon, stopSpace) of
(True , True ) -> \x -> x <= ':' && (x == ':' || x == ' ' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(True , False) -> \x -> x <= ':' && (x == ':' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(False, True ) -> \x -> x <= '$' && ( x == ' ' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(False, False) -> \x -> x <= '$' && ( x == '$' || x == '\r' || x == '\n' || x == '\0')
f x = case break00 special x of (a,x) -> if BS.null a then g x else Lit a $: g x
x $: (xs,y) = (x:xs,y)
g x | head0 x /= '$' = ([], x)
g x | c_x <- tail0 x, (c,x) <- list0 c_x = case c of
'$' -> Lit (BS.singleton '$') $: f x
' ' -> Lit (BS.singleton ' ') $: f x
':' -> Lit (BS.singleton ':') $: f x
'\n' -> f $ dropSpace x
'\r' -> f $ dropSpace $ dropN x
'{' | (name,x) <- span0 isVarDot x, not $ BS.null name, ('}',x) <- list0 x -> Var name $: f x
_ | (name,x) <- span0 isVar c_x, not $ BS.null name -> Var name $: f x
_ -> error "Ninja parsing, unexpect $ followed by unexpected stuff"
jumpCont :: Str0 -> Str0
jumpCont o
| '$' <- head0 o
, let x = tail0 o
= case head0 x of
'\n' -> dropSpace $ tail0 x
'\r' -> dropSpace $ dropN $ tail0 x
_ -> o
| otherwise = o
splitLineCont :: Str0 -> (Str, Str0)
splitLineCont x = first BS.concat $ f x
where
f x = if not $ endsDollar a then ([a], b) else let (c,d) = f $ dropSpace b in (BS.init a : c, d)
where (a,b) = splitLineCR x
splitLineCR :: Str0 -> (Str, Str0)
splitLineCR x = if BS.singleton '\r' `BS.isSuffixOf` a then (BS.init a, dropN b) else (a, dropN b)
where (a,b) = break0 (== '\n') x
| null | https://raw.githubusercontent.com/ndmitchell/shake/99c5a7a4dc1d5a069b13ed5c1bc8e4bc7f13f4a6/src/Development/Ninja/Lexer.hs | haskell | fails with GHC 7.10
{-# OPTIONS_GHC -ddump-simpl #-}
-------------------------------------------------------------------
null terminated
The predicate must return true for '\0'
-------------------------------------------------------------------
ACTUAL LEXER
[indent]foo = bar
include file
include file
rule name
pool name
default foo bar
foo = bar | # LANGUAGE PatternGuards #
| Lexing is a slow point , the code below is optimised
module Development.Ninja.Lexer(Lexeme(..), lexerFile) where
import Data.Tuple.Extra
import Data.Char
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Unsafe as BS
import Development.Ninja.Type
import qualified Data.ByteString.Internal as Internal
import System.IO.Unsafe
import Data.Word
import Foreign.Ptr
import Foreign.Storable
import GHC.Exts
LIBRARY BITS
type S = Ptr Word8
char :: S -> Char
char x = Internal.w2c $ unsafePerformIO $ peek x
next :: S -> S
next x = x `plusPtr` 1
# INLINE dropWhile0 #
dropWhile0 :: (Char -> Bool) -> Str0 -> Str0
dropWhile0 f x = snd $ span0 f x
# INLINE span0 #
span0 :: (Char -> Bool) -> Str0 -> (Str, Str0)
span0 f = break0 (not . f)
# INLINE break0 #
break0 :: (Char -> Bool) -> Str0 -> (Str, Str0)
break0 f (Str0 bs) = (BS.unsafeTake i bs, Str0 $ BS.unsafeDrop i bs)
where
i = unsafePerformIO $ BS.unsafeUseAsCString bs $ \ptr -> do
let start = castPtr ptr :: S
let end = go start
pure $! Ptr end `minusPtr` start
go s@(Ptr a) | c == '\0' || f c = a
| otherwise = go (next s)
where c = char s
# INLINE break00 #
break00 :: (Char -> Bool) -> Str0 -> (Str, Str0)
break00 f (Str0 bs) = (BS.unsafeTake i bs, Str0 $ BS.unsafeDrop i bs)
where
i = unsafePerformIO $ BS.unsafeUseAsCString bs $ \ptr -> do
let start = castPtr ptr :: S
let end = go start
pure $! Ptr end `minusPtr` start
go s@(Ptr a) | f c = a
| otherwise = go (next s)
where c = char s
head0 :: Str0 -> Char
head0 (Str0 x) = Internal.w2c $ BS.unsafeHead x
tail0 :: Str0 -> Str0
tail0 (Str0 x) = Str0 $ BS.unsafeTail x
list0 :: Str0 -> (Char, Str0)
list0 x = (head0 x, tail0 x)
take0 :: Int -> Str0 -> Str
take0 i (Str0 x) = BS.takeWhile (/= '\0') $ BS.take i x
Lex each line separately , rather than each lexeme
data Lexeme
build foo : bar | baz || qux ( | and || are represented as )
deriving Show
isVar, isVarDot :: Char -> Bool
isVar x = x == '-' || x == '_' || isAsciiLower x || isAsciiUpper x || isDigit x
isVarDot x = x == '.' || isVar x
endsDollar :: Str -> Bool
endsDollar = BS.isSuffixOf (BS.singleton '$')
dropN :: Str0 -> Str0
dropN x = if head0 x == '\n' then tail0 x else x
dropSpace :: Str0 -> Str0
dropSpace = dropWhile0 (== ' ')
lexerFile :: Maybe FilePath -> IO [Lexeme]
lexerFile file = lexer <$> maybe BS.getContents BS.readFile file
lexer :: Str -> [Lexeme]
lexer x = lexerLoop $ Str0 $ x `BS.append` BS.pack "\n\n\0"
lexerLoop :: Str0 -> [Lexeme]
lexerLoop c_x | (c,x) <- list0 c_x = case c of
'\r' -> lexerLoop x
'\n' -> lexerLoop x
' ' -> lexBind $ dropSpace x
'#' -> lexerLoop $ dropWhile0 (/= '\n') x
'b' | Just x <- strip "uild " x -> lexBuild x
'r' | Just x <- strip "ule " x -> lexRule x
'd' | Just x <- strip "efault " x -> lexDefault x
'p' | Just x <- strip "ool " x -> lexPool x
'i' | Just x <- strip "nclude " x -> lexInclude x
's' | Just x <- strip "ubninja " x -> lexSubninja x
'\0' -> []
_ -> lexDefine c_x
where
strip str (Str0 x) = if b `BS.isPrefixOf` x then Just $ dropSpace $ Str0 $ BS.drop (BS.length b) x else Nothing
where b = BS.pack str
lexBind :: Str0 -> [Lexeme]
lexBind c_x | (c,x) <- list0 c_x = case c of
'\r' -> lexerLoop x
'\n' -> lexerLoop x
'#' -> lexerLoop $ dropWhile0 (/= '\n') x
'\0' -> []
_ -> lexxBind LexBind c_x
lexBuild :: Str0 -> [Lexeme]
lexBuild x
| (outputs,x) <- lexxExprs True x
, (rule,x) <- span0 isVarDot $ jumpCont $ dropSpace x
, (deps,x) <- lexxExprs False $ dropSpace x
= LexBuild outputs rule deps : lexerLoop x
lexDefault :: Str0 -> [Lexeme]
lexDefault x
| (files,x) <- lexxExprs False x
= LexDefault files : lexerLoop x
lexRule, lexPool, lexInclude, lexSubninja, lexDefine :: Str0 -> [Lexeme]
lexRule = lexxName LexRule
lexPool = lexxName LexPool
lexInclude = lexxFile LexInclude
lexSubninja = lexxFile LexSubninja
lexDefine = lexxBind LexDefine
lexxBind :: (Str -> Expr -> Lexeme) -> Str0 -> [Lexeme]
lexxBind ctor x
| (var,x) <- span0 isVarDot x
, ('=',x) <- list0 $ jumpCont $ dropSpace x
, (exp,x) <- lexxExpr False False $ jumpCont $ dropSpace x
= ctor var exp : lexerLoop x
lexxBind _ x = error $ "Ninja parse failed when parsing binding, " ++ show (take0 100 x)
lexxFile :: (Expr -> Lexeme) -> Str0 -> [Lexeme]
lexxFile ctor x
| (exp,rest) <- lexxExpr False False $ dropSpace x
= ctor exp : lexerLoop rest
lexxName :: (Str -> Lexeme) -> Str0 -> [Lexeme]
lexxName ctor x
| (name,rest) <- splitLineCont x
= ctor name : lexerLoop rest
lexxExprs :: Bool -> Str0 -> ([Expr], Str0)
lexxExprs stopColon x = case lexxExpr stopColon True x of
(a,c_x) | c <- head0 c_x, x <- tail0 c_x -> case c of
' ' -> add a $ lexxExprs stopColon $ dropSpace x
':' | stopColon -> new a x
_ | stopColon -> error "Ninja parsing, expected a colon"
'\r' -> new a $ dropN x
'\n' -> new a x
'\0' -> new a c_x
_ -> error "Ninja parsing, unexpected expression"
where
new a x = add a ([], x)
add (Exprs []) x = x
add a (as,x) = (a:as,x)
# NOINLINE lexxExpr #
snd will start with one of " : " or be empty
lexxExpr stopColon stopSpace = first exprs . f
where
exprs [x] = x
exprs xs = Exprs xs
special = case (stopColon, stopSpace) of
(True , True ) -> \x -> x <= ':' && (x == ':' || x == ' ' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(True , False) -> \x -> x <= ':' && (x == ':' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(False, True ) -> \x -> x <= '$' && ( x == ' ' || x == '$' || x == '\r' || x == '\n' || x == '\0')
(False, False) -> \x -> x <= '$' && ( x == '$' || x == '\r' || x == '\n' || x == '\0')
f x = case break00 special x of (a,x) -> if BS.null a then g x else Lit a $: g x
x $: (xs,y) = (x:xs,y)
g x | head0 x /= '$' = ([], x)
g x | c_x <- tail0 x, (c,x) <- list0 c_x = case c of
'$' -> Lit (BS.singleton '$') $: f x
' ' -> Lit (BS.singleton ' ') $: f x
':' -> Lit (BS.singleton ':') $: f x
'\n' -> f $ dropSpace x
'\r' -> f $ dropSpace $ dropN x
'{' | (name,x) <- span0 isVarDot x, not $ BS.null name, ('}',x) <- list0 x -> Var name $: f x
_ | (name,x) <- span0 isVar c_x, not $ BS.null name -> Var name $: f x
_ -> error "Ninja parsing, unexpect $ followed by unexpected stuff"
jumpCont :: Str0 -> Str0
jumpCont o
| '$' <- head0 o
, let x = tail0 o
= case head0 x of
'\n' -> dropSpace $ tail0 x
'\r' -> dropSpace $ dropN $ tail0 x
_ -> o
| otherwise = o
splitLineCont :: Str0 -> (Str, Str0)
splitLineCont x = first BS.concat $ f x
where
f x = if not $ endsDollar a then ([a], b) else let (c,d) = f $ dropSpace b in (BS.init a : c, d)
where (a,b) = splitLineCR x
splitLineCR :: Str0 -> (Str, Str0)
splitLineCR x = if BS.singleton '\r' `BS.isSuffixOf` a then (BS.init a, dropN b) else (a, dropN b)
where (a,b) = break0 (== '\n') x
|
4b26ecb0d6044facc044967d90456dbfc6c1be30040cde2e4d6c65d005b9e49f | clojurecup2014/parade-route | genclass.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(assembly-load "clojure.test_clojure.genclass.examples.ExampleClass") ;;; added because we do not have automatic class loading
(ns ^{:doc "Tests for clojure.core/gen-class"
:author "Stuart Halloway, Daniel Solano Gómez"}
clojure.test-clojure.genclass
(:use clojure.test clojure.test-helper)
(:import [clojure.test_clojure.genclass.examples
ExampleClass
;;;ExampleAnnotationClass
;;;ProtectedFinalTester
ArrayDefInterface
ArrayGenInterface]
[ java.lang.annotation ElementType
;;; Retention
RetentionPolicy
));;; Target]))
(deftest arg-support
(let [example (ExampleClass.)
o (Object.)]
(is (= "foo with o, o" (.foo example o o)))
(is (= "foo with o, i" (.foo example o (int 1))))
(is (thrown? NotImplementedException (.foo example o))))) ;;; java.lang.UnsupportedOperationException
(deftest name-munging
(testing "mapping from Java fields to Clojure vars"
(is (= #'clojure.test-clojure.genclass.examples/-foo-Object-Int32 ;;; -foo-Object-int
(get-field ExampleClass 'foo_Object_Int32__var))) ;;; foo_Object_int__var
;;;(is (= #'clojure.test-clojure.genclass.examples/-ToString ;;; -toString
( get - field ExampleClass ' ToString__var ) ) ) ) ) ; ; ; toString__var ------ TODO : Figure out why JVM can find this var , we ca n't .
;todo - fix this, it depends on the order of things out of a hash-map
#_(deftest test-annotations
(let [annot-class ExampleAnnotationClass
foo-method (.getDeclaredMethod annot-class "foo" (into-array [String]))]
(testing "Class annotations:"
(is (= 2 (count (.getDeclaredAnnotations annot-class))))
(testing "@Deprecated"
(let [deprecated (.getAnnotation annot-class Deprecated)]
(is deprecated)))
(testing "@Target([])"
(let [resource (.getAnnotation annot-class Target)]
(is (= 0 (count (.value resource)))))))
(testing "Method annotations:"
(testing "@Deprecated void foo(String):"
(is (= 1 (count (.getDeclaredAnnotations foo-method))))
(is (.getAnnotation foo-method Deprecated))))
(testing "Parameter annotations:"
(let [param-annots (.getParameterAnnotations foo-method)]
(is (= 1 (alength param-annots)))
(let [first-param-annots (aget param-annots 0)]
(is (= 2 (alength first-param-annots)))
(testing "void foo(@Retention(…) String)"
(let [retention (aget first-param-annots 0)]
(is (instance? Retention retention))
(= RetentionPolicy/SOURCE (.value retention))))
(testing "void foo(@Target(…) String)"
(let [target (aget first-param-annots 1)]
(is (instance? Target target))
(is (= [ElementType/TYPE ElementType/PARAMETER] (seq (.value target)))))))))))
(deftest genclass-option-validation
IllegalArgumentException
(@#'clojure.core/validate-generate-class-options {:methods '[[fine [] void] [has-hyphen [] void]]}))))
;;;(deftest protected-final-access
;;; (let [obj (ProtectedFinalTester.)]
;;; (testing "Protected final method visibility"
( is ( thrown ? IllegalArgumentException ( .findSystemClass obj " java.lang . String " ) ) ) )
;;; (testing "Allow exposition of protected final method."
( is (= String ( .superFindSystemClass obj " java.lang . String " ) ) ) ) ) )
(deftest interface-array-type-hints
(let [array-types {:ints (class (int-array 0)) :uints (class (uint-array 0))
:bytes (class (byte-array 0)) :sbytes (class (sbyte-array 0))
:shorts (class (short-array 0)) :ushorts (class (ushort-array 0))
:chars (class (char-array 0))
:longs (class (long-array 0)) :ulongs (class (ulong-array 0))
:floats (class (float-array 0))
:doubles (class (double-array 0))
:booleans (class (boolean-array 0))
:maps (class (into-array System.Collections.Hashtable []))} ;;; java.util.Map
array-types (assoc array-types
:maps-2d (class (into-array (:maps array-types) [])))
method-with-name (fn [name methods] (first (filter #(= name (.Name %)) methods))) ;;; .getName
( first ( .getParameterTypes method ) ) )
return-type (fn [method] (.ReturnType method))] ;;; .getReturnType
(testing "definterface"
(let [method-with-name #(method-with-name % (.GetMethods ArrayDefInterface))] ;;; .getMethods
(testing "sugar primitive array hints"
(are [name type] (= (type array-types)
(parameter-type (method-with-name name)))
"takesByteArray" :bytes "takesSByteArray" :sbytes
"takesCharArray" :chars
"takesShortArray" :shorts "takesUShortArray" :ushorts
"takesIntArray" :ints "takesUIntArray" :uints
"takesLongArray" :longs "takesULongArray" :ulongs
"takesFloatArray" :floats
"takesDoubleArray" :doubles
"takesBooleanArray" :booleans))
(testing "raw primitive array hints"
(are [name type] (= (type array-types)
(return-type (method-with-name name)))
"returnsByteArray" :bytes "returnsSByteArray" :sbytes
"returnsCharArray" :chars
"returnsShortArray" :shorts "returnsUShortArray" :ushorts
"returnsIntArray" :ints "returnsUIntArray" :uints
"returnsLongArray" :longs "returnsULongArray" :ulongs
"returnsFloatArray" :floats
"returnsDoubleArray" :doubles
"returnsBooleanArray" :booleans))))
(testing "gen-interface"
(let [method-with-name #(method-with-name % (.GetMethods ArrayGenInterface))] ;;; .getMethods
(testing "sugar primitive array hints"
(are [name type] (= (type array-types)
(parameter-type (method-with-name name)))
"takesByteArray" :bytes "takesSByteArray" :sbytes
"takesCharArray" :chars
"takesShortArray" :shorts "takesUShortArray" :ushorts
"takesIntArray" :ints "takesUIntArray" :uints
"takesLongArray" :longs "takesULongArray" :ulongs
"takesFloatArray" :floats
"takesDoubleArray" :doubles
"takesBooleanArray" :booleans))
(testing "raw primitive array hints"
(are [name type] (= (type array-types)
(return-type (method-with-name name)))
"returnsByteArray" :bytes "returnsSByteArray" :sbytes
"returnsCharArray" :chars
"returnsShortArray" :shorts "returnsUShortArray" :ushorts
"returnsIntArray" :ints "returnsUIntArray" :uints
"returnsLongArray" :longs "returnsULongArray" :ulongs
"returnsFloatArray" :floats
"returnsDoubleArray" :doubles
"returnsBooleanArray" :booleans)))))) | null | https://raw.githubusercontent.com/clojurecup2014/parade-route/adb2e1ea202228e3da07902849dee08f0bb8d81c/Assets/Clojure/Internal/Plugins/clojure/test_clojure/genclass.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
added because we do not have automatic class loading
ExampleAnnotationClass
ProtectedFinalTester
Retention
Target]))
java.lang.UnsupportedOperationException
-foo-Object-int
foo_Object_int__var
(is (= #'clojure.test-clojure.genclass.examples/-ToString ;;; -toString
; ; toString__var ------ TODO : Figure out why JVM can find this var , we ca n't .
todo - fix this, it depends on the order of things out of a hash-map
(deftest protected-final-access
(let [obj (ProtectedFinalTester.)]
(testing "Protected final method visibility"
(testing "Allow exposition of protected final method."
java.util.Map
.getName
.getReturnType
.getMethods
.getMethods | Copyright ( c ) . All rights reserved .
(ns ^{:doc "Tests for clojure.core/gen-class"
:author "Stuart Halloway, Daniel Solano Gómez"}
clojure.test-clojure.genclass
(:use clojure.test clojure.test-helper)
(:import [clojure.test_clojure.genclass.examples
ExampleClass
ArrayDefInterface
ArrayGenInterface]
[ java.lang.annotation ElementType
RetentionPolicy
(deftest arg-support
(let [example (ExampleClass.)
o (Object.)]
(is (= "foo with o, o" (.foo example o o)))
(is (= "foo with o, i" (.foo example o (int 1))))
(deftest name-munging
(testing "mapping from Java fields to Clojure vars"
#_(deftest test-annotations
(let [annot-class ExampleAnnotationClass
foo-method (.getDeclaredMethod annot-class "foo" (into-array [String]))]
(testing "Class annotations:"
(is (= 2 (count (.getDeclaredAnnotations annot-class))))
(testing "@Deprecated"
(let [deprecated (.getAnnotation annot-class Deprecated)]
(is deprecated)))
(testing "@Target([])"
(let [resource (.getAnnotation annot-class Target)]
(is (= 0 (count (.value resource)))))))
(testing "Method annotations:"
(testing "@Deprecated void foo(String):"
(is (= 1 (count (.getDeclaredAnnotations foo-method))))
(is (.getAnnotation foo-method Deprecated))))
(testing "Parameter annotations:"
(let [param-annots (.getParameterAnnotations foo-method)]
(is (= 1 (alength param-annots)))
(let [first-param-annots (aget param-annots 0)]
(is (= 2 (alength first-param-annots)))
(testing "void foo(@Retention(…) String)"
(let [retention (aget first-param-annots 0)]
(is (instance? Retention retention))
(= RetentionPolicy/SOURCE (.value retention))))
(testing "void foo(@Target(…) String)"
(let [target (aget first-param-annots 1)]
(is (instance? Target target))
(is (= [ElementType/TYPE ElementType/PARAMETER] (seq (.value target)))))))))))
(deftest genclass-option-validation
IllegalArgumentException
(@#'clojure.core/validate-generate-class-options {:methods '[[fine [] void] [has-hyphen [] void]]}))))
( is ( thrown ? IllegalArgumentException ( .findSystemClass obj " java.lang . String " ) ) ) )
( is (= String ( .superFindSystemClass obj " java.lang . String " ) ) ) ) ) )
(deftest interface-array-type-hints
(let [array-types {:ints (class (int-array 0)) :uints (class (uint-array 0))
:bytes (class (byte-array 0)) :sbytes (class (sbyte-array 0))
:shorts (class (short-array 0)) :ushorts (class (ushort-array 0))
:chars (class (char-array 0))
:longs (class (long-array 0)) :ulongs (class (ulong-array 0))
:floats (class (float-array 0))
:doubles (class (double-array 0))
:booleans (class (boolean-array 0))
array-types (assoc array-types
:maps-2d (class (into-array (:maps array-types) [])))
( first ( .getParameterTypes method ) ) )
(testing "definterface"
(testing "sugar primitive array hints"
(are [name type] (= (type array-types)
(parameter-type (method-with-name name)))
"takesByteArray" :bytes "takesSByteArray" :sbytes
"takesCharArray" :chars
"takesShortArray" :shorts "takesUShortArray" :ushorts
"takesIntArray" :ints "takesUIntArray" :uints
"takesLongArray" :longs "takesULongArray" :ulongs
"takesFloatArray" :floats
"takesDoubleArray" :doubles
"takesBooleanArray" :booleans))
(testing "raw primitive array hints"
(are [name type] (= (type array-types)
(return-type (method-with-name name)))
"returnsByteArray" :bytes "returnsSByteArray" :sbytes
"returnsCharArray" :chars
"returnsShortArray" :shorts "returnsUShortArray" :ushorts
"returnsIntArray" :ints "returnsUIntArray" :uints
"returnsLongArray" :longs "returnsULongArray" :ulongs
"returnsFloatArray" :floats
"returnsDoubleArray" :doubles
"returnsBooleanArray" :booleans))))
(testing "gen-interface"
(testing "sugar primitive array hints"
(are [name type] (= (type array-types)
(parameter-type (method-with-name name)))
"takesByteArray" :bytes "takesSByteArray" :sbytes
"takesCharArray" :chars
"takesShortArray" :shorts "takesUShortArray" :ushorts
"takesIntArray" :ints "takesUIntArray" :uints
"takesLongArray" :longs "takesULongArray" :ulongs
"takesFloatArray" :floats
"takesDoubleArray" :doubles
"takesBooleanArray" :booleans))
(testing "raw primitive array hints"
(are [name type] (= (type array-types)
(return-type (method-with-name name)))
"returnsByteArray" :bytes "returnsSByteArray" :sbytes
"returnsCharArray" :chars
"returnsShortArray" :shorts "returnsUShortArray" :ushorts
"returnsIntArray" :ints "returnsUIntArray" :uints
"returnsLongArray" :longs "returnsULongArray" :ulongs
"returnsFloatArray" :floats
"returnsDoubleArray" :doubles
"returnsBooleanArray" :booleans)))))) |
58bb8eed795e1f062b2245299e856e5f15d1d82828f12ed3ff3acc1da00e0a33 | etoroxlabs/lira | LiraParserPropTest.hs |
MIT License
--
Copyright ( c ) 2019 eToroX Labs
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
module LiraParserPropTest
( tests
, hprop_ParsePrettyPrintIdentity
)
where
import qualified Data.Text as Text
import Data.Text (unpack)
import Lira.Contract
import Lira.Contract.Pretty (printContract)
import Lira.Contract.Parser (parseContract')
import LiraGen
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Test.Hspec
import Test.Hspec.Hedgehog (PropertyT, forAll, hedgehog, (===))
tests :: Spec
tests =
describe "parseContract/printContract" $
it "are inverses" $
hedgehog hprop_ParsePrettyPrintIdentity
hprop_ParsePrettyPrintIdentity :: PropertyT IO ()
hprop_ParsePrettyPrintIdentity = do
c <- forAll contractGen
let s = printContract c
got <- evalEither (parseContract' "" s)
c === got
| null | https://raw.githubusercontent.com/etoroxlabs/lira/33fae6d37c5467d0a59ab9e9759636f2468b3653/test/LiraParserPropTest.hs | haskell |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. |
MIT License
Copyright ( c ) 2019 eToroX Labs
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
module LiraParserPropTest
( tests
, hprop_ParsePrettyPrintIdentity
)
where
import qualified Data.Text as Text
import Data.Text (unpack)
import Lira.Contract
import Lira.Contract.Pretty (printContract)
import Lira.Contract.Parser (parseContract')
import LiraGen
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Test.Hspec
import Test.Hspec.Hedgehog (PropertyT, forAll, hedgehog, (===))
tests :: Spec
tests =
describe "parseContract/printContract" $
it "are inverses" $
hedgehog hprop_ParsePrettyPrintIdentity
hprop_ParsePrettyPrintIdentity :: PropertyT IO ()
hprop_ParsePrettyPrintIdentity = do
c <- forAll contractGen
let s = printContract c
got <- evalEither (parseContract' "" s)
c === got
|
318e2bf60e6b20967b51dc322e2263271590171078ea242f2c11eab5135ccba6 | clash-lang/clash-compiler | Poly2.hs | module Poly2 where
import Clash.Prelude
topEntity :: (a ~ Bool) => a -> b -> a
topEntity x y = x
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/dd52247633ab73f1960b401ea953580cfaf4f6ef/tests/shouldfail/Poly2.hs | haskell | module Poly2 where
import Clash.Prelude
topEntity :: (a ~ Bool) => a -> b -> a
topEntity x y = x
| |
e394106117a1ad4235a14c962b6099d4707c17284c801432d926fb0d08509a69 | Dexterminator/imperimetric | core_card.cljs | (ns imperimetric.core-card
(:require-macros
[devcards.core :as dc])
(:require
[imperimetric.cards]))
(dc/start-devcard-ui!)
| null | https://raw.githubusercontent.com/Dexterminator/imperimetric/57e975c470490724f69cc43c2f5d0fa2359745d0/src/devcards/core_card.cljs | clojure | (ns imperimetric.core-card
(:require-macros
[devcards.core :as dc])
(:require
[imperimetric.cards]))
(dc/start-devcard-ui!)
| |
5602524bb10ef9f84cfa6ede3f0126af5f3468d94283ae4358c432814a8d3379 | facebookarchive/pfff | token_views_opa.mli |
type token = Parser_opa.token
type tree =
| T of token
| Paren of tree list list (* grouped by comma *)
grouped by comma too , as in type defs
| Bracket of tree list list
| Xml of tree list (* attributes *) * tree list (* children *)
val mk_tree:
Parser_opa.token list -> tree list
val vof_tree_list:
tree list -> Ocaml.v
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/lang_opa/parsing/token_views_opa.mli | ocaml | grouped by comma
attributes
children |
type token = Parser_opa.token
type tree =
| T of token
grouped by comma too , as in type defs
| Bracket of tree list list
val mk_tree:
Parser_opa.token list -> tree list
val vof_tree_list:
tree list -> Ocaml.v
|
b373318b52664eb7e0cf779b057afc8f20881653ba38b4a35fa17fffa8b57a70 | input-output-hk/cardano-ledger | EpochNumber.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE UndecidableInstances #
module Cardano.Chain.Slotting.EpochNumber (
EpochNumber (..),
isBootstrapEra,
)
where
import Cardano.Ledger.Binary (
DecCBOR (..),
EncCBOR (..),
FromCBOR (..),
ToCBOR (..),
fromByronCBOR,
toByronCBOR,
)
import Cardano.Prelude
import qualified Data.Aeson as Aeson
import Data.Data (Data)
import Data.Ix (Ix)
import Formatting (bprint, int)
import Formatting.Buildable (Buildable (..))
import NoThunks.Class (NoThunks (..))
import Text.JSON.Canonical (FromJSON (..), ToJSON (..))
-- | Index of epoch.
newtype EpochNumber = EpochNumber
{ getEpochNumber :: Word64
}
deriving
( Show
, Data
, Eq
, Ord
, Num
, Enum
, Ix
, Integral
, Real
, Generic
, Bounded
, NFData
, NoThunks
)
instance Buildable EpochNumber where
build = bprint ("#" . int)
-- Used for debugging purposes only
instance Aeson.ToJSON EpochNumber
instance ToCBOR EpochNumber where
toCBOR = toByronCBOR
instance FromCBOR EpochNumber where
fromCBOR = fromByronCBOR
instance EncCBOR EpochNumber where
encCBOR (EpochNumber epoch) = encCBOR epoch
encodedSizeExpr size = encodedSizeExpr size . fmap getEpochNumber
instance DecCBOR EpochNumber where
decCBOR = EpochNumber <$> decCBOR
Note that it will be encoded as string , because ' EpochNumber ' does n't
necessary fit into JS number .
instance Monad m => ToJSON m EpochNumber where
toJSON = toJSON . getEpochNumber
instance MonadError SchemaError m => FromJSON m EpochNumber where
fromJSON = fmap EpochNumber . fromJSON
-- | Bootstrap era is ongoing until stakes are unlocked. The reward era starts
-- from the epoch specified as the epoch that unlocks stakes:
--
-- @
-- [unlock stake epoch]
-- /
Epoch : ... E-3 E-2 E-1 E+0 E+1 E+2 E+3 ...
-- ------------------ | -----------------------
-- Bootstrap era Reward era
-- @
isBootstrapEra ::
-- | Unlock stake epoch
EpochNumber ->
-- | Epoch in question (for which we determine whether it belongs to the
-- bootstrap era)
EpochNumber ->
Bool
isBootstrapEra unlockStakeEpoch epoch = epoch < unlockStakeEpoch
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger/b9aa1ad1728c0ceeca62657ec94d6d099896c052/eras/byron/ledger/impl/src/Cardano/Chain/Slotting/EpochNumber.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
| Index of epoch.
Used for debugging purposes only
| Bootstrap era is ongoing until stakes are unlocked. The reward era starts
from the epoch specified as the epoch that unlocks stakes:
@
[unlock stake epoch]
/
------------------ | -----------------------
Bootstrap era Reward era
@
| Unlock stake epoch
| Epoch in question (for which we determine whether it belongs to the
bootstrap era) | # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE UndecidableInstances #
module Cardano.Chain.Slotting.EpochNumber (
EpochNumber (..),
isBootstrapEra,
)
where
import Cardano.Ledger.Binary (
DecCBOR (..),
EncCBOR (..),
FromCBOR (..),
ToCBOR (..),
fromByronCBOR,
toByronCBOR,
)
import Cardano.Prelude
import qualified Data.Aeson as Aeson
import Data.Data (Data)
import Data.Ix (Ix)
import Formatting (bprint, int)
import Formatting.Buildable (Buildable (..))
import NoThunks.Class (NoThunks (..))
import Text.JSON.Canonical (FromJSON (..), ToJSON (..))
newtype EpochNumber = EpochNumber
{ getEpochNumber :: Word64
}
deriving
( Show
, Data
, Eq
, Ord
, Num
, Enum
, Ix
, Integral
, Real
, Generic
, Bounded
, NFData
, NoThunks
)
instance Buildable EpochNumber where
build = bprint ("#" . int)
instance Aeson.ToJSON EpochNumber
instance ToCBOR EpochNumber where
toCBOR = toByronCBOR
instance FromCBOR EpochNumber where
fromCBOR = fromByronCBOR
instance EncCBOR EpochNumber where
encCBOR (EpochNumber epoch) = encCBOR epoch
encodedSizeExpr size = encodedSizeExpr size . fmap getEpochNumber
instance DecCBOR EpochNumber where
decCBOR = EpochNumber <$> decCBOR
Note that it will be encoded as string , because ' EpochNumber ' does n't
necessary fit into JS number .
instance Monad m => ToJSON m EpochNumber where
toJSON = toJSON . getEpochNumber
instance MonadError SchemaError m => FromJSON m EpochNumber where
fromJSON = fmap EpochNumber . fromJSON
Epoch : ... E-3 E-2 E-1 E+0 E+1 E+2 E+3 ...
isBootstrapEra ::
EpochNumber ->
EpochNumber ->
Bool
isBootstrapEra unlockStakeEpoch epoch = epoch < unlockStakeEpoch
|
1cb950ac8485e3abf09ac6f1ca2b40c2f827cad81e4ca13b7a3cc4fc4bb5389b | scicloj/notespace | watch.clj | (ns scicloj.notespace.v4.watch
(:require [clojure.pprint :as pp]
[nextjournal.beholder :as beholder]
[scicloj.notespace.v4.events.pipeline :as v4.pipeline]
[scicloj.notespace.v4.path :as v4.path]
[scicloj.notespace.v4.config :as v4.config])
(:import sun.nio.fs.UnixPath))
(defn handle [{:keys [^UnixPath path]}]
(when-not (:ignore-file-changes? @v4.config/*config)
(let [path-str (.toString path)]
(when (v4.path/clj-path? path-str)
(v4.pipeline/process-event
{:path (v4.path/real-path path-str)
:event/type :scicloj.notespace.v4.events.handle/buffer-update})))))
(defn watch []
(beholder/watch #'handle "."))
(defn stop [watcher]
(beholder/stop watcher))
| null | https://raw.githubusercontent.com/scicloj/notespace/5235aefd2d9b472d1d06204128089d41885034f3/src/scicloj/notespace/v4/watch.clj | clojure | (ns scicloj.notespace.v4.watch
(:require [clojure.pprint :as pp]
[nextjournal.beholder :as beholder]
[scicloj.notespace.v4.events.pipeline :as v4.pipeline]
[scicloj.notespace.v4.path :as v4.path]
[scicloj.notespace.v4.config :as v4.config])
(:import sun.nio.fs.UnixPath))
(defn handle [{:keys [^UnixPath path]}]
(when-not (:ignore-file-changes? @v4.config/*config)
(let [path-str (.toString path)]
(when (v4.path/clj-path? path-str)
(v4.pipeline/process-event
{:path (v4.path/real-path path-str)
:event/type :scicloj.notespace.v4.events.handle/buffer-update})))))
(defn watch []
(beholder/watch #'handle "."))
(defn stop [watcher]
(beholder/stop watcher))
| |
31761eb0aa2b8852d954a92fb8c066ea475d63f528cd8547f190bf87bf75bd14 | softwarelanguageslab/maf | R5RS_ad_qstand-3.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 1
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 1
(letrec ((quick-sort (lambda (vector)
(letrec ((swap (lambda (vector index1 index2)
(let ((temp (vector-ref vector index1)))
(vector-set! vector index1 (vector-ref vector index2))
(vector-set! vector index2 temp))))
(quick-sort-aux (lambda (low high)
(<change>
()
>)
(letrec ((quick-sort-aux-iter (lambda (mid-value from to)
(letrec ((quick-right (lambda (index1)
(if (< (vector-ref vector index1) mid-value)
(quick-right (+ index1 1))
index1)))
(quick-left (lambda (index2)
(if (> (vector-ref vector index2) mid-value)
(quick-left (- index2 1))
index2))))
(let ((index1 (quick-right (+ from 1)))
(index2 (quick-left to)))
(if (< index1 index2)
(begin
(swap vector index1 index2)
(quick-sort-aux-iter mid-value index1 index2))
index2))))))
(if (< low high)
(begin
(if (> (vector-ref vector low) (vector-ref vector high))
(swap vector low high)
#f)
(let ((mid-index (quick-sort-aux-iter (vector-ref vector low) low high)))
(<change>
(swap vector mid-index low)
((lambda (x) x) (swap vector mid-index low)))
(quick-sort-aux low (- mid-index 1))
(quick-sort-aux (+ mid-index 1) high)))
#f)))))
(quick-sort-aux 0 (- (vector-length vector) 1)))))
(test1 (vector 7 2 4 6 0 8 5 3 1)))
(quick-sort test1)
(letrec ((test2 (vector 8 1 4 9 6 3 5 2 7 0)))
(<change>
(quick-sort test2)
(letrec ((test3 (vector 8 3 6 6 1 5 4 2 9 6)))
(quick-sort test3)
(if (equal? test1 (vector 0 1 2 3 4 5 6 7 8))
(if (equal? test2 (vector 0 1 2 3 4 5 6 7 8 9))
(equal? test3 (vector 1 2 3 4 5 6 6 6 8 9))
#f)
#f)))
(<change>
(letrec ((test3 (vector 8 3 6 6 1 5 4 2 9 6)))
(quick-sort test3)
(if (equal? test1 (vector 0 1 2 3 4 5 6 7 8))
(if (equal? test2 (vector 0 1 2 3 4 5 6 7 8 9))
(equal? test3 (vector 1 2 3 4 5 6 6 6 8 9))
#f)
#f))
(quick-sort test2)))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_ad_qstand-3.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0 | * removed : 0
* added : 1
* swaps : 1
* calls to i d fun : 1
(letrec ((quick-sort (lambda (vector)
(letrec ((swap (lambda (vector index1 index2)
(let ((temp (vector-ref vector index1)))
(vector-set! vector index1 (vector-ref vector index2))
(vector-set! vector index2 temp))))
(quick-sort-aux (lambda (low high)
(<change>
()
>)
(letrec ((quick-sort-aux-iter (lambda (mid-value from to)
(letrec ((quick-right (lambda (index1)
(if (< (vector-ref vector index1) mid-value)
(quick-right (+ index1 1))
index1)))
(quick-left (lambda (index2)
(if (> (vector-ref vector index2) mid-value)
(quick-left (- index2 1))
index2))))
(let ((index1 (quick-right (+ from 1)))
(index2 (quick-left to)))
(if (< index1 index2)
(begin
(swap vector index1 index2)
(quick-sort-aux-iter mid-value index1 index2))
index2))))))
(if (< low high)
(begin
(if (> (vector-ref vector low) (vector-ref vector high))
(swap vector low high)
#f)
(let ((mid-index (quick-sort-aux-iter (vector-ref vector low) low high)))
(<change>
(swap vector mid-index low)
((lambda (x) x) (swap vector mid-index low)))
(quick-sort-aux low (- mid-index 1))
(quick-sort-aux (+ mid-index 1) high)))
#f)))))
(quick-sort-aux 0 (- (vector-length vector) 1)))))
(test1 (vector 7 2 4 6 0 8 5 3 1)))
(quick-sort test1)
(letrec ((test2 (vector 8 1 4 9 6 3 5 2 7 0)))
(<change>
(quick-sort test2)
(letrec ((test3 (vector 8 3 6 6 1 5 4 2 9 6)))
(quick-sort test3)
(if (equal? test1 (vector 0 1 2 3 4 5 6 7 8))
(if (equal? test2 (vector 0 1 2 3 4 5 6 7 8 9))
(equal? test3 (vector 1 2 3 4 5 6 6 6 8 9))
#f)
#f)))
(<change>
(letrec ((test3 (vector 8 3 6 6 1 5 4 2 9 6)))
(quick-sort test3)
(if (equal? test1 (vector 0 1 2 3 4 5 6 7 8))
(if (equal? test2 (vector 0 1 2 3 4 5 6 7 8 9))
(equal? test3 (vector 1 2 3 4 5 6 6 6 8 9))
#f)
#f))
(quick-sort test2)))) |
da6c56b00230667564f3d12705851f8fcdbb7812b12b19e252351e3da7cf2c86 | nc6/tabula | Options.hs |
Copyright ( c ) 2014 Genome Research Ltd.
Author : < >
This program is free software : you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 3 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program . If not , see < / > .
Copyright (c) 2014 Genome Research Ltd.
Author: Nicholas A. Clarke <>
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program. If not, see </>.
-}
{-# LANGUAGE DataKinds, TypeOperators #-}
# LANGUAGE FlexibleContexts , NoMonomorphismRestriction #
# OPTIONS_GHC -fno - warn - missing - signatures #
module Tabula.Options (
options
, command
, verbosity
, quiet
, project
, resume
, db
, bufferSize
, global
, Command(..)
, Options
, RecordOptions
, CatOptions
, T_project
, T_db
, T_global
, readDestination
, readProjectName
, showAsHistory
) where
import Data.Char (toUpper)
import Data.Version (showVersion)
import Data.Vinyl
import Database.Redis (PortID(PortNumber))
import Options.Applicative hiding (command)
import qualified Options.Applicative as Opt
import System.Log (Priority(..))
import Tabula.Command.Cat (Format(..))
import Tabula.Destination
import Tabula.Destination.File
import Tabula.Destination.Redis
import qualified Text.Parsec as P
import qualified Paths_tabula as Paths (version)
-------------- Options ------------------
data Command = Record (PlainRec RecordOptions)
| Cat (PlainRec CatOptions)
| List (PlainRec ListOptions)
command = Field :: "command" ::: Command
type Options = "command" ::: Command ': CommonOptions
-- Common options --
type CommonOptions = ["verbosity" ::: Priority, "quiet" ::: Bool]
-- | Verbosity (logging level)
verbosity = Field :: "verbosity" ::: Priority
-- | Quiet mode (disable all logging)
quiet = Field :: "quiet" ::: Bool
-- Shared options --
-- | Specify which project
type T_project = "project" ::: String
project = Field :: T_project
| Project database .
type T_db = "db" ::: Maybe DestinationProvider
db = Field :: T_db
-- | Use global namespace?
type T_global = "global" ::: Bool
global = Field :: T_global
-- Default options --
type RecordOptions = [ "resume" ::: Bool
, T_db
, "bufferSize" ::: Int
, T_project
, T_global
]
-- | Resume a session
resume = Field :: "resume" ::: Bool
-- | Set buffer size
bufferSize = Field :: "bufferSize" ::: Int
| Cat options
type T_showAsHistory = "showAsHistory" ::: Format
showAsHistory = Field :: T_showAsHistory
type CatOptions = [ T_db, T_project, T_showAsHistory, T_global ]
type ListOptions = '[T_db]
------------- Parsers ------------------
version :: Parser (a -> a)
version = infoOption ("Tabula version " ++ showVersion Paths.version)
( long "version"
<> help "Print version information" )
-- Shared options
projectOption :: Parser String
projectOption = argument readProjectName (metavar "PROJECT" <> value "default")
destinationOption :: String -> Parser DestinationProvider
destinationOption helpText = nullOption (long "destination"
<> short 'd'
<> metavar "DESTINATION"
<> reader readDestination
<> help helpText)
globalOption :: Parser Bool
globalOption = switch (long "global"
<> short 'g'
<> help "Use global rather than user namespace.")
-- Option groups
recordOptions :: Rec RecordOptions Parser
recordOptions = resume <-: (switch (long "resume" <> help "Resume existing session."))
<+> db <-: optional (destinationOption "Destination to write logs to.")
<+> bufferSize <-: option (long "bufferSize"
<> metavar "SIZE"
<> value 64
<> help "Set buffer size (in multiples of 4096B)")
<+> project <-: projectOption
<+> global <-: globalOption
catOptions :: Rec CatOptions Parser
catOptions = db <-: optional (destinationOption "Destination to read logs from.")
<+> project <-: projectOption
<+> showAsHistory <-: (flag Full AsHistory (long "as-history"
<> help "Show in bash history format (e.g. only commands)"))
<+> global <-: globalOption
listOptions :: Rec ListOptions Parser
listOptions = db <-: optional (destinationOption "Destination to list projects.")
commonOptions :: Rec CommonOptions Parser
commonOptions = verbosity <-: (nullOption (long "verbosity"
<> short 'V'
<> metavar "LEVEL"
<> reader readPriority
<> value ERROR
<> help "Set logging level (default ERROR)"))
<+> quiet <-: (switch (long "quiet" <> short 'q' <> help "Disable logging"))
options = info (version <*> helper <*> commands <++> (dist commonOptions)) (
header "tabula - a utility for recording shell sessions."
<> progDesc "Open a recorded shell session for a specific project.")
where
commands = subparser (
Opt.command "start" (
info (fmap ((command =:) . Record) $ dist recordOptions)
(progDesc "Start or resume a project session."))
<> Opt.command "cat" (
info (fmap ((command =:) . Cat) $ dist catOptions)
(progDesc "Print a project session to stdout."))
<> Opt.command "ls" (
info (fmap ((command =:) . List) $ dist listOptions)
(progDesc "List all projects created at a destination."))
)
(<++>) a b = liftA2 (<+>) a b
--------------- Utility -------------------
readPriority :: Monad m => String -> m Priority
readPriority p = case map toUpper p of
"DEBUG" -> return DEBUG
"INFO" -> return INFO
"NOTICE" -> return NOTICE
"WARNING" -> return WARNING
"ERROR" -> return ERROR
"CRITICAL" -> return CRITICAL
"ALERT" -> return ALERT
"EMERGENCY" -> return EMERGENCY
x -> fail $ "Invalid logging level specified: " ++ x
readProjectName :: Monad m => String -> m String
readProjectName s = case P.parse projectNameParser "Project name" s of
Left err -> fail $ "Invalid project name: " ++ show err
Right x -> return x
readDestination :: Monad m => String -> m DestinationProvider
readDestination s = let
protoSep = P.string "://"
path = P.many1 (P.noneOf ":")
fileDest = P.string "file" >> protoSep >> do
p <- path
return $ fileProvider p
redisDest = P.string "redis" >> protoSep >> do
host <- P.option (connectHost defaultConnectInfo) $
P.many1 (P.alphaNum <|> P.char '.')
port <- P.option (connectPort defaultConnectInfo) $
liftA (PortNumber . fromIntegral . readInt) (P.char ':' >> P.many1 (P.digit))
let connInfo = defaultConnectInfo {
connectHost = host
, connectPort = port
}
return $ redisProvider connInfo
destinations = fileDest <|> redisDest
readInt :: String -> Integer
readInt = read
in case P.parse destinations "Destination" s of
Left err -> fail $ "Invalid destination: " ++ show err
Right x -> return x
| null | https://raw.githubusercontent.com/nc6/tabula/f76524bbe56b45b125707cf1f4f9526817c4e6e8/Tabula/Options.hs | haskell | # LANGUAGE DataKinds, TypeOperators #
------------ Options ------------------
Common options --
| Verbosity (logging level)
| Quiet mode (disable all logging)
Shared options --
| Specify which project
| Use global namespace?
Default options --
| Resume a session
| Set buffer size
----------- Parsers ------------------
Shared options
Option groups
------------- Utility ------------------- |
Copyright ( c ) 2014 Genome Research Ltd.
Author : < >
This program is free software : you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 3 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program . If not , see < / > .
Copyright (c) 2014 Genome Research Ltd.
Author: Nicholas A. Clarke <>
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program. If not, see </>.
-}
# LANGUAGE FlexibleContexts , NoMonomorphismRestriction #
# OPTIONS_GHC -fno - warn - missing - signatures #
module Tabula.Options (
options
, command
, verbosity
, quiet
, project
, resume
, db
, bufferSize
, global
, Command(..)
, Options
, RecordOptions
, CatOptions
, T_project
, T_db
, T_global
, readDestination
, readProjectName
, showAsHistory
) where
import Data.Char (toUpper)
import Data.Version (showVersion)
import Data.Vinyl
import Database.Redis (PortID(PortNumber))
import Options.Applicative hiding (command)
import qualified Options.Applicative as Opt
import System.Log (Priority(..))
import Tabula.Command.Cat (Format(..))
import Tabula.Destination
import Tabula.Destination.File
import Tabula.Destination.Redis
import qualified Text.Parsec as P
import qualified Paths_tabula as Paths (version)
data Command = Record (PlainRec RecordOptions)
| Cat (PlainRec CatOptions)
| List (PlainRec ListOptions)
command = Field :: "command" ::: Command
type Options = "command" ::: Command ': CommonOptions
type CommonOptions = ["verbosity" ::: Priority, "quiet" ::: Bool]
verbosity = Field :: "verbosity" ::: Priority
quiet = Field :: "quiet" ::: Bool
type T_project = "project" ::: String
project = Field :: T_project
| Project database .
type T_db = "db" ::: Maybe DestinationProvider
db = Field :: T_db
type T_global = "global" ::: Bool
global = Field :: T_global
type RecordOptions = [ "resume" ::: Bool
, T_db
, "bufferSize" ::: Int
, T_project
, T_global
]
resume = Field :: "resume" ::: Bool
bufferSize = Field :: "bufferSize" ::: Int
| Cat options
type T_showAsHistory = "showAsHistory" ::: Format
showAsHistory = Field :: T_showAsHistory
type CatOptions = [ T_db, T_project, T_showAsHistory, T_global ]
type ListOptions = '[T_db]
version :: Parser (a -> a)
version = infoOption ("Tabula version " ++ showVersion Paths.version)
( long "version"
<> help "Print version information" )
projectOption :: Parser String
projectOption = argument readProjectName (metavar "PROJECT" <> value "default")
destinationOption :: String -> Parser DestinationProvider
destinationOption helpText = nullOption (long "destination"
<> short 'd'
<> metavar "DESTINATION"
<> reader readDestination
<> help helpText)
globalOption :: Parser Bool
globalOption = switch (long "global"
<> short 'g'
<> help "Use global rather than user namespace.")
recordOptions :: Rec RecordOptions Parser
recordOptions = resume <-: (switch (long "resume" <> help "Resume existing session."))
<+> db <-: optional (destinationOption "Destination to write logs to.")
<+> bufferSize <-: option (long "bufferSize"
<> metavar "SIZE"
<> value 64
<> help "Set buffer size (in multiples of 4096B)")
<+> project <-: projectOption
<+> global <-: globalOption
catOptions :: Rec CatOptions Parser
catOptions = db <-: optional (destinationOption "Destination to read logs from.")
<+> project <-: projectOption
<+> showAsHistory <-: (flag Full AsHistory (long "as-history"
<> help "Show in bash history format (e.g. only commands)"))
<+> global <-: globalOption
listOptions :: Rec ListOptions Parser
listOptions = db <-: optional (destinationOption "Destination to list projects.")
commonOptions :: Rec CommonOptions Parser
commonOptions = verbosity <-: (nullOption (long "verbosity"
<> short 'V'
<> metavar "LEVEL"
<> reader readPriority
<> value ERROR
<> help "Set logging level (default ERROR)"))
<+> quiet <-: (switch (long "quiet" <> short 'q' <> help "Disable logging"))
options = info (version <*> helper <*> commands <++> (dist commonOptions)) (
header "tabula - a utility for recording shell sessions."
<> progDesc "Open a recorded shell session for a specific project.")
where
commands = subparser (
Opt.command "start" (
info (fmap ((command =:) . Record) $ dist recordOptions)
(progDesc "Start or resume a project session."))
<> Opt.command "cat" (
info (fmap ((command =:) . Cat) $ dist catOptions)
(progDesc "Print a project session to stdout."))
<> Opt.command "ls" (
info (fmap ((command =:) . List) $ dist listOptions)
(progDesc "List all projects created at a destination."))
)
(<++>) a b = liftA2 (<+>) a b
readPriority :: Monad m => String -> m Priority
readPriority p = case map toUpper p of
"DEBUG" -> return DEBUG
"INFO" -> return INFO
"NOTICE" -> return NOTICE
"WARNING" -> return WARNING
"ERROR" -> return ERROR
"CRITICAL" -> return CRITICAL
"ALERT" -> return ALERT
"EMERGENCY" -> return EMERGENCY
x -> fail $ "Invalid logging level specified: " ++ x
readProjectName :: Monad m => String -> m String
readProjectName s = case P.parse projectNameParser "Project name" s of
Left err -> fail $ "Invalid project name: " ++ show err
Right x -> return x
readDestination :: Monad m => String -> m DestinationProvider
readDestination s = let
protoSep = P.string "://"
path = P.many1 (P.noneOf ":")
fileDest = P.string "file" >> protoSep >> do
p <- path
return $ fileProvider p
redisDest = P.string "redis" >> protoSep >> do
host <- P.option (connectHost defaultConnectInfo) $
P.many1 (P.alphaNum <|> P.char '.')
port <- P.option (connectPort defaultConnectInfo) $
liftA (PortNumber . fromIntegral . readInt) (P.char ':' >> P.many1 (P.digit))
let connInfo = defaultConnectInfo {
connectHost = host
, connectPort = port
}
return $ redisProvider connInfo
destinations = fileDest <|> redisDest
readInt :: String -> Integer
readInt = read
in case P.parse destinations "Destination" s of
Left err -> fail $ "Invalid destination: " ++ show err
Right x -> return x
|
782cef19fffed242a282b545e5333f81f2a44019d129606ef13bec3783d95169 | BinRoot/Haskell-Data-Analysis-Cookbook | Main.hs | import Data.Algorithm.MaximalCliques
main = do
print $ getMaximalCliques edges nodes
edges 1 5 = True
edges 1 2 = True
edges 2 3 = True
edges 2 5 = True
edges 4 5 = True
edges 3 4 = True
edges 4 6 = True
edges _ _ = False
nodes = [1..6] | null | https://raw.githubusercontent.com/BinRoot/Haskell-Data-Analysis-Cookbook/f8c46987d78f4a6c1828b353c5f906b9314c2ef9/Ch06/Code09_cliq/Main.hs | haskell | import Data.Algorithm.MaximalCliques
main = do
print $ getMaximalCliques edges nodes
edges 1 5 = True
edges 1 2 = True
edges 2 3 = True
edges 2 5 = True
edges 4 5 = True
edges 3 4 = True
edges 4 6 = True
edges _ _ = False
nodes = [1..6] | |
c203e724236602d190159e528a1b5978f8856fc5173b26cfdb3ef0b465476d16 | fluree/ledger | open_test.clj | (ns fluree.db.ledger.api.open-test
(:require [clojure.test :refer :all]
[fluree.db.test-helpers :as test]
[org.httpkit.client :as http]
[fluree.db.util.json :as json]
[fluree.db.api :as fdb]
[fluree.db.query.http-signatures :as http-signatures])
(:import (java.util UUID)))
(use-fixtures :once test/test-system)
;; Utility vars and functions
(def endpoint-url (str ":" @test/port "/fdb/" test/ledger-endpoints "/"))
(def endpoint-url-short (str ":" @test/port "/fdb/"))
(defn- rand-str
[]
(apply str
at least 5 characters
(repeatedly #(char (+ (rand 26) 65))))))
(defn- get-unique-count
[current goal-count fn]
(let [current-count (count (distinct current))
distance (- goal-count current-count)]
(if (< 0 distance)
(get-unique-count (distinct (concat current (repeatedly distance fn))) goal-count fn)
(distinct current))))
ENDPOINT TEST : /transact
(deftest add-chat-alt-schema-test
(testing "adding chat-alt schema succeeds"
(let [ledger (test/rand-ledger test/ledger-endpoints)
{:keys [status body] :as schema-res} (test/transact-schema
ledger "chat-alt.edn")]
(is (= 200 status))
(is (test/contains-every? schema-res :opts :body :headers :status))
(is (test/contains-every? body :t :id :auth :tempids :block :hash :fuel
:status :bytes :flakes))
(is (= 2 (:block body)))
(is (= 59 (-> body :tempids (test/get-tempid-count :_predicate))))
(is (= 4 (-> body :tempids (test/get-tempid-count :_collection)))))))
(deftest transact-people-comments-chats-test
(testing "add data to chat alt succeeds"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
{:keys [status body] :as new-data-res}
(test/transact-data ledger "chat-alt-people-comments-chats.edn")]
(is (= 200 status))
(is (test/contains-every? new-data-res :opts :body :headers :status))
(is (test/contains-every? body :tempids :block :hash :fuel :auth :status
:bytes :t :flakes))
the tempids should be _ auth$chatUser , _ auth$temp , comment$1 - > 12
; chat$1 -> 13, nestedComponent$1 ->12, _user$jdoe, :_user$zsmith,
person$1 - > 4 , _ role$chatUser
; _rule$viewAllPeople, _rule$editOwnChats, _rule$viewAllChats
(is (= (into #{:_auth$chatUser :_auth$temp :_rule$viewAllPeople
:_rule$editOwnChats :_rule$viewAllChats :_role$chatUser
:_user$jdoe :_user$zsmith :_fn$ownChats :person}
(concat
(map #(keyword (str "comment$" %)) (range 1 13))
(map #(keyword (str "chat$" %)) (range 1 14))
(map #(keyword (str "person$" %)) (range 1 4))
(map #(keyword (str "nestedComponent$" %)) (range 1 13))))
(-> body :tempids keys set)))
check that 1 person ( without tempid ) was added
(is (= 1 (-> body :tempids (test/get-tempid-count :person)))))))
ENDPOINT TEST : /query
(deftest query-all-collections-test
(testing "Querying all collections"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:select ["*"] :from "_collection"}
{:keys [status body] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)
collections (into #{} (map #(:_collection/name %) results))]
(is (= 200 status))
; The keys in the response are -> :opts :body :headers :status
(is (test/contains-every? query-res :opts :body :headers :status))
; Are all the collection names what we expect?
(is (= #{"_rule" "_fn" "nestedComponent" "_predicate" "_setting" "chat"
"_auth" "_user" "person" "_shard" "_tag" "comment" "_role"
"_collection" "_ctx"}
collections)))))
(deftest query-all-predicates-test
(testing "Query all predicates"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:select ["*"] :from "_predicate"}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)
predicates (into #{} (map #(:_predicate/name %) results))]
(is (= 200 status))
(is (test/contains-every? query-res :opts :body :headers :status))
; Are some of the predicates we expect returned?
(is (every? predicates ["comment/nestedComponent" "person/stringUnique"]))
(is (< 30 (count predicates))))))
(deftest query-recursive-unlimited-test
(testing "recursive query recurses"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "category.edn")
_ (test/transact-data ledger "bike-categories.edn")
query {:select ["?categoryName"]
:where [["?c" "category/name" "Fixie"]
["?c" "category/subCategoryOf+" "?s"]
["?s" "category/name" "?categoryName"]]}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status)
(str "Query response was: " (pr-str query-res)))
(is (= #{"Bikes" "Safety" "Road" "Hipster"} (-> results flatten set))
(str "Query response was: " (pr-str query-res))))))
(deftest query-recursive-limited-test
(testing "recursive query recurses"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "category.edn")
_ (test/transact-data ledger "bike-categories.edn")
query {:select ["?categoryName"]
:where [["?c" "category/name" "Fixie"]
["?c" "category/subCategoryOf+2" "?s"]
["?s" "category/name" "?categoryName"]]}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status)
(str "Query response was: " (pr-str query-res)))
(is (= #{"Road" "Hipster"} (-> results flatten set))
(str "Query response was: " (pr-str query-res))))))
ENDPOINT TEST : /multi - query
(deftest query-collections-predicates-multiquery-test
(testing "Querying all collections and predicates in multi-query"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:coll {:select ["*"] :from "_collection"}
:pred {:select ["*"] :from "_predicate"}}
{:keys [body status] :as multi-res}
@(http/post (str endpoint-url-short ledger "/multi-query")
(test/standard-request query))
results (json/parse body)
collections (into #{} (map #(:_collection/name %) (:coll results)))
predicates (into #{} (map #(:_predicate/name %) (:pred results)))]
(is (= 200 status))
(is (test/contains-every? multi-res :opts :body :headers :status))
(is (= collections #{"_rule" "nestedComponent" "_fn" "_predicate"
"_setting" "chat" "_auth" "_user" "person" "_shard"
"_tag" "comment" "_role" "_collection" "_ctx"}))
; Are some of the predicates we expect returned?
(is (every? predicates ["comment/nestedComponent" "person/stringUnique"])))))
(deftest sign-multi-query-test
(testing "sign multi-query where collections are not named in alphanumeric order"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
private-key (slurp "default-private-key.txt")
qry-str (str "{\"collections\":{\"select\":[\"*\"],\"from\":\"_collection\"},\n "
" \"predicates\":{\"select\":[\"*\"],\"from\":\"_predicate\"},\n "
" \"_setting\":{\"select\":[\"*\"],\"from\":\"_setting\"},\n "
" \"_rule\":{\"select\":[\"*\"],\"from\":\"_rule\"},\n "
" \"_role\":{\"select\":[\"*\"],\"from\":\"_role\"},\n "
" \"_user\":{\"select\":[\"*\"],\"from\":\"_user\"}\n }")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/multi-query")
signed-req (http-signatures/sign-request :post q-endpoint request
private-key)
{:keys [status body] :as multi-res}
@(http/post q-endpoint signed-req)
results (json/parse body)
collections (into #{} (map #(:_collection/name %) (:collections results)))
predicates (into #{} (map #(:_predicate/name %) (:predicates results)))
roles (into #{} (map #(:_role/id %) (:_role results)))]
(is (= 200 status))
; The keys in the response are -> :opts :body :headers :status
(is (test/contains-every? multi-res :opts :body :headers :status))
; Are all the collections what we expect?
(is (test/contains-every? collections "_rule" "nestedComponent" "_fn"
"_predicate" "_setting" "chat" "_auth" "_user"
"person" "_shard" "_tag" "comment" "_role"
"_collection"))
; Are some of the predicates we expect returned?
(is (test/contains-every? predicates "comment/nestedComponent"
"person/stringUnique"))
; Are the expected roles returned?
(is (test/contains-every? roles "chatUser" "root")))))
ENDPOINT TEST : /transact
(deftest transacting-new-persons-test
(testing "Creating 100 random persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
random-person (fn []
{:_id "person"
:stringNotUnique (rand-str)})
person-tx (repeatedly 100 random-person)
{:keys [status body] :as tx-res}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request person-tx))
result (json/parse body)
person-keys (-> result keys set)
flakes (:flakes result)
tempids (:tempids result)]
(is (every? person-keys [:tempids :block :hash :fuel :auth :status :flakes]))
(is (< 100 (count flakes)))
(is (= 100 (test/get-tempid-count tempids :person))))))
ENDPOINT TEST : /block
(deftest query-block-two-test
(testing "Query block 2"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:block 2}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/block")
(test/standard-request query))
results (json/parse body)
block (first results)
block-keys (keys block)]
(is (= 200 status))
(is (= 2 (:block block)))
(is (every? #{:block :hash :instant :txns :block-bytes :cmd-types :t :sigs
:flakes}
block-keys)))))
ENDPOINT TEST : /history
(deftest history-query-collection-name-test
(testing "Query history of flakes with _collection/name predicate"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
history-query {:history [nil 40]}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/history")
(test/standard-request history-query))
result (json/parse body)]
(is (= 200 status))
(is (every? (fn [flakes]
(every? #(= 40 (second %)) flakes))
(map :flakes result))))))
ENDPOINT TEST : /graphql
(deftest query-all-collections-graphql-test
(testing "Querying all collections through the graphql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query {:query "{
graph {
_collection (sort: {predicate: \"name\", order: ASC}) {
_id name spec version doc
}
}
}"}
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/graphql")
(test/standard-request query))
results (json/parse body)
collections (-> results :data :_collection)
collection-names (set (map :name collections))]
(is (= 200 status))
(is (every? #(test/contains-every? % :doc :version :name :_id)
collections))
(is (= #{"_rule" "_fn" "_predicate" "_setting" "_auth" "_user" "_shard"
"_tag" "_role" "_collection" "_ctx"}
collection-names)))))
(deftest sign-all-collections-graphql-test
(testing "sign a query for all collections through the graphql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
private-key (slurp "default-private-key.txt")
graphql-str "{
graph {
_collection (sort: {predicate: \"name\", order: ASC}) {
_id name spec version doc
}
}
}"
qry-str (json/stringify {:query graphql-str})
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/graphql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body]} @(http/post q-endpoint signed-req)
results (json/parse body)
collections (-> results :data :_collection)
collection-keys (reduce (fn [acc c] (apply conj acc (keys c)))
#{} collections)
collection-names (set (map :name collections))]
(is (= 200 status))
(is (test/contains-every? collection-keys :_id :name :version :doc))
(is (test/contains-every? collection-names "_rule" "_fn" "_predicate"
"_setting" "_auth" "_user" "_shard" "_tag"
"_role" "_collection")))))
ENDPOINT TEST : /graphql transaction
(deftest add-a-person-graphql-test
(testing "Add two new people with graphql"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
graphql {:query "mutation addPeople ($myPeopleTx: JSON) {
transact(tx: $myPeopleTx)
}"
:variables
{:myPeopleTx "[
{
\"_id\": \"person\",
\"stringNotUnique\": \"oRamirez\",
\"stringUnique\": \"Oscar Ramirez\"
},
{
\"_id\": \"person\",
\"stringNotUnique\": \"cStuart\",
\"stringUnique\": \"Chana Stuart\"
}
]"}}
{:keys [status body]}
@(http/post (str endpoint-url-short ledger "/graphql")
(test/standard-request graphql))
result (json/parse body)
result-keys (-> result :data keys set)
flakes (-> result :data :flakes)
flake-vals (set (map #(nth % 2) flakes))]
(is (= 200 status))
(is (= #{:tempids :block :hash :instant :type :duration :fuel :auth :status :id :bytes :t :flakes}
result-keys))
(is (= 11 (count flakes)))
(is (test/contains-every? flake-vals "Chana Stuart" "cStuart" "Oscar Ramirez" "oRamirez")))))
ENDPOINT TEST : /sparql
(deftest query-collection-sparql-test
(testing "Querying all collections through the sparql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query "SELECT ?name \nWHERE \n {\n ?collection fd:_collection/name ?name. \n}"
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/sparql")
(test/standard-request query))
results (json/parse body)
collection-names (set (apply concat results))]
(is (= 200 status))
;; Make sure we got results back
(is (> (count results) 1))
Each result should be an array of 1 ( ? name )
(is (every? #(= 1 (count %)) results))
(is (test/contains-every? collection-names "_predicate" "_auth"
"_collection" "_fn" "_role" "_rule" "_setting"
"_tag" "_user")))))
(deftest sign-query-collection-sparql-test
(testing "sign a query for all collections through the sparql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
private-key (slurp "default-private-key.txt")
qry-str (json/stringify "SELECT ?name \nWHERE \n {\n ?collection fd:_collection/name ?name. \n}")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/sparql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body]} @(http/post q-endpoint signed-req)
results (json/parse body)
collection-names (set (apply concat results))]
(is (= 200 status))
;; Make sure we got results back
(is (> (count results) 1))
Each result should be an array of 1 ( ? name )
(is (every? #(= 1 (count %)) results))
(is (test/contains-every? collection-names "_predicate" "_auth"
"_collection" "_fn" "_role" "_rule" "_setting"
"_tag" "_user")))))
ENDPOINT TEST : /sparql
(deftest query-wikidata-sparql-test
(testing "Querying wikidata with sparql syntax"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query "SELECT ?item ?itemLabel \nWHERE \n {\n ?item wdt:P31 wd:Q146. \n}"
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/sparql")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status))
;; Make sure we got results back
(is (> (count results) 1))
Each result should be an array of 2 ( ? item and ? )
(is (every? #(= 2 (count %)) results)))))
ENDPOINT TEST : /sql
(deftest sign-sql-query-test
(testing "sign a query for all collections through the sql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
private-key (slurp "default-private-key.txt")
qry-str (json/stringify "SELECT * FROM _collection")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/sql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body] :as sql-res} @(http/post q-endpoint signed-req)
results (json/parse body)
collections (set (map :_collection/name results))]
(is (= 200 status))
; The keys in the response are -> :opts :body :headers :status
(is (test/contains-every? sql-res :opts :body :headers :status))
; Are all the collections what we expect?
(is (test/contains-every? collections
"_rule" "nestedComponent" "_fn" "_predicate" "_setting"
"chat" "_auth" "_user" "person" "_shard" "_tag" "comment"
"_role" "_collection")))))
ENDPOINT TEST : /health
(deftest health-check-test
(testing "Getting health status"
(let [{:keys [status body]} @(http/post (str endpoint-url-short "health"))
result (json/parse body)]
(is (= 200 status))
(is (:ready result)))))
ENDPOINT TEST : /ledgers
(deftest get-all-ledgers-test
(testing "Get all ledgers"
(test/init-ledgers! (conj test/all-ledgers "test/three"))
(let [{:keys [status body]} @(http/post (str endpoint-url-short "ledgers"))
result (-> body json/parse set)]
(is (= 200 status))
(is (test/contains-every?
result
["fluree" "api"] ["fluree" "querytransact"] ["fluree" "invoice"]
["fluree" "chat"] ["fluree" "voting"] ["fluree" "crypto"]
["test" "three"] ["fluree" "supplychain"] ["fluree" "todo"])))))
ENDPOINT TEST : /transact
(deftest transacting-new-chats-test
(testing "Creating 100 random chat messages and adding them to existing persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
random-chat (fn []
{:_id "chat"
:stringNotUnique (rand-str)
:person (nth persons (rand-int (count persons)))
:instantUnique "#(now)"})
chat-tx (repeatedly 100 random-chat)
{chat-status :status, chat-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request chat-tx))
tx-result (json/parse chat-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 chat-status))
(is (test/contains-every? tx-keys :auth :tempids :block :hash :fuel
:status :bytes :flakes :instant :type :duration
:id :t))
(is (< 99 (count flakes)))
(is (= 100 (test/get-tempid-count tempids :chat))))))
ENDPOINT TEST : /transact
(deftest updating-persons-test
(testing "Updating all person/stringNotUniques"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
person-tx (mapv (fn [n]
{:_id n
:stringNotUnique (rand-str)})
persons)
{tx-status :status, tx-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request person-tx))
tx-result (json/parse tx-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 tx-status))
(is (test/contains-every? tx-keys :auth :block :hash :fuel :status :bytes
:flakes :instant :type :duration :id :t))
#_(is (< 100 (count flakes))) ; TODO: why?
(is (nil? tempids))
;; Confirm all the predicates we expect to be featured in the flakes
(is (= #{101 106 99 100 1003 103 107}
(->> flakes (map second) set))))))
ENDPOINT TEST : /transact
;; TODO: This is idiomatic, but it fails b/c it's checking for things I don't
fully understand ( like setting a ceiling of 100 tx - count and then asserting
;; that that should be smaller than the number of flakes (why?). So I'm not
;; marking it w/ for now so I can move on with the other tests.
(deftest transacting-new-stringUniqueMulti-test
(testing "Creating 300 random stringUniqueMulti (sum) and adding them to existing persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
num-persons (count persons)
tx-count (if (> 100 num-persons)
num-persons
100)
random-sum (repeatedly 300 rand-str)
random-sum* (get-unique-count random-sum 300 rand-str)
[rand-sum-1 rand-sum-2 rand-sum-3] (partition 100 random-sum*)
sum-tx (map (fn [person sum1 sum2 sum3]
{:_id person
:stringUniqueMulti [sum1 sum2 sum3]})
persons rand-sum-1 rand-sum-2 rand-sum-3)
{tx-status :status, tx-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request sum-tx))
tx-result (json/parse tx-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 tx-status))
(is (test/contains-every? tx-keys :auth :block :hash :fuel :status
:flakes))
(is (< tx-count (count flakes)))
(is (nil? tempids)))))
ENDPOINT TEST : /new - db
(deftest create-ledger-test
(testing "Creating a new ledger"
(let [new-ledger-body {:ledger/id (str "test/three-" (UUID/randomUUID))}
{:keys [status body]} @(http/post
(str endpoint-url-short "new-ledger")
(test/standard-request new-ledger-body))
result (json/parse body)]
(is (= 200 status))
(is (string? result))
(is (= 64 (count result))))))
ENDPOINT TEST : /new - keys
(deftest new-keys-test
(testing "Generating new keys"
(let [{:keys [status body]} @(http/post (str endpoint-url-short "new-keys"))
result (json/parse body)
result-keys (-> result keys set)]
(is (= 200 status))
(is (= #{:private :public :account-id} result-keys)))))
ENDPOINT TEST : /command
(deftest command-add-person-test
(testing "Issue a signed command to add a person"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
priv-key (slurp "default-private-key.txt")
cmd-map (assoc (fdb/tx->command ledger
[{:_id "person" :stringNotUnique "JoAnne"}]
priv-key)
:txid-only true)
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/command")
(test/standard-request cmd-map))
result (json/parse body)]
(is (= 200 status))
(is (string? result))
(is (= 64 (count result))))))
(deftest command-add-person-verbose-test
(testing "Issue a signed command to add a person")
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
priv-key (slurp "default-private-key.txt")
cmd-map (-> ledger
(fdb/tx->command [{:_id "person" :stringNotUnique "Sally"}]
priv-key))
{:keys [status body]} @(http/post (str endpoint-url-short ledger
"/command")
(test/standard-request cmd-map))
result (json/parse body)]
(is (= 200 status))
(is (map? result))
(is (test/contains-every? result :tempids :block :hash :instant
:type :duration :fuel :auth :status :id
:bytes :t :flakes))))
ENDPOINT TEST : signed /delete - db request
(deftest delete-ledger-test
(testing "delete ledger - open api"
(let [ledger (test/rand-ledger test/ledger-endpoints)
{:keys [status body]} @(http/post
(str endpoint-url-short "delete-ledger")
(test/standard-request {:ledger/id ledger}))
result (json/parse body)]
(is (= 200 status))
(is (= ledger (:deleted result))))))
ENDPOINT TEST : /gen - flakes , /query - with , /test - transact - with
;; TODO: Fix this. It doesn't work even when run by itself on its own empty ledger
#_(deftest gen-flakes-query-transact-with-test
(testing "Issue a signed command to add a person."
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
txn [{:_id "person" :stringNotUnique "Josie"}
{:_id "person" :stringNotUnique "Georgine"}
{:_id "person" :stringNotUnique "Alan"}
{:_id "person" :stringNotUnique "Elaine"}]
{:keys [status body]} @(http/post
(str endpoint-url-short ledger
"/gen-flakes")
(test/standard-request txn))
_ (println "gen-flakes result:" (pr-str body))
flakes (-> body json/parse :flakes)
qw-test {:query {:select ["*"] :from "person"} :flakes flakes}
{qw-status :status, qw-body :body}
@(http/post (str endpoint-url-short ledger "/query-with")
(test/standard-request qw-test))
qw-result (json/parse qw-body)
{q-status :status, q-body :body}
@(http/post (str endpoint-url-short ledger "/query")
(test/standard-request {:select ["*"] :from "person"}))
q-result (json/parse q-body)]
(is (= 200 status))
(is (= 200 qw-status))
(is (= 200 q-status))
;; These names appear when selecting people in query-with
(is (= (->> qw-result (map :person/stringNotUnique) set)
#{"Josie" "Alan" "Georgine" "Elaine"}))
;; None of these names actually appear when just querying.
(is (not-any? (->> q-result (map :person/stringNotUnique) set)
["Josie" "Alan" "Georgine" "Elaine"])))))
| null | https://raw.githubusercontent.com/fluree/ledger/af93dd2f0261cabed58fadedbe215e828d38cb44/test/fluree/db/ledger/api/open_test.clj | clojure | Utility vars and functions
chat$1 -> 13, nestedComponent$1 ->12, _user$jdoe, :_user$zsmith,
_rule$viewAllPeople, _rule$editOwnChats, _rule$viewAllChats
The keys in the response are -> :opts :body :headers :status
Are all the collection names what we expect?
Are some of the predicates we expect returned?
Are some of the predicates we expect returned?
The keys in the response are -> :opts :body :headers :status
Are all the collections what we expect?
Are some of the predicates we expect returned?
Are the expected roles returned?
Make sure we got results back
Make sure we got results back
Make sure we got results back
The keys in the response are -> :opts :body :headers :status
Are all the collections what we expect?
TODO: why?
Confirm all the predicates we expect to be featured in the flakes
TODO: This is idiomatic, but it fails b/c it's checking for things I don't
that that should be smaller than the number of flakes (why?). So I'm not
marking it w/ for now so I can move on with the other tests.
TODO: Fix this. It doesn't work even when run by itself on its own empty ledger
These names appear when selecting people in query-with
None of these names actually appear when just querying. | (ns fluree.db.ledger.api.open-test
(:require [clojure.test :refer :all]
[fluree.db.test-helpers :as test]
[org.httpkit.client :as http]
[fluree.db.util.json :as json]
[fluree.db.api :as fdb]
[fluree.db.query.http-signatures :as http-signatures])
(:import (java.util UUID)))
(use-fixtures :once test/test-system)
(def endpoint-url (str ":" @test/port "/fdb/" test/ledger-endpoints "/"))
(def endpoint-url-short (str ":" @test/port "/fdb/"))
(defn- rand-str
[]
(apply str
at least 5 characters
(repeatedly #(char (+ (rand 26) 65))))))
(defn- get-unique-count
[current goal-count fn]
(let [current-count (count (distinct current))
distance (- goal-count current-count)]
(if (< 0 distance)
(get-unique-count (distinct (concat current (repeatedly distance fn))) goal-count fn)
(distinct current))))
ENDPOINT TEST : /transact
(deftest add-chat-alt-schema-test
(testing "adding chat-alt schema succeeds"
(let [ledger (test/rand-ledger test/ledger-endpoints)
{:keys [status body] :as schema-res} (test/transact-schema
ledger "chat-alt.edn")]
(is (= 200 status))
(is (test/contains-every? schema-res :opts :body :headers :status))
(is (test/contains-every? body :t :id :auth :tempids :block :hash :fuel
:status :bytes :flakes))
(is (= 2 (:block body)))
(is (= 59 (-> body :tempids (test/get-tempid-count :_predicate))))
(is (= 4 (-> body :tempids (test/get-tempid-count :_collection)))))))
(deftest transact-people-comments-chats-test
(testing "add data to chat alt succeeds"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
{:keys [status body] :as new-data-res}
(test/transact-data ledger "chat-alt-people-comments-chats.edn")]
(is (= 200 status))
(is (test/contains-every? new-data-res :opts :body :headers :status))
(is (test/contains-every? body :tempids :block :hash :fuel :auth :status
:bytes :t :flakes))
the tempids should be _ auth$chatUser , _ auth$temp , comment$1 - > 12
person$1 - > 4 , _ role$chatUser
(is (= (into #{:_auth$chatUser :_auth$temp :_rule$viewAllPeople
:_rule$editOwnChats :_rule$viewAllChats :_role$chatUser
:_user$jdoe :_user$zsmith :_fn$ownChats :person}
(concat
(map #(keyword (str "comment$" %)) (range 1 13))
(map #(keyword (str "chat$" %)) (range 1 14))
(map #(keyword (str "person$" %)) (range 1 4))
(map #(keyword (str "nestedComponent$" %)) (range 1 13))))
(-> body :tempids keys set)))
check that 1 person ( without tempid ) was added
(is (= 1 (-> body :tempids (test/get-tempid-count :person)))))))
ENDPOINT TEST : /query
(deftest query-all-collections-test
(testing "Querying all collections"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:select ["*"] :from "_collection"}
{:keys [status body] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)
collections (into #{} (map #(:_collection/name %) results))]
(is (= 200 status))
(is (test/contains-every? query-res :opts :body :headers :status))
(is (= #{"_rule" "_fn" "nestedComponent" "_predicate" "_setting" "chat"
"_auth" "_user" "person" "_shard" "_tag" "comment" "_role"
"_collection" "_ctx"}
collections)))))
(deftest query-all-predicates-test
(testing "Query all predicates"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:select ["*"] :from "_predicate"}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)
predicates (into #{} (map #(:_predicate/name %) results))]
(is (= 200 status))
(is (test/contains-every? query-res :opts :body :headers :status))
(is (every? predicates ["comment/nestedComponent" "person/stringUnique"]))
(is (< 30 (count predicates))))))
(deftest query-recursive-unlimited-test
(testing "recursive query recurses"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "category.edn")
_ (test/transact-data ledger "bike-categories.edn")
query {:select ["?categoryName"]
:where [["?c" "category/name" "Fixie"]
["?c" "category/subCategoryOf+" "?s"]
["?s" "category/name" "?categoryName"]]}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status)
(str "Query response was: " (pr-str query-res)))
(is (= #{"Bikes" "Safety" "Road" "Hipster"} (-> results flatten set))
(str "Query response was: " (pr-str query-res))))))
(deftest query-recursive-limited-test
(testing "recursive query recurses"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "category.edn")
_ (test/transact-data ledger "bike-categories.edn")
query {:select ["?categoryName"]
:where [["?c" "category/name" "Fixie"]
["?c" "category/subCategoryOf+2" "?s"]
["?s" "category/name" "?categoryName"]]}
{:keys [body status] :as query-res} @(http/post
(str endpoint-url-short
ledger "/query")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status)
(str "Query response was: " (pr-str query-res)))
(is (= #{"Road" "Hipster"} (-> results flatten set))
(str "Query response was: " (pr-str query-res))))))
ENDPOINT TEST : /multi - query
(deftest query-collections-predicates-multiquery-test
(testing "Querying all collections and predicates in multi-query"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:coll {:select ["*"] :from "_collection"}
:pred {:select ["*"] :from "_predicate"}}
{:keys [body status] :as multi-res}
@(http/post (str endpoint-url-short ledger "/multi-query")
(test/standard-request query))
results (json/parse body)
collections (into #{} (map #(:_collection/name %) (:coll results)))
predicates (into #{} (map #(:_predicate/name %) (:pred results)))]
(is (= 200 status))
(is (test/contains-every? multi-res :opts :body :headers :status))
(is (= collections #{"_rule" "nestedComponent" "_fn" "_predicate"
"_setting" "chat" "_auth" "_user" "person" "_shard"
"_tag" "comment" "_role" "_collection" "_ctx"}))
(is (every? predicates ["comment/nestedComponent" "person/stringUnique"])))))
(deftest sign-multi-query-test
(testing "sign multi-query where collections are not named in alphanumeric order"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
private-key (slurp "default-private-key.txt")
qry-str (str "{\"collections\":{\"select\":[\"*\"],\"from\":\"_collection\"},\n "
" \"predicates\":{\"select\":[\"*\"],\"from\":\"_predicate\"},\n "
" \"_setting\":{\"select\":[\"*\"],\"from\":\"_setting\"},\n "
" \"_rule\":{\"select\":[\"*\"],\"from\":\"_rule\"},\n "
" \"_role\":{\"select\":[\"*\"],\"from\":\"_role\"},\n "
" \"_user\":{\"select\":[\"*\"],\"from\":\"_user\"}\n }")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/multi-query")
signed-req (http-signatures/sign-request :post q-endpoint request
private-key)
{:keys [status body] :as multi-res}
@(http/post q-endpoint signed-req)
results (json/parse body)
collections (into #{} (map #(:_collection/name %) (:collections results)))
predicates (into #{} (map #(:_predicate/name %) (:predicates results)))
roles (into #{} (map #(:_role/id %) (:_role results)))]
(is (= 200 status))
(is (test/contains-every? multi-res :opts :body :headers :status))
(is (test/contains-every? collections "_rule" "nestedComponent" "_fn"
"_predicate" "_setting" "chat" "_auth" "_user"
"person" "_shard" "_tag" "comment" "_role"
"_collection"))
(is (test/contains-every? predicates "comment/nestedComponent"
"person/stringUnique"))
(is (test/contains-every? roles "chatUser" "root")))))
ENDPOINT TEST : /transact
(deftest transacting-new-persons-test
(testing "Creating 100 random persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
random-person (fn []
{:_id "person"
:stringNotUnique (rand-str)})
person-tx (repeatedly 100 random-person)
{:keys [status body] :as tx-res}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request person-tx))
result (json/parse body)
person-keys (-> result keys set)
flakes (:flakes result)
tempids (:tempids result)]
(is (every? person-keys [:tempids :block :hash :fuel :auth :status :flakes]))
(is (< 100 (count flakes)))
(is (= 100 (test/get-tempid-count tempids :person))))))
ENDPOINT TEST : /block
(deftest query-block-two-test
(testing "Query block 2"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
query {:block 2}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/block")
(test/standard-request query))
results (json/parse body)
block (first results)
block-keys (keys block)]
(is (= 200 status))
(is (= 2 (:block block)))
(is (every? #{:block :hash :instant :txns :block-bytes :cmd-types :t :sigs
:flakes}
block-keys)))))
ENDPOINT TEST : /history
(deftest history-query-collection-name-test
(testing "Query history of flakes with _collection/name predicate"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
history-query {:history [nil 40]}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/history")
(test/standard-request history-query))
result (json/parse body)]
(is (= 200 status))
(is (every? (fn [flakes]
(every? #(= 40 (second %)) flakes))
(map :flakes result))))))
ENDPOINT TEST : /graphql
(deftest query-all-collections-graphql-test
(testing "Querying all collections through the graphql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query {:query "{
graph {
_collection (sort: {predicate: \"name\", order: ASC}) {
_id name spec version doc
}
}
}"}
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/graphql")
(test/standard-request query))
results (json/parse body)
collections (-> results :data :_collection)
collection-names (set (map :name collections))]
(is (= 200 status))
(is (every? #(test/contains-every? % :doc :version :name :_id)
collections))
(is (= #{"_rule" "_fn" "_predicate" "_setting" "_auth" "_user" "_shard"
"_tag" "_role" "_collection" "_ctx"}
collection-names)))))
(deftest sign-all-collections-graphql-test
(testing "sign a query for all collections through the graphql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
private-key (slurp "default-private-key.txt")
graphql-str "{
graph {
_collection (sort: {predicate: \"name\", order: ASC}) {
_id name spec version doc
}
}
}"
qry-str (json/stringify {:query graphql-str})
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/graphql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body]} @(http/post q-endpoint signed-req)
results (json/parse body)
collections (-> results :data :_collection)
collection-keys (reduce (fn [acc c] (apply conj acc (keys c)))
#{} collections)
collection-names (set (map :name collections))]
(is (= 200 status))
(is (test/contains-every? collection-keys :_id :name :version :doc))
(is (test/contains-every? collection-names "_rule" "_fn" "_predicate"
"_setting" "_auth" "_user" "_shard" "_tag"
"_role" "_collection")))))
ENDPOINT TEST : /graphql transaction
(deftest add-a-person-graphql-test
(testing "Add two new people with graphql"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
graphql {:query "mutation addPeople ($myPeopleTx: JSON) {
transact(tx: $myPeopleTx)
}"
:variables
{:myPeopleTx "[
{
\"_id\": \"person\",
\"stringNotUnique\": \"oRamirez\",
\"stringUnique\": \"Oscar Ramirez\"
},
{
\"_id\": \"person\",
\"stringNotUnique\": \"cStuart\",
\"stringUnique\": \"Chana Stuart\"
}
]"}}
{:keys [status body]}
@(http/post (str endpoint-url-short ledger "/graphql")
(test/standard-request graphql))
result (json/parse body)
result-keys (-> result :data keys set)
flakes (-> result :data :flakes)
flake-vals (set (map #(nth % 2) flakes))]
(is (= 200 status))
(is (= #{:tempids :block :hash :instant :type :duration :fuel :auth :status :id :bytes :t :flakes}
result-keys))
(is (= 11 (count flakes)))
(is (test/contains-every? flake-vals "Chana Stuart" "cStuart" "Oscar Ramirez" "oRamirez")))))
ENDPOINT TEST : /sparql
(deftest query-collection-sparql-test
(testing "Querying all collections through the sparql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query "SELECT ?name \nWHERE \n {\n ?collection fd:_collection/name ?name. \n}"
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/sparql")
(test/standard-request query))
results (json/parse body)
collection-names (set (apply concat results))]
(is (= 200 status))
(is (> (count results) 1))
Each result should be an array of 1 ( ? name )
(is (every? #(= 1 (count %)) results))
(is (test/contains-every? collection-names "_predicate" "_auth"
"_collection" "_fn" "_role" "_rule" "_setting"
"_tag" "_user")))))
(deftest sign-query-collection-sparql-test
(testing "sign a query for all collections through the sparql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
private-key (slurp "default-private-key.txt")
qry-str (json/stringify "SELECT ?name \nWHERE \n {\n ?collection fd:_collection/name ?name. \n}")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/sparql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body]} @(http/post q-endpoint signed-req)
results (json/parse body)
collection-names (set (apply concat results))]
(is (= 200 status))
(is (> (count results) 1))
Each result should be an array of 1 ( ? name )
(is (every? #(= 1 (count %)) results))
(is (test/contains-every? collection-names "_predicate" "_auth"
"_collection" "_fn" "_role" "_rule" "_setting"
"_tag" "_user")))))
ENDPOINT TEST : /sparql
(deftest query-wikidata-sparql-test
(testing "Querying wikidata with sparql syntax"
(let [ledger (test/rand-ledger test/ledger-endpoints)
query "SELECT ?item ?itemLabel \nWHERE \n {\n ?item wdt:P31 wd:Q146. \n}"
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/sparql")
(test/standard-request query))
results (json/parse body)]
(is (= 200 status))
(is (> (count results) 1))
Each result should be an array of 2 ( ? item and ? )
(is (every? #(= 2 (count %)) results)))))
ENDPOINT TEST : /sql
(deftest sign-sql-query-test
(testing "sign a query for all collections through the sql endpoint"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
private-key (slurp "default-private-key.txt")
qry-str (json/stringify "SELECT * FROM _collection")
request {:headers {"content-type" "application/json"}
:body qry-str}
q-endpoint (str endpoint-url-short ledger "/sql")
signed-req (http-signatures/sign-request :post q-endpoint request private-key)
{:keys [status body] :as sql-res} @(http/post q-endpoint signed-req)
results (json/parse body)
collections (set (map :_collection/name results))]
(is (= 200 status))
(is (test/contains-every? sql-res :opts :body :headers :status))
(is (test/contains-every? collections
"_rule" "nestedComponent" "_fn" "_predicate" "_setting"
"chat" "_auth" "_user" "person" "_shard" "_tag" "comment"
"_role" "_collection")))))
ENDPOINT TEST : /health
(deftest health-check-test
(testing "Getting health status"
(let [{:keys [status body]} @(http/post (str endpoint-url-short "health"))
result (json/parse body)]
(is (= 200 status))
(is (:ready result)))))
ENDPOINT TEST : /ledgers
(deftest get-all-ledgers-test
(testing "Get all ledgers"
(test/init-ledgers! (conj test/all-ledgers "test/three"))
(let [{:keys [status body]} @(http/post (str endpoint-url-short "ledgers"))
result (-> body json/parse set)]
(is (= 200 status))
(is (test/contains-every?
result
["fluree" "api"] ["fluree" "querytransact"] ["fluree" "invoice"]
["fluree" "chat"] ["fluree" "voting"] ["fluree" "crypto"]
["test" "three"] ["fluree" "supplychain"] ["fluree" "todo"])))))
ENDPOINT TEST : /transact
(deftest transacting-new-chats-test
(testing "Creating 100 random chat messages and adding them to existing persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post (str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
random-chat (fn []
{:_id "chat"
:stringNotUnique (rand-str)
:person (nth persons (rand-int (count persons)))
:instantUnique "#(now)"})
chat-tx (repeatedly 100 random-chat)
{chat-status :status, chat-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request chat-tx))
tx-result (json/parse chat-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 chat-status))
(is (test/contains-every? tx-keys :auth :tempids :block :hash :fuel
:status :bytes :flakes :instant :type :duration
:id :t))
(is (< 99 (count flakes)))
(is (= 100 (test/get-tempid-count tempids :chat))))))
ENDPOINT TEST : /transact
(deftest updating-persons-test
(testing "Updating all person/stringNotUniques"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
person-tx (mapv (fn [n]
{:_id n
:stringNotUnique (rand-str)})
persons)
{tx-status :status, tx-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request person-tx))
tx-result (json/parse tx-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 tx-status))
(is (test/contains-every? tx-keys :auth :block :hash :fuel :status :bytes
:flakes :instant :type :duration :id :t))
(is (nil? tempids))
(is (= #{101 106 99 100 1003 103 107}
(->> flakes (map second) set))))))
ENDPOINT TEST : /transact
fully understand ( like setting a ceiling of 100 tx - count and then asserting
(deftest transacting-new-stringUniqueMulti-test
(testing "Creating 300 random stringUniqueMulti (sum) and adding them to existing persons"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger "chat-alt-people-comments-chats.edn")
query {:select ["*"] :from "person"}
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/query")
(test/standard-request query))
results (json/parse body)
persons (map :_id results)
num-persons (count persons)
tx-count (if (> 100 num-persons)
num-persons
100)
random-sum (repeatedly 300 rand-str)
random-sum* (get-unique-count random-sum 300 rand-str)
[rand-sum-1 rand-sum-2 rand-sum-3] (partition 100 random-sum*)
sum-tx (map (fn [person sum1 sum2 sum3]
{:_id person
:stringUniqueMulti [sum1 sum2 sum3]})
persons rand-sum-1 rand-sum-2 rand-sum-3)
{tx-status :status, tx-body :body}
@(http/post (str endpoint-url-short ledger "/transact")
(test/standard-request sum-tx))
tx-result (json/parse tx-body)
tx-keys (-> tx-result keys set)
flakes (:flakes tx-result)
tempids (:tempids tx-result)]
(is (= 200 status))
(is (= 200 tx-status))
(is (test/contains-every? tx-keys :auth :block :hash :fuel :status
:flakes))
(is (< tx-count (count flakes)))
(is (nil? tempids)))))
ENDPOINT TEST : /new - db
(deftest create-ledger-test
(testing "Creating a new ledger"
(let [new-ledger-body {:ledger/id (str "test/three-" (UUID/randomUUID))}
{:keys [status body]} @(http/post
(str endpoint-url-short "new-ledger")
(test/standard-request new-ledger-body))
result (json/parse body)]
(is (= 200 status))
(is (string? result))
(is (= 64 (count result))))))
ENDPOINT TEST : /new - keys
(deftest new-keys-test
(testing "Generating new keys"
(let [{:keys [status body]} @(http/post (str endpoint-url-short "new-keys"))
result (json/parse body)
result-keys (-> result keys set)]
(is (= 200 status))
(is (= #{:private :public :account-id} result-keys)))))
ENDPOINT TEST : /command
(deftest command-add-person-test
(testing "Issue a signed command to add a person"
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
priv-key (slurp "default-private-key.txt")
cmd-map (assoc (fdb/tx->command ledger
[{:_id "person" :stringNotUnique "JoAnne"}]
priv-key)
:txid-only true)
{:keys [status body]} @(http/post
(str endpoint-url-short ledger "/command")
(test/standard-request cmd-map))
result (json/parse body)]
(is (= 200 status))
(is (string? result))
(is (= 64 (count result))))))
(deftest command-add-person-verbose-test
(testing "Issue a signed command to add a person")
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
priv-key (slurp "default-private-key.txt")
cmd-map (-> ledger
(fdb/tx->command [{:_id "person" :stringNotUnique "Sally"}]
priv-key))
{:keys [status body]} @(http/post (str endpoint-url-short ledger
"/command")
(test/standard-request cmd-map))
result (json/parse body)]
(is (= 200 status))
(is (map? result))
(is (test/contains-every? result :tempids :block :hash :instant
:type :duration :fuel :auth :status :id
:bytes :t :flakes))))
ENDPOINT TEST : signed /delete - db request
(deftest delete-ledger-test
(testing "delete ledger - open api"
(let [ledger (test/rand-ledger test/ledger-endpoints)
{:keys [status body]} @(http/post
(str endpoint-url-short "delete-ledger")
(test/standard-request {:ledger/id ledger}))
result (json/parse body)]
(is (= 200 status))
(is (= ledger (:deleted result))))))
ENDPOINT TEST : /gen - flakes , /query - with , /test - transact - with
#_(deftest gen-flakes-query-transact-with-test
(testing "Issue a signed command to add a person."
(let [ledger (test/rand-ledger test/ledger-endpoints)
_ (test/transact-schema ledger "chat-alt.edn")
_ (test/transact-data ledger
"chat-alt-people-comments-chats.edn")
txn [{:_id "person" :stringNotUnique "Josie"}
{:_id "person" :stringNotUnique "Georgine"}
{:_id "person" :stringNotUnique "Alan"}
{:_id "person" :stringNotUnique "Elaine"}]
{:keys [status body]} @(http/post
(str endpoint-url-short ledger
"/gen-flakes")
(test/standard-request txn))
_ (println "gen-flakes result:" (pr-str body))
flakes (-> body json/parse :flakes)
qw-test {:query {:select ["*"] :from "person"} :flakes flakes}
{qw-status :status, qw-body :body}
@(http/post (str endpoint-url-short ledger "/query-with")
(test/standard-request qw-test))
qw-result (json/parse qw-body)
{q-status :status, q-body :body}
@(http/post (str endpoint-url-short ledger "/query")
(test/standard-request {:select ["*"] :from "person"}))
q-result (json/parse q-body)]
(is (= 200 status))
(is (= 200 qw-status))
(is (= 200 q-status))
(is (= (->> qw-result (map :person/stringNotUnique) set)
#{"Josie" "Alan" "Georgine" "Elaine"}))
(is (not-any? (->> q-result (map :person/stringNotUnique) set)
["Josie" "Alan" "Georgine" "Elaine"])))))
|
53bfa40c9b3499bdc728dac339f8d905c78b0408967f9056bf2fa749b8312d74 | euhmeuh/web-galaxy | _base.rkt | #lang racket/base
(provide
base-page)
(require
racket/string
web-galaxy/entities
web-galaxy/translate)
(define basic-links
(list
(link "/" "Home")
(link "#about" "About")))
(define (render-navigation links)
`(nav ([role "navigation"])
(ul ,@(map (lambda (link)
`(li ,(render-element link)))
links))))
(define (render-title title)
`(title ,(string-append title " | Pony Blog")))
(define (base-page title links renderer)
`(html ([lang ,(symbol->string (current-language))])
(head
(meta ([charset "utf-8"]))
(link ([rel "stylesheet"] [type "text/css"] [href "/common.css"]))
,(render-title title))
(body
(header
(h1 "Pony Blog")
(p "The blog about colorful ponies")
,(render-navigation (append basic-links links)))
,(renderer)
(footer
(p (small "This blog was made with web-galaxy, the Racket framework for the web!"))))))
| null | https://raw.githubusercontent.com/euhmeuh/web-galaxy/2d9d5710aec25d961dcfc37a2e88c3c0f435021f/web-galaxy-test/tests/web-galaxy/pony-blog/pages/_base.rkt | racket | #lang racket/base
(provide
base-page)
(require
racket/string
web-galaxy/entities
web-galaxy/translate)
(define basic-links
(list
(link "/" "Home")
(link "#about" "About")))
(define (render-navigation links)
`(nav ([role "navigation"])
(ul ,@(map (lambda (link)
`(li ,(render-element link)))
links))))
(define (render-title title)
`(title ,(string-append title " | Pony Blog")))
(define (base-page title links renderer)
`(html ([lang ,(symbol->string (current-language))])
(head
(meta ([charset "utf-8"]))
(link ([rel "stylesheet"] [type "text/css"] [href "/common.css"]))
,(render-title title))
(body
(header
(h1 "Pony Blog")
(p "The blog about colorful ponies")
,(render-navigation (append basic-links links)))
,(renderer)
(footer
(p (small "This blog was made with web-galaxy, the Racket framework for the web!"))))))
| |
5b1686f8c21da0e671f5749f8f92d3bef0291bc3c6bb50da0962fd41246be331 | haroldcarr/learn-haskell-coq-ml-etc | Lib.hs | module Lib where
-- /#free-monads
import Control.Monad.Free
Pure : : a - > Free f a
Free : : f ( Free f a ) - > Free f a
liftF : : ( Functor f , ) = > f a - > m a
retract : : Monad f = > Free f a - > f a
Free monads : monads that
- instead of having a join operation that combines computations
- forms composite computations from application of a functor
join : : Monad m = > m ( m a ) - > m a
wrap : : = > f ( m a ) - > m a
example : Partiality monad : models computations which can diverge
create a free monad from the Maybe functor
used to fix the call - depth of function ( e.g. , )
Pure :: a -> Free f a
Free :: f (Free f a) -> Free f a
liftF :: (Functor f, MonadFree f m) => f a -> m a
retract :: Monad f => Free f a -> f a
Free monads : monads that
- instead of having a join operation that combines computations
- forms composite computations from application of a functor
join :: Monad m => m (m a) -> m a
wrap :: MonadFree f m => f (m a) -> m a
example : Partiality monad : models computations which can diverge
create a free monad from the Maybe functor
used to fix the call-depth of function (e.g., Ackermann)
-}
type Partiality a = Free Maybe a
-- Non-termination.
never :: Partiality a
never = fix (Free . Just)
where
fix :: (a -> a) -> a
fix f = let x = f x in x
fromMaybe :: Maybe a -> Partiality a
fromMaybe (Just x) = Pure x
fromMaybe Nothing = Free Nothing
runPartiality :: Int -> Partiality a -> Maybe a
runPartiality 0 _ = Nothing
runPartiality _ (Pure a) = Just a
runPartiality _ (Free Nothing) = Nothing
runPartiality n (Free (Just a)) = runPartiality (n - 1) a
ack :: Int -> Int -> Partiality Int
ack 0 n = Pure (n + 1)
ack m 0 = Free (Just $ ack (m-1) 1)
ack m n = Free (Just $ ack m (n-1)) >>= ack (m-1)
runack :: IO ()
runack = do
let diverge = never :: Partiality ()
print $ runPartiality 1000 diverge
print $ runPartiality 1000 (ack 3 4)
print $ runPartiality 5500 (ack 3 4)
print $ show (ack 1 2)
print $ runPartiality 3 (ack 1 2)
print $ runPartiality 4 (ack 1 2)
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/66dc7ac85e46635daa8b225cff2bc7f6f23a3d06/haskell/topic/fix-free/stephen-diehl-free-monds/src/Lib.hs | haskell | /#free-monads
Non-termination. | module Lib where
import Control.Monad.Free
Pure : : a - > Free f a
Free : : f ( Free f a ) - > Free f a
liftF : : ( Functor f , ) = > f a - > m a
retract : : Monad f = > Free f a - > f a
Free monads : monads that
- instead of having a join operation that combines computations
- forms composite computations from application of a functor
join : : Monad m = > m ( m a ) - > m a
wrap : : = > f ( m a ) - > m a
example : Partiality monad : models computations which can diverge
create a free monad from the Maybe functor
used to fix the call - depth of function ( e.g. , )
Pure :: a -> Free f a
Free :: f (Free f a) -> Free f a
liftF :: (Functor f, MonadFree f m) => f a -> m a
retract :: Monad f => Free f a -> f a
Free monads : monads that
- instead of having a join operation that combines computations
- forms composite computations from application of a functor
join :: Monad m => m (m a) -> m a
wrap :: MonadFree f m => f (m a) -> m a
example : Partiality monad : models computations which can diverge
create a free monad from the Maybe functor
used to fix the call-depth of function (e.g., Ackermann)
-}
type Partiality a = Free Maybe a
never :: Partiality a
never = fix (Free . Just)
where
fix :: (a -> a) -> a
fix f = let x = f x in x
fromMaybe :: Maybe a -> Partiality a
fromMaybe (Just x) = Pure x
fromMaybe Nothing = Free Nothing
runPartiality :: Int -> Partiality a -> Maybe a
runPartiality 0 _ = Nothing
runPartiality _ (Pure a) = Just a
runPartiality _ (Free Nothing) = Nothing
runPartiality n (Free (Just a)) = runPartiality (n - 1) a
ack :: Int -> Int -> Partiality Int
ack 0 n = Pure (n + 1)
ack m 0 = Free (Just $ ack (m-1) 1)
ack m n = Free (Just $ ack m (n-1)) >>= ack (m-1)
runack :: IO ()
runack = do
let diverge = never :: Partiality ()
print $ runPartiality 1000 diverge
print $ runPartiality 1000 (ack 3 4)
print $ runPartiality 5500 (ack 3 4)
print $ show (ack 1 2)
print $ runPartiality 3 (ack 1 2)
print $ runPartiality 4 (ack 1 2)
|
a37ac023bbb3118316cd8b78747d332ff36b5af7b79d95083dfdc16d53bb4ccf | amnh/poy5 | build.ml | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
let () = SadmanOutput.register "Build" "$Revision: 3649 $"
let debug_profile_memory = false
let (-->) a b = b a
let current_snapshot x =
if debug_profile_memory then
let () = Printf.printf "%s\n%!" x in
MemProfiler.current_snapshot x
else ()
let rec build_features (meth:Methods.build) =
match meth with
| `Prebuilt fn ->
let fn =
match fn with
| `Local x | `Remote x -> x
in
("type", "prebuilt") ::
("filename", fn) :: []
| `Nj -> ("type", "neighbor joining") :: []
TODO : report type of iteration patterns ; here , BB and RT
let other_features str max_n =
[ ("type", str);
("number of trees to keep", string_of_int max_n)]
in
("number of trees", string_of_int n) ::
(match meth with
| `Wagner_Rnd (max_n, _, _, _, _) ->
other_features "randomized wagner" max_n
| `Wagner_Ordered (max_n, _, _, _, _) ->
other_features "ordered wagner" max_n
| `Wagner_Mst (max_n, _, _, _, _) ->
other_features "minimum spanning tree wagner" max_n
| `Wagner_Distances (max_n, _, _, _, _) ->
other_features "distances based tree wagner" max_n
| `Nj
| (`Branch_and_Bound _)
| (`Prebuilt _) as x ->
build_features x
| _ -> [] )
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),_) ->
[("type", "Branch and bound"); ("initial_bound", match bound with | None ->
"none" | Some x -> string_of_float x);
("threshold", match threshold with
| None -> "0.0" | Some x -> string_of_float x);
("number of trees to keep", string_of_int max_trees);
("keep method", match keep_method with
| `Last -> "last"
| `First -> "first"
| `Keep_Random -> "random")]
| `Build_Random _ ->
[("type", "Uniformly at random")]
let remove_exact (meth : Methods.cost_calculation) (acc : Methods.transform
list) : Methods.transform list =
match meth with
| #Methods.transform as meth -> meth :: acc
let rec get_transformations (meth : Methods.build) : Methods.transform list =
match meth with
| `Nj
| `Prebuilt _ -> []
| `Build (_, build_meth, trans,_) ->
List.fold_right remove_exact (trans @
(match build_meth with
| `Branch_and_Bound ((_, _, _, _, trans),_)
| `Wagner_Distances (_, _, _, trans, _)
| `Wagner_Mst (_, _, _, trans, _)
| `Wagner_Rnd (_, _, _, trans, _)
| `Wagner_Ordered (_, _, _, trans, _)
| `Constraint (_, _, _, trans)
| `Build_Random ((_, _, _, trans, _),_) -> trans
| `Nj
| `Prebuilt _ -> [])) []
| `Branch_and_Bound ((_, _, _, _, trans),_)
| `Build_Random ((_, _, _, trans, _),_) ->
List.fold_right remove_exact trans []
module type S = sig
type a
type b
val report_mst : Data.d -> a list -> string option -> unit
(** [prebuilt a b] generates a list of trees using the list of trees [a] and the
* node data [b]. It is required that the names of the taxa in [a] have data
* associated in [b]. [b] is generated by the {!Node.load_data} function. *)
val prebuilt :
(string option * Tree.Parse.tree_types list) list ->
Data.d * a list ->
[> `Set of [> `Single of (a, b) Ptree.p_tree ] list ]
val build_initial_trees :
(a, b) Ptree.p_tree Sexpr.t -> Data.d ->
a list -> Methods.build -> (a, b) Ptree.p_tree Sexpr.t
end
module MakeNormal (Node : NodeSig.S) (Edge : Edge.EdgeSig with type n = Node.n)
(TreeOps : Ptree.Tree_Operations with type a = Node.n with type b = Edge.e)
= struct
module PtreeSearch = Ptree.Search (Node) (Edge) (TreeOps)
module BuildTabus = Tabus.Make (Node) (Edge)
module Queues = Queues.Make (Node) (Edge)
type a = PtreeSearch.a
type b = PtreeSearch.b
type phylogeny = (a, b) Ptree.p_tree
let map_of_list f x =
List.fold_left (fun acc x -> All_sets.IntegerMap.add (f x) x acc)
All_sets.IntegerMap.empty x
let randomize lst =
let arr = Array.of_list lst in
Array_ops.randomize arr;
Array.to_list arr
let single_wagner_search_manager = new Queues.wagner_srch_mgr true 1 0.0
let set_of_leafs data =
map_of_list (fun x -> Node.taxon_code x) data
let disjoin_tree data node =
let leafs = set_of_leafs node in
let tree = Ptree.make_disjoint_tree data leafs in
tree --> PtreeSearch.downpass --> PtreeSearch.uppass
let edges_of_tree tree =
Tree.EdgeSet.fold (fun x acc -> x :: acc) tree.Ptree.tree.Tree.d_edges []
let random_tree (data : Data.d) (nodes : a list) adj_mgr =
let tree : phylogeny =
{ (Ptree.empty data) with
Ptree.tree = Tree.random (List.map Node.taxon_code nodes);
Ptree.node_data = map_of_list Node.taxon_code nodes; }
in
tree --> PtreeSearch.downpass --> PtreeSearch.uppass
let branch_and_bound keep_method max_trees threshold data nodes bound adj_mgr =
let select_appropriate bound_plus_threshold lst =
let lst = List.filter (fun x -> bound_plus_threshold >=
Ptree.get_cost `Adjusted x) lst in
let len = List.length lst in
if len <= max_trees then lst
else
let () = assert (len = max_trees + 1) in
match keep_method with
| `Last ->
(match List.rev lst with
| h :: lst -> List.rev lst
| [] -> assert false)
| `First ->
(match lst with
| h :: lst -> lst
| [] -> assert false)
| `Keep_Random ->
(let arr = Array.of_list lst in
Array_ops.randomize arr;
match Array.to_list arr with
| h :: lst -> lst
| [] -> assert false)
in
(* calculate the initial bound if nto provided; /2 for threshold *)
let bound = match bound with
| None -> max_float /. 2.
| Some x -> x
(* create status for ncurses *)
and st = Status.create "Branch and Bound Build" (Some 100) "percent complete" in
let () = Status.full_report ~adv:(0) st in
(* We need to present some output; find a decent depth and that percentage done *)
let report_depth,report_percent =
(* the number of possibilities at level n *)
let n t =
let rec n acc t = match t with
| 0 | 1 | 2 | 3 -> acc
| t -> n (acc*(2*t-5)) (t-1)
in
n 1 t
depth=6 will prune ~1 % of the tree ( having 105 possibilities )
and depth = max 1 (min ((List.length nodes)-1) 6) and p = ref 0.0 in
depth,
(fun depth ->
let p_incr = (1.0 /. float (n depth)) *. 100.0 in
p := p_incr +. !p;
Status.full_report ~adv:(int_of_float !p) st)
in
let rec aux_branch_and_bound depth ((bound, best_trees) as acc) tree
edges cur_handle other_handles =
match edges with
| (Tree.Edge (x, y)) :: t ->
if report_depth = depth then report_percent depth;
let new_tree, _ =
TreeOps.join_fn adj_mgr [] (Tree.Edge_Jxn (x,y)) cur_handle tree
in
let new_cost = Ptree.get_cost `Adjusted new_tree in
if new_cost > bound +. threshold then begin
if depth < report_depth then report_percent depth;
aux_branch_and_bound depth acc tree t cur_handle other_handles
end else begin
let acc = match other_handles with
| nh :: oh ->
aux_branch_and_bound (depth+1) acc new_tree
(edges_of_tree new_tree) (Tree.Single_Jxn nh) oh
| [] ->
let realbound = bound +. threshold in
if new_cost < bound then
new_cost, select_appropriate (new_cost +. threshold) (new_tree :: best_trees)
else if new_cost <= realbound then
(bound, select_appropriate realbound (new_tree::best_trees))
else
acc
in
aux_branch_and_bound depth acc tree t cur_handle other_handles
end
| [] -> acc
in
let initial_tree = disjoin_tree data nodes in
let _, trees =
if max_trees < 1 then 0., []
else begin match List.map Node.taxon_code nodes with
| f :: s :: tail ->
let new_tree, _ =
TreeOps.join_fn adj_mgr [] (Tree.Single_Jxn f) (Tree.Single_Jxn s) initial_tree
in
begin match tail with
| t :: tail ->
let edges = edges_of_tree new_tree in
aux_branch_and_bound 3 (bound,[]) new_tree edges (Tree.Single_Jxn t) tail
| [] ->
Ptree.get_cost `Adjusted new_tree, [new_tree]
end
| _ -> 0., [initial_tree]
end
in
let () = Status.finished st in
Sexpr.of_list (List.map PtreeSearch.uppass trees)
let sort_list_of_trees ptrees =
let cost = Ptree.get_cost `Adjusted in
List.sort (fun a b -> compare (cost a) (cost b)) ptrees
let constrained_build cg data n constraint_tree nodes adj_mgr =
let rec randomize_tree tree =
match tree with
| Tree.Parse.Leafp _ -> tree
| Tree.Parse.Nodep (lst, res) ->
let arr = Array.map randomize_tree (Array.of_list lst) in
Array_ops.randomize arr;
Tree.Parse.Nodep ((Array.to_list arr), res)
in
let ptree = disjoin_tree data nodes in
let rec aux_constructor fn ptree tree =
match tree with
| Tree.Parse.Leafp x ->
ptree, (Data.taxon_code (fn x) data)
| Tree.Parse.Nodep ([x], _) ->
aux_constructor fn ptree x
| Tree.Parse.Nodep (lst, _) ->
let ptree, handles =
List.fold_left (fun (ptree, acc) item ->
let ptree, nh = aux_constructor fn ptree item in
ptree, (nh :: acc)) (ptree, []) lst
in
let handles =
List.map
(fun (x : int) -> Ptree.handle_of x ptree)
handles
in
let constraints = List.fold_left (fun acc x ->
let acc = All_sets.Integers.add x acc in
try
let parent = Ptree.get_parent x ptree in
All_sets.Integers.add parent acc
with
| _ -> acc) All_sets.Integers.empty handles
in
let ptrees =
PtreeSearch.make_wagner_tree ~sequence:handles
ptree adj_mgr single_wagner_search_manager
(BuildTabus.wagner_constraint
constraints)
in
let ptrees = sort_list_of_trees ptrees in
match ptrees, handles with
| (ptree :: _), (h :: _) ->
ptree, h
| _ -> assert false
in
let st = Status.create "Constrained Wagner Replicate" (Some n)
"Constrained Wagner tree replicate building" in
let rec total_builder res blt =
Status.full_report ~adv:(n - blt) st;
if blt = n then
let () = Status.finished st in
`Set res
else
let rec deal_with_tree = function
| Tree.Parse.Annotated (t,_) ->
deal_with_tree t
| Tree.Parse.Flat t ->
let tree,_= aux_constructor (fun x-> x) ptree (randomize_tree t) in
tree
| Tree.Parse.Branches t ->
let tree,_= aux_constructor (fst) ptree (randomize_tree t) in
tree
| Tree.Parse.Characters t ->
let tree,_ = aux_constructor (fst) ptree (randomize_tree t) in
tree
in
let tree = deal_with_tree constraint_tree in
let tree = TreeOps.uppass tree in
total_builder ((`Single tree) :: res) (blt + 1)
in
if n < 0 then `Empty
else
total_builder [] 0
let single_wagner data tabu_mgr wmgr cg nodes adj_mgr =
let disjoin_tree = disjoin_tree data nodes
and nodes = List.map (fun x -> Node.taxon_code x) nodes in
let ptrees =
PtreeSearch.make_wagner_tree ~sequence:nodes
disjoin_tree adj_mgr wmgr tabu_mgr
in
let ptrees = sort_list_of_trees ptrees in
match ptrees with
| hd :: _ -> `Single (PtreeSearch.uppass hd)
| _ -> failwith "No wagner trees built!"
let wagner data tabu_mgr cg nodes adj_mgr =
single_wagner data tabu_mgr single_wagner_search_manager cg nodes adj_mgr
let randomized_single_wagner data tabu_mgr randomize wmgr cg adj_mgr =
let nodes = randomize () in
single_wagner data tabu_mgr wmgr cg nodes adj_mgr
(** [rand_wagner a b] creates a fresh wagner tree with its
* corresponding cost using
* the initial set of leaves as generated by [b ()] and the code
* generator [a]. The addition
* sequence depends on the list as produced by [b]
* before creating the tree. *)
let rand_wagner data tabu_mgr data_generator cg =
randomized_single_wagner data tabu_mgr data_generator
single_wagner_search_manager cg
let n_independent_wagner data_generator tabu_mgr cg data n =
(* This is equivalent to having multiple replicates *)
let st = Status.create "Wagner Replicate" (Some n) "Wagner tree replicate \
building" in
let mgr = single_wagner_search_manager in
let rec builder cnt acc =
if cnt > 0 then begin
Status.full_report ~adv:(n - cnt) st;
let next =
(randomized_single_wagner data tabu_mgr data_generator mgr cg) :: acc
in
builder (cnt - 1) next
end else begin
Status.finished st;
`Set acc
end
in
builder n []
let create_adjust_manager (m,b) =
let thrsh = match m with
| `Threshold f
| `Both (f,_) -> Some f
| `Always -> Some 0.0
| `Null
| `MaxCount _ -> None
and count = match m with
| `MaxCount m
| `Both (_,m) -> Some m
| `Always -> Some 0
| `Null
| `Threshold _ -> None
in
let mgr = match b with
| `Null -> BuildTabus.simple_nm_none count thrsh
| `AllBranches -> BuildTabus.simple_nm_all count thrsh
| `JoinDelta -> BuildTabus.complex_nm_delta count thrsh
| `Neighborhood x -> BuildTabus.complex_nm_neighborhood x count thrsh
in
Some mgr
(* compose the iteration manager tabu *)
let pick_tabu_manager = function
| `UnionBased _ -> (* Maximum distance is useless in this case *)
BuildTabus.wagner_tabu
| `AllBased _
| `Partition _ ->
BuildTabus.distance_dfs_wagner
(** [max_n_wagner a b n] creates a list of at most [n] wagner trees using those
* other trees that have the same cost of the current best tree. It uses the
* original addition sequence as generated by [b ()].*)
let max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr =
let wmgr = new Queues.wagner_srch_mgr true n threshold in
let nodes = data_generator () in
let nodesl = List.map (fun x -> Node.taxon_code x) nodes in
let res =
PtreeSearch.make_wagner_tree ~sequence:nodesl
(disjoin_tree data nodes) adj_mgr wmgr tabu_mgr
in
`Set (List.map (fun x -> `Single (TreeOps.uppass x)) res)
let make_distances_table ?(both=true) nodes =
let tmp = Hashtbl.create 99991 in
List.iter (fun a ->
List.iter (fun b ->
let ca = Node.taxon_code a
and cb = Node.taxon_code b in
if ca = cb then ()
else
if Hashtbl.mem tmp (ca, cb) then ()
else begin
let d = Node.distance 100000. a b in
Hashtbl.add tmp (ca, cb) d;
if both then Hashtbl.add tmp (cb, ca) d;
end) nodes) nodes;
tmp
module OrderedPairs = struct
type t = (float * (int * int))
let compare a b = compare ((fst a) : float) (fst b)
end
module H = Heap.Make (OrderedPairs)
let table_of_trees nodes =
let tbl = Hashtbl.create 1667 in
List.iter (fun n -> Hashtbl.add tbl (Node.taxon_code n)
(Tree.Parse.Leafp (Node.taxon_code n))) nodes;
tbl
let set_of_trees nodes =
List.fold_left (fun acc x ->
All_sets.Integers.add (Node.taxon_code x) acc)
All_sets.Integers.empty nodes
let (-->) a b = b a
let nj_qtable table terminals =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
let r = float_of_int (All_sets.Integers.cardinal terminals) in
let q_table = Hashtbl.create 99991 in
Hashtbl.iter (fun ((i, j) as p) dist ->
let sum =
All_sets.Integers.fold (fun c sum ->
sum -.
(if c = i then 0. else d c i) -.
(if c = j then 0. else d c j))
terminals 0.
in
Hashtbl.add q_table p (((r -. 2.) *. dist) +. sum)) table;
q_table
let nj_distance_to_ancestor table terminals f g =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
let sum_of_distance_to x =
All_sets.Integers.fold (fun y sum -> sum +. (d x y)) terminals 0.
in
let r = float_of_int (All_sets.Integers.cardinal terminals) in
((0.5 *. (d f g))
+. ((1. /. (2. *. (r -. 2.))) *.
((sum_of_distance_to f) -. (sum_of_distance_to g))))
let nj_new_distance table distance_fu distance_gu f g k =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
((0.5 *. ((d f k) -. distance_fu)) +.
(0.5 *. ((d g k) -. distance_gu)))
let join_trees code distance_table a b tree_table trees heap =
let trees =
trees
--> All_sets.Integers.remove a
--> All_sets.Integers.remove b
in
let ta = Hashtbl.find tree_table a
and tb = Hashtbl.find tree_table b in
let tab = Tree.Parse.Nodep ([ta; tb], code) in
Hashtbl.add tree_table code tab;
let heap =
let distance_acode =
nj_distance_to_ancestor distance_table trees a b
and distance_bcode =
nj_distance_to_ancestor distance_table trees b a
in
All_sets.Integers.fold (fun c heap ->
let dc =
nj_new_distance distance_table distance_acode
distance_bcode a b c
in
let pair = (code, c) in
Hashtbl.add distance_table pair dc;
H.insert (dc, pair) heap) trees heap
in
let trees = All_sets.Integers.add code trees in
trees, code - 1, heap
let rec merge_nj_trees code distance_table tree_table trees heap =
let (_, (a, b)) = H.findMin heap in
let heap = H.deleteMin heap in
if All_sets.Integers.mem a trees &&
All_sets.Integers.mem b trees then
join_trees code distance_table a b tree_table trees heap
else
merge_nj_trees code distance_table tree_table trees heap
let nj data nodes =
let distance_table =
let both = false in
make_distances_table ~both nodes
in
let heap =
Hashtbl.fold
(fun x y acc ->
H.insert (y, x) acc)
distance_table H.empty
in
let tree_table = table_of_trees nodes in
let trees = set_of_trees nodes in
let rec complete_merge code trees heap =
if 1 = All_sets.Integers.cardinal trees then
Hashtbl.find tree_table (All_sets.Integers.choose trees)
else
let trees, code, heap =
merge_nj_trees code distance_table tree_table trees heap
in
complete_merge code trees heap
in
let tree = complete_merge (-1) trees heap in
Tree.Parse.map
(fun x ->
if x >= 0 then Data.code_taxon x data
else "") tree
let distances_ordered nodes =
let distances_table = make_distances_table nodes in
let distances_list =
let tmp =
Hashtbl.fold (fun a b acc ->
(a, b) :: acc) distances_table []
in
List.sort (fun (_, a) (_, b) -> compare a b) tmp
in
let _, addition_list =
let add_one visited acc x =
if All_sets.Integers.mem x visited then visited, acc
else All_sets.Integers.add x visited, x :: acc
in
List.fold_left (fun (visited, acc) ((x, y), _) ->
let v, a = add_one visited acc x in
add_one v a y) (All_sets.Integers.empty, []) distances_list
in
let addition_list = List.rev addition_list in
let rec addition_function lst =
match lst with
| h1 :: ((h2 :: t) as rest) ->
if 0 = Random.int 2 then
h1 :: (addition_function rest)
else h2 :: (addition_function (h1 :: t))
| _ -> lst
in
let create_list () =
let lst = addition_function addition_list in
List.rev (List.map (fun x ->
List.find (fun y ->
x = Node.taxon_code y) nodes) lst)
in
create_list
let mst data nodes =
let distances_table = make_distances_table nodes in
let distance_fn a b = Hashtbl.find distances_table (a, b)
and codes = List.map Node.taxon_code nodes in
let mst = Mst.kruskal Mst.Closest distance_fn codes in
let do_mst () =
let data = Mst.bfs_traversal Mst.Closest2 mst in
List.map (fun x ->
List.find (fun y ->
x = Node.taxon_code y) nodes) data
in
do_mst
let report_mst data nodes filename =
let distances_table = make_distances_table nodes in
let distance_fn a b = Hashtbl.find distances_table (a, b)
and codes = List.map Node.taxon_code nodes in
let mst = Mst.kruskal Mst.Closest distance_fn codes in
Mst.print_mst_tree (fun x -> Data.code_taxon x data) mst filename
let max_n_dg_p_wagner data threshold tabu_mgr data_generator cg n p adj_mgr: phylogeny Sexpr.t =
match p, n with
| 0, _ -> `Empty
| 1, 1 ->
let st = Status.create "Building Wagner Tree" None "" in
let res = rand_wagner data tabu_mgr data_generator cg adj_mgr in
Status.finished st;
res
| 1, n ->
let st = Status.create "Building Wagner Tree" None "" in
let res = max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr in
Status.finished st;
res
| p, _ ->
let builder cnt acc =
let next = max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr in
next :: acc
in
`Set (Sexpr.compose_status "Wagner build" builder p [])
* [ a b n p ] generates [ p ] independent
* , on each keeping the best [ n ] trees found on each step ,
* following the addition sequence as specified by the node generating
* function [ b ( ) ] .
* wagner trees, on each keeping the best [n] trees found on each step,
* following the addition sequence as specified by the node generating
* function [b ()]. *)
let max_n_randomized_p_wagner data threshold tabu_mgr cg nodes n p adj =
let data_generator () = randomize nodes in
max_n_dg_p_wagner data threshold tabu_mgr data_generator cg n p adj
let max_n_mst_p_wagner data threshold tabu_mgr node_data cg nodes n p adj =
let mst = mst node_data nodes in
max_n_dg_p_wagner data threshold tabu_mgr mst cg n p adj
let max_n_distances_p_wagner data threshold tabu_mgr cg nodes n p adj =
let dord = distances_ordered nodes in
max_n_dg_p_wagner data threshold tabu_mgr dord cg n p adj
let split_in_forests trees =
let make_tree_set_of_taxa acc x =
let rec make_tree_set_of_taxa acc x =
match x with
| Tree.Parse.Leafp name -> All_sets.Strings.add name acc
| Tree.Parse.Nodep (chld, _) ->
List.fold_left make_tree_set_of_taxa acc chld
in
make_tree_set_of_taxa acc (Tree.Parse.strip_tree x)
in
let are_different a acc b =
acc && (All_sets.Strings.is_empty (All_sets.Strings.inter a b))
in
let are_same a acc b =
acc && (0 = (All_sets.Strings.compare a b))
in
let rec are_something pairwise_comparison acc lst =
match lst with
| h :: t ->
let acc = List.fold_left (pairwise_comparison h) acc t in
are_something pairwise_comparison acc t
| [] -> acc
in
let are_all_the_same_set lst =
are_something are_same true lst
and are_all_different lst =
are_something are_different true lst
in
List.fold_left (fun acc (name,x) ->
if 1 = List.length x then (name,x) :: acc
else
let taxa =
List.map (make_tree_set_of_taxa All_sets.Strings.empty) x
in
if are_all_the_same_set taxa then
let x = List.map (fun x -> name,[x]) x in
x @ acc
else if are_all_different taxa then (name,x) :: acc
else
let _ =
Status.user_message Status.Error
("While@ trying@ to@ read@ the@ trees@ from@ the@ "
^ "input@ files@ I@ have@ found@ some@ internal@ "
^ "inconsistencies:@ POY@ can@ read@ either@ forests@ "
^ "or@ trees,@ and@ recognize@ each@ by@ comparing@ "
^ "the@ trees@ in@ memory,@ either@ all@ the@ trees@ "
^ "between@ separators@ (, or ;)@ share@ same@ taxa@ "
^ "in@ which@ case@ I@ treat@ them@ as@ just@ trees@ "
^ "or@ they@ are@ disjoint@, and@ I@ treat@ them@ as@ "
^ "a@ forest.@ Your@ input@ don't@ have@ one@ of@ "
^ "those@ properties.@ I@ think@ you@ intend@ to@ "
^ "read@ just@ trees@, but@ there@ is@ some@ tree@ "
^ "with@ taxa@ that@ doesn't@ appear@ in@ some@ other@ "
^ "tree.@ Sorry,@ I@ can't@ recover@ from@ this,@ and@ "
^ "won't@ load@ the@ trees@ you@ gave@ me.")
in
failwith "Illegal tree input")
[] trees
let prebuilt (trees: (string option * Tree.Parse.tree_types list) list) ((data,_) as sumdata) =
let trees = split_in_forests trees in
let st = Status.create "Loading Trees" (Some (List.length trees)) "" in
let constructor (cnt, lst) x =
Status.full_report ~adv:cnt st;
let t =
current_snapshot "Build.prebuilt.constructor begin";
let tree = PtreeSearch.convert_to x sumdata in
current_snapshot "Build.prebuilt.constructor converted";
let tree = PtreeSearch.downpass tree in
current_snapshot "Build.prebuilt.constructor downpass";
let tree = PtreeSearch.uppass tree in
current_snapshot "Build.prebuilt.constructor uppass";
tree
in
cnt + 1, (`Single t) :: lst
in
let res =
let _, res = List.fold_left constructor (1, []) trees in
`Set res
in
Status.finished st;
res
let prebuilt trees sumdata = match trees with
| [] -> `Set []
| xs -> prebuilt xs sumdata
let rec build_initial_trees trees data nodes (meth : Methods.build) =
let d = (data, nodes) in
let cg =
let code = ref data.Data.number_of_taxa in
fun () -> incr code; !code
in
let built_tree_report acc trees =
let builder (acc, cnt) t =
let cost = Ptree.get_cost `Adjusted t in
let hd = ("tree_" ^ string_of_int cnt ^ "_cost", string_of_float
cost) in
hd :: acc, cnt + 1
in
builder acc trees
in
let do_constraint file = match file with
| None ->
let hd, tree_list = match Sexpr.to_list trees with
| (h :: _) as t -> h, t
| [] -> failwith "No trees for constraint"
in
let maj = float_of_int (List.length tree_list) in
Ptree.consensus
(PtreeSearch.get_collapse_function None)
(fun code -> Data.code_taxon code data)
(maj)
(Sexpr.to_list trees)
(match data.Data.root_at with
| Some v -> v
| None ->
let f = Sexpr.first trees in
Ptree.choose_leaf f)
| Some file ->
begin match (Data.process_trees data file).Data.trees with
| [((_,[t]), _, _) as one] when Data.verify_trees data one -> t
| _ -> failwith "Illegal input constraint file"
end
in
let perform_build () = match meth with
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),adj_meth) ->
let threshold = match threshold with
| None -> 0.
| Some x -> x
and adj_mgr = create_adjust_manager adj_meth in
branch_and_bound keep_method max_trees threshold data nodes bound adj_mgr
| `Prebuilt file ->
let data = Data.process_trees data file in
let trees = List.filter (Data.verify_trees data) data.Data.trees in
let trees = List.map (fun (a, _, id) -> a) trees in
prebuilt trees d
| `Nj ->
let tree = None, [Tree.Parse.Flat (nj data nodes)] in
prebuilt [tree] d
| `Build (n, build_meth, lst, adj_meth) ->
let new_nodes = nodes
and adj_mgr = create_adjust_manager adj_meth in
(** TODO: Add different cost calculation heuristic methods *)
(** TODO: Add different keep methods *)
if n < 1 then trees
else
begin match build_meth with
| `Constraint (_, threshold, file, _) ->
let constraint_tree = do_constraint file in
constrained_build cg data n constraint_tree nodes adj_mgr
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),_) ->
let threshold =
match threshold with
| None -> 0.
| Some x -> x
in
branch_and_bound keep_method (n * max_trees) threshold data nodes bound adj_mgr
| `Wagner_Rnd (max_n, threshold, _, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let res =
max_n_randomized_p_wagner data threshold
tabu_mgr cg new_nodes max_n n adj_mgr
in
Sexpr.of_list (Sexpr.to_list res)
| `Wagner_Ordered (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status = Status.create "Wagner ordered build" None "" in
let () =
Status.full_report ~msg:"Building ordered tree" status
in
let hd =
max_n_wagner data threshold tabu_mgr
(fun () -> new_nodes) cg max_n adj_mgr
in
let () =
Status.full_report ~msg:"Building random trees"
status in
let tl =
max_n_randomized_p_wagner data threshold tabu_mgr
cg new_nodes max_n (n - 1) adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list (`Set [hd; tl]))
| `Wagner_Distances (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status =
Status.create "Wagner Distances-ordered build"
None ""
in
let () =
Status.full_report ~msg:"Building trees"
status in
let lst =
max_n_distances_p_wagner data threshold tabu_mgr
cg new_nodes max_n n adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list lst)
| `Wagner_Mst (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status =
Status.create "Wagner MST-ordered build"
None ""
in
let () =
Status.full_report ~msg:"Building trees"
status in
let lst =
max_n_mst_p_wagner data threshold tabu_mgr data
cg new_nodes max_n n adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list lst)
| `Nj
| (`Prebuilt _) as x -> build_initial_trees trees data nodes x
| `Build_Random _ ->
let st = Status.create "Random Trees build" (Some n) "" in
let arr =
Array.init n
(fun x ->
Status.full_report ~adv:x st;
random_tree data nodes adj_mgr)
in
Status.finished st;
Sexpr.of_list (Array.to_list arr)
end;
| `Build_Random ((n, _, _, _, _),adj_meth) ->
let st = Status.create "Random Trees build" (Some n) ""
and adj_mgr = create_adjust_manager adj_meth in
let arr =
Array.init n
(fun x ->
Status.full_report ~adv:x st;
random_tree data nodes adj_mgr)
in
Status.finished st;
Sexpr.of_list (Array.to_list arr)
in
Sadman.start "build" (build_features meth);
let timer = Timer.start () in
let res = perform_build () in
let time = Timer.get_user timer in
let report, n =
Sexpr.fold_left (fun acc x -> built_tree_report acc x) ([], 0) res
in
Sadman.finish ((TreeSearch.search_time_and_trees_considered time n) @ report);
res
end
module Make (NodeH : NodeSig.S with type other_n = Node.Standard.n)
(EdgeH : Edge.EdgeSig with type n = NodeH.n)
(TreeOps : Ptree.Tree_Operations with type a = NodeH.n with type b = EdgeH.e) =
struct
type a = NodeH.n
type b = EdgeH.e
module TOH = TreeOps
module TOS = Chartree.TreeOps
module NodeS = Node.Standard
module DH = MakeNormal (NodeH) (EdgeH) (TreeOps)
module SH = MakeNormal (NodeS) (Edge.SelfEdge) (TOS)
let report_mst = DH.report_mst
let prebuilt = DH.prebuilt
let replace_contents downpass uppass get_code nodes data ptree =
let nt = { (Ptree.empty data) with Ptree.tree = ptree.Ptree.tree } in
nodes
--> List.fold_left
(fun nt node ->
Ptree.add_node_data (get_code node) node nt) nt
--> downpass
--> uppass
let from_s_to_h = replace_contents TOH.downpass TOH.uppass NodeH.taxon_code
let from_h_to_s = replace_contents TOS.downpass TOS.uppass NodeS.taxon_code
let build_initial_trees trees data n b =
let has_dyn = Data.has_dynamic data in
let has_lik = Data.has_static_likelihood data in
if has_dyn || has_lik then
DH.build_initial_trees trees data n b
else
let s_nodes = List.map NodeH.to_other n in
let trees = Sexpr.map (from_h_to_s s_nodes data) trees in
let trees = SH.build_initial_trees trees data s_nodes b in
Sexpr.map (from_s_to_h n data) trees
end
(* Fast heuristics used *)
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/build.ml | ocaml |
This program is free software; you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
* [prebuilt a b] generates a list of trees using the list of trees [a] and the
* node data [b]. It is required that the names of the taxa in [a] have data
* associated in [b]. [b] is generated by the {!Node.load_data} function.
calculate the initial bound if nto provided; /2 for threshold
create status for ncurses
We need to present some output; find a decent depth and that percentage done
the number of possibilities at level n
* [rand_wagner a b] creates a fresh wagner tree with its
* corresponding cost using
* the initial set of leaves as generated by [b ()] and the code
* generator [a]. The addition
* sequence depends on the list as produced by [b]
* before creating the tree.
This is equivalent to having multiple replicates
compose the iteration manager tabu
Maximum distance is useless in this case
* [max_n_wagner a b n] creates a list of at most [n] wagner trees using those
* other trees that have the same cost of the current best tree. It uses the
* original addition sequence as generated by [b ()].
* TODO: Add different cost calculation heuristic methods
* TODO: Add different keep methods
Fast heuristics used | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
let () = SadmanOutput.register "Build" "$Revision: 3649 $"
let debug_profile_memory = false
let (-->) a b = b a
let current_snapshot x =
if debug_profile_memory then
let () = Printf.printf "%s\n%!" x in
MemProfiler.current_snapshot x
else ()
let rec build_features (meth:Methods.build) =
match meth with
| `Prebuilt fn ->
let fn =
match fn with
| `Local x | `Remote x -> x
in
("type", "prebuilt") ::
("filename", fn) :: []
| `Nj -> ("type", "neighbor joining") :: []
TODO : report type of iteration patterns ; here , BB and RT
let other_features str max_n =
[ ("type", str);
("number of trees to keep", string_of_int max_n)]
in
("number of trees", string_of_int n) ::
(match meth with
| `Wagner_Rnd (max_n, _, _, _, _) ->
other_features "randomized wagner" max_n
| `Wagner_Ordered (max_n, _, _, _, _) ->
other_features "ordered wagner" max_n
| `Wagner_Mst (max_n, _, _, _, _) ->
other_features "minimum spanning tree wagner" max_n
| `Wagner_Distances (max_n, _, _, _, _) ->
other_features "distances based tree wagner" max_n
| `Nj
| (`Branch_and_Bound _)
| (`Prebuilt _) as x ->
build_features x
| _ -> [] )
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),_) ->
[("type", "Branch and bound"); ("initial_bound", match bound with | None ->
"none" | Some x -> string_of_float x);
("threshold", match threshold with
| None -> "0.0" | Some x -> string_of_float x);
("number of trees to keep", string_of_int max_trees);
("keep method", match keep_method with
| `Last -> "last"
| `First -> "first"
| `Keep_Random -> "random")]
| `Build_Random _ ->
[("type", "Uniformly at random")]
let remove_exact (meth : Methods.cost_calculation) (acc : Methods.transform
list) : Methods.transform list =
match meth with
| #Methods.transform as meth -> meth :: acc
let rec get_transformations (meth : Methods.build) : Methods.transform list =
match meth with
| `Nj
| `Prebuilt _ -> []
| `Build (_, build_meth, trans,_) ->
List.fold_right remove_exact (trans @
(match build_meth with
| `Branch_and_Bound ((_, _, _, _, trans),_)
| `Wagner_Distances (_, _, _, trans, _)
| `Wagner_Mst (_, _, _, trans, _)
| `Wagner_Rnd (_, _, _, trans, _)
| `Wagner_Ordered (_, _, _, trans, _)
| `Constraint (_, _, _, trans)
| `Build_Random ((_, _, _, trans, _),_) -> trans
| `Nj
| `Prebuilt _ -> [])) []
| `Branch_and_Bound ((_, _, _, _, trans),_)
| `Build_Random ((_, _, _, trans, _),_) ->
List.fold_right remove_exact trans []
module type S = sig
type a
type b
val report_mst : Data.d -> a list -> string option -> unit
val prebuilt :
(string option * Tree.Parse.tree_types list) list ->
Data.d * a list ->
[> `Set of [> `Single of (a, b) Ptree.p_tree ] list ]
val build_initial_trees :
(a, b) Ptree.p_tree Sexpr.t -> Data.d ->
a list -> Methods.build -> (a, b) Ptree.p_tree Sexpr.t
end
module MakeNormal (Node : NodeSig.S) (Edge : Edge.EdgeSig with type n = Node.n)
(TreeOps : Ptree.Tree_Operations with type a = Node.n with type b = Edge.e)
= struct
module PtreeSearch = Ptree.Search (Node) (Edge) (TreeOps)
module BuildTabus = Tabus.Make (Node) (Edge)
module Queues = Queues.Make (Node) (Edge)
type a = PtreeSearch.a
type b = PtreeSearch.b
type phylogeny = (a, b) Ptree.p_tree
let map_of_list f x =
List.fold_left (fun acc x -> All_sets.IntegerMap.add (f x) x acc)
All_sets.IntegerMap.empty x
let randomize lst =
let arr = Array.of_list lst in
Array_ops.randomize arr;
Array.to_list arr
let single_wagner_search_manager = new Queues.wagner_srch_mgr true 1 0.0
let set_of_leafs data =
map_of_list (fun x -> Node.taxon_code x) data
let disjoin_tree data node =
let leafs = set_of_leafs node in
let tree = Ptree.make_disjoint_tree data leafs in
tree --> PtreeSearch.downpass --> PtreeSearch.uppass
let edges_of_tree tree =
Tree.EdgeSet.fold (fun x acc -> x :: acc) tree.Ptree.tree.Tree.d_edges []
let random_tree (data : Data.d) (nodes : a list) adj_mgr =
let tree : phylogeny =
{ (Ptree.empty data) with
Ptree.tree = Tree.random (List.map Node.taxon_code nodes);
Ptree.node_data = map_of_list Node.taxon_code nodes; }
in
tree --> PtreeSearch.downpass --> PtreeSearch.uppass
let branch_and_bound keep_method max_trees threshold data nodes bound adj_mgr =
let select_appropriate bound_plus_threshold lst =
let lst = List.filter (fun x -> bound_plus_threshold >=
Ptree.get_cost `Adjusted x) lst in
let len = List.length lst in
if len <= max_trees then lst
else
let () = assert (len = max_trees + 1) in
match keep_method with
| `Last ->
(match List.rev lst with
| h :: lst -> List.rev lst
| [] -> assert false)
| `First ->
(match lst with
| h :: lst -> lst
| [] -> assert false)
| `Keep_Random ->
(let arr = Array.of_list lst in
Array_ops.randomize arr;
match Array.to_list arr with
| h :: lst -> lst
| [] -> assert false)
in
let bound = match bound with
| None -> max_float /. 2.
| Some x -> x
and st = Status.create "Branch and Bound Build" (Some 100) "percent complete" in
let () = Status.full_report ~adv:(0) st in
let report_depth,report_percent =
let n t =
let rec n acc t = match t with
| 0 | 1 | 2 | 3 -> acc
| t -> n (acc*(2*t-5)) (t-1)
in
n 1 t
depth=6 will prune ~1 % of the tree ( having 105 possibilities )
and depth = max 1 (min ((List.length nodes)-1) 6) and p = ref 0.0 in
depth,
(fun depth ->
let p_incr = (1.0 /. float (n depth)) *. 100.0 in
p := p_incr +. !p;
Status.full_report ~adv:(int_of_float !p) st)
in
let rec aux_branch_and_bound depth ((bound, best_trees) as acc) tree
edges cur_handle other_handles =
match edges with
| (Tree.Edge (x, y)) :: t ->
if report_depth = depth then report_percent depth;
let new_tree, _ =
TreeOps.join_fn adj_mgr [] (Tree.Edge_Jxn (x,y)) cur_handle tree
in
let new_cost = Ptree.get_cost `Adjusted new_tree in
if new_cost > bound +. threshold then begin
if depth < report_depth then report_percent depth;
aux_branch_and_bound depth acc tree t cur_handle other_handles
end else begin
let acc = match other_handles with
| nh :: oh ->
aux_branch_and_bound (depth+1) acc new_tree
(edges_of_tree new_tree) (Tree.Single_Jxn nh) oh
| [] ->
let realbound = bound +. threshold in
if new_cost < bound then
new_cost, select_appropriate (new_cost +. threshold) (new_tree :: best_trees)
else if new_cost <= realbound then
(bound, select_appropriate realbound (new_tree::best_trees))
else
acc
in
aux_branch_and_bound depth acc tree t cur_handle other_handles
end
| [] -> acc
in
let initial_tree = disjoin_tree data nodes in
let _, trees =
if max_trees < 1 then 0., []
else begin match List.map Node.taxon_code nodes with
| f :: s :: tail ->
let new_tree, _ =
TreeOps.join_fn adj_mgr [] (Tree.Single_Jxn f) (Tree.Single_Jxn s) initial_tree
in
begin match tail with
| t :: tail ->
let edges = edges_of_tree new_tree in
aux_branch_and_bound 3 (bound,[]) new_tree edges (Tree.Single_Jxn t) tail
| [] ->
Ptree.get_cost `Adjusted new_tree, [new_tree]
end
| _ -> 0., [initial_tree]
end
in
let () = Status.finished st in
Sexpr.of_list (List.map PtreeSearch.uppass trees)
let sort_list_of_trees ptrees =
let cost = Ptree.get_cost `Adjusted in
List.sort (fun a b -> compare (cost a) (cost b)) ptrees
let constrained_build cg data n constraint_tree nodes adj_mgr =
let rec randomize_tree tree =
match tree with
| Tree.Parse.Leafp _ -> tree
| Tree.Parse.Nodep (lst, res) ->
let arr = Array.map randomize_tree (Array.of_list lst) in
Array_ops.randomize arr;
Tree.Parse.Nodep ((Array.to_list arr), res)
in
let ptree = disjoin_tree data nodes in
let rec aux_constructor fn ptree tree =
match tree with
| Tree.Parse.Leafp x ->
ptree, (Data.taxon_code (fn x) data)
| Tree.Parse.Nodep ([x], _) ->
aux_constructor fn ptree x
| Tree.Parse.Nodep (lst, _) ->
let ptree, handles =
List.fold_left (fun (ptree, acc) item ->
let ptree, nh = aux_constructor fn ptree item in
ptree, (nh :: acc)) (ptree, []) lst
in
let handles =
List.map
(fun (x : int) -> Ptree.handle_of x ptree)
handles
in
let constraints = List.fold_left (fun acc x ->
let acc = All_sets.Integers.add x acc in
try
let parent = Ptree.get_parent x ptree in
All_sets.Integers.add parent acc
with
| _ -> acc) All_sets.Integers.empty handles
in
let ptrees =
PtreeSearch.make_wagner_tree ~sequence:handles
ptree adj_mgr single_wagner_search_manager
(BuildTabus.wagner_constraint
constraints)
in
let ptrees = sort_list_of_trees ptrees in
match ptrees, handles with
| (ptree :: _), (h :: _) ->
ptree, h
| _ -> assert false
in
let st = Status.create "Constrained Wagner Replicate" (Some n)
"Constrained Wagner tree replicate building" in
let rec total_builder res blt =
Status.full_report ~adv:(n - blt) st;
if blt = n then
let () = Status.finished st in
`Set res
else
let rec deal_with_tree = function
| Tree.Parse.Annotated (t,_) ->
deal_with_tree t
| Tree.Parse.Flat t ->
let tree,_= aux_constructor (fun x-> x) ptree (randomize_tree t) in
tree
| Tree.Parse.Branches t ->
let tree,_= aux_constructor (fst) ptree (randomize_tree t) in
tree
| Tree.Parse.Characters t ->
let tree,_ = aux_constructor (fst) ptree (randomize_tree t) in
tree
in
let tree = deal_with_tree constraint_tree in
let tree = TreeOps.uppass tree in
total_builder ((`Single tree) :: res) (blt + 1)
in
if n < 0 then `Empty
else
total_builder [] 0
let single_wagner data tabu_mgr wmgr cg nodes adj_mgr =
let disjoin_tree = disjoin_tree data nodes
and nodes = List.map (fun x -> Node.taxon_code x) nodes in
let ptrees =
PtreeSearch.make_wagner_tree ~sequence:nodes
disjoin_tree adj_mgr wmgr tabu_mgr
in
let ptrees = sort_list_of_trees ptrees in
match ptrees with
| hd :: _ -> `Single (PtreeSearch.uppass hd)
| _ -> failwith "No wagner trees built!"
let wagner data tabu_mgr cg nodes adj_mgr =
single_wagner data tabu_mgr single_wagner_search_manager cg nodes adj_mgr
let randomized_single_wagner data tabu_mgr randomize wmgr cg adj_mgr =
let nodes = randomize () in
single_wagner data tabu_mgr wmgr cg nodes adj_mgr
let rand_wagner data tabu_mgr data_generator cg =
randomized_single_wagner data tabu_mgr data_generator
single_wagner_search_manager cg
let n_independent_wagner data_generator tabu_mgr cg data n =
let st = Status.create "Wagner Replicate" (Some n) "Wagner tree replicate \
building" in
let mgr = single_wagner_search_manager in
let rec builder cnt acc =
if cnt > 0 then begin
Status.full_report ~adv:(n - cnt) st;
let next =
(randomized_single_wagner data tabu_mgr data_generator mgr cg) :: acc
in
builder (cnt - 1) next
end else begin
Status.finished st;
`Set acc
end
in
builder n []
let create_adjust_manager (m,b) =
let thrsh = match m with
| `Threshold f
| `Both (f,_) -> Some f
| `Always -> Some 0.0
| `Null
| `MaxCount _ -> None
and count = match m with
| `MaxCount m
| `Both (_,m) -> Some m
| `Always -> Some 0
| `Null
| `Threshold _ -> None
in
let mgr = match b with
| `Null -> BuildTabus.simple_nm_none count thrsh
| `AllBranches -> BuildTabus.simple_nm_all count thrsh
| `JoinDelta -> BuildTabus.complex_nm_delta count thrsh
| `Neighborhood x -> BuildTabus.complex_nm_neighborhood x count thrsh
in
Some mgr
let pick_tabu_manager = function
BuildTabus.wagner_tabu
| `AllBased _
| `Partition _ ->
BuildTabus.distance_dfs_wagner
let max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr =
let wmgr = new Queues.wagner_srch_mgr true n threshold in
let nodes = data_generator () in
let nodesl = List.map (fun x -> Node.taxon_code x) nodes in
let res =
PtreeSearch.make_wagner_tree ~sequence:nodesl
(disjoin_tree data nodes) adj_mgr wmgr tabu_mgr
in
`Set (List.map (fun x -> `Single (TreeOps.uppass x)) res)
let make_distances_table ?(both=true) nodes =
let tmp = Hashtbl.create 99991 in
List.iter (fun a ->
List.iter (fun b ->
let ca = Node.taxon_code a
and cb = Node.taxon_code b in
if ca = cb then ()
else
if Hashtbl.mem tmp (ca, cb) then ()
else begin
let d = Node.distance 100000. a b in
Hashtbl.add tmp (ca, cb) d;
if both then Hashtbl.add tmp (cb, ca) d;
end) nodes) nodes;
tmp
module OrderedPairs = struct
type t = (float * (int * int))
let compare a b = compare ((fst a) : float) (fst b)
end
module H = Heap.Make (OrderedPairs)
let table_of_trees nodes =
let tbl = Hashtbl.create 1667 in
List.iter (fun n -> Hashtbl.add tbl (Node.taxon_code n)
(Tree.Parse.Leafp (Node.taxon_code n))) nodes;
tbl
let set_of_trees nodes =
List.fold_left (fun acc x ->
All_sets.Integers.add (Node.taxon_code x) acc)
All_sets.Integers.empty nodes
let (-->) a b = b a
let nj_qtable table terminals =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
let r = float_of_int (All_sets.Integers.cardinal terminals) in
let q_table = Hashtbl.create 99991 in
Hashtbl.iter (fun ((i, j) as p) dist ->
let sum =
All_sets.Integers.fold (fun c sum ->
sum -.
(if c = i then 0. else d c i) -.
(if c = j then 0. else d c j))
terminals 0.
in
Hashtbl.add q_table p (((r -. 2.) *. dist) +. sum)) table;
q_table
let nj_distance_to_ancestor table terminals f g =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
let sum_of_distance_to x =
All_sets.Integers.fold (fun y sum -> sum +. (d x y)) terminals 0.
in
let r = float_of_int (All_sets.Integers.cardinal terminals) in
((0.5 *. (d f g))
+. ((1. /. (2. *. (r -. 2.))) *.
((sum_of_distance_to f) -. (sum_of_distance_to g))))
let nj_new_distance table distance_fu distance_gu f g k =
let d x y = Hashtbl.find table ((min x y), (max x y)) in
((0.5 *. ((d f k) -. distance_fu)) +.
(0.5 *. ((d g k) -. distance_gu)))
let join_trees code distance_table a b tree_table trees heap =
let trees =
trees
--> All_sets.Integers.remove a
--> All_sets.Integers.remove b
in
let ta = Hashtbl.find tree_table a
and tb = Hashtbl.find tree_table b in
let tab = Tree.Parse.Nodep ([ta; tb], code) in
Hashtbl.add tree_table code tab;
let heap =
let distance_acode =
nj_distance_to_ancestor distance_table trees a b
and distance_bcode =
nj_distance_to_ancestor distance_table trees b a
in
All_sets.Integers.fold (fun c heap ->
let dc =
nj_new_distance distance_table distance_acode
distance_bcode a b c
in
let pair = (code, c) in
Hashtbl.add distance_table pair dc;
H.insert (dc, pair) heap) trees heap
in
let trees = All_sets.Integers.add code trees in
trees, code - 1, heap
let rec merge_nj_trees code distance_table tree_table trees heap =
let (_, (a, b)) = H.findMin heap in
let heap = H.deleteMin heap in
if All_sets.Integers.mem a trees &&
All_sets.Integers.mem b trees then
join_trees code distance_table a b tree_table trees heap
else
merge_nj_trees code distance_table tree_table trees heap
let nj data nodes =
let distance_table =
let both = false in
make_distances_table ~both nodes
in
let heap =
Hashtbl.fold
(fun x y acc ->
H.insert (y, x) acc)
distance_table H.empty
in
let tree_table = table_of_trees nodes in
let trees = set_of_trees nodes in
let rec complete_merge code trees heap =
if 1 = All_sets.Integers.cardinal trees then
Hashtbl.find tree_table (All_sets.Integers.choose trees)
else
let trees, code, heap =
merge_nj_trees code distance_table tree_table trees heap
in
complete_merge code trees heap
in
let tree = complete_merge (-1) trees heap in
Tree.Parse.map
(fun x ->
if x >= 0 then Data.code_taxon x data
else "") tree
let distances_ordered nodes =
let distances_table = make_distances_table nodes in
let distances_list =
let tmp =
Hashtbl.fold (fun a b acc ->
(a, b) :: acc) distances_table []
in
List.sort (fun (_, a) (_, b) -> compare a b) tmp
in
let _, addition_list =
let add_one visited acc x =
if All_sets.Integers.mem x visited then visited, acc
else All_sets.Integers.add x visited, x :: acc
in
List.fold_left (fun (visited, acc) ((x, y), _) ->
let v, a = add_one visited acc x in
add_one v a y) (All_sets.Integers.empty, []) distances_list
in
let addition_list = List.rev addition_list in
let rec addition_function lst =
match lst with
| h1 :: ((h2 :: t) as rest) ->
if 0 = Random.int 2 then
h1 :: (addition_function rest)
else h2 :: (addition_function (h1 :: t))
| _ -> lst
in
let create_list () =
let lst = addition_function addition_list in
List.rev (List.map (fun x ->
List.find (fun y ->
x = Node.taxon_code y) nodes) lst)
in
create_list
let mst data nodes =
let distances_table = make_distances_table nodes in
let distance_fn a b = Hashtbl.find distances_table (a, b)
and codes = List.map Node.taxon_code nodes in
let mst = Mst.kruskal Mst.Closest distance_fn codes in
let do_mst () =
let data = Mst.bfs_traversal Mst.Closest2 mst in
List.map (fun x ->
List.find (fun y ->
x = Node.taxon_code y) nodes) data
in
do_mst
let report_mst data nodes filename =
let distances_table = make_distances_table nodes in
let distance_fn a b = Hashtbl.find distances_table (a, b)
and codes = List.map Node.taxon_code nodes in
let mst = Mst.kruskal Mst.Closest distance_fn codes in
Mst.print_mst_tree (fun x -> Data.code_taxon x data) mst filename
let max_n_dg_p_wagner data threshold tabu_mgr data_generator cg n p adj_mgr: phylogeny Sexpr.t =
match p, n with
| 0, _ -> `Empty
| 1, 1 ->
let st = Status.create "Building Wagner Tree" None "" in
let res = rand_wagner data tabu_mgr data_generator cg adj_mgr in
Status.finished st;
res
| 1, n ->
let st = Status.create "Building Wagner Tree" None "" in
let res = max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr in
Status.finished st;
res
| p, _ ->
let builder cnt acc =
let next = max_n_wagner data threshold tabu_mgr data_generator cg n adj_mgr in
next :: acc
in
`Set (Sexpr.compose_status "Wagner build" builder p [])
* [ a b n p ] generates [ p ] independent
* , on each keeping the best [ n ] trees found on each step ,
* following the addition sequence as specified by the node generating
* function [ b ( ) ] .
* wagner trees, on each keeping the best [n] trees found on each step,
* following the addition sequence as specified by the node generating
* function [b ()]. *)
let max_n_randomized_p_wagner data threshold tabu_mgr cg nodes n p adj =
let data_generator () = randomize nodes in
max_n_dg_p_wagner data threshold tabu_mgr data_generator cg n p adj
let max_n_mst_p_wagner data threshold tabu_mgr node_data cg nodes n p adj =
let mst = mst node_data nodes in
max_n_dg_p_wagner data threshold tabu_mgr mst cg n p adj
let max_n_distances_p_wagner data threshold tabu_mgr cg nodes n p adj =
let dord = distances_ordered nodes in
max_n_dg_p_wagner data threshold tabu_mgr dord cg n p adj
let split_in_forests trees =
let make_tree_set_of_taxa acc x =
let rec make_tree_set_of_taxa acc x =
match x with
| Tree.Parse.Leafp name -> All_sets.Strings.add name acc
| Tree.Parse.Nodep (chld, _) ->
List.fold_left make_tree_set_of_taxa acc chld
in
make_tree_set_of_taxa acc (Tree.Parse.strip_tree x)
in
let are_different a acc b =
acc && (All_sets.Strings.is_empty (All_sets.Strings.inter a b))
in
let are_same a acc b =
acc && (0 = (All_sets.Strings.compare a b))
in
let rec are_something pairwise_comparison acc lst =
match lst with
| h :: t ->
let acc = List.fold_left (pairwise_comparison h) acc t in
are_something pairwise_comparison acc t
| [] -> acc
in
let are_all_the_same_set lst =
are_something are_same true lst
and are_all_different lst =
are_something are_different true lst
in
List.fold_left (fun acc (name,x) ->
if 1 = List.length x then (name,x) :: acc
else
let taxa =
List.map (make_tree_set_of_taxa All_sets.Strings.empty) x
in
if are_all_the_same_set taxa then
let x = List.map (fun x -> name,[x]) x in
x @ acc
else if are_all_different taxa then (name,x) :: acc
else
let _ =
Status.user_message Status.Error
("While@ trying@ to@ read@ the@ trees@ from@ the@ "
^ "input@ files@ I@ have@ found@ some@ internal@ "
^ "inconsistencies:@ POY@ can@ read@ either@ forests@ "
^ "or@ trees,@ and@ recognize@ each@ by@ comparing@ "
^ "the@ trees@ in@ memory,@ either@ all@ the@ trees@ "
^ "between@ separators@ (, or ;)@ share@ same@ taxa@ "
^ "in@ which@ case@ I@ treat@ them@ as@ just@ trees@ "
^ "or@ they@ are@ disjoint@, and@ I@ treat@ them@ as@ "
^ "a@ forest.@ Your@ input@ don't@ have@ one@ of@ "
^ "those@ properties.@ I@ think@ you@ intend@ to@ "
^ "read@ just@ trees@, but@ there@ is@ some@ tree@ "
^ "with@ taxa@ that@ doesn't@ appear@ in@ some@ other@ "
^ "tree.@ Sorry,@ I@ can't@ recover@ from@ this,@ and@ "
^ "won't@ load@ the@ trees@ you@ gave@ me.")
in
failwith "Illegal tree input")
[] trees
let prebuilt (trees: (string option * Tree.Parse.tree_types list) list) ((data,_) as sumdata) =
let trees = split_in_forests trees in
let st = Status.create "Loading Trees" (Some (List.length trees)) "" in
let constructor (cnt, lst) x =
Status.full_report ~adv:cnt st;
let t =
current_snapshot "Build.prebuilt.constructor begin";
let tree = PtreeSearch.convert_to x sumdata in
current_snapshot "Build.prebuilt.constructor converted";
let tree = PtreeSearch.downpass tree in
current_snapshot "Build.prebuilt.constructor downpass";
let tree = PtreeSearch.uppass tree in
current_snapshot "Build.prebuilt.constructor uppass";
tree
in
cnt + 1, (`Single t) :: lst
in
let res =
let _, res = List.fold_left constructor (1, []) trees in
`Set res
in
Status.finished st;
res
let prebuilt trees sumdata = match trees with
| [] -> `Set []
| xs -> prebuilt xs sumdata
let rec build_initial_trees trees data nodes (meth : Methods.build) =
let d = (data, nodes) in
let cg =
let code = ref data.Data.number_of_taxa in
fun () -> incr code; !code
in
let built_tree_report acc trees =
let builder (acc, cnt) t =
let cost = Ptree.get_cost `Adjusted t in
let hd = ("tree_" ^ string_of_int cnt ^ "_cost", string_of_float
cost) in
hd :: acc, cnt + 1
in
builder acc trees
in
let do_constraint file = match file with
| None ->
let hd, tree_list = match Sexpr.to_list trees with
| (h :: _) as t -> h, t
| [] -> failwith "No trees for constraint"
in
let maj = float_of_int (List.length tree_list) in
Ptree.consensus
(PtreeSearch.get_collapse_function None)
(fun code -> Data.code_taxon code data)
(maj)
(Sexpr.to_list trees)
(match data.Data.root_at with
| Some v -> v
| None ->
let f = Sexpr.first trees in
Ptree.choose_leaf f)
| Some file ->
begin match (Data.process_trees data file).Data.trees with
| [((_,[t]), _, _) as one] when Data.verify_trees data one -> t
| _ -> failwith "Illegal input constraint file"
end
in
let perform_build () = match meth with
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),adj_meth) ->
let threshold = match threshold with
| None -> 0.
| Some x -> x
and adj_mgr = create_adjust_manager adj_meth in
branch_and_bound keep_method max_trees threshold data nodes bound adj_mgr
| `Prebuilt file ->
let data = Data.process_trees data file in
let trees = List.filter (Data.verify_trees data) data.Data.trees in
let trees = List.map (fun (a, _, id) -> a) trees in
prebuilt trees d
| `Nj ->
let tree = None, [Tree.Parse.Flat (nj data nodes)] in
prebuilt [tree] d
| `Build (n, build_meth, lst, adj_meth) ->
let new_nodes = nodes
and adj_mgr = create_adjust_manager adj_meth in
if n < 1 then trees
else
begin match build_meth with
| `Constraint (_, threshold, file, _) ->
let constraint_tree = do_constraint file in
constrained_build cg data n constraint_tree nodes adj_mgr
| `Branch_and_Bound ((bound, threshold, keep_method, max_trees, _),_) ->
let threshold =
match threshold with
| None -> 0.
| Some x -> x
in
branch_and_bound keep_method (n * max_trees) threshold data nodes bound adj_mgr
| `Wagner_Rnd (max_n, threshold, _, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let res =
max_n_randomized_p_wagner data threshold
tabu_mgr cg new_nodes max_n n adj_mgr
in
Sexpr.of_list (Sexpr.to_list res)
| `Wagner_Ordered (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status = Status.create "Wagner ordered build" None "" in
let () =
Status.full_report ~msg:"Building ordered tree" status
in
let hd =
max_n_wagner data threshold tabu_mgr
(fun () -> new_nodes) cg max_n adj_mgr
in
let () =
Status.full_report ~msg:"Building random trees"
status in
let tl =
max_n_randomized_p_wagner data threshold tabu_mgr
cg new_nodes max_n (n - 1) adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list (`Set [hd; tl]))
| `Wagner_Distances (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status =
Status.create "Wagner Distances-ordered build"
None ""
in
let () =
Status.full_report ~msg:"Building trees"
status in
let lst =
max_n_distances_p_wagner data threshold tabu_mgr
cg new_nodes max_n n adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list lst)
| `Wagner_Mst (max_n, threshold, keep_method, lst, tabu_mgr) ->
let tabu_mgr = pick_tabu_manager tabu_mgr in
let status =
Status.create "Wagner MST-ordered build"
None ""
in
let () =
Status.full_report ~msg:"Building trees"
status in
let lst =
max_n_mst_p_wagner data threshold tabu_mgr data
cg new_nodes max_n n adj_mgr
in
let () = Status.finished status in
Sexpr.of_list (Sexpr.to_list lst)
| `Nj
| (`Prebuilt _) as x -> build_initial_trees trees data nodes x
| `Build_Random _ ->
let st = Status.create "Random Trees build" (Some n) "" in
let arr =
Array.init n
(fun x ->
Status.full_report ~adv:x st;
random_tree data nodes adj_mgr)
in
Status.finished st;
Sexpr.of_list (Array.to_list arr)
end;
| `Build_Random ((n, _, _, _, _),adj_meth) ->
let st = Status.create "Random Trees build" (Some n) ""
and adj_mgr = create_adjust_manager adj_meth in
let arr =
Array.init n
(fun x ->
Status.full_report ~adv:x st;
random_tree data nodes adj_mgr)
in
Status.finished st;
Sexpr.of_list (Array.to_list arr)
in
Sadman.start "build" (build_features meth);
let timer = Timer.start () in
let res = perform_build () in
let time = Timer.get_user timer in
let report, n =
Sexpr.fold_left (fun acc x -> built_tree_report acc x) ([], 0) res
in
Sadman.finish ((TreeSearch.search_time_and_trees_considered time n) @ report);
res
end
module Make (NodeH : NodeSig.S with type other_n = Node.Standard.n)
(EdgeH : Edge.EdgeSig with type n = NodeH.n)
(TreeOps : Ptree.Tree_Operations with type a = NodeH.n with type b = EdgeH.e) =
struct
type a = NodeH.n
type b = EdgeH.e
module TOH = TreeOps
module TOS = Chartree.TreeOps
module NodeS = Node.Standard
module DH = MakeNormal (NodeH) (EdgeH) (TreeOps)
module SH = MakeNormal (NodeS) (Edge.SelfEdge) (TOS)
let report_mst = DH.report_mst
let prebuilt = DH.prebuilt
let replace_contents downpass uppass get_code nodes data ptree =
let nt = { (Ptree.empty data) with Ptree.tree = ptree.Ptree.tree } in
nodes
--> List.fold_left
(fun nt node ->
Ptree.add_node_data (get_code node) node nt) nt
--> downpass
--> uppass
let from_s_to_h = replace_contents TOH.downpass TOH.uppass NodeH.taxon_code
let from_h_to_s = replace_contents TOS.downpass TOS.uppass NodeS.taxon_code
let build_initial_trees trees data n b =
let has_dyn = Data.has_dynamic data in
let has_lik = Data.has_static_likelihood data in
if has_dyn || has_lik then
DH.build_initial_trees trees data n b
else
let s_nodes = List.map NodeH.to_other n in
let trees = Sexpr.map (from_h_to_s s_nodes data) trees in
let trees = SH.build_initial_trees trees data s_nodes b in
Sexpr.map (from_s_to_h n data) trees
end
|
0ed8288761ddeae678c358bb0d72396d86a4c7ced2377beeb54ba6c94e2aacb9 | AbstractMachinesLab/caramel | uri0.mli | open! Import
include Json.Jsonable.S
val equal : t -> t -> bool
val to_dyn : t -> Dyn.t
val hash : t -> int
val to_path : t -> string
val of_path : string -> t
val to_string : t -> string
val pp : Format.formatter -> t -> unit
| null | https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/lsp/src/uri0.mli | ocaml | open! Import
include Json.Jsonable.S
val equal : t -> t -> bool
val to_dyn : t -> Dyn.t
val hash : t -> int
val to_path : t -> string
val of_path : string -> t
val to_string : t -> string
val pp : Format.formatter -> t -> unit
| |
701692e9ea646abf39cefd36689b80cec75f67229f605119535f3cb7261a4067 | stuarthalloway/programming-clojure | chain_4.clj | (ns examples.test.macros.chain-4
(:use clojure.test examples.macros.chain-4))
(deftest test-chain-4
(are [x y] (= x y)
(macroexpand-1 '(examples.macros.chain-4/chain a b)) '(. a b)
(macroexpand-1 '(examples.macros.chain-4/chain a b c)) '(examples.macros.chain-4/chain (. a b) (c))))
| null | https://raw.githubusercontent.com/stuarthalloway/programming-clojure/192e2f28d797fd70e50778aabd031b3ff55bd2b9/test/examples/test/macros/chain_4.clj | clojure | (ns examples.test.macros.chain-4
(:use clojure.test examples.macros.chain-4))
(deftest test-chain-4
(are [x y] (= x y)
(macroexpand-1 '(examples.macros.chain-4/chain a b)) '(. a b)
(macroexpand-1 '(examples.macros.chain-4/chain a b c)) '(examples.macros.chain-4/chain (. a b) (c))))
| |
68e359d7b5498a731645eedd5735053904b82ed703206dc7878b8d388ca53e63 | yansh/MoanaML | moana.ml |
* Copyright ( c ) 2014
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Yan Shvartzshnaider
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
(* SIGNATURES *)
module type STORE =
sig
type t
val empty : t
(* storage name *)
val name : string
val init : ?query:Config.tuple list -> Config.tuple list -> t
val add : t -> Config.tuple -> t
(* provide a graph query as list of tuples and returns list of tuples *)
(* matching it *)
val query : t -> Config.tuple list -> Config.tuple list list
(* return stored graph as set of tuples *)
val to_list: t -> Config.tuple list
end;;
Signature for the abstraction which will support many types of
(* backend storage. *)
module type GRAPH =
sig
(*type tuple*)
type t
val init : ?query:Config.tuple list -> Config.tuple list -> t
(* add fact as a tuple *)
val add : t -> Config.tuple -> t
(* specify a query as list of tuple, this will return a matching list of *)
val map : t -> tuples:(Config.tuple list) -> t
val to_list: t -> Config.tuple list
end;;
Functor from STORE to GRAPH
module Make(S: STORE):(GRAPH with type t = S.t) = struct
type t = S.t
let init = S.init
let add g (tuple : Config.tuple) =
let s = Printf.sprintf "Adding fact to %s" S.name in
print_endline s;
S.add g tuple ;;
let map g ~tuples:query= S.query g query |> Helper.flatten_tuple_list |> S.init ~query
let to_list = S.to_list
let to_string graph =
let dbList = S.to_list graph in
let rec
match dbList with
| [ ] - > " Finished\n "
| head : : rest - >
Helper.to_string head ^ " \n " ^
string_lst rest in
string_lst dbList ; ;
let dbList = S.to_list graph in
let rec string_lst dbList =
match dbList with
| [] -> "Finished\n"
| head :: rest ->
Helper.to_string head ^ "\n" ^
string_lst rest in
string_lst dbList ;;*)
end ;;
| null | https://raw.githubusercontent.com/yansh/MoanaML/c9843c10a0624e1c06e185e3dd1e7d877270d0d7/moana.ml | ocaml | SIGNATURES
storage name
provide a graph query as list of tuples and returns list of tuples
matching it
return stored graph as set of tuples
backend storage.
type tuple
add fact as a tuple
specify a query as list of tuple, this will return a matching list of |
* Copyright ( c ) 2014
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Yan Shvartzshnaider
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
module type STORE =
sig
type t
val empty : t
val name : string
val init : ?query:Config.tuple list -> Config.tuple list -> t
val add : t -> Config.tuple -> t
val query : t -> Config.tuple list -> Config.tuple list list
val to_list: t -> Config.tuple list
end;;
Signature for the abstraction which will support many types of
module type GRAPH =
sig
type t
val init : ?query:Config.tuple list -> Config.tuple list -> t
val add : t -> Config.tuple -> t
val map : t -> tuples:(Config.tuple list) -> t
val to_list: t -> Config.tuple list
end;;
Functor from STORE to GRAPH
module Make(S: STORE):(GRAPH with type t = S.t) = struct
type t = S.t
let init = S.init
let add g (tuple : Config.tuple) =
let s = Printf.sprintf "Adding fact to %s" S.name in
print_endline s;
S.add g tuple ;;
let map g ~tuples:query= S.query g query |> Helper.flatten_tuple_list |> S.init ~query
let to_list = S.to_list
let to_string graph =
let dbList = S.to_list graph in
let rec
match dbList with
| [ ] - > " Finished\n "
| head : : rest - >
Helper.to_string head ^ " \n " ^
string_lst rest in
string_lst dbList ; ;
let dbList = S.to_list graph in
let rec string_lst dbList =
match dbList with
| [] -> "Finished\n"
| head :: rest ->
Helper.to_string head ^ "\n" ^
string_lst rest in
string_lst dbList ;;*)
end ;;
|
a02a805c813d1025bb7770e747dd1849fd21a3e8e9fabe21c5c079fefdd004ba | shterrett/haskell-road | IAR.hs | # LANGUAGE NoMonomorphismRestriction #
module Book.IAR
where
import Data.List
import Book.STAL (display)
sumOdds' :: Integer -> Integer
sumOdds' n = sum [ 2*k - 1 | k <- [1..n] ]
sumOdds :: Integer -> Integer
sumOdds n = n^2
sumEvens' :: Integer -> Integer
sumEvens' n = sum [ 2*k | k <- [1..n] ]
sumEvens :: Integer -> Integer
sumEvens n = n * (n+1)
sumInts :: Integer -> Integer
sumInts n = (n * (n+1)) `div` 2
sumSquares' :: Integer -> Integer
sumSquares' n = sum [ k^2 | k <- [1..n] ]
sumSquares :: Integer -> Integer
sumSquares n = (n*(n+1)*(2*n+1)) `div` 6
sumCubes' :: Integer -> Integer
sumCubes' n = sum [ k^3 | k <- [1..n] ]
sumCubes :: Integer -> Integer
sumCubes n = (n*(n+1) `div` 2)^2
data Natural = Z | S Natural
deriving (Eq, Show)
plus m Z = m
plus m (S n) = S (plus m n)
m `mult` Z = Z
m `mult` (S n) = (m `mult` n) `plus` m
expn m Z = (S Z)
expn m (S n) = (expn m n) `mult` m
leq Z _ = True
leq (S _) Z = False
leq (S m) (S n) = leq m n
geq m n = leq n m
gt m n = not (leq m n)
lt m n = gt n m
foldn :: (a -> a) -> a -> Natural -> a
foldn h c Z = c
foldn h c (S n) = h (foldn h c n)
exclaim :: Natural -> String
exclaim = foldn ('!':) []
bittest :: [Int] -> Bool
bittest [] = True
bittest [0] = True
bittest (1:xs) = bittest xs
bittest (0:1:xs) = bittest xs
bittest _ = False
fib 0 = 0
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
fib' n = fib2 0 1 n where
fib2 a b 0 = a
fib2 a b n = fib2 b (a+b) (n-1)
data BinTree = L | N BinTree BinTree deriving Show
makeBinTree :: Integer -> BinTree
makeBinTree 0 = L
makeBinTree n = N (makeBinTree (n-1)) (makeBinTree (n-1))
count :: BinTree -> Integer
count L = 1
count (N t1 t2) = 1 + count t1 + count t2
depth :: BinTree -> Integer
depth L = 0
depth (N t1 t2) = (max (depth t1) (depth t2)) + 1
balanced :: BinTree -> Bool
balanced L = True
balanced (N t1 t2) = (balanced t1)
&& (balanced t2)
&& depth t1 == depth t2
data Tree = Lf | Nd Int Tree Tree deriving Show
data Tr a = Nil | T a (Tr a) (Tr a) deriving (Eq,Show)
type Dict = Tr (String,String)
split :: [a] -> ([a],a,[a])
split xs = (ys1,y,ys2)
where
ys1 = take n xs
(y:ys2) = drop n xs
n = length xs `div` 2
data LeafTree a = Leaf a
| Node (LeafTree a) (LeafTree a) deriving Show
ltree :: LeafTree String
ltree = Node
(Leaf "I")
(Node
(Leaf "love")
(Leaf "you"))
data Rose a = Bud a | Br [Rose a] deriving (Eq,Show)
rose = Br [Bud 1, Br [Bud 2, Bud 3, Br [Bud 4, Bud 5, Bud 6]]]
len [] = 0
len (x:xs) = 1 + len xs
cat :: [a] -> [a] -> [a]
cat [] ys = ys
cat (x:xs) ys = x : (cat xs ys)
add = foldr plus Z
mlt = foldr mult (S Z)
ln :: [a] -> Natural
ln = foldr (\ _ n -> S n) Z
rev = foldl (\ xs x -> x:xs) []
rev' = foldr (\ x xs -> xs ++ [x]) []
data Peg = A | B | C
type Tower = ([Int], [Int], [Int])
move :: Peg -> Peg -> Tower -> Tower
move A B (x:xs,ys,zs) = (xs,x:ys,zs)
move B A (xs,y:ys,zs) = (y:xs,ys,zs)
move A C (x:xs,ys,zs) = (xs,ys,x:zs)
move C A (xs,ys,z:zs) = (z:xs,ys,zs)
move B C (xs,y:ys,zs) = (xs,ys,y:zs)
move C B (xs,ys,z:zs) = (xs,z:ys,zs)
transfer :: Peg -> Peg -> Peg -> Int -> Tower -> [Tower]
transfer _ _ _ 0 tower = [tower]
transfer p q r n tower = transfer p r q (n-1) tower
++
transfer r q p (n-1) (move p q tower')
where tower' = last (transfer p r q (n-1) tower)
hanoi :: Int -> [Tower]
hanoi n = transfer A C B n ([1..n],[],[])
check :: Int -> Tower -> Bool
check 0 t = t == ([],[],[])
check n (xs,ys,zs)
| xs /= [] && last xs == n = check (n-1) (init xs, zs, ys)
| zs /= [] && last zs == n = check (n-1) (ys, xs, init zs)
| otherwise = False
maxT :: Tower -> Int
maxT (xs, ys, zs) = foldr max 0 (xs ++ ys ++ zs)
checkT :: Tower -> Bool
checkT t = check (maxT t) t
parity :: Tower -> (Int,Int,Int)
parity (xs,ys,zs) = par (xs ++ [n+1], ys ++ [n],zs ++ [n+1])
where
n = maxT (xs, ys, zs)
par (x:xs,y:ys,z:zs) = (mod x 2, mod y 2, mod z 2)
target :: Tower -> Peg
target t@(xs,ys,zs) | parity t == (0,1,1) = A
| parity t == (1,0,1) = B
| parity t == (1,1,0) = C
move1 :: Tower -> Tower
move1 t@(1:_,ys,zs) = move A (target t) t
move1 t@(xs,1:_,zs) = move B (target t) t
move1 t@(xs,ys,1:_) = move C (target t) t
move2 :: Tower -> Tower
move2 t@(1:xs,[],zs) = move C B t
move2 t@(1:xs,ys,[]) = move B C t
move2 t@(1:xs,ys,zs) = if ys < zs then move B C t else move C B t
move2 t@([],1:ys,zs) = move C A t
move2 t@(xs,1:ys,[]) = move A C t
move2 t@(xs,1:ys,zs) = if xs < zs then move A C t else move C A t
move2 t@([],ys,1:zs) = move B A t
move2 t@(xs,[],1:zs) = move A B t
move2 t@(xs,ys,1:zs) = if xs < ys then move A B t else move B A t
done :: Tower -> Bool
done ([],[], _) = True
done (xs,ys,zs) = False
transfer1, transfer2 :: Tower -> [Tower]
transfer1 t = t : transfer2 (move1 t)
transfer2 t = if done t then [t] else t : transfer1 (move2 t)
hanoi' :: Int -> [Tower]
hanoi' n = transfer1 ([1..n],[],[])
zazen :: [Tower]
zazen = hanoi' 64
hanoiCount :: Int -> Integer -> Tower
hanoiCount n k | k < 0 = error "argument negative"
| k > 2^n - 1 = error "argument not in range"
| k == 0 = ([1..n],[],[])
| k == 2^n - 1 = ([],[],[1..n])
| k < 2^(n-1) = (xs ++ [n], zs, ys)
| k >= 2^(n-1) = (ys', xs', zs' ++ [n])
where
(xs,ys,zs) = hanoiCount (n-1) k
(xs',ys',zs') = hanoiCount (n-1) (k - 2^(n-1))
toTower :: Integer -> Tower
toTower n = hanoiCount k m
where
n' = fromInteger (n+1)
k = truncate (logBase 2 n')
m = truncate (n' - 2^k)
data Form = P Int | Conj Form Form | Disj Form Form | Neg Form
instance Show Form where
show (P i) = 'P':show i
show (Conj f1 f2) = "(" ++ show f1 ++ " & " ++ show f2 ++ ")"
show (Disj f1 f2) = "(" ++ show f1 ++ " v " ++ show f2 ++ ")"
show (Neg f) = "~" ++ show f
subforms :: Form -> [Form]
subforms (P n) = [(P n)]
subforms (Conj f1 f2) = (Conj f1 f2):(subforms f1 ++ subforms f2)
subforms (Disj f1 f2) = (Disj f1 f2):(subforms f1 ++ subforms f2)
subforms (Neg f) = (Neg f):(subforms f)
ccount :: Form -> Int
ccount (P n) = 0
ccount (Conj f1 f2) = 1 + (ccount f1) + (ccount f2)
ccount (Disj f1 f2) = 1 + (ccount f1) + (ccount f2)
ccount (Neg f) = 1 + (ccount f)
acount :: Form -> Int
acount (P n) = 1
acount (Conj f1 f2) = (acount f1) + (acount f2)
acount (Disj f1 f2) = (acount f1) + (acount f2)
acount (Neg f) = acount f
| null | https://raw.githubusercontent.com/shterrett/haskell-road/4af8253fc4475e5689b0ce59da468a3a6c92872e/src/Book/IAR.hs | haskell | # LANGUAGE NoMonomorphismRestriction #
module Book.IAR
where
import Data.List
import Book.STAL (display)
sumOdds' :: Integer -> Integer
sumOdds' n = sum [ 2*k - 1 | k <- [1..n] ]
sumOdds :: Integer -> Integer
sumOdds n = n^2
sumEvens' :: Integer -> Integer
sumEvens' n = sum [ 2*k | k <- [1..n] ]
sumEvens :: Integer -> Integer
sumEvens n = n * (n+1)
sumInts :: Integer -> Integer
sumInts n = (n * (n+1)) `div` 2
sumSquares' :: Integer -> Integer
sumSquares' n = sum [ k^2 | k <- [1..n] ]
sumSquares :: Integer -> Integer
sumSquares n = (n*(n+1)*(2*n+1)) `div` 6
sumCubes' :: Integer -> Integer
sumCubes' n = sum [ k^3 | k <- [1..n] ]
sumCubes :: Integer -> Integer
sumCubes n = (n*(n+1) `div` 2)^2
data Natural = Z | S Natural
deriving (Eq, Show)
plus m Z = m
plus m (S n) = S (plus m n)
m `mult` Z = Z
m `mult` (S n) = (m `mult` n) `plus` m
expn m Z = (S Z)
expn m (S n) = (expn m n) `mult` m
leq Z _ = True
leq (S _) Z = False
leq (S m) (S n) = leq m n
geq m n = leq n m
gt m n = not (leq m n)
lt m n = gt n m
foldn :: (a -> a) -> a -> Natural -> a
foldn h c Z = c
foldn h c (S n) = h (foldn h c n)
exclaim :: Natural -> String
exclaim = foldn ('!':) []
bittest :: [Int] -> Bool
bittest [] = True
bittest [0] = True
bittest (1:xs) = bittest xs
bittest (0:1:xs) = bittest xs
bittest _ = False
fib 0 = 0
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
fib' n = fib2 0 1 n where
fib2 a b 0 = a
fib2 a b n = fib2 b (a+b) (n-1)
data BinTree = L | N BinTree BinTree deriving Show
makeBinTree :: Integer -> BinTree
makeBinTree 0 = L
makeBinTree n = N (makeBinTree (n-1)) (makeBinTree (n-1))
count :: BinTree -> Integer
count L = 1
count (N t1 t2) = 1 + count t1 + count t2
depth :: BinTree -> Integer
depth L = 0
depth (N t1 t2) = (max (depth t1) (depth t2)) + 1
balanced :: BinTree -> Bool
balanced L = True
balanced (N t1 t2) = (balanced t1)
&& (balanced t2)
&& depth t1 == depth t2
data Tree = Lf | Nd Int Tree Tree deriving Show
data Tr a = Nil | T a (Tr a) (Tr a) deriving (Eq,Show)
type Dict = Tr (String,String)
split :: [a] -> ([a],a,[a])
split xs = (ys1,y,ys2)
where
ys1 = take n xs
(y:ys2) = drop n xs
n = length xs `div` 2
data LeafTree a = Leaf a
| Node (LeafTree a) (LeafTree a) deriving Show
ltree :: LeafTree String
ltree = Node
(Leaf "I")
(Node
(Leaf "love")
(Leaf "you"))
data Rose a = Bud a | Br [Rose a] deriving (Eq,Show)
rose = Br [Bud 1, Br [Bud 2, Bud 3, Br [Bud 4, Bud 5, Bud 6]]]
len [] = 0
len (x:xs) = 1 + len xs
cat :: [a] -> [a] -> [a]
cat [] ys = ys
cat (x:xs) ys = x : (cat xs ys)
add = foldr plus Z
mlt = foldr mult (S Z)
ln :: [a] -> Natural
ln = foldr (\ _ n -> S n) Z
rev = foldl (\ xs x -> x:xs) []
rev' = foldr (\ x xs -> xs ++ [x]) []
data Peg = A | B | C
type Tower = ([Int], [Int], [Int])
move :: Peg -> Peg -> Tower -> Tower
move A B (x:xs,ys,zs) = (xs,x:ys,zs)
move B A (xs,y:ys,zs) = (y:xs,ys,zs)
move A C (x:xs,ys,zs) = (xs,ys,x:zs)
move C A (xs,ys,z:zs) = (z:xs,ys,zs)
move B C (xs,y:ys,zs) = (xs,ys,y:zs)
move C B (xs,ys,z:zs) = (xs,z:ys,zs)
transfer :: Peg -> Peg -> Peg -> Int -> Tower -> [Tower]
transfer _ _ _ 0 tower = [tower]
transfer p q r n tower = transfer p r q (n-1) tower
++
transfer r q p (n-1) (move p q tower')
where tower' = last (transfer p r q (n-1) tower)
hanoi :: Int -> [Tower]
hanoi n = transfer A C B n ([1..n],[],[])
check :: Int -> Tower -> Bool
check 0 t = t == ([],[],[])
check n (xs,ys,zs)
| xs /= [] && last xs == n = check (n-1) (init xs, zs, ys)
| zs /= [] && last zs == n = check (n-1) (ys, xs, init zs)
| otherwise = False
maxT :: Tower -> Int
maxT (xs, ys, zs) = foldr max 0 (xs ++ ys ++ zs)
checkT :: Tower -> Bool
checkT t = check (maxT t) t
parity :: Tower -> (Int,Int,Int)
parity (xs,ys,zs) = par (xs ++ [n+1], ys ++ [n],zs ++ [n+1])
where
n = maxT (xs, ys, zs)
par (x:xs,y:ys,z:zs) = (mod x 2, mod y 2, mod z 2)
target :: Tower -> Peg
target t@(xs,ys,zs) | parity t == (0,1,1) = A
| parity t == (1,0,1) = B
| parity t == (1,1,0) = C
move1 :: Tower -> Tower
move1 t@(1:_,ys,zs) = move A (target t) t
move1 t@(xs,1:_,zs) = move B (target t) t
move1 t@(xs,ys,1:_) = move C (target t) t
move2 :: Tower -> Tower
move2 t@(1:xs,[],zs) = move C B t
move2 t@(1:xs,ys,[]) = move B C t
move2 t@(1:xs,ys,zs) = if ys < zs then move B C t else move C B t
move2 t@([],1:ys,zs) = move C A t
move2 t@(xs,1:ys,[]) = move A C t
move2 t@(xs,1:ys,zs) = if xs < zs then move A C t else move C A t
move2 t@([],ys,1:zs) = move B A t
move2 t@(xs,[],1:zs) = move A B t
move2 t@(xs,ys,1:zs) = if xs < ys then move A B t else move B A t
done :: Tower -> Bool
done ([],[], _) = True
done (xs,ys,zs) = False
transfer1, transfer2 :: Tower -> [Tower]
transfer1 t = t : transfer2 (move1 t)
transfer2 t = if done t then [t] else t : transfer1 (move2 t)
hanoi' :: Int -> [Tower]
hanoi' n = transfer1 ([1..n],[],[])
zazen :: [Tower]
zazen = hanoi' 64
hanoiCount :: Int -> Integer -> Tower
hanoiCount n k | k < 0 = error "argument negative"
| k > 2^n - 1 = error "argument not in range"
| k == 0 = ([1..n],[],[])
| k == 2^n - 1 = ([],[],[1..n])
| k < 2^(n-1) = (xs ++ [n], zs, ys)
| k >= 2^(n-1) = (ys', xs', zs' ++ [n])
where
(xs,ys,zs) = hanoiCount (n-1) k
(xs',ys',zs') = hanoiCount (n-1) (k - 2^(n-1))
toTower :: Integer -> Tower
toTower n = hanoiCount k m
where
n' = fromInteger (n+1)
k = truncate (logBase 2 n')
m = truncate (n' - 2^k)
data Form = P Int | Conj Form Form | Disj Form Form | Neg Form
instance Show Form where
show (P i) = 'P':show i
show (Conj f1 f2) = "(" ++ show f1 ++ " & " ++ show f2 ++ ")"
show (Disj f1 f2) = "(" ++ show f1 ++ " v " ++ show f2 ++ ")"
show (Neg f) = "~" ++ show f
subforms :: Form -> [Form]
subforms (P n) = [(P n)]
subforms (Conj f1 f2) = (Conj f1 f2):(subforms f1 ++ subforms f2)
subforms (Disj f1 f2) = (Disj f1 f2):(subforms f1 ++ subforms f2)
subforms (Neg f) = (Neg f):(subforms f)
ccount :: Form -> Int
ccount (P n) = 0
ccount (Conj f1 f2) = 1 + (ccount f1) + (ccount f2)
ccount (Disj f1 f2) = 1 + (ccount f1) + (ccount f2)
ccount (Neg f) = 1 + (ccount f)
acount :: Form -> Int
acount (P n) = 1
acount (Conj f1 f2) = (acount f1) + (acount f2)
acount (Disj f1 f2) = (acount f1) + (acount f2)
acount (Neg f) = acount f
| |
2297de22512d87fb57d3492158f43bb80b57f76be972a48f10dd4b405c0ea13e | cky/guile | server.scm | ;;; Web server
Copyright ( C ) 2010 , 2011 , 2012 , 2013 , 2015 Free Software Foundation , Inc.
;; This library is free software; you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA
;;; Commentary:
;;;
;;; (web server) is a generic web server interface, along with a main
loop implementation for web servers controlled by .
;;;
;;; The lowest layer is the <server-impl> object, which defines a set of
;;; hooks to open a server, read a request from a client, write a
;;; response to a client, and close a server. These hooks -- open,
;;; read, write, and close, respectively -- are bound together in a
;;; <server-impl> object. Procedures in this module take a
;;; <server-impl> object, if needed.
;;;
;;; A <server-impl> may also be looked up by name. If you pass the
` http ' symbol to ` run - server ' , looks for a variable named
;;; `http' in the `(web server http)' module, which should be bound to a
;;; <server-impl> object. Such a binding is made by instantiation of
;;; the `define-server-impl' syntax. In this way the run-server loop can
;;; automatically load other backends if available.
;;;
;;; The life cycle of a server goes as follows:
;;;
* The ` open ' hook is called , to open the server . ` open ' takes 0 or
;;; more arguments, depending on the backend, and returns an opaque
;;; server socket object, or signals an error.
;;;
;;; * The `read' hook is called, to read a request from a new client.
The ` read ' hook takes one arguments , the server socket . It
should return three values : an opaque client socket , the
;;; request, and the request body. The request should be a
;;; `<request>' object, from `(web request)'. The body should be a
;;; string or a bytevector, or `#f' if there is no body.
;;;
;;; If the read failed, the `read' hook may return #f for the client
;;; socket, request, and body.
;;;
;;; * A user-provided handler procedure is called, with the request
and body as its arguments . The handler should return two
;;; values: the response, as a `<response>' record from `(web
;;; response)', and the response body as a string, bytevector, or
;;; `#f' if not present. We also allow the reponse to be simply an
;;; alist of headers, in which case a default response object is
;;; constructed with those headers.
;;;
* The ` write ' hook is called with three arguments : the client
;;; socket, the response, and the body. The `write' hook returns no
;;; values.
;;;
;;; * At this point the request handling is complete. For a loop, we
;;; loop back and try to read a new request.
;;;
;;; * If the user interrupts the loop, the `close' hook is called on
;;; the server socket.
;;;
;;; Code:
(define-module (web server)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-9 gnu)
#:use-module (rnrs bytevectors)
#:use-module (ice-9 binary-ports)
#:use-module (web request)
#:use-module (web response)
#:use-module (system repl error-handling)
#:use-module (ice-9 control)
#:use-module (ice-9 iconv)
#:export (define-server-impl
lookup-server-impl
make-server-impl
server-impl?
server-impl-name
server-impl-open
server-impl-read
server-impl-write
server-impl-close
open-server
read-client
handle-request
sanitize-response
write-client
close-server
serve-one-client
run-server))
(define *timer* (gettimeofday))
(define (print-elapsed who)
(let ((t (gettimeofday)))
(pk who (+ (* (- (car t) (car *timer*)) 1000000)
(- (cdr t) (cdr *timer*))))
(set! *timer* t)))
(eval-when (expand)
(define *time-debug?* #f))
(define-syntax debug-elapsed
(lambda (x)
(syntax-case x ()
((_ who)
(if *time-debug?*
#'(print-elapsed who)
#'*unspecified*)))))
(define-record-type server-impl
(make-server-impl name open read write close)
server-impl?
(name server-impl-name)
(open server-impl-open)
(read server-impl-read)
(write server-impl-write)
(close server-impl-close))
(define-syntax-rule (define-server-impl name open read write close)
(define name
(make-server-impl 'name open read write close)))
(define (lookup-server-impl impl)
"Look up a server implementation. If IMPL is a server
implementation already, it is returned directly. If it is a symbol, the
binding named IMPL in the ‘(web server IMPL)’ module is
looked up. Otherwise an error is signaled.
Currently a server implementation is a somewhat opaque type, useful only
for passing to other procedures in this module, like
‘read-client’."
(cond
((server-impl? impl) impl)
((symbol? impl)
(let ((impl (module-ref (resolve-module `(web server ,impl)) impl)))
(if (server-impl? impl)
impl
(error "expected a server impl in module" `(web server ,impl)))))
(else
(error "expected a server-impl or a symbol" impl))))
;; -> server
(define (open-server impl open-params)
"Open a server for the given implementation. Return one value, the
new server object. The implementation's ‘open’ procedure is
applied to OPEN-PARAMS, which should be a list."
(apply (server-impl-open impl) open-params))
;; -> (client request body | #f #f #f)
(define (read-client impl server)
"Read a new client from SERVER, by applying the implementation's
‘read’ procedure to the server. If successful, return three
values: an object corresponding to the client, a request object, and the
request body. If any exception occurs, return ‘#f’ for all three
values."
(call-with-error-handling
(lambda ()
((server-impl-read impl) server))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _ (values #f #f #f))))
(define (extend-response r k v . additional)
(let ((r (set-field r (response-headers)
(assoc-set! (copy-tree (response-headers r))
k v))))
(if (null? additional)
r
(apply extend-response r additional))))
;; -> response body
(define (sanitize-response request response body)
"\"Sanitize\" the given response and body, making them appropriate for
the given request.
As a convenience to web handler authors, RESPONSE may be given as
an alist of headers, in which case it is used to construct a default
response. Ensures that the response version corresponds to the request
version. If BODY is a string, encodes the string to a bytevector,
in an encoding appropriate for RESPONSE. Adds a
‘content-length’ and ‘content-type’ header, as necessary.
If BODY is a procedure, it is called with a port as an argument,
and the output collected as a bytevector. In the future we might try to
instead use a compressing, chunk-encoded port, and call this procedure
later, in the write-client procedure. Authors are advised not to rely
on the procedure being called at any particular time."
(cond
((list? response)
(sanitize-response request
(build-response #:version (request-version request)
#:headers response)
body))
((not (equal? (request-version request) (response-version response)))
(sanitize-response request
(adapt-response-version response
(request-version request))
body))
((not body)
(values response #vu8()))
((string? body)
(let* ((type (response-content-type response
'(text/plain)))
(declared-charset (assq-ref (cdr type) 'charset))
(charset (or declared-charset "utf-8")))
(sanitize-response
request
(if declared-charset
response
(extend-response response 'content-type
`(,@type (charset . ,charset))))
(string->bytevector body charset))))
((procedure? body)
(let* ((type (response-content-type response
'(text/plain)))
(declared-charset (assq-ref (cdr type) 'charset))
(charset (or declared-charset "utf-8")))
(sanitize-response
request
(if declared-charset
response
(extend-response response 'content-type
`(,@type (charset . ,charset))))
(call-with-encoded-output-string charset body))))
((not (bytevector? body))
(error "unexpected body type"))
((and (response-must-not-include-body? response)
body
FIXME make this stricter : even an empty body should be prohibited .
(not (zero? (bytevector-length body))))
(error "response with this status code must not include body" response))
(else
;; check length; assert type; add other required fields?
(values (let ((rlen (response-content-length response))
(blen (bytevector-length body)))
(cond
(rlen (if (= rlen blen)
response
(error "bad content-length" rlen blen)))
(else (extend-response response 'content-length blen))))
(if (eq? (request-method request) 'HEAD)
;; Responses to HEAD requests must not include bodies.
;; We could raise an error here, but it seems more
;; appropriate to just do something sensible.
#f
body)))))
;; -> response body state
(define (handle-request handler request body state)
"Handle a given request, returning the response and body.
The response and response body are produced by calling the given
HANDLER with REQUEST and BODY as arguments.
The elements of STATE are also passed to HANDLER as
arguments, and may be returned as additional values. The new
STATE, collected from the HANDLER's return values, is then
returned as a list. The idea is that a server loop receives a handler
from the user, along with whatever state values the user is interested
in, allowing the user's handler to explicitly manage its state."
(call-with-error-handling
(lambda ()
(call-with-values (lambda ()
(with-stack-and-prompt
(lambda ()
(apply handler request body state))))
(lambda (response body . state)
(call-with-values (lambda ()
(debug-elapsed 'handler)
(sanitize-response request response body))
(lambda (response body)
(debug-elapsed 'sanitize)
(values response body state))))))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _
(values (build-response #:code 500) #f state))))
;; -> unspecified values
(define (write-client impl server client response body)
"Write an HTTP response and body to CLIENT. If the server and
client support persistent connections, it is the implementation's
responsibility to keep track of the client thereafter, presumably by
attaching it to the SERVER argument somehow."
(call-with-error-handling
(lambda ()
((server-impl-write impl) server client response body))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _ (values))))
;; -> unspecified values
(define (close-server impl server)
"Release resources allocated by a previous invocation of
‘open-server’."
((server-impl-close impl) server))
(define call-with-sigint
(if (not (provided? 'posix))
(lambda (thunk handler-thunk) (thunk))
(lambda (thunk handler-thunk)
(let ((handler #f))
(catch 'interrupt
(lambda ()
(dynamic-wind
(lambda ()
(set! handler
(sigaction SIGINT (lambda (sig) (throw 'interrupt)))))
thunk
(lambda ()
(if handler
;; restore Scheme handler, SIG_IGN or SIG_DFL.
(sigaction SIGINT (car handler) (cdr handler))
;; restore original C handler.
(sigaction SIGINT #f)))))
(lambda (k . _) (handler-thunk)))))))
(define (with-stack-and-prompt thunk)
(call-with-prompt (default-prompt-tag)
(lambda () (start-stack #t (thunk)))
(lambda (k proc)
(with-stack-and-prompt (lambda () (proc k))))))
;; -> new-state
(define (serve-one-client handler impl server state)
"Read one request from SERVER, call HANDLER on the request
and body, and write the response to the client. Return the new state
produced by the handler procedure."
(debug-elapsed 'serve-again)
(call-with-values
(lambda ()
(read-client impl server))
(lambda (client request body)
(debug-elapsed 'read-client)
(if client
(call-with-values
(lambda ()
(handle-request handler request body state))
(lambda (response body state)
(debug-elapsed 'handle-request)
(write-client impl server client response body)
(debug-elapsed 'write-client)
state))
state))))
(define* (run-server handler #:optional (impl 'http) (open-params '())
. state)
"Run Guile's built-in web server.
HANDLER should be a procedure that takes two or more arguments,
the HTTP request and request body, and returns two or more values, the
response and response body.
For example, here is a simple \"Hello, World!\" server:
@example
(define (handler request body)
(values '((content-type . (text/plain)))
\"Hello, World!\"))
(run-server handler)
@end example
The response and body will be run through ‘sanitize-response’
before sending back to the client.
Additional arguments to HANDLER are taken from
STATE. Additional return values are accumulated into a new
STATE, which will be used for subsequent requests. In this way a
handler can explicitly manage its state.
The default server implementation is ‘http’, which accepts
OPEN-PARAMS like ‘(#:port 8081)’, among others. See \"Web
Server\" in the manual, for more information."
(let* ((impl (lookup-server-impl impl))
(server (open-server impl open-params)))
(call-with-sigint
(lambda ()
(let lp ((state state))
(lp (serve-one-client handler impl server state))))
(lambda ()
(close-server impl server)
(values)))))
| null | https://raw.githubusercontent.com/cky/guile/89ce9fb31b00f1f243fe6f2450db50372cc0b86d/module/web/server.scm | scheme | Web server
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Commentary:
(web server) is a generic web server interface, along with a main
The lowest layer is the <server-impl> object, which defines a set of
hooks to open a server, read a request from a client, write a
response to a client, and close a server. These hooks -- open,
read, write, and close, respectively -- are bound together in a
<server-impl> object. Procedures in this module take a
<server-impl> object, if needed.
A <server-impl> may also be looked up by name. If you pass the
`http' in the `(web server http)' module, which should be bound to a
<server-impl> object. Such a binding is made by instantiation of
the `define-server-impl' syntax. In this way the run-server loop can
automatically load other backends if available.
The life cycle of a server goes as follows:
more arguments, depending on the backend, and returns an opaque
server socket object, or signals an error.
* The `read' hook is called, to read a request from a new client.
request, and the request body. The request should be a
`<request>' object, from `(web request)'. The body should be a
string or a bytevector, or `#f' if there is no body.
If the read failed, the `read' hook may return #f for the client
socket, request, and body.
* A user-provided handler procedure is called, with the request
values: the response, as a `<response>' record from `(web
response)', and the response body as a string, bytevector, or
`#f' if not present. We also allow the reponse to be simply an
alist of headers, in which case a default response object is
constructed with those headers.
socket, the response, and the body. The `write' hook returns no
values.
* At this point the request handling is complete. For a loop, we
loop back and try to read a new request.
* If the user interrupts the loop, the `close' hook is called on
the server socket.
Code:
-> server
-> (client request body | #f #f #f)
-> response body
check length; assert type; add other required fields?
Responses to HEAD requests must not include bodies.
We could raise an error here, but it seems more
appropriate to just do something sensible.
-> response body state
-> unspecified values
-> unspecified values
restore Scheme handler, SIG_IGN or SIG_DFL.
restore original C handler.
-> new-state |
Copyright ( C ) 2010 , 2011 , 2012 , 2013 , 2015 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA
loop implementation for web servers controlled by .
` http ' symbol to ` run - server ' , looks for a variable named
* The ` open ' hook is called , to open the server . ` open ' takes 0 or
The ` read ' hook takes one arguments , the server socket . It
should return three values : an opaque client socket , the
and body as its arguments . The handler should return two
* The ` write ' hook is called with three arguments : the client
(define-module (web server)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-9 gnu)
#:use-module (rnrs bytevectors)
#:use-module (ice-9 binary-ports)
#:use-module (web request)
#:use-module (web response)
#:use-module (system repl error-handling)
#:use-module (ice-9 control)
#:use-module (ice-9 iconv)
#:export (define-server-impl
lookup-server-impl
make-server-impl
server-impl?
server-impl-name
server-impl-open
server-impl-read
server-impl-write
server-impl-close
open-server
read-client
handle-request
sanitize-response
write-client
close-server
serve-one-client
run-server))
(define *timer* (gettimeofday))
(define (print-elapsed who)
(let ((t (gettimeofday)))
(pk who (+ (* (- (car t) (car *timer*)) 1000000)
(- (cdr t) (cdr *timer*))))
(set! *timer* t)))
(eval-when (expand)
(define *time-debug?* #f))
(define-syntax debug-elapsed
(lambda (x)
(syntax-case x ()
((_ who)
(if *time-debug?*
#'(print-elapsed who)
#'*unspecified*)))))
(define-record-type server-impl
(make-server-impl name open read write close)
server-impl?
(name server-impl-name)
(open server-impl-open)
(read server-impl-read)
(write server-impl-write)
(close server-impl-close))
(define-syntax-rule (define-server-impl name open read write close)
(define name
(make-server-impl 'name open read write close)))
(define (lookup-server-impl impl)
"Look up a server implementation. If IMPL is a server
implementation already, it is returned directly. If it is a symbol, the
binding named IMPL in the ‘(web server IMPL)’ module is
looked up. Otherwise an error is signaled.
Currently a server implementation is a somewhat opaque type, useful only
for passing to other procedures in this module, like
‘read-client’."
(cond
((server-impl? impl) impl)
((symbol? impl)
(let ((impl (module-ref (resolve-module `(web server ,impl)) impl)))
(if (server-impl? impl)
impl
(error "expected a server impl in module" `(web server ,impl)))))
(else
(error "expected a server-impl or a symbol" impl))))
(define (open-server impl open-params)
"Open a server for the given implementation. Return one value, the
new server object. The implementation's ‘open’ procedure is
applied to OPEN-PARAMS, which should be a list."
(apply (server-impl-open impl) open-params))
(define (read-client impl server)
"Read a new client from SERVER, by applying the implementation's
‘read’ procedure to the server. If successful, return three
values: an object corresponding to the client, a request object, and the
request body. If any exception occurs, return ‘#f’ for all three
values."
(call-with-error-handling
(lambda ()
((server-impl-read impl) server))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _ (values #f #f #f))))
(define (extend-response r k v . additional)
(let ((r (set-field r (response-headers)
(assoc-set! (copy-tree (response-headers r))
k v))))
(if (null? additional)
r
(apply extend-response r additional))))
(define (sanitize-response request response body)
"\"Sanitize\" the given response and body, making them appropriate for
the given request.
As a convenience to web handler authors, RESPONSE may be given as
an alist of headers, in which case it is used to construct a default
response. Ensures that the response version corresponds to the request
version. If BODY is a string, encodes the string to a bytevector,
in an encoding appropriate for RESPONSE. Adds a
‘content-length’ and ‘content-type’ header, as necessary.
If BODY is a procedure, it is called with a port as an argument,
and the output collected as a bytevector. In the future we might try to
instead use a compressing, chunk-encoded port, and call this procedure
later, in the write-client procedure. Authors are advised not to rely
on the procedure being called at any particular time."
(cond
((list? response)
(sanitize-response request
(build-response #:version (request-version request)
#:headers response)
body))
((not (equal? (request-version request) (response-version response)))
(sanitize-response request
(adapt-response-version response
(request-version request))
body))
((not body)
(values response #vu8()))
((string? body)
(let* ((type (response-content-type response
'(text/plain)))
(declared-charset (assq-ref (cdr type) 'charset))
(charset (or declared-charset "utf-8")))
(sanitize-response
request
(if declared-charset
response
(extend-response response 'content-type
`(,@type (charset . ,charset))))
(string->bytevector body charset))))
((procedure? body)
(let* ((type (response-content-type response
'(text/plain)))
(declared-charset (assq-ref (cdr type) 'charset))
(charset (or declared-charset "utf-8")))
(sanitize-response
request
(if declared-charset
response
(extend-response response 'content-type
`(,@type (charset . ,charset))))
(call-with-encoded-output-string charset body))))
((not (bytevector? body))
(error "unexpected body type"))
((and (response-must-not-include-body? response)
body
FIXME make this stricter : even an empty body should be prohibited .
(not (zero? (bytevector-length body))))
(error "response with this status code must not include body" response))
(else
(values (let ((rlen (response-content-length response))
(blen (bytevector-length body)))
(cond
(rlen (if (= rlen blen)
response
(error "bad content-length" rlen blen)))
(else (extend-response response 'content-length blen))))
(if (eq? (request-method request) 'HEAD)
#f
body)))))
(define (handle-request handler request body state)
"Handle a given request, returning the response and body.
The response and response body are produced by calling the given
HANDLER with REQUEST and BODY as arguments.
The elements of STATE are also passed to HANDLER as
arguments, and may be returned as additional values. The new
STATE, collected from the HANDLER's return values, is then
returned as a list. The idea is that a server loop receives a handler
from the user, along with whatever state values the user is interested
in, allowing the user's handler to explicitly manage its state."
(call-with-error-handling
(lambda ()
(call-with-values (lambda ()
(with-stack-and-prompt
(lambda ()
(apply handler request body state))))
(lambda (response body . state)
(call-with-values (lambda ()
(debug-elapsed 'handler)
(sanitize-response request response body))
(lambda (response body)
(debug-elapsed 'sanitize)
(values response body state))))))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _
(values (build-response #:code 500) #f state))))
(define (write-client impl server client response body)
"Write an HTTP response and body to CLIENT. If the server and
client support persistent connections, it is the implementation's
responsibility to keep track of the client thereafter, presumably by
attaching it to the SERVER argument somehow."
(call-with-error-handling
(lambda ()
((server-impl-write impl) server client response body))
#:pass-keys '(quit interrupt)
#:on-error (if (batch-mode?) 'backtrace 'debug)
#:post-error (lambda _ (values))))
(define (close-server impl server)
"Release resources allocated by a previous invocation of
‘open-server’."
((server-impl-close impl) server))
(define call-with-sigint
(if (not (provided? 'posix))
(lambda (thunk handler-thunk) (thunk))
(lambda (thunk handler-thunk)
(let ((handler #f))
(catch 'interrupt
(lambda ()
(dynamic-wind
(lambda ()
(set! handler
(sigaction SIGINT (lambda (sig) (throw 'interrupt)))))
thunk
(lambda ()
(if handler
(sigaction SIGINT (car handler) (cdr handler))
(sigaction SIGINT #f)))))
(lambda (k . _) (handler-thunk)))))))
(define (with-stack-and-prompt thunk)
(call-with-prompt (default-prompt-tag)
(lambda () (start-stack #t (thunk)))
(lambda (k proc)
(with-stack-and-prompt (lambda () (proc k))))))
(define (serve-one-client handler impl server state)
"Read one request from SERVER, call HANDLER on the request
and body, and write the response to the client. Return the new state
produced by the handler procedure."
(debug-elapsed 'serve-again)
(call-with-values
(lambda ()
(read-client impl server))
(lambda (client request body)
(debug-elapsed 'read-client)
(if client
(call-with-values
(lambda ()
(handle-request handler request body state))
(lambda (response body state)
(debug-elapsed 'handle-request)
(write-client impl server client response body)
(debug-elapsed 'write-client)
state))
state))))
(define* (run-server handler #:optional (impl 'http) (open-params '())
. state)
"Run Guile's built-in web server.
HANDLER should be a procedure that takes two or more arguments,
the HTTP request and request body, and returns two or more values, the
response and response body.
For example, here is a simple \"Hello, World!\" server:
@example
(define (handler request body)
(values '((content-type . (text/plain)))
\"Hello, World!\"))
(run-server handler)
@end example
The response and body will be run through ‘sanitize-response’
before sending back to the client.
Additional arguments to HANDLER are taken from
STATE. Additional return values are accumulated into a new
STATE, which will be used for subsequent requests. In this way a
handler can explicitly manage its state.
The default server implementation is ‘http’, which accepts
OPEN-PARAMS like ‘(#:port 8081)’, among others. See \"Web
Server\" in the manual, for more information."
(let* ((impl (lookup-server-impl impl))
(server (open-server impl open-params)))
(call-with-sigint
(lambda ()
(let lp ((state state))
(lp (serve-one-client handler impl server state))))
(lambda ()
(close-server impl server)
(values)))))
|
70cb244781ed79c42c147baa5d5e7d9eabf1e1af3de4c208e4ae75e536ea5fce | nd/sicp | interpreter.scm | (define apply-in-underlying-scheme apply)
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((let? exp) (eval (let->combination exp) env))
((lambda? exp) (make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp) (eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((application? exp) (apply (eval (operator exp) env)
(list-of-values (operands exp) env)))
(else
(error "Unknown expression type -- EVAL" exp))))
(define (apply procedure arguments)
(cond ((primitive-procedure? procedure) (apply-primitive-procedure procedure arguments))
((compound-procedure? procedure)
(eval-sequence (procedure-body procedure)
(extend-environment (procedure-parameters procedure)
arguments
(procedure-environment procedure))))
(else (error "Unknown procedure type -- APPLY" procedure))))
(define (list-of-values exps env)
(if (no-operands? exps)
'()
(cons (eval (first-operand exps) env)
(list-of-values (rest-operands exps) env))))
(define (eval-if exp env)
(if (true? (eval (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define (eval-sequence exps env)
(cond ((last-exp? exps) (eval (first-exp exps) env))
(else (eval (first-exp exps) env)
(eval-sequence (rest-exps exps) env))))
(define (eval-assignment exp env)
(set-variable-value! (assignment-variable exp)
(eval (assignment-value exp) env)
env)
'ok)
(define (eval-definition exp env)
(define-variable! (definition-variable exp)
(eval (definition-value exp) env)
env)
'ok)
(define (self-evaluating? exp)
(cond ((number? exp) true)
((string? exp) true)
(else false)))
(define (variable? exp) (symbol? exp))
(define (quoted? exp) (tagged-list? exp 'quote))
(define (text-of-quotation exp) (cadr exp))
(define (tagged-list? exp tag)
(if (pair? exp)
(eq? (car exp) tag)
false))
(define (assignment? exp) (tagged-list? exp 'set!))
(define (assignment-variable exp) (cadr exp))
(define (assignment-value exp) (caddr exp))
(define (make-assignment var val) (list 'set! var val))
(define (make-definition var val) (list 'define var val))
(define (definition? exp) (tagged-list? exp 'define))
(define (definition-variable exp)
(if (symbol? (cadr exp))
(cadr exp)
(caadr exp)))
(define (definition-value exp)
(if (symbol? (cadr exp))
(caddr exp)
(make-lambda (cdadr exp) ; formal parameters
(cddr exp)))) ; body
(define (lambda? exp) (tagged-list? exp 'lambda))
(define (lambda-parameters exp) (cadr exp))
(define (lambda-body exp) (cddr exp))
(define (make-lambda parameters body) (cons 'lambda (cons parameters body)))
(define (if? exp) (tagged-list? exp 'if))
(define (if-predicate exp) (cadr exp))
(define (if-consequent exp) (caddr exp))
(define (if-alternative exp)
(if (not (null? (cdddr exp)))
(cadddr exp)
'false))
(define (make-if predicate consequent alternative) (list 'if predicate consequent alternative))
(define (begin? exp) (tagged-list? exp 'begin))
(define (begin-actions exp) (cdr exp))
(define (last-exp? seq) (null? (cdr seq)))
(define (first-exp seq) (car seq))
(define (rest-exps seq) (cdr seq))
(define (sequence->exp seq)
(cond ((null? seq) seq)
((last-exp? seq) (first-exp seq))
(else (make-begin seq))))
(define (make-begin seq) (cons 'begin seq))
(define (application? exp) (pair? exp))
(define (operator exp) (car exp))
(define (operands exp) (cdr exp))
(define (no-operands? ops) (null? ops))
(define (first-operand ops) (car ops))
(define (rest-operands ops) (cdr ops))
(define (enclosing-environment env) (cdr env))
(define (first-frame env) (car env))
(define the-empty-environment '())
(define (make-frame variables values) (cons variables values))
(define (frame-variables frame) (car frame))
(define (frame-values frame) (cdr frame))
(define (add-binding-to-frame! var val frame)
(set-car! frame (cons var (car frame)))
(set-cdr! frame (cons val (cdr frame))))
(define (extend-environment vars vals base-env)
(if (= (length vars) (length vals))
(cons (make-frame vars vals) base-env)
(if (< (length vars) (length vals))
(error "Too many arguments supplied" vars vals)
(error "Too few arguments supplied" vars vals))))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (car vars))
(car vals))
(else (scan (cdr vars) (cdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (car vars))
(set-car! vals val))
(else (scan (cdr vars) (cdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable -- SET!" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan vars vals)
(cond ((null? vars)
(add-binding-to-frame! var val frame))
((eq? var (car vars))
(set-car! vals val))
(else (scan (cdr vars) (cdr vals)))))
(scan (frame-variables frame)
(frame-values frame))))
(define (setup-environment)
(let ((initial-env
(extend-environment (primitive-procedure-names)
(primitive-procedure-objects)
the-empty-environment)))
(define-variable! 'true true initial-env)
(define-variable! 'false false initial-env)
initial-env))
(define (primitive-procedure? proc) (tagged-list? proc 'primitive))
(define (primitive-implementation proc) (cadr proc))
(define primitive-procedures
(list (list 'car car)
(list 'cdr cdr)
(list 'cons cons)
(list 'null? null?)
(list '= =)
(list '+ +)
(list '- -)
(list '* *)))
(define (primitive-procedure-names) (map car primitive-procedures))
(define (primitive-procedure-objects)
(map (lambda (proc) (list 'primitive (cadr proc)))
primitive-procedures))
(define (apply-primitive-procedure proc args)
(apply-in-underlying-scheme (primitive-implementation proc) args))
(define input-prompt ";;; M-Eval input:")
(define output-prompt ";;; M-Eval value:")
(define (driver-loop)
(prompt-for-input input-prompt)
(let ((input (read)))
(let ((output (eval input the-global-environment)))
(announce-output output-prompt)
(user-print output)))
(driver-loop))
(define (prompt-for-input string)
(newline) (newline) (display string) (newline))
(define (announce-output string)
(newline) (display string) (newline))
(define (user-print object)
(if (compound-procedure? object)
(display (list 'compound-procedure
(procedure-parameters object)
(procedure-body object)
'<procedure-env>))
(display object)))
;;====
(define (eval-and exp env)
(define (iter exps)
(let ((first-value (eval (first-exp exps) env)))
(if (false? first-value)
false
(if (last-exp? exps)
first-value
(iter (rest-exps exps))))))
(if (null? (and-expressions exp))
true
(iter (and-expressions exp))))
(define (eval-or exp env)
(define (iter exps)
(let ((first-value (eval (first-exp exps) env)))
(if (true? first-value)
first-value
(if (last-exp? exps)
false
(iter (rest-exps exps))))))
(if (null? (or-expressions exp))
false
(iter (or-expressions exp))))
(define (and? exp) (tagged-list? exp 'and))
(define (or? exp) (tagged-list? exp 'or))
(define (and-expressions exp) (cdr exp))
(define (or-expressions exp) (cdr exp))
;;---
;;let
;;---
(define (let->combination exp)
(let ((parameters (map let-assignment-var (let-assignments exp)))
(values (map let-assignment-value (let-assignments exp)))
(body (let-body exp)))
(append (list (make-lambda parameters body)) values)))
(define (let? exp) (eq? (car exp) 'let))
(define (make-let assignments body) (cons 'let (cons assignments body)))
(define (let-assignments exp) (cadr exp))
(define (let-body exp) (cddr exp))
(define (first-assignment assignments) (car assignments))
(define (rest-assignment assignments) (cdr assignments))
(define (make-let-assignment var val) (list var val))
(define (let-assignment-var assignment) (car assignment))
(define (let-assignment-value assignment) (cadr assignment))
(define (eval-let exp env)
(let ((combintation (let->combination exp)))
(apply (eval (operator combintation) env)
(list-of-values (operands combintation) env))))
(define (cond? exp) (tagged-list? exp 'cond))
(define (cond-clauses exp) (cdr exp))
(define (cond-else-clause? clause) (eq? (cond-predicate clause) 'else))
(define (cond-predicate clause) (car clause))
(define (cond-actions clause) (cdr clause))
(define (cond->if exp) (expand-clauses (cond-clauses exp)))
(define (expand-clauses clauses)
(if (null? clauses)
'false ; no `else' clause
(let ((first (car clauses))
(rest (cdr clauses)))
(if (cond-else-clause? first)
(if (null? rest)
(sequence->exp (cond-actions first))
(error "ELSE clause isn't last -- COND->IF" clauses))
(make-if (cond-predicate first)
(sequence->exp (cond-actions first))
(expand-clauses rest))))))
(define (scan-out-defines body)
(let* ((defs (filter definition? body))
(non-defs (filter (lambda (x) (not (definition? x))) body))
(assignments (map (lambda (def) (make-let-assignment (definition-variable def) ''*unassigned*)) defs))
(sets (map (lambda (def) (make-assignment (definition-variable def) (definition-value def))) defs)))
(if (not (null? defs))
(list (let->combination (make-let assignments (append sets non-defs))))
body)))
(define (make-procedure parameters body env) (list 'procedure parameters body env))
(define (compound-procedure? p) (tagged-list? p 'procedure))
(define (procedure-parameters p) (cadr p))
(define (procedure-body p) (scan-out-defines (caddr p)))
(define (procedure-environment p) (cadddr p))
(define (true? x) (not (eq? x false)))
(define (false? x) (eq? x false))
(define the-global-environment (setup-environment))
| null | https://raw.githubusercontent.com/nd/sicp/d8587a0403d95af7c7bcf59b812f98c4f8550afd/ch04/interpreter.scm | scheme | formal parameters
body
====
---
let
---
no `else' clause | (define apply-in-underlying-scheme apply)
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((let? exp) (eval (let->combination exp) env))
((lambda? exp) (make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp) (eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((application? exp) (apply (eval (operator exp) env)
(list-of-values (operands exp) env)))
(else
(error "Unknown expression type -- EVAL" exp))))
(define (apply procedure arguments)
(cond ((primitive-procedure? procedure) (apply-primitive-procedure procedure arguments))
((compound-procedure? procedure)
(eval-sequence (procedure-body procedure)
(extend-environment (procedure-parameters procedure)
arguments
(procedure-environment procedure))))
(else (error "Unknown procedure type -- APPLY" procedure))))
(define (list-of-values exps env)
(if (no-operands? exps)
'()
(cons (eval (first-operand exps) env)
(list-of-values (rest-operands exps) env))))
(define (eval-if exp env)
(if (true? (eval (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define (eval-sequence exps env)
(cond ((last-exp? exps) (eval (first-exp exps) env))
(else (eval (first-exp exps) env)
(eval-sequence (rest-exps exps) env))))
(define (eval-assignment exp env)
(set-variable-value! (assignment-variable exp)
(eval (assignment-value exp) env)
env)
'ok)
(define (eval-definition exp env)
(define-variable! (definition-variable exp)
(eval (definition-value exp) env)
env)
'ok)
(define (self-evaluating? exp)
(cond ((number? exp) true)
((string? exp) true)
(else false)))
(define (variable? exp) (symbol? exp))
(define (quoted? exp) (tagged-list? exp 'quote))
(define (text-of-quotation exp) (cadr exp))
(define (tagged-list? exp tag)
(if (pair? exp)
(eq? (car exp) tag)
false))
(define (assignment? exp) (tagged-list? exp 'set!))
(define (assignment-variable exp) (cadr exp))
(define (assignment-value exp) (caddr exp))
(define (make-assignment var val) (list 'set! var val))
(define (make-definition var val) (list 'define var val))
(define (definition? exp) (tagged-list? exp 'define))
(define (definition-variable exp)
(if (symbol? (cadr exp))
(cadr exp)
(caadr exp)))
(define (definition-value exp)
(if (symbol? (cadr exp))
(caddr exp)
(define (lambda? exp) (tagged-list? exp 'lambda))
(define (lambda-parameters exp) (cadr exp))
(define (lambda-body exp) (cddr exp))
(define (make-lambda parameters body) (cons 'lambda (cons parameters body)))
(define (if? exp) (tagged-list? exp 'if))
(define (if-predicate exp) (cadr exp))
(define (if-consequent exp) (caddr exp))
(define (if-alternative exp)
(if (not (null? (cdddr exp)))
(cadddr exp)
'false))
(define (make-if predicate consequent alternative) (list 'if predicate consequent alternative))
(define (begin? exp) (tagged-list? exp 'begin))
(define (begin-actions exp) (cdr exp))
(define (last-exp? seq) (null? (cdr seq)))
(define (first-exp seq) (car seq))
(define (rest-exps seq) (cdr seq))
(define (sequence->exp seq)
(cond ((null? seq) seq)
((last-exp? seq) (first-exp seq))
(else (make-begin seq))))
(define (make-begin seq) (cons 'begin seq))
(define (application? exp) (pair? exp))
(define (operator exp) (car exp))
(define (operands exp) (cdr exp))
(define (no-operands? ops) (null? ops))
(define (first-operand ops) (car ops))
(define (rest-operands ops) (cdr ops))
(define (enclosing-environment env) (cdr env))
(define (first-frame env) (car env))
(define the-empty-environment '())
(define (make-frame variables values) (cons variables values))
(define (frame-variables frame) (car frame))
(define (frame-values frame) (cdr frame))
(define (add-binding-to-frame! var val frame)
(set-car! frame (cons var (car frame)))
(set-cdr! frame (cons val (cdr frame))))
(define (extend-environment vars vals base-env)
(if (= (length vars) (length vals))
(cons (make-frame vars vals) base-env)
(if (< (length vars) (length vals))
(error "Too many arguments supplied" vars vals)
(error "Too few arguments supplied" vars vals))))
(define (lookup-variable-value var env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (car vars))
(car vals))
(else (scan (cdr vars) (cdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (set-variable-value! var val env)
(define (env-loop env)
(define (scan vars vals)
(cond ((null? vars)
(env-loop (enclosing-environment env)))
((eq? var (car vars))
(set-car! vals val))
(else (scan (cdr vars) (cdr vals)))))
(if (eq? env the-empty-environment)
(error "Unbound variable -- SET!" var)
(let ((frame (first-frame env)))
(scan (frame-variables frame)
(frame-values frame)))))
(env-loop env))
(define (define-variable! var val env)
(let ((frame (first-frame env)))
(define (scan vars vals)
(cond ((null? vars)
(add-binding-to-frame! var val frame))
((eq? var (car vars))
(set-car! vals val))
(else (scan (cdr vars) (cdr vals)))))
(scan (frame-variables frame)
(frame-values frame))))
(define (setup-environment)
(let ((initial-env
(extend-environment (primitive-procedure-names)
(primitive-procedure-objects)
the-empty-environment)))
(define-variable! 'true true initial-env)
(define-variable! 'false false initial-env)
initial-env))
(define (primitive-procedure? proc) (tagged-list? proc 'primitive))
(define (primitive-implementation proc) (cadr proc))
(define primitive-procedures
(list (list 'car car)
(list 'cdr cdr)
(list 'cons cons)
(list 'null? null?)
(list '= =)
(list '+ +)
(list '- -)
(list '* *)))
(define (primitive-procedure-names) (map car primitive-procedures))
(define (primitive-procedure-objects)
(map (lambda (proc) (list 'primitive (cadr proc)))
primitive-procedures))
(define (apply-primitive-procedure proc args)
(apply-in-underlying-scheme (primitive-implementation proc) args))
(define input-prompt ";;; M-Eval input:")
(define output-prompt ";;; M-Eval value:")
(define (driver-loop)
(prompt-for-input input-prompt)
(let ((input (read)))
(let ((output (eval input the-global-environment)))
(announce-output output-prompt)
(user-print output)))
(driver-loop))
(define (prompt-for-input string)
(newline) (newline) (display string) (newline))
(define (announce-output string)
(newline) (display string) (newline))
(define (user-print object)
(if (compound-procedure? object)
(display (list 'compound-procedure
(procedure-parameters object)
(procedure-body object)
'<procedure-env>))
(display object)))
(define (eval-and exp env)
(define (iter exps)
(let ((first-value (eval (first-exp exps) env)))
(if (false? first-value)
false
(if (last-exp? exps)
first-value
(iter (rest-exps exps))))))
(if (null? (and-expressions exp))
true
(iter (and-expressions exp))))
(define (eval-or exp env)
(define (iter exps)
(let ((first-value (eval (first-exp exps) env)))
(if (true? first-value)
first-value
(if (last-exp? exps)
false
(iter (rest-exps exps))))))
(if (null? (or-expressions exp))
false
(iter (or-expressions exp))))
(define (and? exp) (tagged-list? exp 'and))
(define (or? exp) (tagged-list? exp 'or))
(define (and-expressions exp) (cdr exp))
(define (or-expressions exp) (cdr exp))
(define (let->combination exp)
(let ((parameters (map let-assignment-var (let-assignments exp)))
(values (map let-assignment-value (let-assignments exp)))
(body (let-body exp)))
(append (list (make-lambda parameters body)) values)))
(define (let? exp) (eq? (car exp) 'let))
(define (make-let assignments body) (cons 'let (cons assignments body)))
(define (let-assignments exp) (cadr exp))
(define (let-body exp) (cddr exp))
(define (first-assignment assignments) (car assignments))
(define (rest-assignment assignments) (cdr assignments))
(define (make-let-assignment var val) (list var val))
(define (let-assignment-var assignment) (car assignment))
(define (let-assignment-value assignment) (cadr assignment))
(define (eval-let exp env)
(let ((combintation (let->combination exp)))
(apply (eval (operator combintation) env)
(list-of-values (operands combintation) env))))
(define (cond? exp) (tagged-list? exp 'cond))
(define (cond-clauses exp) (cdr exp))
(define (cond-else-clause? clause) (eq? (cond-predicate clause) 'else))
(define (cond-predicate clause) (car clause))
(define (cond-actions clause) (cdr clause))
(define (cond->if exp) (expand-clauses (cond-clauses exp)))
(define (expand-clauses clauses)
(if (null? clauses)
(let ((first (car clauses))
(rest (cdr clauses)))
(if (cond-else-clause? first)
(if (null? rest)
(sequence->exp (cond-actions first))
(error "ELSE clause isn't last -- COND->IF" clauses))
(make-if (cond-predicate first)
(sequence->exp (cond-actions first))
(expand-clauses rest))))))
(define (scan-out-defines body)
(let* ((defs (filter definition? body))
(non-defs (filter (lambda (x) (not (definition? x))) body))
(assignments (map (lambda (def) (make-let-assignment (definition-variable def) ''*unassigned*)) defs))
(sets (map (lambda (def) (make-assignment (definition-variable def) (definition-value def))) defs)))
(if (not (null? defs))
(list (let->combination (make-let assignments (append sets non-defs))))
body)))
(define (make-procedure parameters body env) (list 'procedure parameters body env))
(define (compound-procedure? p) (tagged-list? p 'procedure))
(define (procedure-parameters p) (cadr p))
(define (procedure-body p) (scan-out-defines (caddr p)))
(define (procedure-environment p) (cadddr p))
(define (true? x) (not (eq? x false)))
(define (false? x) (eq? x false))
(define the-global-environment (setup-environment))
|
788f39c9da2b6d9247aeddef493bd81e1687ab09e3bf0d13975e877ae68054ce | v-kolesnikov/sicp | 1_44_test.clj | (ns sicp.chapter01.1-44-test
(:require [clojure.test :refer :all]
[sicp.test-helper :refer :all]
[sicp.common :refer [square]]
[sicp.chapter01.1-44 :refer :all]))
(deftest test-smooth
(let [dx 0.00001]
(assert-equal 4.0 ((smooth square dx) 2))
(assert-equal 9.0 ((smooth square dx) 3))))
(deftest test-smooth-fold
(let [dx 0.00001
n 10]
(assert-equal 4.0 ((smooth-fold square dx n) 2))
(assert-equal 9.0 ((smooth-fold square dx n) 3))))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/test/sicp/chapter01/1_44_test.clj | clojure | (ns sicp.chapter01.1-44-test
(:require [clojure.test :refer :all]
[sicp.test-helper :refer :all]
[sicp.common :refer [square]]
[sicp.chapter01.1-44 :refer :all]))
(deftest test-smooth
(let [dx 0.00001]
(assert-equal 4.0 ((smooth square dx) 2))
(assert-equal 9.0 ((smooth square dx) 3))))
(deftest test-smooth-fold
(let [dx 0.00001
n 10]
(assert-equal 4.0 ((smooth-fold square dx n) 2))
(assert-equal 9.0 ((smooth-fold square dx n) 3))))
| |
8cb3f9b54dc7398eda31976b5e61d80aad3173c26d3deb8be8f02c923a33e446 | OCamlPro/ez_pgocaml | thread.ml | module type E = sig type err val from_exn : exn -> err end
module Make(E : E) = struct
type 'a t = ('a, E.err) Result.t Lwt.t
let (>>=) p f = Lwt.bind p (function Error e -> Lwt.return_error e | Ok x -> f x)
let fail exn = Lwt.return_error (E.from_exn exn)
let catch = Lwt.catch
let return = Lwt.return_ok
type in_channel = Lwt_io.input_channel
type out_channel = Lwt_io.output_channel
let open_connection sockaddr =
let sock = Lwt_unix.socket (Unix.domain_of_sockaddr sockaddr) Lwt_unix.SOCK_STREAM 0 in
catch
(fun () ->
Lwt.bind (Lwt_unix.connect sock sockaddr)
(fun () ->
Lwt_unix.set_close_on_exec sock;
return (Lwt_io.of_fd ~mode:Lwt_io.input sock,
Lwt_io.of_fd ~mode:Lwt_io.output sock)))
(fun exn -> Lwt.bind (Lwt_unix.close sock) (fun () -> fail exn))
let output_char oc c = Lwt.bind (Lwt_io.write_char oc c) Lwt.return_ok
let output_string oc s = Lwt.bind (Lwt_io.write oc s) Lwt.return_ok
let flush oc = Lwt.bind (Lwt_io.flush oc) Lwt.return_ok
let input_char ic = Lwt.bind (Lwt_io.read_char ic) Lwt.return_ok
let really_input ic b n m = Lwt.bind (Lwt_io.read_into_exactly ic b n m) Lwt.return_ok
let close_in ic = Lwt.bind (Lwt_io.close ic) Lwt.return_ok
let output_binary_int oc n =
output_char oc (Char.chr (n lsr 24)) >>= fun () ->
output_char oc (Char.chr ((n lsr 16) land 255)) >>= fun () ->
output_char oc (Char.chr ((n lsr 8) land 255)) >>= fun () ->
output_char oc (Char.chr (n land 255))
let input_binary_int ic =
input_char ic >>= fun a ->
input_char ic >>= fun b ->
input_char ic >>= fun c ->
input_char ic >>= fun d ->
return ((Char.code a lsl 24)
lor (Char.code b lsl 16)
lor (Char.code c lsl 8)
lor (Char.code d))
end
| null | https://raw.githubusercontent.com/OCamlPro/ez_pgocaml/e84e6835e6048a27fcdfdc58403ca3a8ce16d744/src/impl/rp/thread.ml | ocaml | module type E = sig type err val from_exn : exn -> err end
module Make(E : E) = struct
type 'a t = ('a, E.err) Result.t Lwt.t
let (>>=) p f = Lwt.bind p (function Error e -> Lwt.return_error e | Ok x -> f x)
let fail exn = Lwt.return_error (E.from_exn exn)
let catch = Lwt.catch
let return = Lwt.return_ok
type in_channel = Lwt_io.input_channel
type out_channel = Lwt_io.output_channel
let open_connection sockaddr =
let sock = Lwt_unix.socket (Unix.domain_of_sockaddr sockaddr) Lwt_unix.SOCK_STREAM 0 in
catch
(fun () ->
Lwt.bind (Lwt_unix.connect sock sockaddr)
(fun () ->
Lwt_unix.set_close_on_exec sock;
return (Lwt_io.of_fd ~mode:Lwt_io.input sock,
Lwt_io.of_fd ~mode:Lwt_io.output sock)))
(fun exn -> Lwt.bind (Lwt_unix.close sock) (fun () -> fail exn))
let output_char oc c = Lwt.bind (Lwt_io.write_char oc c) Lwt.return_ok
let output_string oc s = Lwt.bind (Lwt_io.write oc s) Lwt.return_ok
let flush oc = Lwt.bind (Lwt_io.flush oc) Lwt.return_ok
let input_char ic = Lwt.bind (Lwt_io.read_char ic) Lwt.return_ok
let really_input ic b n m = Lwt.bind (Lwt_io.read_into_exactly ic b n m) Lwt.return_ok
let close_in ic = Lwt.bind (Lwt_io.close ic) Lwt.return_ok
let output_binary_int oc n =
output_char oc (Char.chr (n lsr 24)) >>= fun () ->
output_char oc (Char.chr ((n lsr 16) land 255)) >>= fun () ->
output_char oc (Char.chr ((n lsr 8) land 255)) >>= fun () ->
output_char oc (Char.chr (n land 255))
let input_binary_int ic =
input_char ic >>= fun a ->
input_char ic >>= fun b ->
input_char ic >>= fun c ->
input_char ic >>= fun d ->
return ((Char.code a lsl 24)
lor (Char.code b lsl 16)
lor (Char.code c lsl 8)
lor (Char.code d))
end
| |
310b0ea99cd38ed49444d6b6a9f7d355a7be5053811b84a33f090b5986aeac45 | DavidAlphaFox/RabbitMQ | rabbit_mgmt_wm_definitions.erl | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
%% compliance with the License. You may obtain a copy of the License at
%% /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
%% License for the specific language governing rights and limitations
%% under the License.
%%
The Original Code is RabbitMQ Management Plugin .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_mgmt_wm_definitions).
-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
-export([content_types_accepted/2, allowed_methods/2, accept_json/2]).
-export([post_is_create/2, create_path/2, accept_multipart/2]).
-export([apply_defs/3]).
-import(rabbit_misc, [pget/2, pget/3]).
-include("rabbit_mgmt.hrl").
-include_lib("webmachine/include/webmachine.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
%%--------------------------------------------------------------------
init(_Config) -> {ok, #context{}}.
content_types_provided(ReqData, Context) ->
{[{"application/json", to_json}], ReqData, Context}.
content_types_accepted(ReqData, Context) ->
{[{"application/json", accept_json},
{"multipart/form-data", accept_multipart}], ReqData, Context}.
allowed_methods(ReqData, Context) ->
{['HEAD', 'GET', 'POST'], ReqData, Context}.
post_is_create(ReqData, Context) ->
{true, ReqData, Context}.
create_path(ReqData, Context) ->
{"dummy", ReqData, Context}.
to_json(ReqData, Context) ->
Xs = [X || X <- rabbit_mgmt_wm_exchanges:basic(ReqData),
export_exchange(X)],
Qs = [Q || Q <- rabbit_mgmt_wm_queues:basic(ReqData),
export_queue(Q)],
QNames = [{pget(name, Q), pget(vhost, Q)} || Q <- Qs],
Bs = [B || B <- rabbit_mgmt_wm_bindings:basic(ReqData),
export_binding(B, QNames)],
{ok, Vsn} = application:get_key(rabbit, vsn),
rabbit_mgmt_util:reply(
[{rabbit_version, list_to_binary(Vsn)}] ++
filter(
[{users, rabbit_mgmt_wm_users:users()},
{vhosts, rabbit_mgmt_wm_vhosts:basic()},
{permissions, rabbit_mgmt_wm_permissions:permissions()},
{parameters, rabbit_mgmt_wm_parameters:basic(ReqData)},
{policies, rabbit_mgmt_wm_policies:basic(ReqData)},
{queues, Qs},
{exchanges, Xs},
{bindings, Bs}]),
case wrq:get_qs_value("download", ReqData) of
undefined -> ReqData;
Filename -> rabbit_mgmt_util:set_resp_header(
"Content-Disposition",
"attachment; filename=" ++
mochiweb_util:unquote(Filename), ReqData)
end,
Context).
accept_json(ReqData, Context) ->
accept(wrq:req_body(ReqData), ReqData, Context).
accept_multipart(ReqData, Context) ->
Parts = webmachine_multipart:get_all_parts(
wrq:req_body(ReqData),
webmachine_multipart:find_boundary(ReqData)),
Redirect = get_part("redirect", Parts),
Json = get_part("file", Parts),
Resp = {Res, _, _} = accept(Json, ReqData, Context),
case Res of
true ->
ReqData1 =
case Redirect of
unknown -> ReqData;
_ -> rabbit_mgmt_util:redirect(Redirect, ReqData)
end,
{true, ReqData1, Context};
_ ->
Resp
end.
is_authorized(ReqData, Context) ->
case wrq:get_qs_value("auth", ReqData) of
undefined -> rabbit_mgmt_util:is_authorized_admin(ReqData, Context);
Auth -> is_authorized_qs(ReqData, Context, Auth)
end.
%% Support for the web UI - it can't add a normal "authorization"
%% header for a file download.
is_authorized_qs(ReqData, Context, Auth) ->
case rabbit_web_dispatch_util:parse_auth_header("Basic " ++ Auth) of
[Username, Password] -> rabbit_mgmt_util:is_authorized_admin(
ReqData, Context, Username, Password);
_ -> {?AUTH_REALM, ReqData, Context}
end.
%%--------------------------------------------------------------------
accept(Body, ReqData, Context) ->
apply_defs(Body, fun() -> {true, ReqData, Context} end,
fun(E) -> rabbit_mgmt_util:bad_request(E, ReqData, Context) end).
apply_defs(Body, SuccessFun, ErrorFun) ->
case rabbit_mgmt_util:decode([], Body) of
{error, E} ->
ErrorFun(E);
{ok, _, All} ->
try
for_all(users, All, fun add_user/1),
for_all(vhosts, All, fun add_vhost/1),
for_all(permissions, All, fun add_permission/1),
for_all(parameters, All, fun add_parameter/1),
for_all(policies, All, fun add_policy/1),
for_all(queues, All, fun add_queue/1),
for_all(exchanges, All, fun add_exchange/1),
for_all(bindings, All, fun add_binding/1),
SuccessFun()
catch {error, E} -> ErrorFun(format(E));
exit:E -> ErrorFun(format(E))
end
end.
format(#amqp_error{name = Name, explanation = Explanation}) ->
list_to_binary(rabbit_misc:format("~s: ~s", [Name, Explanation]));
format(E) ->
list_to_binary(rabbit_misc:format("~p", [E])).
get_part(Name, Parts) ->
TODO any reason not to use lists : instead ?
Filtered = [Value || {N, _Meta, Value} <- Parts, N == Name],
case Filtered of
[] -> unknown;
[F] -> F
end.
export_queue(Queue) ->
pget(owner_pid, Queue) == none.
export_binding(Binding, Qs) ->
Src = pget(source, Binding),
Dest = pget(destination, Binding),
DestType = pget(destination_type, Binding),
VHost = pget(vhost, Binding),
Src =/= <<"">>
andalso
( (DestType =:= queue andalso lists:member({Dest, VHost}, Qs))
orelse (DestType =:= exchange andalso Dest =/= <<"">>) ).
export_exchange(Exchange) ->
export_name(pget(name, Exchange)).
export_name(<<>>) -> false;
export_name(<<"amq.", _/binary>>) -> false;
export_name(_Name) -> true.
%%--------------------------------------------------------------------
rw_state() ->
[{users, [name, password_hash, tags]},
{vhosts, [name]},
{permissions, [user, vhost, configure, write, read]},
{parameters, [vhost, component, name, value]},
{policies, [vhost, name, pattern, definition, priority, 'apply-to']},
{queues, [name, vhost, durable, auto_delete, arguments]},
{exchanges, [name, vhost, type, durable, auto_delete, internal,
arguments]},
{bindings, [source, vhost, destination, destination_type, routing_key,
arguments]}].
filter(Items) ->
[filter_items(N, V, proplists:get_value(N, rw_state())) || {N, V} <- Items].
filter_items(Name, List, Allowed) ->
{Name, [filter_item(I, Allowed) || I <- List]}.
filter_item(Item, Allowed) ->
[{K, Fact} || {K, Fact} <- Item, lists:member(K, Allowed)].
%%--------------------------------------------------------------------
for_all(Name, All, Fun) ->
case pget(Name, All) of
undefined -> ok;
List -> [Fun([{atomise_name(K), V} || {K, V} <- I]) ||
{struct, I} <- List]
end.
atomise_name(N) -> list_to_atom(binary_to_list(N)).
%%--------------------------------------------------------------------
add_parameter(Param) ->
VHost = pget(vhost, Param),
Comp = pget(component, Param),
Key = pget(name, Param),
Term = rabbit_misc:json_to_term(pget(value, Param)),
case rabbit_runtime_parameters:set(VHost, Comp, Key, Term, none) of
ok -> ok;
{error_string, E} -> S = rabbit_misc:format(" (~s/~s/~s)",
[VHost, Comp, Key]),
exit(list_to_binary(E ++ S))
end.
add_policy(Param) ->
VHost = pget(vhost, Param),
Key = pget(name, Param),
case rabbit_policy:set(
VHost, Key, pget(pattern, Param),
rabbit_misc:json_to_term(pget(definition, Param)),
pget(priority, Param),
pget('apply-to', Param, <<"all">>)) of
ok -> ok;
{error_string, E} -> S = rabbit_misc:format(" (~s/~s)", [VHost, Key]),
exit(list_to_binary(E ++ S))
end.
add_user(User) ->
rabbit_mgmt_wm_user:put_user(User).
add_vhost(VHost) ->
VHostName = pget(name, VHost),
VHostTrace = pget(tracing, VHost),
rabbit_mgmt_wm_vhost:put_vhost(VHostName, VHostTrace).
add_permission(Permission) ->
rabbit_auth_backend_internal:set_permissions(pget(user, Permission),
pget(vhost, Permission),
pget(configure, Permission),
pget(write, Permission),
pget(read, Permission)).
add_queue(Queue) ->
rabbit_amqqueue:declare(r(queue, Queue),
pget(durable, Queue),
pget(auto_delete, Queue),
rabbit_mgmt_util:args(pget(arguments, Queue)),
none).
add_exchange(Exchange) ->
Internal = case pget(internal, Exchange) of
= < 2.2.0
I -> I
end,
rabbit_exchange:declare(r(exchange, Exchange),
rabbit_exchange:check_type(pget(type, Exchange)),
pget(durable, Exchange),
pget(auto_delete, Exchange),
Internal,
rabbit_mgmt_util:args(pget(arguments, Exchange))).
add_binding(Binding) ->
DestType = list_to_atom(binary_to_list(pget(destination_type, Binding))),
rabbit_binding:add(
#binding{source = r(exchange, source, Binding),
destination = r(DestType, destination, Binding),
key = pget(routing_key, Binding),
args = rabbit_mgmt_util:args(pget(arguments, Binding))}).
r(Type, Props) -> r(Type, name, Props).
r(Type, Name, Props) ->
rabbit_misc:r(pget(vhost, Props), Type, pget(Name, Props)).
| null | https://raw.githubusercontent.com/DavidAlphaFox/RabbitMQ/0a64e6f0464a9a4ce85c6baa52fb1c584689f49a/plugins-src/rabbitmq-management/src/rabbit_mgmt_wm_definitions.erl | erlang | compliance with the License. You may obtain a copy of the License at
/
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations
under the License.
--------------------------------------------------------------------
Support for the web UI - it can't add a normal "authorization"
header for a file download.
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ Management Plugin .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_mgmt_wm_definitions).
-export([init/1, to_json/2, content_types_provided/2, is_authorized/2]).
-export([content_types_accepted/2, allowed_methods/2, accept_json/2]).
-export([post_is_create/2, create_path/2, accept_multipart/2]).
-export([apply_defs/3]).
-import(rabbit_misc, [pget/2, pget/3]).
-include("rabbit_mgmt.hrl").
-include_lib("webmachine/include/webmachine.hrl").
-include_lib("amqp_client/include/amqp_client.hrl").
init(_Config) -> {ok, #context{}}.
content_types_provided(ReqData, Context) ->
{[{"application/json", to_json}], ReqData, Context}.
content_types_accepted(ReqData, Context) ->
{[{"application/json", accept_json},
{"multipart/form-data", accept_multipart}], ReqData, Context}.
allowed_methods(ReqData, Context) ->
{['HEAD', 'GET', 'POST'], ReqData, Context}.
post_is_create(ReqData, Context) ->
{true, ReqData, Context}.
create_path(ReqData, Context) ->
{"dummy", ReqData, Context}.
to_json(ReqData, Context) ->
Xs = [X || X <- rabbit_mgmt_wm_exchanges:basic(ReqData),
export_exchange(X)],
Qs = [Q || Q <- rabbit_mgmt_wm_queues:basic(ReqData),
export_queue(Q)],
QNames = [{pget(name, Q), pget(vhost, Q)} || Q <- Qs],
Bs = [B || B <- rabbit_mgmt_wm_bindings:basic(ReqData),
export_binding(B, QNames)],
{ok, Vsn} = application:get_key(rabbit, vsn),
rabbit_mgmt_util:reply(
[{rabbit_version, list_to_binary(Vsn)}] ++
filter(
[{users, rabbit_mgmt_wm_users:users()},
{vhosts, rabbit_mgmt_wm_vhosts:basic()},
{permissions, rabbit_mgmt_wm_permissions:permissions()},
{parameters, rabbit_mgmt_wm_parameters:basic(ReqData)},
{policies, rabbit_mgmt_wm_policies:basic(ReqData)},
{queues, Qs},
{exchanges, Xs},
{bindings, Bs}]),
case wrq:get_qs_value("download", ReqData) of
undefined -> ReqData;
Filename -> rabbit_mgmt_util:set_resp_header(
"Content-Disposition",
"attachment; filename=" ++
mochiweb_util:unquote(Filename), ReqData)
end,
Context).
accept_json(ReqData, Context) ->
accept(wrq:req_body(ReqData), ReqData, Context).
accept_multipart(ReqData, Context) ->
Parts = webmachine_multipart:get_all_parts(
wrq:req_body(ReqData),
webmachine_multipart:find_boundary(ReqData)),
Redirect = get_part("redirect", Parts),
Json = get_part("file", Parts),
Resp = {Res, _, _} = accept(Json, ReqData, Context),
case Res of
true ->
ReqData1 =
case Redirect of
unknown -> ReqData;
_ -> rabbit_mgmt_util:redirect(Redirect, ReqData)
end,
{true, ReqData1, Context};
_ ->
Resp
end.
is_authorized(ReqData, Context) ->
case wrq:get_qs_value("auth", ReqData) of
undefined -> rabbit_mgmt_util:is_authorized_admin(ReqData, Context);
Auth -> is_authorized_qs(ReqData, Context, Auth)
end.
is_authorized_qs(ReqData, Context, Auth) ->
case rabbit_web_dispatch_util:parse_auth_header("Basic " ++ Auth) of
[Username, Password] -> rabbit_mgmt_util:is_authorized_admin(
ReqData, Context, Username, Password);
_ -> {?AUTH_REALM, ReqData, Context}
end.
accept(Body, ReqData, Context) ->
apply_defs(Body, fun() -> {true, ReqData, Context} end,
fun(E) -> rabbit_mgmt_util:bad_request(E, ReqData, Context) end).
apply_defs(Body, SuccessFun, ErrorFun) ->
case rabbit_mgmt_util:decode([], Body) of
{error, E} ->
ErrorFun(E);
{ok, _, All} ->
try
for_all(users, All, fun add_user/1),
for_all(vhosts, All, fun add_vhost/1),
for_all(permissions, All, fun add_permission/1),
for_all(parameters, All, fun add_parameter/1),
for_all(policies, All, fun add_policy/1),
for_all(queues, All, fun add_queue/1),
for_all(exchanges, All, fun add_exchange/1),
for_all(bindings, All, fun add_binding/1),
SuccessFun()
catch {error, E} -> ErrorFun(format(E));
exit:E -> ErrorFun(format(E))
end
end.
format(#amqp_error{name = Name, explanation = Explanation}) ->
list_to_binary(rabbit_misc:format("~s: ~s", [Name, Explanation]));
format(E) ->
list_to_binary(rabbit_misc:format("~p", [E])).
get_part(Name, Parts) ->
TODO any reason not to use lists : instead ?
Filtered = [Value || {N, _Meta, Value} <- Parts, N == Name],
case Filtered of
[] -> unknown;
[F] -> F
end.
export_queue(Queue) ->
pget(owner_pid, Queue) == none.
export_binding(Binding, Qs) ->
Src = pget(source, Binding),
Dest = pget(destination, Binding),
DestType = pget(destination_type, Binding),
VHost = pget(vhost, Binding),
Src =/= <<"">>
andalso
( (DestType =:= queue andalso lists:member({Dest, VHost}, Qs))
orelse (DestType =:= exchange andalso Dest =/= <<"">>) ).
export_exchange(Exchange) ->
export_name(pget(name, Exchange)).
export_name(<<>>) -> false;
export_name(<<"amq.", _/binary>>) -> false;
export_name(_Name) -> true.
rw_state() ->
[{users, [name, password_hash, tags]},
{vhosts, [name]},
{permissions, [user, vhost, configure, write, read]},
{parameters, [vhost, component, name, value]},
{policies, [vhost, name, pattern, definition, priority, 'apply-to']},
{queues, [name, vhost, durable, auto_delete, arguments]},
{exchanges, [name, vhost, type, durable, auto_delete, internal,
arguments]},
{bindings, [source, vhost, destination, destination_type, routing_key,
arguments]}].
filter(Items) ->
[filter_items(N, V, proplists:get_value(N, rw_state())) || {N, V} <- Items].
filter_items(Name, List, Allowed) ->
{Name, [filter_item(I, Allowed) || I <- List]}.
filter_item(Item, Allowed) ->
[{K, Fact} || {K, Fact} <- Item, lists:member(K, Allowed)].
for_all(Name, All, Fun) ->
case pget(Name, All) of
undefined -> ok;
List -> [Fun([{atomise_name(K), V} || {K, V} <- I]) ||
{struct, I} <- List]
end.
atomise_name(N) -> list_to_atom(binary_to_list(N)).
add_parameter(Param) ->
VHost = pget(vhost, Param),
Comp = pget(component, Param),
Key = pget(name, Param),
Term = rabbit_misc:json_to_term(pget(value, Param)),
case rabbit_runtime_parameters:set(VHost, Comp, Key, Term, none) of
ok -> ok;
{error_string, E} -> S = rabbit_misc:format(" (~s/~s/~s)",
[VHost, Comp, Key]),
exit(list_to_binary(E ++ S))
end.
add_policy(Param) ->
VHost = pget(vhost, Param),
Key = pget(name, Param),
case rabbit_policy:set(
VHost, Key, pget(pattern, Param),
rabbit_misc:json_to_term(pget(definition, Param)),
pget(priority, Param),
pget('apply-to', Param, <<"all">>)) of
ok -> ok;
{error_string, E} -> S = rabbit_misc:format(" (~s/~s)", [VHost, Key]),
exit(list_to_binary(E ++ S))
end.
add_user(User) ->
rabbit_mgmt_wm_user:put_user(User).
add_vhost(VHost) ->
VHostName = pget(name, VHost),
VHostTrace = pget(tracing, VHost),
rabbit_mgmt_wm_vhost:put_vhost(VHostName, VHostTrace).
add_permission(Permission) ->
rabbit_auth_backend_internal:set_permissions(pget(user, Permission),
pget(vhost, Permission),
pget(configure, Permission),
pget(write, Permission),
pget(read, Permission)).
add_queue(Queue) ->
rabbit_amqqueue:declare(r(queue, Queue),
pget(durable, Queue),
pget(auto_delete, Queue),
rabbit_mgmt_util:args(pget(arguments, Queue)),
none).
add_exchange(Exchange) ->
Internal = case pget(internal, Exchange) of
= < 2.2.0
I -> I
end,
rabbit_exchange:declare(r(exchange, Exchange),
rabbit_exchange:check_type(pget(type, Exchange)),
pget(durable, Exchange),
pget(auto_delete, Exchange),
Internal,
rabbit_mgmt_util:args(pget(arguments, Exchange))).
add_binding(Binding) ->
DestType = list_to_atom(binary_to_list(pget(destination_type, Binding))),
rabbit_binding:add(
#binding{source = r(exchange, source, Binding),
destination = r(DestType, destination, Binding),
key = pget(routing_key, Binding),
args = rabbit_mgmt_util:args(pget(arguments, Binding))}).
r(Type, Props) -> r(Type, name, Props).
r(Type, Name, Props) ->
rabbit_misc:r(pget(vhost, Props), Type, pget(Name, Props)).
|
486cca85404a3dcce68b6f6638ff58a7a01c0120299e50e6bdf81197ba4733cc | metosin/komponentit | core.cljs | (ns example.core
(:require [clojure.string :as str]))
(defn header [module module-name]
(str
"# "
module-name
"\n## [view source]("
"/" (name module) ".cljs"
"), [view example source]("
"-src/cljs/example/" (name module) ".cljs"
")\n"))
| null | https://raw.githubusercontent.com/metosin/komponentit/d962ce1d69ccc3800db0d6b4fc18fc2fd30b494a/example-src/cljs/example/core.cljs | clojure | (ns example.core
(:require [clojure.string :as str]))
(defn header [module module-name]
(str
"# "
module-name
"\n## [view source]("
"/" (name module) ".cljs"
"), [view example source]("
"-src/cljs/example/" (name module) ".cljs"
")\n"))
| |
2dab33215a78fed768333857e953c6809cd0126fa47db13a91603aa6b1e6d1ba | haskell-suite/haskell-src-exts | ForeignImportJavascript.hs | # LANGUAGE JavascriptFFI #
foreign import javascript unsafe "somethingUseful_ = $1"
js_set_somethingUseful :: JSFun a -> IO ()
| null | https://raw.githubusercontent.com/haskell-suite/haskell-src-exts/84a4930e0e5c051b7d9efd20ef7c822d5fc1c33b/tests/examples/ForeignImportJavascript.hs | haskell | # LANGUAGE JavascriptFFI #
foreign import javascript unsafe "somethingUseful_ = $1"
js_set_somethingUseful :: JSFun a -> IO ()
| |
dd93cf09768e97e1518894349d0d48aedf77958140406a0e7e47f577b1b3e079 | WorksHub/client | views.cljs | (ns wh.admin.tags.views
(:require
[reagent.core :as r]
[wh.admin.tags.events :as events]
[wh.admin.tags.subs :as subs]
[wh.common.text :as text]
[wh.common.subs]
[wh.components.forms.views :refer [text-field select-field]]
[wh.components.icons :refer [icon]]
[wh.components.not-found :as not-found]
[wh.components.pagination :as pagination]
[wh.components.tag :as tag]
[wh.re-frame.events :refer [dispatch dispatch-sync]]
[wh.re-frame.subs :refer [<sub]]))
(defn tag-row
[_tag]
(let [temp-label (r/atom nil)
temp-weight (r/atom nil)
editing? (r/atom false)]
(fn [{:keys [_id label slug type subtype weight] :as tag}]
[:div.edit-tags__tag-row
{:class (when @editing? "edit-tags__tag-row--editing")}
[:div.edit-tags__tag-row__primary
[:div.is-flex.is-full-width
[:ul.tags [tag/tag :li tag]]
[:span (pr-str (dissoc tag :id))]]
[:a.a--underlined
{:on-click #(swap! editing? not)}
(if @editing? "Hide" "Edit")]]
(when @editing?
[:div.edit-tags__tag-row__editable
[:div.is-flex.tag-label [:strong "Label"] [text-field (or @temp-label label)
{:on-change #(reset! temp-label %)}]
(when (and @temp-label (not= @temp-label label))
[:div.edit-tags__tag-row__info__icons
[icon "tick"
:on-click #(dispatch [::events/set-tag-label tag @temp-label])]
[icon "close"
:on-click #(reset! temp-label nil)]])]
[:div.is-flex.tag-type [:strong "Type"] [select-field type
{:options (<sub [::subs/tag-types])
:on-change [::events/set-tag-type tag]}]]
(when-let [subtypes (<sub [::subs/tag-subtypes type])]
[:div.is-flex.tag-subtype [:strong "Subtype"] [select-field subtype
{:options subtypes
:on-change [::events/set-tag-subtype tag]}]])
[:div.is-flex.tag-slug [:strong "Slug"] [text-field slug
{:read-only true}]]
[:div.is-flex.tag-weight [:strong "Weight"] [text-field (or @temp-weight weight)
{:type :number
:maxv 1.0
:minv 0.0
:step 0.001
:on-change #(reset! temp-weight (or % ""))}]
(when (and @temp-weight
(text/not-blank @temp-weight)
(not= @temp-weight weight))
[:div.edit-tags__tag-row__info__icons
[icon "tick"
:on-click #(dispatch [::events/set-tag-weight tag @temp-weight])]
[icon "close"
:on-click #(reset! temp-weight nil)]])]
[:div.is-flex.tag-weight-slider [:strong ""]
[:input {:type :range
:min 0.0
:max 1.0
:step 0.001
:value (or @temp-weight weight)
:on-change #(reset! temp-weight (js/parseFloat (.. % -target -value)))}]]])])))
(def page-limit 30)
(defn main []
(fn []
(let [all-tags (<sub [::subs/all-tags])
query-params (<sub [:wh/query-params])
current-page (js/parseInt (get query-params "page" "1"))
{:keys [tags total]} (<sub [::subs/all-tags--filtered current-page page-limit])]
[:div.main.edit-tags
[:h1 "Edit Tags"]
[:section
[:form.wh-formx.is-flex
[:div.text-field-control
[:input.input
{:name "search"
:type "text"
:autoComplete "off"
:placeholder "Search tags..."
:value (<sub [::subs/search-term])
:on-change #(dispatch-sync [::events/set-search-term (-> % .-target .-value)])}]]
[select-field (<sub [::subs/type-filter])
{:options (into [{:id nil :label "All tags"}] (<sub [::subs/tag-types]))
:on-change [::events/set-type-filter]}]]]
[:span "Showing " (inc (* (dec current-page) page-limit)) "-" (min (* current-page page-limit)
(count all-tags)) " of " (count all-tags)]
[:section.edit-tags__list
(if all-tags
(if (not-empty tags)
[:ul
(doall
(for [tag tags]
^{:key (:id tag)}
[:li [tag-row tag]]))]
[:h2 "No matching tags"])
[:h2 "Loading..."])]
(when (and (not-empty tags) (> total page-limit))
[pagination/pagination
current-page
(pagination/generate-pagination current-page (int (js/Math.ceil (/ total page-limit))))
:tags-edit
query-params])])))
(defn page []
(if (<sub [:user/admin?])
[main]
[:div.dashboard
[not-found/not-found-page]]))
| null | https://raw.githubusercontent.com/WorksHub/client/77e4212a69dad049a9e784143915058acd918982/client/src/wh/admin/tags/views.cljs | clojure | (ns wh.admin.tags.views
(:require
[reagent.core :as r]
[wh.admin.tags.events :as events]
[wh.admin.tags.subs :as subs]
[wh.common.text :as text]
[wh.common.subs]
[wh.components.forms.views :refer [text-field select-field]]
[wh.components.icons :refer [icon]]
[wh.components.not-found :as not-found]
[wh.components.pagination :as pagination]
[wh.components.tag :as tag]
[wh.re-frame.events :refer [dispatch dispatch-sync]]
[wh.re-frame.subs :refer [<sub]]))
(defn tag-row
[_tag]
(let [temp-label (r/atom nil)
temp-weight (r/atom nil)
editing? (r/atom false)]
(fn [{:keys [_id label slug type subtype weight] :as tag}]
[:div.edit-tags__tag-row
{:class (when @editing? "edit-tags__tag-row--editing")}
[:div.edit-tags__tag-row__primary
[:div.is-flex.is-full-width
[:ul.tags [tag/tag :li tag]]
[:span (pr-str (dissoc tag :id))]]
[:a.a--underlined
{:on-click #(swap! editing? not)}
(if @editing? "Hide" "Edit")]]
(when @editing?
[:div.edit-tags__tag-row__editable
[:div.is-flex.tag-label [:strong "Label"] [text-field (or @temp-label label)
{:on-change #(reset! temp-label %)}]
(when (and @temp-label (not= @temp-label label))
[:div.edit-tags__tag-row__info__icons
[icon "tick"
:on-click #(dispatch [::events/set-tag-label tag @temp-label])]
[icon "close"
:on-click #(reset! temp-label nil)]])]
[:div.is-flex.tag-type [:strong "Type"] [select-field type
{:options (<sub [::subs/tag-types])
:on-change [::events/set-tag-type tag]}]]
(when-let [subtypes (<sub [::subs/tag-subtypes type])]
[:div.is-flex.tag-subtype [:strong "Subtype"] [select-field subtype
{:options subtypes
:on-change [::events/set-tag-subtype tag]}]])
[:div.is-flex.tag-slug [:strong "Slug"] [text-field slug
{:read-only true}]]
[:div.is-flex.tag-weight [:strong "Weight"] [text-field (or @temp-weight weight)
{:type :number
:maxv 1.0
:minv 0.0
:step 0.001
:on-change #(reset! temp-weight (or % ""))}]
(when (and @temp-weight
(text/not-blank @temp-weight)
(not= @temp-weight weight))
[:div.edit-tags__tag-row__info__icons
[icon "tick"
:on-click #(dispatch [::events/set-tag-weight tag @temp-weight])]
[icon "close"
:on-click #(reset! temp-weight nil)]])]
[:div.is-flex.tag-weight-slider [:strong ""]
[:input {:type :range
:min 0.0
:max 1.0
:step 0.001
:value (or @temp-weight weight)
:on-change #(reset! temp-weight (js/parseFloat (.. % -target -value)))}]]])])))
(def page-limit 30)
(defn main []
(fn []
(let [all-tags (<sub [::subs/all-tags])
query-params (<sub [:wh/query-params])
current-page (js/parseInt (get query-params "page" "1"))
{:keys [tags total]} (<sub [::subs/all-tags--filtered current-page page-limit])]
[:div.main.edit-tags
[:h1 "Edit Tags"]
[:section
[:form.wh-formx.is-flex
[:div.text-field-control
[:input.input
{:name "search"
:type "text"
:autoComplete "off"
:placeholder "Search tags..."
:value (<sub [::subs/search-term])
:on-change #(dispatch-sync [::events/set-search-term (-> % .-target .-value)])}]]
[select-field (<sub [::subs/type-filter])
{:options (into [{:id nil :label "All tags"}] (<sub [::subs/tag-types]))
:on-change [::events/set-type-filter]}]]]
[:span "Showing " (inc (* (dec current-page) page-limit)) "-" (min (* current-page page-limit)
(count all-tags)) " of " (count all-tags)]
[:section.edit-tags__list
(if all-tags
(if (not-empty tags)
[:ul
(doall
(for [tag tags]
^{:key (:id tag)}
[:li [tag-row tag]]))]
[:h2 "No matching tags"])
[:h2 "Loading..."])]
(when (and (not-empty tags) (> total page-limit))
[pagination/pagination
current-page
(pagination/generate-pagination current-page (int (js/Math.ceil (/ total page-limit))))
:tags-edit
query-params])])))
(defn page []
(if (<sub [:user/admin?])
[main]
[:div.dashboard
[not-found/not-found-page]]))
| |
697ae7d569507eeddc3b9620af743c30002fdf83eb74f38273ddc7c1bd6185d3 | marigold-dev/deku | v128.mli | (* Types *)
type t
type bits = string
type ('i8x16, 'i16x8, 'i32x4, 'i64x2, 'f32x4, 'f64x2) laneop =
| I8x16 of 'i8x16
| I16x8 of 'i16x8
| I32x4 of 'i32x4
| I64x2 of 'i64x2
| F32x4 of 'f32x4
| F64x2 of 'f64x2
type shape = (unit, unit, unit, unit, unit, unit) laneop
(* Basics *)
val bitwidth : int
val num_lanes : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> int
val type_of_lane : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> Types.num_type
val string_of_shape : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> string
val zero : t
val of_bits : bits -> t
val to_bits : t -> bits
(* String conversion *)
val to_string : t -> string
val to_hex_string : t -> string
val of_strings : shape -> string list -> t
(* Shape-based operations *)
module type IntShape = sig
type lane
val num_lanes : int
val to_lanes : t -> lane list
val of_lanes : lane list -> t
val splat : lane -> t
val extract_lane_s : int -> t -> lane
val extract_lane_u : int -> t -> lane
val replace_lane : int -> t -> lane -> t
val eq : t -> t -> t
val ne : t -> t -> t
val lt_s : t -> t -> t
val lt_u : t -> t -> t
val le_s : t -> t -> t
val le_u : t -> t -> t
val gt_s : t -> t -> t
val gt_u : t -> t -> t
val ge_s : t -> t -> t
val ge_u : t -> t -> t
val abs : t -> t
val neg : t -> t
val popcnt : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val min_s : t -> t -> t
val min_u : t -> t -> t
val max_s : t -> t -> t
val max_u : t -> t -> t
val mul : t -> t -> t
val avgr_u : t -> t -> t
val any_true : t -> bool
val all_true : t -> bool
val bitmask : t -> Int32.t
val shl : t -> I32.t -> t
val shr_s : t -> I32.t -> t
val shr_u : t -> I32.t -> t
val add_sat_s : t -> t -> t
val add_sat_u : t -> t -> t
val sub_sat_s : t -> t -> t
val sub_sat_u : t -> t -> t
val q15mulr_sat_s : t -> t -> t
end
module type FloatShape = sig
type lane
val num_lanes : int
val to_lanes : t -> lane list
val of_lanes : lane list -> t
val splat : lane -> t
val extract_lane : int -> t -> lane
val replace_lane : int -> t -> lane -> t
val eq : t -> t -> t
val ne : t -> t -> t
val lt : t -> t -> t
val le : t -> t -> t
val gt : t -> t -> t
val ge : t -> t -> t
val abs : t -> t
val neg : t -> t
val sqrt : t -> t
val ceil : t -> t
val floor : t -> t
val trunc : t -> t
val nearest : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val min : t -> t -> t
val max : t -> t -> t
val pmin : t -> t -> t
val pmax : t -> t -> t
end
module I8x16 : IntShape with type lane = I8.t
module I16x8 : IntShape with type lane = I16.t
module I32x4 : IntShape with type lane = I32.t
module I64x2 : IntShape with type lane = I64.t
module F32x4 : FloatShape with type lane = F32.t
module F64x2 : FloatShape with type lane = F64.t
(* Special shapes *)
module V1x128 : sig
val lognot : t -> t
val and_ : t -> t -> t
val or_ : t -> t -> t
val xor : t -> t -> t
val andnot : t -> t -> t
val bitselect : t -> t -> t -> t
end
module V8x16 : sig
val swizzle : t -> t -> t
val shuffle : t -> t -> int list -> t
end
(* Conversions *)
module I8x16_convert : sig
val narrow_s : t -> t -> t
val narrow_u : t -> t -> t
end
module I16x8_convert : sig
val narrow_s : t -> t -> t
val narrow_u : t -> t -> t
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
val extadd_pairwise_s : t -> t
val extadd_pairwise_u : t -> t
end
module I32x4_convert : sig
val trunc_sat_f32x4_s : t -> t
val trunc_sat_f32x4_u : t -> t
val trunc_sat_f64x2_s_zero : t -> t
val trunc_sat_f64x2_u_zero : t -> t
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val dot_s : t -> t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
val extadd_pairwise_s : t -> t
val extadd_pairwise_u : t -> t
end
module I64x2_convert : sig
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
end
module F32x4_convert : sig
val convert_i32x4_s : t -> t
val convert_i32x4_u : t -> t
val demote_f64x2_zero : t -> t
end
module F64x2_convert : sig
val promote_low_f32x4 : t -> t
val convert_i32x4_s : t -> t
val convert_i32x4_u : t -> t
end
| null | https://raw.githubusercontent.com/marigold-dev/deku/a26f31e0560ad12fd86cf7fa4667bb147247c7ef/deku-c/interpreter/exec/v128.mli | ocaml | Types
Basics
String conversion
Shape-based operations
Special shapes
Conversions |
type t
type bits = string
type ('i8x16, 'i16x8, 'i32x4, 'i64x2, 'f32x4, 'f64x2) laneop =
| I8x16 of 'i8x16
| I16x8 of 'i16x8
| I32x4 of 'i32x4
| I64x2 of 'i64x2
| F32x4 of 'f32x4
| F64x2 of 'f64x2
type shape = (unit, unit, unit, unit, unit, unit) laneop
val bitwidth : int
val num_lanes : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> int
val type_of_lane : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> Types.num_type
val string_of_shape : ('a, 'b, 'c, 'd, 'e, 'f) laneop -> string
val zero : t
val of_bits : bits -> t
val to_bits : t -> bits
val to_string : t -> string
val to_hex_string : t -> string
val of_strings : shape -> string list -> t
module type IntShape = sig
type lane
val num_lanes : int
val to_lanes : t -> lane list
val of_lanes : lane list -> t
val splat : lane -> t
val extract_lane_s : int -> t -> lane
val extract_lane_u : int -> t -> lane
val replace_lane : int -> t -> lane -> t
val eq : t -> t -> t
val ne : t -> t -> t
val lt_s : t -> t -> t
val lt_u : t -> t -> t
val le_s : t -> t -> t
val le_u : t -> t -> t
val gt_s : t -> t -> t
val gt_u : t -> t -> t
val ge_s : t -> t -> t
val ge_u : t -> t -> t
val abs : t -> t
val neg : t -> t
val popcnt : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val min_s : t -> t -> t
val min_u : t -> t -> t
val max_s : t -> t -> t
val max_u : t -> t -> t
val mul : t -> t -> t
val avgr_u : t -> t -> t
val any_true : t -> bool
val all_true : t -> bool
val bitmask : t -> Int32.t
val shl : t -> I32.t -> t
val shr_s : t -> I32.t -> t
val shr_u : t -> I32.t -> t
val add_sat_s : t -> t -> t
val add_sat_u : t -> t -> t
val sub_sat_s : t -> t -> t
val sub_sat_u : t -> t -> t
val q15mulr_sat_s : t -> t -> t
end
module type FloatShape = sig
type lane
val num_lanes : int
val to_lanes : t -> lane list
val of_lanes : lane list -> t
val splat : lane -> t
val extract_lane : int -> t -> lane
val replace_lane : int -> t -> lane -> t
val eq : t -> t -> t
val ne : t -> t -> t
val lt : t -> t -> t
val le : t -> t -> t
val gt : t -> t -> t
val ge : t -> t -> t
val abs : t -> t
val neg : t -> t
val sqrt : t -> t
val ceil : t -> t
val floor : t -> t
val trunc : t -> t
val nearest : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val min : t -> t -> t
val max : t -> t -> t
val pmin : t -> t -> t
val pmax : t -> t -> t
end
module I8x16 : IntShape with type lane = I8.t
module I16x8 : IntShape with type lane = I16.t
module I32x4 : IntShape with type lane = I32.t
module I64x2 : IntShape with type lane = I64.t
module F32x4 : FloatShape with type lane = F32.t
module F64x2 : FloatShape with type lane = F64.t
module V1x128 : sig
val lognot : t -> t
val and_ : t -> t -> t
val or_ : t -> t -> t
val xor : t -> t -> t
val andnot : t -> t -> t
val bitselect : t -> t -> t -> t
end
module V8x16 : sig
val swizzle : t -> t -> t
val shuffle : t -> t -> int list -> t
end
module I8x16_convert : sig
val narrow_s : t -> t -> t
val narrow_u : t -> t -> t
end
module I16x8_convert : sig
val narrow_s : t -> t -> t
val narrow_u : t -> t -> t
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
val extadd_pairwise_s : t -> t
val extadd_pairwise_u : t -> t
end
module I32x4_convert : sig
val trunc_sat_f32x4_s : t -> t
val trunc_sat_f32x4_u : t -> t
val trunc_sat_f64x2_s_zero : t -> t
val trunc_sat_f64x2_u_zero : t -> t
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val dot_s : t -> t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
val extadd_pairwise_s : t -> t
val extadd_pairwise_u : t -> t
end
module I64x2_convert : sig
val extend_low_s : t -> t
val extend_high_s : t -> t
val extend_low_u : t -> t
val extend_high_u : t -> t
val extmul_low_s : t -> t -> t
val extmul_high_s : t -> t -> t
val extmul_low_u : t -> t -> t
val extmul_high_u : t -> t -> t
end
module F32x4_convert : sig
val convert_i32x4_s : t -> t
val convert_i32x4_u : t -> t
val demote_f64x2_zero : t -> t
end
module F64x2_convert : sig
val promote_low_f32x4 : t -> t
val convert_i32x4_s : t -> t
val convert_i32x4_u : t -> t
end
|
55f46fc47a70a45900b4102eb4a4eea9e93573643b7fdada1bec30f5ab99ead1 | serperu/secer | roman.erl | -module(roman).
-compile(export_all).
to_roman(0) -> [];
to_roman(X) when X >= 1000 -> [$M | to_roman(X - 1000)];
to_roman(X) when X >= 100 ->
digit(X div 100, $C, $D, $M)
++ to_roman(X rem 100);
to_roman(X) when X >= 10 ->
digit(X div 10, $X, $L, $C)
++ to_roman(X rem 10);
to_roman(X) when X >= 1 -> digit(X, $I, $V, $X).
digit(1, X, _, _) -> [X];
digit(2, X, _, _) -> [X, X];
digit(3, X, _, _) -> [X, X, X];
digit(4, X, Y, _) -> [X, Y];
digit(5, _, Y, _) -> [Y];
digit(6, X, Y, _) -> [Y, X];
digit(7, X, Y, _) -> [Y, X, X];
%digit(8, X, Y, _) -> [Y, X, X, X]; %RIGHT
digit(8, X, Y, _) -> [Y, Y, X, X]; %WRONG
digit(9, X, _, Z) -> [X, Z].
main(N) ->
to_roman(N). | null | https://raw.githubusercontent.com/serperu/secer/ffe2f9602356c34c9465c44534c82cc758b809b5/benchmarks/roman/roman.erl | erlang | digit(8, X, Y, _) -> [Y, X, X, X]; %RIGHT
WRONG | -module(roman).
-compile(export_all).
to_roman(0) -> [];
to_roman(X) when X >= 1000 -> [$M | to_roman(X - 1000)];
to_roman(X) when X >= 100 ->
digit(X div 100, $C, $D, $M)
++ to_roman(X rem 100);
to_roman(X) when X >= 10 ->
digit(X div 10, $X, $L, $C)
++ to_roman(X rem 10);
to_roman(X) when X >= 1 -> digit(X, $I, $V, $X).
digit(1, X, _, _) -> [X];
digit(2, X, _, _) -> [X, X];
digit(3, X, _, _) -> [X, X, X];
digit(4, X, Y, _) -> [X, Y];
digit(5, _, Y, _) -> [Y];
digit(6, X, Y, _) -> [Y, X];
digit(7, X, Y, _) -> [Y, X, X];
digit(9, X, _, Z) -> [X, Z].
main(N) ->
to_roman(N). |
4ed7c496efc4f9628998da79601acc146e684151a48516b10170f39b32b00f19 | melange-re/melange | NumberAdder.ml | let addNumbers a b = a + b | null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/build_tests/monorepo/libs/number-adder/src/NumberAdder.ml | ocaml | let addNumbers a b = a + b | |
f55a83808693f11d6c7f1c85ef73f6a11724b439ec1b310b4a9484b9e8be300e | AbstractMachinesLab/caramel | env.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Environment handling *)
open Cmi_format
open Misc
open Asttypes
open Longident
open Path
open Types
open Btype
module String = Misc.Stdlib.String
let add_delayed_check_forward = ref (fun _ -> assert false)
let value_declarations : ((string * Location.t), (unit -> unit)) Hashtbl.t =
Hashtbl.create 16
This table is used to usage of value declarations . A declaration is
identified with its name and location . The callback attached to a
declaration is called whenever the value is used explicitly
( lookup_value ) or implicitly ( inclusion test between signatures ,
cf ) .
identified with its name and location. The callback attached to a
declaration is called whenever the value is used explicitly
(lookup_value) or implicitly (inclusion test between signatures,
cf Includemod.value_descriptions). *)
let type_declarations = Hashtbl.create 16
let module_declarations = Hashtbl.create 16
type constructor_usage = Positive | Pattern | Privatize
type constructor_usages =
{
mutable cu_positive: bool;
mutable cu_pattern: bool;
mutable cu_privatize: bool;
}
let add_constructor_usage cu = function
| Positive -> cu.cu_positive <- true
| Pattern -> cu.cu_pattern <- true
| Privatize -> cu.cu_privatize <- true
let constructor_usages () =
{cu_positive = false; cu_pattern = false; cu_privatize = false}
let used_constructors :
(string * Location.t * string, (constructor_usage -> unit)) Hashtbl.t
= Hashtbl.create 16
type error =
| Missing_module of Location.t * Path.t * Path.t
| Illegal_value_name of Location.t * string
exception Error of error
let error err = raise (Error err)
(** Map indexed by the name of module components. *)
module NameMap = String.Map
type summary =
Env_empty
| Env_value of summary * Ident.t * value_description
| Env_type of summary * Ident.t * type_declaration
| Env_extension of summary * Ident.t * extension_constructor
| Env_module of summary * Ident.t * module_presence * module_declaration
| Env_modtype of summary * Ident.t * modtype_declaration
| Env_class of summary * Ident.t * class_declaration
| Env_cltype of summary * Ident.t * class_type_declaration
| Env_open of summary * Path.t
| Env_functor_arg of summary * Ident.t
| Env_constraints of summary * type_declaration Path.Map.t
| Env_copy_types of summary * string list
| Env_persistent of summary * Ident.t
type address =
| Aident of Ident.t
| Adot of address * int
module TycompTbl =
struct
(** This module is used to store components of types (i.e. labels
and constructors). We keep a representation of each nested
"open" and the set of local bindings between each of them. *)
type 'a t = {
current: 'a Ident.tbl;
(** Local bindings since the last open. *)
opened: 'a opened option;
(** Symbolic representation of the last (innermost) open, if any. *)
}
and 'a opened = {
components: ('a list) NameMap.t;
* Components from the opened module . We keep a list of
bindings for each name , as in comp_labels and
comp_constrs .
bindings for each name, as in comp_labels and
comp_constrs. *)
using: (string -> ('a * 'a) option -> unit) option;
(** A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing. *)
next: 'a t;
(** The table before opening the module. *)
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let add_open slot wrap components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; components; next};
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let nothing = fun () -> ()
let mk_callback rest name desc = function
| None -> nothing
| Some f ->
(fun () ->
match rest with
| [] -> f name None
| (hidden, _) :: _ -> f name (Some (desc, hidden))
)
let rec find_all name tbl =
List.map (fun (_id, desc) -> desc, nothing)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {using; next; components} ->
let rest = find_all name next in
match NameMap.find name components with
| exception Not_found -> rest
| opened ->
List.map
(fun desc -> desc, mk_callback rest name desc using)
opened
@ rest
let rec fold_name f tbl acc =
let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in
match tbl.opened with
| Some {using = _; next; components} ->
acc
|> NameMap.fold
(fun _name -> List.fold_right f)
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let diff_keys is_local tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
is_local (find_same id tbl2) &&
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
module IdTbl =
struct
(** This module is used to store all kinds of components except
(labels and constructors) in environments. We keep a
representation of each nested "open" and the set of local
bindings between each of them. *)
type 'a t = {
current: 'a Ident.tbl;
(** Local bindings since the last open *)
opened: 'a opened option;
(** Symbolic representation of the last (innermost) open, if any. *)
}
and 'a opened = {
root: Path.t;
(** The path of the opened module, to be prefixed in front of
its local names to produce a valid path in the current
environment. *)
components: 'a NameMap.t;
(** Components from the opened module. *)
using: (string -> ('a * 'a) option -> unit) option;
(** A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing. *)
next: 'a t;
(** The table before opening the module. *)
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let remove id tbl =
{tbl with current = Ident.remove id tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; root; components; next};
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let rec find_name ~mark name tbl =
try
let (id, desc) = Ident.find_name name tbl.current in
Pident id, desc
with Not_found as exn ->
begin match tbl.opened with
| Some {using; root; next; components} ->
begin try
let descr = NameMap.find name components in
let res = Pdot (root, name), descr in
if mark then begin match using with
| None -> ()
| Some f -> begin
match find_name ~mark:false name next with
| exception Not_found -> f name None
| _, descr' -> f name (Some (descr', descr))
end
end;
res
with Not_found ->
find_name ~mark name next
end
| None ->
raise exn
end
let rec update name f tbl =
try
let (id, desc) = Ident.find_name name tbl.current in
let new_desc = f desc in
{tbl with current = Ident.add id new_desc tbl.current}
with Not_found ->
begin match tbl.opened with
| Some {root; using; next; components} ->
begin try
let desc = NameMap.find name components in
let new_desc = f desc in
let components = NameMap.add name new_desc components in
{tbl with opened = Some {root; using; next; components}}
with Not_found ->
let next = update name f next in
{tbl with opened = Some {root; using; next; components}}
end
| None ->
tbl
end
let rec find_all name tbl =
List.map
(fun (id, desc) -> Pident id, desc)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {root; using = _; next; components} ->
try
let desc = NameMap.find name components in
(Pdot (root, name), desc) :: find_all name next
with Not_found ->
find_all name next
let rec fold_name f tbl acc =
let acc =
Ident.fold_name
(fun id d -> f (Ident.name id) (Pident id, d))
tbl.current acc
in
match tbl.opened with
| Some {root; using = _; next; components} ->
acc
|> NameMap.fold
(fun name desc -> f name (Pdot (root, name), desc))
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let rec iter f tbl =
Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current;
match tbl.opened with
| Some {root; using = _; next; components} ->
NameMap.iter
(fun s x ->
let root_scope = Path.scope root in
f (Ident.create_scoped ~scope:root_scope s)
(Pdot (root, s), x))
components;
iter f next
| None -> ()
let diff_keys tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type type_descriptions =
constructor_description list * label_description list
let in_signature_flag = 0x01
type 'a value_or_persistent =
| Value of 'a
| Persistent
type t = {
values: (value_description * address_lazy) IdTbl.t;
constrs: (constructor_description * address_lazy option) TycompTbl.t;
labels: label_description TycompTbl.t;
types: (type_declaration * type_descriptions) IdTbl.t;
modules: (module_declaration_lazy * address_lazy) value_or_persistent IdTbl.t;
modtypes: modtype_declaration IdTbl.t;
components: (module_components * address_lazy) value_or_persistent IdTbl.t;
classes: (class_declaration * address_lazy) IdTbl.t;
cltypes: class_type_declaration IdTbl.t;
functor_args: unit Ident.tbl;
summary: summary;
local_constraints: type_declaration Path.Map.t;
flags: int;
}
and module_declaration_lazy =
(Subst.t * Subst.scoping * module_declaration, module_declaration) EnvLazy.t
and module_components =
{
alerts: alerts;
loc: Location.t;
comps: (components_maker, module_components_repr option) EnvLazy.t;
}
and components_maker = {
cm_env: t;
cm_freshening_subst: Subst.t option;
cm_prefixing_subst: Subst.t;
cm_path: Path.t;
cm_addr: address_lazy;
cm_mty: Types.module_type;
}
and module_components_repr =
Structure_comps of structure_components
| Functor_comps of functor_components
and structure_components = {
mutable comp_values: (value_description * address_lazy) NameMap.t;
mutable comp_constrs:
((constructor_description * address_lazy option) list) NameMap.t;
mutable comp_labels: label_description list NameMap.t;
mutable comp_types: (type_declaration * type_descriptions) NameMap.t;
mutable comp_modules: (module_declaration_lazy * address_lazy) NameMap.t;
mutable comp_modtypes: modtype_declaration NameMap.t;
mutable comp_components: (module_components * address_lazy) NameMap.t;
mutable comp_classes: (class_declaration * address_lazy) NameMap.t;
mutable comp_cltypes: class_type_declaration NameMap.t;
}
and functor_components = {
fcomp_param: Ident.t; (* Formal parameter *)
fcomp_arg: module_type option; (* Argument signature *)
fcomp_res: module_type; (* Result signature *)
fcomp_cache: (Path.t, module_components) Hashtbl.t; (* For memoization *)
fcomp_subst_cache: (Path.t, module_type) Hashtbl.t
}
and address_unforced =
| Projection of { parent : address_lazy; pos : int; }
| ModAlias of { env : t; path : Path.t; }
and address_lazy = (address_unforced, address) EnvLazy.t
let empty_structure =
Structure_comps {
comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty;
comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_components = NameMap.empty; comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty }
let copy_local ~from env =
{ env with
local_constraints = from.local_constraints;
flags = from.flags }
let same_constr = ref (fun _ _ _ -> assert false)
let check_well_formed_module = ref (fun _ -> assert false)
Helper to decide whether to report an identifier shadowing
by some ' open ' . For labels and constructors , we do not report
if the two elements are from the same re - exported declaration .
Later , one could also interpret some attributes on value and
type declarations to silence the shadowing warnings .
by some 'open'. For labels and constructors, we do not report
if the two elements are from the same re-exported declaration.
Later, one could also interpret some attributes on value and
type declarations to silence the shadowing warnings. *)
let check_shadowing env = function
| `Constructor (Some ((c1, _), (c2, _)))
when not (!same_constr env c1.cstr_res c2.cstr_res) ->
Some "constructor"
| `Label (Some (l1, l2))
when not (!same_constr env l1.lbl_res l2.lbl_res) ->
Some "label"
| `Value (Some _) -> Some "value"
| `Type (Some _) -> Some "type"
| `Module (Some _) | `Component (Some _) -> Some "module"
| `Module_type (Some _) -> Some "module type"
| `Class (Some _) -> Some "class"
| `Class_type (Some _) -> Some "class type"
| `Constructor _ | `Label _
| `Value None | `Type None | `Module None | `Module_type None
| `Class None | `Class_type None | `Component None ->
None
let subst_modtype_maker (subst, scoping, md) =
{md with md_type = Subst.modtype scoping subst md.md_type}
let empty = {
values = IdTbl.empty; constrs = TycompTbl.empty;
labels = TycompTbl.empty; types = IdTbl.empty;
modules = IdTbl.empty; modtypes = IdTbl.empty;
components = IdTbl.empty; classes = IdTbl.empty;
cltypes = IdTbl.empty;
summary = Env_empty; local_constraints = Path.Map.empty;
flags = 0;
functor_args = Ident.empty;
}
let in_signature b env =
let flags =
if b then env.flags lor in_signature_flag
else env.flags land (lnot in_signature_flag)
in
{env with flags}
let is_in_signature env = env.flags land in_signature_flag <> 0
let is_ident = function
Pident _ -> true
| Pdot _ | Papply _ -> false
let is_local_ext = function
| {cstr_tag = Cstr_extension(p, _)}, _ -> is_ident p
| _ -> false
let diff env1 env2 =
IdTbl.diff_keys env1.values env2.values @
TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs @
IdTbl.diff_keys env1.modules env2.modules @
IdTbl.diff_keys env1.classes env2.classes
(* Forward declarations *)
let components_of_module' =
ref ((fun ~alerts:_ ~loc:_ _env _fsub _psub _path _addr _mty -> assert false):
alerts:alerts -> loc:Location.t -> t ->
Subst.t option -> Subst.t -> Path.t -> address_lazy -> module_type ->
module_components)
let components_of_module_maker' =
ref ((fun _ -> assert false) :
components_maker -> module_components_repr option)
let components_of_functor_appl' =
ref ((fun _f _env _p1 _p2 -> assert false) :
functor_components -> t -> Path.t -> Path.t -> module_components)
let check_modtype_inclusion =
(* to be filled with Includemod.check_modtype_inclusion *)
ref ((fun ~loc:_ _env _mty1 _path1 _mty2 -> assert false) :
loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit)
let strengthen =
(* to be filled with Mtype.strengthen *)
ref ((fun ~aliasable:_ _env _mty _path -> assert false) :
aliasable:bool -> t -> module_type -> Path.t -> module_type)
let md md_type =
{md_type; md_attributes=[]; md_loc=Location.none}
(* Print addresses *)
let rec print_address ppf = function
| Aident id -> Format.fprintf ppf "%s" (Ident.name id)
| Adot(a, pos) -> Format.fprintf ppf "%a.[%i]" print_address a pos
(* The name of the compilation unit currently compiled.
"" if outside a compilation unit. *)
module Current_unit_name : sig
val get : unit -> modname
val set : modname -> unit
val is : modname -> bool
val is_name_of : Ident.t -> bool
end = struct
let current_unit =
ref ""
let get () =
!current_unit
let set name =
current_unit := name
let is name =
!current_unit = name
let is_name_of id =
is (Ident.name id)
end
let set_unit_name = Current_unit_name.set
let get_unit_name = Current_unit_name.get
let find_same_module id tbl =
match IdTbl.find_same id tbl with
| x -> x
| exception Not_found
when Ident.persistent id && not (Current_unit_name.is_name_of id) ->
Persistent
(* signature of persistent compilation units *)
type persistent_module = {
pm_signature: signature Lazy.t;
pm_components: module_components;
}
let add_persistent_structure id env =
if not (Ident.persistent id) then invalid_arg "Env.add_persistent_structure";
if not (Current_unit_name.is_name_of id) then
{ env with
modules = IdTbl.add id Persistent env.modules;
components = IdTbl.add id Persistent env.components;
summary = Env_persistent (env.summary, id);
}
else
env
let sign_of_cmi ~freshen { Persistent_env.Persistent_signature.cmi; _ } =
let name = cmi.cmi_name in
let sign = cmi.cmi_sign in
let flags = cmi.cmi_flags in
let id = Ident.create_persistent name in
let path = Pident id in
let addr = EnvLazy.create_forced (Aident id) in
let alerts =
List.fold_left (fun acc -> function Alerts s -> s | _ -> acc)
Misc.Stdlib.String.Map.empty
flags
in
let loc = Location.none in
let pm_signature = lazy (Subst.signature Make_local Subst.identity sign) in
let pm_components =
let freshening_subst =
if freshen then (Some Subst.identity) else None in
!components_of_module' ~alerts ~loc
empty freshening_subst Subst.identity path addr (Mty_signature sign) in
{
pm_signature;
pm_components;
}
let read_sign_of_cmi = sign_of_cmi ~freshen:true
let save_sign_of_cmi = sign_of_cmi ~freshen:false
let persistent_env : persistent_module Persistent_env.t =
Persistent_env.empty ()
let without_cmis f x =
Persistent_env.without_cmis persistent_env f x
let imports () = Persistent_env.imports persistent_env
let import_crcs ~source crcs =
Persistent_env.import_crcs persistent_env ~source crcs
let read_pers_mod modname filename =
Persistent_env.read persistent_env read_sign_of_cmi modname filename
let find_pers_mod name =
Persistent_env.find persistent_env read_sign_of_cmi name
let check_pers_mod ~loc name =
Persistent_env.check persistent_env read_sign_of_cmi ~loc name
let crc_of_unit name =
Persistent_env.crc_of_unit persistent_env read_sign_of_cmi name
let is_imported_opaque modname =
Persistent_env.is_imported_opaque persistent_env modname
let reset_declaration_caches () =
Hashtbl.clear value_declarations;
Hashtbl.clear type_declarations;
Hashtbl.clear module_declarations;
Hashtbl.clear used_constructors;
()
let reset_cache () =
Current_unit_name.set "";
Persistent_env.clear persistent_env;
reset_declaration_caches ();
()
let reset_cache_toplevel () =
Persistent_env.clear_missing persistent_env;
reset_declaration_caches ();
()
(* get_components *)
let get_components_opt c =
match Persistent_env.can_load_cmis persistent_env with
| Persistent_env.Can_load_cmis ->
EnvLazy.force !components_of_module_maker' c.comps
| Persistent_env.Cannot_load_cmis log ->
EnvLazy.force_logged log !components_of_module_maker' c.comps
let get_components c =
match get_components_opt c with
| None -> empty_structure
| Some c -> c
(* Lookup by identifier *)
let rec find_module_descr path env =
match path with
Pident id ->
begin match find_same_module id env.components with
| Value x -> fst x
| Persistent -> (find_pers_mod (Ident.name id)).pm_components
end
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c ->
fst (NameMap.find s c.comp_components)
| Functor_comps _ ->
raise Not_found
end
| Papply(p1, p2) ->
begin match get_components (find_module_descr p1 env) with
Functor_comps f ->
!components_of_functor_appl' f env p1 p2
| Structure_comps _ ->
raise Not_found
end
let find proj1 proj2 path env =
match path with
Pident id -> IdTbl.find_same id (proj1 env)
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c -> NameMap.find s (proj2 c)
| Functor_comps _ ->
raise Not_found
end
| Papply _ ->
raise Not_found
let find_value_full =
find (fun env -> env.values) (fun sc -> sc.comp_values)
and find_type_full =
find (fun env -> env.types) (fun sc -> sc.comp_types)
and find_modtype =
find (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
and find_class_full =
find (fun env -> env.classes) (fun sc -> sc.comp_classes)
and find_cltype =
find (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
let find_value p env =
fst (find_value_full p env)
let find_class p env =
fst (find_class_full p env)
let type_of_cstr path = function
| {cstr_inlined = Some d; _} ->
(d, ([], List.map snd (Datarepr.labels_of_type path d)))
| _ ->
assert false
let find_type_full path env =
match Path.constructor_typath path with
| Regular p ->
(try (Path.Map.find p env.local_constraints, ([], []))
with Not_found -> find_type_full p env)
| Cstr (ty_path, s) ->
let (_, (cstrs, _)) =
try find_type_full ty_path env
with Not_found -> assert false
in
let cstr =
try List.find (fun cstr -> cstr.cstr_name = s) cstrs
with Not_found -> assert false
in
type_of_cstr path cstr
| LocalExt id ->
let cstr =
try fst (TycompTbl.find_same id env.constrs)
with Not_found -> assert false
in
type_of_cstr path cstr
| Ext (mod_path, s) ->
let comps =
try find_module_descr mod_path env
with Not_found -> assert false
in
let comps =
match get_components comps with
| Structure_comps c -> c
| Functor_comps _ -> assert false
in
let exts =
List.filter
(function ({cstr_tag=Cstr_extension _}, _) -> true | _ -> false)
(try NameMap.find s comps.comp_constrs
with Not_found -> assert false)
in
match exts with
| [(cstr, _)] -> type_of_cstr path cstr
| _ -> assert false
let find_type p env =
fst (find_type_full p env)
let find_type_descrs p env =
snd (find_type_full p env)
let find_module ~alias path env =
match path with
Pident id ->
begin
match find_same_module id env.modules with
| Value (data, _) -> EnvLazy.force subst_modtype_maker data
| Persistent ->
let pm = find_pers_mod (Ident.name id) in
md (Mty_signature(Lazy.force pm.pm_signature))
end
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c ->
let data, _ = NameMap.find s c.comp_modules in
EnvLazy.force subst_modtype_maker data
| Functor_comps _ ->
raise Not_found
end
| Papply(p1, p2) ->
let desc1 = find_module_descr p1 env in
begin match get_components desc1 with
Functor_comps f ->
let mty =
match f.fcomp_res with
| Mty_alias _ as mty -> mty
| mty ->
if alias then mty else
try
Hashtbl.find f.fcomp_subst_cache p2
with Not_found ->
let mty =
Subst.modtype (Rescope (Path.scope path))
(Subst.add_module f.fcomp_param p2 Subst.identity)
f.fcomp_res in
Hashtbl.add f.fcomp_subst_cache p2 mty;
mty
in
md mty
| Structure_comps _ ->
raise Not_found
end
let rec find_module_address path env =
match path with
| Pident id ->
begin
match find_same_module id env.modules with
| Value (_, addr) -> get_address addr
| Persistent -> Aident id
end
| Pdot(p, s) -> begin
match get_components (find_module_descr p env) with
| Structure_comps c ->
let _, addr = NameMap.find s c.comp_modules in
get_address addr
| Functor_comps _ ->
raise Not_found
end
| Papply _ -> raise Not_found
and force_address = function
| Projection { parent; pos } -> Adot(get_address parent, pos)
| ModAlias { env; path } -> find_module_address path env
and get_address a =
EnvLazy.force force_address a
let find_value_address p env =
get_address (snd (find_value_full p env))
let find_class_address p env =
get_address (snd (find_class_full p env))
let rec get_constrs_address = function
| [] -> raise Not_found
| (_, None) :: rest -> get_constrs_address rest
| (_, Some a) :: _ -> get_address a
let find_constructor_address path env =
match path with
| Pident id -> begin
match TycompTbl.find_same id env.constrs with
| _, None -> raise Not_found
| _, Some addr -> get_address addr
end
| Pdot(p, s) -> begin
match get_components (find_module_descr p env) with
| Structure_comps c ->
get_constrs_address (NameMap.find s c.comp_constrs)
| Functor_comps _ ->
raise Not_found
end
| Papply _ ->
raise Not_found
let required_globals = ref []
let reset_required_globals () = required_globals := []
let get_required_globals () = !required_globals
let add_required_global id =
if Ident.global id && not !Clflags.transparent_modules
&& not (List.exists (Ident.same id) !required_globals)
then required_globals := id :: !required_globals
let rec normalize_module_path lax env = function
| Pident id as path when lax && Ident.persistent id ->
path (* fast path (avoids lookup) *)
| Pdot (p, s) as path ->
let p' = normalize_module_path lax env p in
if p == p' then expand_module_path lax env path
else expand_module_path lax env (Pdot(p', s))
| Papply (p1, p2) as path ->
let p1' = normalize_module_path lax env p1 in
let p2' = normalize_module_path true env p2 in
if p1 == p1' && p2 == p2' then expand_module_path lax env path
else expand_module_path lax env (Papply(p1', p2'))
| Pident _ as path ->
expand_module_path lax env path
and expand_module_path lax env path =
try match find_module ~alias:true path env with
{md_type=Mty_alias path1} ->
let path' = normalize_module_path lax env path1 in
if lax || !Clflags.transparent_modules then path' else
let id = Path.head path in
if Ident.global id && not (Ident.same id (Path.head path'))
then add_required_global id;
path'
| _ -> path
with Not_found when lax
|| (match path with Pident id -> not (Ident.persistent id) | _ -> true) ->
path
let normalize_module_path oloc env path =
try normalize_module_path (oloc = None) env path
with Not_found ->
match oloc with None -> assert false
| Some loc ->
error (Missing_module(loc, path,
normalize_module_path true env path))
let normalize_path_prefix oloc env path =
match path with
Pdot(p, s) ->
let p2 = normalize_module_path oloc env p in
if p == p2 then path else Pdot(p2, s)
| Pident _ ->
path
| Papply _ ->
assert false
let is_uident s =
match s.[0] with
| 'A'..'Z' -> true
| _ -> false
let normalize_type_path oloc env path =
Inlined version of Path.is_constructor_typath :
constructor type paths ( i.e. path pointing to an inline
record argument of a constructpr ) are built as a regular
type path followed by a capitalized constructor name .
constructor type paths (i.e. path pointing to an inline
record argument of a constructpr) are built as a regular
type path followed by a capitalized constructor name. *)
match path with
| Pident _ ->
path
| Pdot(p, s) ->
let p2 =
if is_uident s && not (is_uident (Path.last p)) then
(* Cstr M.t.C *)
normalize_path_prefix oloc env p
else
(* Regular M.t, Ext M.C *)
normalize_module_path oloc env p
in
if p == p2 then path else Pdot (p2, s)
| Papply _ ->
assert false
let find_module path env =
find_module ~alias:false path env
(* Find the manifest type associated to a type when appropriate:
- the type should be public or should have a private row,
- the type should have an associated manifest type. *)
let find_type_expansion path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body when decl.type_private = Public
|| decl.type_kind <> Type_abstract
|| Btype.has_constr_row body ->
(decl.type_params, body, decl.type_expansion_scope)
(* The manifest type of Private abstract data types without
private row are still considered unknown to the type system.
Hence, this case is caught by the following clause that also handles
purely abstract data types without manifest type definition. *)
| _ -> raise Not_found
(* Find the manifest type information associated to a type, i.e.
the necessary information for the compiler's type-based optimisations.
In particular, the manifest type associated to a private abstract type
is revealed for the sake of compiler's type-based optimisations. *)
let find_type_expansion_opt path env =
let decl = find_type path env in
match decl.type_manifest with
(* The manifest type of Private abstract data types can still get
an approximation using their manifest type. *)
| Some body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_modtype_expansion path env =
match (find_modtype path env).mtd_type with
| None -> raise Not_found
| Some mty -> mty
let rec is_functor_arg path env =
match path with
Pident id ->
begin try Ident.find_same id env.functor_args; true
with Not_found -> false
end
| Pdot (p, _s) -> is_functor_arg p env
| Papply _ -> true
(* Lookup by name *)
exception Recmodule
let report_alerts ?loc p alerts =
match loc with
| Some loc ->
Misc.Stdlib.String.Map.iter
(fun kind message ->
let message = if message = "" then "" else "\n" ^ message in
Location.alert ~kind loc
(Printf.sprintf "module %s%s" (Path.name p) message)
)
alerts
| _ -> ()
let mark_module_used name loc =
try Hashtbl.find module_declarations (name, loc) ()
with Not_found -> ()
let rec lookup_module_descr_aux ?loc ~mark lid env =
match lid with
Lident s ->
let find_components s = (find_pers_mod s).pm_components in
begin match IdTbl.find_name ~mark s env.components with
| exception Not_found when not (Current_unit_name.is s) ->
let p = Path.Pident (Ident.create_persistent s) in
(p, find_components s)
| (p, data) ->
(p,
match data with
| Value (comp, _) -> comp
| Persistent -> find_components s)
end
| Ldot(l, s) ->
let (p, descr) = lookup_module_descr ?loc ~mark l env in
begin match get_components descr with
Structure_comps c ->
let (descr, _addr) = NameMap.find s c.comp_components in
(Pdot(p, s), descr)
| Functor_comps _ ->
raise Not_found
end
| Lapply(l1, l2) ->
let (p1, desc1) = lookup_module_descr ?loc ~mark l1 env in
let p2 = lookup_module ~load:true ~mark ?loc l2 env in
let {md_type=mty2} = find_module p2 env in
begin match get_components desc1 with
Functor_comps f ->
let loc = match loc with Some l -> l | None -> Location.none in
(match f.fcomp_arg with
| None -> raise Not_found (* PR#7611 *)
| Some arg -> !check_modtype_inclusion ~loc env mty2 p2 arg);
(Papply(p1, p2), !components_of_functor_appl' f env p1 p2)
| Structure_comps _ ->
raise Not_found
end
and lookup_module_descr ?loc ~mark lid env =
let (p, comps) as res = lookup_module_descr_aux ?loc ~mark lid env in
if mark then mark_module_used (Path.last p) comps.loc;
(*
Format.printf "USE module %s at %a@." (Path.last p)
Location.print comps.loc;
*)
report_alerts ?loc p comps.alerts;
res
and lookup_module ~load ?loc ~mark lid env : Path.t =
match lid with
Lident s ->
begin match IdTbl.find_name ~mark s env.modules with
| exception Not_found
when not (Current_unit_name.is s)
&& !Clflags.transparent_modules
&& not load ->
check_pers_mod s
~loc:(Option.value loc ~default:Location.none);
Path.Pident (Ident.create_persistent s)
| p, data ->
begin match data with
| Value (data, _) ->
let {md_loc; md_attributes; md_type} =
EnvLazy.force subst_modtype_maker data
in
if mark then mark_module_used s md_loc;
begin match md_type with
| Mty_ident (Path.Pident id) when Ident.name id = "#recmod#" ->
see # 5965
raise Recmodule
| _ -> ()
end;
report_alerts ?loc p
(Builtin_attributes.alerts_of_attrs md_attributes)
| Persistent ->
if !Clflags.transparent_modules && not load then
check_pers_mod s
~loc:(Option.value loc ~default:Location.none)
else begin
let pm = find_pers_mod s in
report_alerts ?loc p pm.pm_components.alerts
end
end;
p
end
| Ldot(l, s) ->
let (p, descr) = lookup_module_descr ?loc ~mark l env in
begin match get_components descr with
Structure_comps c ->
let (comps, _) = NameMap.find s c.comp_components in
if mark then mark_module_used s comps.loc;
let p = Pdot(p, s) in
report_alerts ?loc p comps.alerts;
p
| Functor_comps _ ->
raise Not_found
end
| Lapply(l1, l2) ->
let (p1, desc1) = lookup_module_descr ?loc ~mark l1 env in
let p2 = lookup_module ~load:true ?loc ~mark l2 env in
let {md_type=mty2} = find_module p2 env in
let p = Papply(p1, p2) in
begin match get_components desc1 with
Functor_comps f ->
let loc = match loc with Some l -> l | None -> Location.none in
(match f.fcomp_arg with
| None -> raise Not_found (* PR#7611 *)
| Some arg -> (!check_modtype_inclusion ~loc env mty2 p2) arg);
p
| Structure_comps _ ->
raise Not_found
end
let lookup proj1 proj2 ?loc ~mark lid env =
match lid with
| Lident s -> IdTbl.find_name ~mark s (proj1 env)
| Ldot(l, s) ->
let path, desc = lookup_module_descr ?loc ~mark l env in
begin match get_components desc with
Structure_comps c ->
let data = NameMap.find s (proj2 c) in
(Pdot(path, s), data)
| Functor_comps _ ->
raise Not_found
end
| Lapply _ ->
raise Not_found
let lookup_all_simple proj1 proj2 shadow ?loc ~mark lid env =
match lid with
Lident s ->
let xl = TycompTbl.find_all s (proj1 env) in
let rec do_shadow =
function
| [] -> []
| ((x, f) :: xs) ->
(x, f) ::
(do_shadow (List.filter (fun (y, _) -> not (shadow x y)) xs))
in
do_shadow xl
| Ldot(l, s) ->
let (_p, desc) = lookup_module_descr ?loc ~mark l env in
begin match get_components desc with
Structure_comps c ->
let comps =
try NameMap.find s (proj2 c) with Not_found -> []
in
List.map
(fun data -> (data, (fun () -> ())))
comps
| Functor_comps _ ->
raise Not_found
end
| Lapply _ ->
raise Not_found
let has_local_constraints env = not (Path.Map.is_empty env.local_constraints)
let cstr_shadow (cstr1, _) (cstr2, _) =
match cstr1.cstr_tag, cstr2.cstr_tag with
| Cstr_extension _, Cstr_extension _ -> true
| _ -> false
let lbl_shadow _lbl1 _lbl2 = false
let ignore_address (path, (desc, _addr)) = (path, desc)
let lookup_value ?loc ~mark lid env =
ignore_address
(lookup (fun env -> env.values) (fun sc -> sc.comp_values)
?loc ~mark lid env)
let lookup_all_constructors ?loc ~mark lid env =
lookup_all_simple (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
cstr_shadow ?loc ~mark lid env
let lookup_all_labels ?loc ~mark lid env =
lookup_all_simple (fun env -> env.labels) (fun sc -> sc.comp_labels)
lbl_shadow ?loc ~mark lid env
let lookup_type ?loc ~mark lid env=
lookup (fun env -> env.types) (fun sc -> sc.comp_types)
?loc ~mark lid env
let lookup_modtype ?loc ~mark lid env =
lookup (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
?loc ~mark lid env
let lookup_class ?loc ~mark lid env =
ignore_address
(lookup (fun env -> env.classes) (fun sc -> sc.comp_classes)
?loc ~mark lid env)
let lookup_cltype ?loc ~mark lid env =
lookup (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
?loc ~mark lid env
type copy_of_types = {
to_copy: string list;
initial_values: (value_description * address_lazy) IdTbl.t;
new_values: (value_description * address_lazy) IdTbl.t;
}
let make_copy_of_types l env : copy_of_types =
let f (desc, addr) =
{desc with val_type = Subst.type_expr Subst.identity desc.val_type}, addr
in
let values =
List.fold_left (fun env s -> IdTbl.update s f env) env.values l
in
{to_copy = l; initial_values = env.values; new_values = values}
let do_copy_types { to_copy = l; initial_values; new_values = values } env =
if initial_values != env.values then fatal_error "Env.do_copy_types";
{env with values; summary = Env_copy_types (env.summary, l)}
let mark_value_used name vd =
try Hashtbl.find value_declarations (name, vd.val_loc) ()
with Not_found -> ()
let mark_type_used name vd =
try Hashtbl.find type_declarations (name, vd.type_loc) ()
with Not_found -> ()
let mark_constructor_used usage name vd constr =
try Hashtbl.find used_constructors (name, vd.type_loc, constr) usage
with Not_found -> ()
let mark_extension_used usage ext name =
let ty_name = Path.last ext.ext_type_path in
try Hashtbl.find used_constructors (ty_name, ext.ext_loc, name) usage
with Not_found -> ()
let set_value_used_callback name vd callback =
let key = (name, vd.val_loc) in
try
let old = Hashtbl.find value_declarations key in
Hashtbl.replace value_declarations key (fun () -> old (); callback ())
this is to support cases like :
let x = let x = 1 in x in x
where the two declarations have the same location
( e.g. resulting from expansion of grammar entries )
let x = let x = 1 in x in x
where the two declarations have the same location
(e.g. resulting from Camlp4 expansion of grammar entries) *)
with Not_found ->
Hashtbl.add value_declarations key callback
let set_type_used_callback name td callback =
let loc = td.type_loc in
if loc.Location.loc_ghost then ()
else let key = (name, loc) in
let old =
try Hashtbl.find type_declarations key
with Not_found -> ignore
in
Hashtbl.replace type_declarations key (fun () -> callback old)
let lookup_value ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_value ?loc ~mark lid env in
if mark then mark_value_used (Longident.last lid) desc;
r
let lookup_type ?loc ?(mark = true) lid env =
let (path, (decl, _)) = lookup_type ?loc ~mark lid env in
if mark then mark_type_used (Longident.last lid) decl;
path
let mark_type_path env path =
try
let decl = find_type path env in
mark_type_used (Path.last path) decl
with Not_found -> ()
let ty_path t =
match repr t with
| {desc=Tconstr(path, _, _)} -> path
| _ -> assert false
let lookup_constructor ?loc ?(mark = true) lid env =
match lookup_all_constructors ?loc ~mark lid env with
[] -> raise Not_found
| ((desc, _), use) :: _ ->
if mark then begin
mark_type_path env (ty_path desc.cstr_res);
use ()
end;
desc
let is_lident = function
Lident _ -> true
| _ -> false
let lookup_all_constructors ?loc ?(mark = true) lid env =
try
let cstrs = lookup_all_constructors ?loc ~mark lid env in
let wrap_use desc use () =
if mark then begin
mark_type_path env (ty_path desc.cstr_res);
use ()
end
in
List.map (fun ((cstr, _), use) -> (cstr, wrap_use cstr use)) cstrs
with
Not_found when is_lident lid -> []
let mark_constructor usage env name desc =
match desc.cstr_tag with
| Cstr_extension _ ->
begin
let ty_path = ty_path desc.cstr_res in
let ty_name = Path.last ty_path in
try Hashtbl.find used_constructors (ty_name, desc.cstr_loc, name) usage
with Not_found -> ()
end
| _ ->
let ty_path = ty_path desc.cstr_res in
let ty_decl = try find_type ty_path env with Not_found -> assert false in
let ty_name = Path.last ty_path in
mark_constructor_used usage ty_name ty_decl name
let lookup_label ?loc ?(mark = true) lid env =
match lookup_all_labels ?loc ~mark lid env with
[] -> raise Not_found
| (desc, use) :: _ ->
if mark then begin
mark_type_path env (ty_path desc.lbl_res);
use ()
end;
desc
let lookup_all_labels ?loc ?(mark = true) lid env =
try
let lbls = lookup_all_labels ?loc ~mark lid env in
let wrap_use desc use () =
if mark then begin
mark_type_path env (ty_path desc.lbl_res);
use ()
end
in
List.map (fun (lbl, use) -> (lbl, wrap_use lbl use)) lbls
with
Not_found when is_lident lid -> []
let lookup_module ~load ?loc ?(mark = true) lid env =
lookup_module ~load ?loc ~mark lid env
let lookup_modtype ?loc ?(mark = true) lid env =
lookup_modtype ?loc ~mark lid env
let lookup_class ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_class ?loc ~mark lid env in
special support for
if Path.name desc.cty_path = "" then ignore (lookup_type ?loc ~mark lid env)
else if mark then mark_type_path env desc.cty_path;
r
let lookup_cltype ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_cltype ?loc ~mark lid env in
if Path.name desc.clty_path = "" then ignore (lookup_type ?loc lid env)
else mark_type_path env desc.clty_path;
mark_type_path env desc.clty_path;
r
(* Helper to handle optional substitutions. *)
let may_subst subst_f sub x =
match sub with
| None -> x
| Some sub -> subst_f sub x
(* Iter on an environment (ignoring the body of functors and
not yet evaluated structures) *)
type iter_cont = unit -> unit
let iter_env_cont = ref []
let rec scrape_alias_for_visit env sub mty =
match mty with
| Mty_alias path ->
begin match may_subst Subst.module_path sub path with
| Pident id
when Ident.persistent id
&& not (Persistent_env.looked_up persistent_env (Ident.name id)) ->
false
PR#6600 : find_module may raise Not_found
try scrape_alias_for_visit env sub (find_module path env).md_type
with Not_found -> false
end
| _ -> true
let iter_env proj1 proj2 f env () =
IdTbl.iter (fun id x -> f (Pident id) x) (proj1 env);
let rec iter_components path path' mcomps =
let cont () =
let visit =
match EnvLazy.get_arg mcomps.comps with
| None -> true
| Some { cm_mty; cm_freshening_subst; _ } ->
scrape_alias_for_visit env cm_freshening_subst cm_mty
in
if not visit then () else
match get_components mcomps with
Structure_comps comps ->
NameMap.iter
(fun s d -> f (Pdot (path, s)) (Pdot (path', s), d))
(proj2 comps);
NameMap.iter
(fun s (c, _) ->
iter_components (Pdot (path, s)) (Pdot (path', s)) c)
comps.comp_components
| Functor_comps _ -> ()
in iter_env_cont := (path, cont) :: !iter_env_cont
in
IdTbl.iter
(fun id (path, comps) ->
match comps with
| Value (comps, _) -> iter_components (Pident id) path comps
| Persistent ->
let modname = Ident.name id in
match Persistent_env.find_in_cache persistent_env modname with
| None -> ()
| Some pm -> iter_components (Pident id) path pm.pm_components)
env.components
let run_iter_cont l =
iter_env_cont := [];
List.iter (fun c -> c ()) l;
let cont = List.rev !iter_env_cont in
iter_env_cont := [];
cont
let iter_types f = iter_env (fun env -> env.types) (fun sc -> sc.comp_types) f
let same_types env1 env2 =
env1.types == env2.types && env1.components == env2.components
let used_persistent () =
Persistent_env.fold persistent_env
(fun s _m r -> Concr.add s r)
Concr.empty
let find_all_comps proj s (p,(mcomps, _)) =
match get_components mcomps with
Functor_comps _ -> []
| Structure_comps comps ->
try
let c = NameMap.find s (proj comps) in
[Pdot(p,s), c]
with Not_found -> []
let rec find_shadowed_comps path env =
match path with
Pident id ->
List.filter_map
(fun (p, data) ->
match data with
| Value x -> Some (p, x)
| Persistent -> None)
(IdTbl.find_all (Ident.name id) env.components)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' =
List.map (find_all_comps (fun comps -> comps.comp_components) s) l
in
List.flatten l'
| Papply _ -> []
let find_shadowed proj1 proj2 path env =
match path with
Pident id ->
IdTbl.find_all (Ident.name id) (proj1 env)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' = List.map (find_all_comps proj2 s) l in
List.flatten l'
| Papply _ -> []
let find_shadowed_types path env =
List.map fst
(find_shadowed
(fun env -> env.types) (fun comps -> comps.comp_types) path env)
(* Expand manifest module type names at the top of the given module type *)
let rec scrape_alias env sub ?path mty =
match mty, path with
Mty_ident _, _ ->
let p =
match may_subst (Subst.modtype Keep) sub mty with
| Mty_ident p -> p
only [ Mty_ident]s in [ sub ]
in
begin try
scrape_alias env sub (find_modtype_expansion p env) ?path
with Not_found ->
mty
end
| Mty_alias path, _ ->
let path = may_subst Subst.module_path sub path in
begin try
scrape_alias env sub (find_module path env).md_type ~path
with Not_found ->
(*Location.prerr_warning Location.none
(Warnings.No_cmi_file (Path.name path));*)
mty
end
| mty, Some path ->
!strengthen ~aliasable:true env mty path
| _ -> mty
(* Given a signature and a root path, prefix all idents in the signature
by the root path and build the corresponding substitution. *)
let prefix_idents root freshening_sub prefixing_sub sg =
let refresh id add_fn = function
| None -> id, None
| Some sub ->
let id' = Ident.rename id in
id', Some (add_fn id (Pident id') sub)
in
let rec prefix_idents root items_and_paths freshening_sub prefixing_sub =
function
| [] -> (List.rev items_and_paths, freshening_sub, prefixing_sub)
| Sig_value(id, _, _) as item :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((item, p) :: items_and_paths) freshening_sub prefixing_sub rem
| Sig_type(id, td, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_type(id', td, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_typext(id, ec, es, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
(* we extend the substitution in case of an inlined record *)
prefix_idents root
((Sig_typext(id', ec, es, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_module(id, pres, md, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_module freshening_sub in
prefix_idents root
((Sig_module(id', pres, md, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_module id' p prefixing_sub)
rem
| Sig_modtype(id, mtd, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub =
refresh id (fun i p s -> Subst.add_modtype i (Mty_ident p) s)
freshening_sub
in
prefix_idents root
((Sig_modtype(id', mtd, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_modtype id' (Mty_ident p) prefixing_sub)
rem
| Sig_class(id, cd, rs, vis) :: rem ->
(* pretend this is a type, cf. PR#6650 *)
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_class(id', cd, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_class_type(id, ctd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_class_type(id', ctd, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
in
prefix_idents root [] freshening_sub prefixing_sub sg
(* Compute structure descriptions *)
let add_to_tbl id decl tbl =
let decls = try NameMap.find id tbl with Not_found -> [] in
NameMap.add id (decl :: decls) tbl
let value_declaration_address (_ : t) id decl =
match decl.val_kind with
| Val_prim _ -> EnvLazy.create_failed Not_found
| _ -> EnvLazy.create_forced (Aident id)
let extension_declaration_address (_ : t) id (_ : extension_constructor) =
EnvLazy.create_forced (Aident id)
let class_declaration_address (_ : t) id (_ : class_declaration) =
EnvLazy.create_forced (Aident id)
let module_declaration_address env id presence md =
match presence with
| Mp_absent -> begin
match md.md_type with
| Mty_alias path -> EnvLazy.create (ModAlias {env; path})
| _ -> assert false
end
| Mp_present ->
EnvLazy.create_forced (Aident id)
let rec components_of_module ~alerts ~loc env fs ps path addr mty =
{
alerts;
loc;
comps = EnvLazy.create {
cm_env = env;
cm_freshening_subst = fs;
cm_prefixing_subst = ps;
cm_path = path;
cm_addr = addr;
cm_mty = mty
}
}
and components_of_module_maker {cm_env; cm_freshening_subst; cm_prefixing_subst;
cm_path; cm_addr; cm_mty} =
match scrape_alias cm_env cm_freshening_subst cm_mty with
Mty_signature sg ->
let c =
{ comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty; comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_components = NameMap.empty; comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty } in
let items_and_paths, freshening_sub, prefixing_sub =
prefix_idents cm_path cm_freshening_subst cm_prefixing_subst sg
in
let env = ref cm_env in
let pos = ref 0 in
let next_address () =
let addr : address_unforced =
Projection { parent = cm_addr; pos = !pos }
in
incr pos;
EnvLazy.create addr
in
let sub = may_subst Subst.compose freshening_sub prefixing_sub in
List.iter (fun (item, path) ->
match item with
Sig_value(id, decl, _) ->
let decl' = Subst.value_description sub decl in
let addr =
match decl.val_kind with
| Val_prim _ -> EnvLazy.create_failed Not_found
| _ -> next_address ()
in
c.comp_values <-
NameMap.add (Ident.name id) (decl', addr) c.comp_values;
| Sig_type(id, decl, _, _) ->
let fresh_decl =
may_subst Subst.type_declaration freshening_sub decl
in
let final_decl = Subst.type_declaration prefixing_sub fresh_decl in
Datarepr.set_row_name final_decl
(Subst.type_path prefixing_sub (Path.Pident id));
let constructors =
List.map snd (Datarepr.constructors_of_type path final_decl) in
let labels =
List.map snd (Datarepr.labels_of_type path final_decl) in
c.comp_types <-
NameMap.add (Ident.name id)
(final_decl, (constructors, labels))
c.comp_types;
List.iter
(fun descr ->
c.comp_constrs <-
add_to_tbl descr.cstr_name (descr, None) c.comp_constrs)
constructors;
List.iter
(fun descr ->
c.comp_labels <-
add_to_tbl descr.lbl_name descr c.comp_labels)
labels;
env := store_type_infos id fresh_decl !env
| Sig_typext(id, ext, _, _) ->
let ext' = Subst.extension_constructor sub ext in
let descr = Datarepr.extension_descr path ext' in
let addr = next_address () in
c.comp_constrs <-
add_to_tbl (Ident.name id) (descr, Some addr) c.comp_constrs
| Sig_module(id, pres, md, _, _) ->
let md' =
(* The prefixed items get the same scope as [cm_path], which is
the prefix. *)
EnvLazy.create (sub, Subst.Rescope (Path.scope cm_path), md)
in
let addr =
match pres with
| Mp_absent -> begin
match md.md_type with
| Mty_alias p ->
let path = may_subst Subst.module_path freshening_sub p in
EnvLazy.create (ModAlias {env = !env; path})
| _ -> assert false
end
| Mp_present -> next_address ()
in
c.comp_modules <-
NameMap.add (Ident.name id) (md', addr) c.comp_modules;
let alerts =
Builtin_attributes.alerts_of_attrs md.md_attributes
in
let comps =
components_of_module ~alerts ~loc:md.md_loc !env freshening_sub
prefixing_sub path addr md.md_type
in
c.comp_components <-
NameMap.add (Ident.name id) (comps, addr) c.comp_components;
env :=
store_module ~freshening_sub ~check:false id addr pres md !env
| Sig_modtype(id, decl, _) ->
let fresh_decl =
the fresh_decl is only going in the local temporary env , and
should n't be used for anything . So we make the items local .
shouldn't be used for anything. So we make the items local. *)
may_subst (Subst.modtype_declaration Make_local) freshening_sub
decl
in
let final_decl =
(* The prefixed items get the same scope as [cm_path], which is
the prefix. *)
Subst.modtype_declaration (Rescope (Path.scope cm_path))
prefixing_sub fresh_decl
in
c.comp_modtypes <-
NameMap.add (Ident.name id) final_decl c.comp_modtypes;
env := store_modtype id fresh_decl !env
| Sig_class(id, decl, _, _) ->
let decl' = Subst.class_declaration sub decl in
c.comp_classes <-
NameMap.add (Ident.name id) (decl', next_address ())
c.comp_classes
| Sig_class_type(id, decl, _, _) ->
let decl' = Subst.cltype_declaration sub decl in
c.comp_cltypes <-
NameMap.add (Ident.name id) decl' c.comp_cltypes)
items_and_paths;
Some (Structure_comps c)
| Mty_functor(param, ty_arg, ty_res) ->
let sub =
may_subst Subst.compose cm_freshening_subst cm_prefixing_subst
in
let scoping = Subst.Rescope (Path.scope cm_path) in
Some (Functor_comps {
fcomp_param = param;
(* fcomp_arg and fcomp_res must be prefixed eagerly, because
they are interpreted in the outer environment *)
fcomp_arg = may_map (Subst.modtype scoping sub) ty_arg;
fcomp_res = Subst.modtype scoping sub ty_res;
fcomp_cache = Hashtbl.create 17;
fcomp_subst_cache = Hashtbl.create 17 })
| Mty_ident _
| Mty_alias _ -> None
(* Insertion of bindings by identifier + path *)
and check_usage loc id warn tbl =
if not loc.Location.loc_ghost && Warnings.is_active (warn "") then begin
let name = Ident.name id in
let key = (name, loc) in
if Hashtbl.mem tbl key then ()
else let used = ref false in
Hashtbl.add tbl key (fun () -> used := true);
if not (name = "" || name.[0] = '_' || name.[0] = '#')
then
!add_delayed_check_forward
(fun () -> if not !used then Location.prerr_warning loc (warn name))
end;
and check_value_name name loc =
(* Note: we could also check here general validity of the
identifier, to protect against bad identifiers forged by -pp or
-ppx preprocessors. *)
if String.length name > 0 && (name.[0] = '#') then
for i = 1 to String.length name - 1 do
if name.[i] = '#' then
error (Illegal_value_name(loc, name))
done
and store_value ?check id addr decl env =
check_value_name (Ident.name id) decl.val_loc;
may (fun f -> check_usage decl.val_loc id f value_declarations) check;
{ env with
values = IdTbl.add id (decl, addr) env.values;
summary = Env_value(env.summary, id, decl) }
and store_type ~check id info env =
let loc = info.type_loc in
if check then
check_usage loc id (fun s -> Warnings.Unused_type_declaration s)
type_declarations;
let path = Pident id in
let constructors = Datarepr.constructors_of_type path info in
let labels = Datarepr.labels_of_type path info in
let descrs = (List.map snd constructors, List.map snd labels) in
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_constructor ("", false, false))
then begin
let ty = Ident.name id in
List.iter
begin fun (_, {cstr_name = c; _}) ->
let k = (ty, loc, c) in
if not (Hashtbl.mem used_constructors k) then
let used = constructor_usages () in
Hashtbl.add used_constructors k (add_constructor_usage used);
if not (ty = "" || ty.[0] = '_')
then !add_delayed_check_forward
(fun () ->
if not (is_in_signature env) && not used.cu_positive then
Location.prerr_warning loc
(Warnings.Unused_constructor
(c, used.cu_pattern, used.cu_privatize)))
end
constructors
end;
{ env with
constrs =
List.fold_right
(fun (id, descr) constrs -> TycompTbl.add id (descr, None) constrs)
constructors
env.constrs;
labels =
List.fold_right
(fun (id, descr) labels -> TycompTbl.add id descr labels)
labels
env.labels;
types = IdTbl.add id (info, descrs) env.types;
summary = Env_type(env.summary, id, info) }
and store_type_infos id info env =
(* Simplified version of store_type that doesn't compute and store
constructor and label infos, but simply record the arity and
manifest-ness of the type. Used in components_of_module to
keep track of type abbreviations (e.g. type t = float) in the
computation of label representations. *)
{ env with
types = IdTbl.add id (info,([],[])) env.types;
summary = Env_type(env.summary, id, info) }
and store_extension ~check id addr ext env =
let loc = ext.ext_loc in
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_extension ("", false, false, false))
then begin
let is_exception = Path.same ext.ext_type_path Predef.path_exn in
let ty = Path.last ext.ext_type_path in
let n = Ident.name id in
let k = (ty, loc, n) in
if not (Hashtbl.mem used_constructors k) then begin
let used = constructor_usages () in
Hashtbl.add used_constructors k (add_constructor_usage used);
!add_delayed_check_forward
(fun () ->
if not (is_in_signature env) && not used.cu_positive then
Location.prerr_warning loc
(Warnings.Unused_extension
(n, is_exception, used.cu_pattern, used.cu_privatize)
)
)
end;
end;
let desc = Datarepr.extension_descr (Pident id) ext in
{ env with
constrs = TycompTbl.add id (desc, Some addr) env.constrs;
summary = Env_extension(env.summary, id, ext) }
and store_module ~check ~freshening_sub id addr presence md env =
let loc = md.md_loc in
if check then
check_usage loc id (fun s -> Warnings.Unused_module s)
module_declarations;
let alerts = Builtin_attributes.alerts_of_attrs md.md_attributes in
let module_decl_lazy =
match freshening_sub with
| None -> EnvLazy.create_forced md
| Some s -> EnvLazy.create (s, Subst.Rescope (Ident.scope id), md)
in
{ env with
modules = IdTbl.add id (Value (module_decl_lazy, addr)) env.modules;
components =
IdTbl.add id
(Value
(components_of_module ~alerts ~loc:md.md_loc
env freshening_sub Subst.identity (Pident id) addr md.md_type,
addr))
env.components;
summary = Env_module(env.summary, id, presence, md) }
and store_modtype id info env =
{ env with
modtypes = IdTbl.add id info env.modtypes;
summary = Env_modtype(env.summary, id, info) }
and store_class id addr desc env =
{ env with
classes = IdTbl.add id (desc, addr) env.classes;
summary = Env_class(env.summary, id, desc) }
and store_cltype id desc env =
{ env with
cltypes = IdTbl.add id desc env.cltypes;
summary = Env_cltype(env.summary, id, desc) }
let scrape_alias env mty = scrape_alias env None mty
(* Compute the components of a functor application in a path. *)
let components_of_functor_appl f env p1 p2 =
try
Hashtbl.find f.fcomp_cache p2
with Not_found ->
let p = Papply(p1, p2) in
let sub = Subst.add_module f.fcomp_param p2 Subst.identity in
(* we have to apply eagerly instead of passing sub to [components_of_module]
because of the call to [check_well_formed_module]. *)
let mty = Subst.modtype (Rescope (Path.scope p)) sub f.fcomp_res in
let addr = EnvLazy.create_failed Not_found in
!check_well_formed_module env Location.(in_file !input_name)
("the signature of " ^ Path.name p) mty;
let comps =
components_of_module ~alerts:Misc.Stdlib.String.Map.empty
~loc:Location.none
(*???*)
env None Subst.identity p addr mty
in
Hashtbl.add f.fcomp_cache p2 comps;
comps
(* Define forward functions *)
let _ =
components_of_module' := components_of_module;
components_of_functor_appl' := components_of_functor_appl;
components_of_module_maker' := components_of_module_maker
(* Insertion of bindings by identifier *)
let add_functor_arg id env =
{env with
functor_args = Ident.add id () env.functor_args;
summary = Env_functor_arg (env.summary, id)}
let add_value ?check id desc env =
let addr = value_declaration_address env id desc in
store_value ?check id addr desc env
let add_type ~check id info env =
store_type ~check id info env
and add_extension ~check id ext env =
let addr = extension_declaration_address env id ext in
store_extension ~check id addr ext env
and add_module_declaration ?(arg=false) ~check id presence md env =
let addr = module_declaration_address env id presence md in
let env = store_module ~freshening_sub:None ~check id addr presence md env in
if arg then add_functor_arg id env else env
and add_modtype id info env =
store_modtype id info env
and add_class id ty env =
let addr = class_declaration_address env id ty in
store_class id addr ty env
and add_cltype id ty env =
store_cltype id ty env
let add_module ?arg id presence mty env =
add_module_declaration ~check:false ?arg id presence (md mty) env
let add_local_type path info env =
{ env with
local_constraints = Path.Map.add path info env.local_constraints }
(* Insertion of bindings by name *)
let enter_value ?check name desc env =
let id = Ident.create_local name in
let addr = value_declaration_address env id desc in
let env = store_value ?check id addr desc env in
(id, env)
let enter_type ~scope name info env =
let id = Ident.create_scoped ~scope name in
let env = store_type ~check:true id info env in
(id, env)
let enter_extension ~scope name ext env =
let id = Ident.create_scoped ~scope name in
let addr = extension_declaration_address env id ext in
let env = store_extension ~check:true id addr ext env in
(id, env)
let enter_module_declaration ?arg id presence md env =
add_module_declaration ?arg ~check:true id presence md env
let enter_modtype ~scope name mtd env =
let id = Ident.create_scoped ~scope name in
let env = store_modtype id mtd env in
(id, env)
let enter_class ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let addr = class_declaration_address env id desc in
let env = store_class id addr desc env in
(id, env)
let enter_cltype ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let env = store_cltype id desc env in
(id, env)
let enter_module ~scope ?arg s presence mty env =
let id = Ident.create_scoped ~scope s in
let env = enter_module_declaration ?arg id presence (md mty) env in
(id, env)
(* Insertion of all components of a signature *)
let add_item comp env =
match comp with
Sig_value(id, decl, _) -> add_value id decl env
| Sig_type(id, decl, _, _) -> add_type ~check:false id decl env
| Sig_typext(id, ext, _, _) -> add_extension ~check:false id ext env
| Sig_module(id, presence, md, _, _) ->
add_module_declaration ~check:false id presence md env
| Sig_modtype(id, decl, _) -> add_modtype id decl env
| Sig_class(id, decl, _, _) -> add_class id decl env
| Sig_class_type(id, decl, _, _) -> add_cltype id decl env
let rec add_signature sg env =
match sg with
[] -> env
| comp :: rem -> add_signature rem (add_item comp env)
let enter_signature ~scope sg env =
let sg = Subst.signature (Rescope scope) Subst.identity sg in
sg, add_signature sg env
(* Open a signature path *)
let add_components slot root env0 comps =
let add_l w comps env0 =
TycompTbl.add_open slot w comps env0
in
let add w comps env0 = IdTbl.add_open slot w root comps env0 in
let constrs =
add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs
in
let labels =
add_l (fun x -> `Label x) comps.comp_labels env0.labels
in
let values =
add (fun x -> `Value x) comps.comp_values env0.values
in
let types =
add (fun x -> `Type x) comps.comp_types env0.types
in
let modtypes =
add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes
in
let classes =
add (fun x -> `Class x) comps.comp_classes env0.classes
in
let cltypes =
add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes
in
let components =
let components =
NameMap.map (fun x -> Value x) comps.comp_components
in
add (fun x -> `Component x) components env0.components
in
let modules =
let modules =
NameMap.map (fun x -> Value x) comps.comp_modules
in
add (fun x -> `Module x) modules env0.modules
in
{ env0 with
summary = Env_open(env0.summary, root);
constrs;
labels;
values;
types;
modtypes;
classes;
cltypes;
components;
modules;
}
let open_signature slot root env0 =
match get_components (find_module_descr root env0) with
| Functor_comps _ -> None
| Structure_comps comps ->
Some (add_components slot root env0 comps)
(* Open a signature from a file *)
let open_pers_signature name env =
match open_signature None (Pident(Ident.create_persistent name)) env with
| Some env -> env
| None -> assert false (* a compilation unit cannot refer to a functor *)
let open_signature
?(used_slot = ref false)
?(loc = Location.none) ?(toplevel = false)
ovf root env =
let unused =
match ovf with
| Asttypes.Fresh -> Warnings.Unused_open (Path.name root)
| Asttypes.Override -> Warnings.Unused_open_bang (Path.name root)
in
let warn_unused =
Warnings.is_active unused
and warn_shadow_id =
Warnings.is_active (Warnings.Open_shadow_identifier ("", ""))
and warn_shadow_lc =
Warnings.is_active (Warnings.Open_shadow_label_constructor ("",""))
in
if not toplevel && not loc.Location.loc_ghost
&& (warn_unused || warn_shadow_id || warn_shadow_lc)
then begin
let used = used_slot in
if warn_unused then
!add_delayed_check_forward
(fun () ->
if not !used then begin
used := true;
Location.prerr_warning loc unused
end
);
let shadowed = ref [] in
let slot s b =
begin match check_shadowing env b with
| Some kind when
ovf = Asttypes.Fresh && not (List.mem (kind, s) !shadowed) ->
shadowed := (kind, s) :: !shadowed;
let w =
match kind with
| "label" | "constructor" ->
Warnings.Open_shadow_label_constructor (kind, s)
| _ -> Warnings.Open_shadow_identifier (kind, s)
in
Location.prerr_warning loc w
| _ -> ()
end;
used := true
in
open_signature (Some slot) root env
end
else open_signature None root env
(* Read a signature from a file *)
let read_signature modname filename =
let pm = read_pers_mod modname filename in
Lazy.force pm.pm_signature
let is_identchar_latin1 = function
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214' | '\216'..'\246'
| '\248'..'\255' | '\'' | '0'..'9' -> true
| _ -> false
let unit_name_of_filename fn =
match Filename.extension fn with
| ".cmi" -> begin
let unit =
String.capitalize_ascii (Filename.remove_extension fn)
in
if String.for_all is_identchar_latin1 unit then
Some unit
else
None
end
| _ -> None
let persistent_structures_of_dir dir =
Load_path.Dir.files dir
|> List.to_seq
|> Seq.filter_map unit_name_of_filename
|> String.Set.of_seq
(* Save a signature to a file *)
let save_signature_with_transform cmi_transform ~alerts sg modname filename =
Btype.cleanup_abbrev ();
Subst.reset_for_saving ();
let sg = Subst.signature Make_local (Subst.for_saving Subst.identity) sg in
let cmi =
Persistent_env.make_cmi persistent_env modname sg alerts
|> cmi_transform in
let pm = save_sign_of_cmi
{ Persistent_env.Persistent_signature.cmi; filename } in
Persistent_env.save_cmi persistent_env
{ Persistent_env.Persistent_signature.filename; cmi } pm;
cmi
let save_signature ~alerts sg modname filename =
save_signature_with_transform (fun cmi -> cmi)
~alerts sg modname filename
let save_signature_with_imports ~alerts sg modname filename imports =
let with_imports cmi = { cmi with cmi_crcs = imports } in
save_signature_with_transform with_imports
~alerts sg modname filename
(* Folding on environments *)
let find_all proj1 proj2 f lid env acc =
match lid with
| None ->
IdTbl.fold_name
(fun name (p, data) acc -> f name p data acc)
(proj1 env) acc
| Some l ->
let p, desc = lookup_module_descr ~mark:true l env in
begin match get_components desc with
Structure_comps c ->
NameMap.fold
(fun s data acc -> f s (Pdot (p, s)) data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let find_all_simple_list proj1 proj2 f lid env acc =
match lid with
| None ->
TycompTbl.fold_name
(fun data acc -> f data acc)
(proj1 env) acc
| Some l ->
let (_p, desc) = lookup_module_descr ~mark:true l env in
begin match get_components desc with
Structure_comps c ->
NameMap.fold
(fun _s comps acc ->
match comps with
| [] -> acc
| data :: _ -> f data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let fold_modules f lid env acc =
match lid with
| None ->
IdTbl.fold_name
(fun name (p, data) acc ->
match data with
| Value (data, _) ->
let data = EnvLazy.force subst_modtype_maker data in
f name p data acc
| Persistent ->
match Persistent_env.find_in_cache persistent_env name with
| None -> acc
| Some pm ->
let data = md (Mty_signature (Lazy.force pm.pm_signature)) in
f name p data acc)
env.modules
acc
| Some l ->
let p, desc = lookup_module_descr ~mark:true l env in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s (data, _) acc ->
f s (Pdot (p, s))
(EnvLazy.force subst_modtype_maker data) acc)
c.comp_modules
acc
| Functor_comps _ ->
acc
end
let fold_values f =
find_all (fun env -> env.values) (fun sc -> sc.comp_values)
(fun k p (vd, _) acc -> f k p vd acc)
and fold_constructors f =
find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
(fun (cd, _) acc -> f cd acc)
and fold_labels f =
find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f
and fold_types f =
find_all (fun env -> env.types) (fun sc -> sc.comp_types) f
and fold_modtypes f =
find_all (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) f
and fold_classes f =
find_all (fun env -> env.classes) (fun sc -> sc.comp_classes)
(fun k p (vd, _) acc -> f k p vd acc)
and fold_cltypes f =
find_all (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) f
let filter_non_loaded_persistent f env =
let to_remove =
IdTbl.fold_name
(fun name (_, data) acc ->
match data with
| Value _ -> acc
| Persistent ->
match Persistent_env.find_in_cache persistent_env name with
| Some _ -> acc
| None ->
if f (Ident.create_persistent name) then
acc
else
String.Set.add name acc)
env.modules
String.Set.empty
in
let remove_ids tbl ids =
String.Set.fold
(fun name tbl -> IdTbl.remove (Ident.create_persistent name) tbl)
ids
tbl
in
let rec filter_summary summary ids =
if String.Set.is_empty ids then
summary
else
match summary with
| Env_empty -> summary
| Env_value (s, id, vd) ->
Env_value (filter_summary s ids, id, vd)
| Env_type (s, id, td) ->
Env_type (filter_summary s ids, id, td)
| Env_extension (s, id, ec) ->
Env_extension (filter_summary s ids, id, ec)
| Env_module (s, id, mp, md) ->
Env_module (filter_summary s ids, id, mp, md)
| Env_modtype (s, id, md) ->
Env_modtype (filter_summary s ids, id, md)
| Env_class (s, id, cd) ->
Env_class (filter_summary s ids, id, cd)
| Env_cltype (s, id, ctd) ->
Env_cltype (filter_summary s ids, id, ctd)
| Env_open (s, p) ->
Env_open (filter_summary s ids, p)
| Env_functor_arg (s, id) ->
Env_functor_arg (filter_summary s ids, id)
| Env_constraints (s, cstrs) ->
Env_constraints (filter_summary s ids, cstrs)
| Env_copy_types (s, types) ->
Env_copy_types (filter_summary s ids, types)
| Env_persistent (s, id) ->
if String.Set.mem (Ident.name id) ids then
filter_summary s (String.Set.remove (Ident.name id) ids)
else
Env_persistent (filter_summary s ids, id)
in
{ env with
modules = remove_ids env.modules to_remove;
components = remove_ids env.components to_remove;
summary = filter_summary env.summary to_remove;
}
(* Make the initial environment *)
let (initial_safe_string, initial_unsafe_string) =
Predef.build_initial_env
(add_type ~check:false)
(add_extension ~check:false)
empty
(* Return the environment summary *)
let summary env =
if Path.Map.is_empty env.local_constraints then env.summary
else Env_constraints (env.summary, env.local_constraints)
let last_env = ref empty
let last_reduced_env = ref empty
let keep_only_summary env =
if !last_env == env then !last_reduced_env
else begin
let new_env =
{
empty with
summary = env.summary;
local_constraints = env.local_constraints;
flags = env.flags;
}
in
last_env := env;
last_reduced_env := new_env;
new_env
end
let env_of_only_summary env_from_summary env =
let new_env = env_from_summary env.summary Subst.identity in
{ new_env with
local_constraints = env.local_constraints;
flags = env.flags;
}
(* Error report *)
open Format
let report_error ppf = function
| Missing_module(_, path1, path2) ->
fprintf ppf "@[@[<hov>";
if Path.same path1 path2 then
fprintf ppf "Internal path@ %s@ is dangling." (Path.name path1)
else
fprintf ppf "Internal path@ %s@ expands to@ %s@ which is dangling."
(Path.name path1) (Path.name path2);
fprintf ppf "@]@ @[%s@ %s@ %s.@]@]"
"The compiled interface for module" (Ident.name (Path.head path2))
"was not found"
| Illegal_value_name(_loc, name) ->
fprintf ppf "'%s' is not a valid value identifier."
name
let () =
Location.register_error_of_exn
(function
| Error err ->
let loc = match err with
(Missing_module (loc, _, _) | Illegal_value_name (loc, _)) -> loc
in
let error_of_printer =
if loc = Location.none
then Location.error_of_printer_file
else Location.error_of_printer ~loc ?sub:None in
Some (error_of_printer report_error err)
| _ ->
None
)
| null | https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/ocaml-lsp-server/vendor/merlin/upstream/ocaml_409/typing/env.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Environment handling
* Map indexed by the name of module components.
* This module is used to store components of types (i.e. labels
and constructors). We keep a representation of each nested
"open" and the set of local bindings between each of them.
* Local bindings since the last open.
* Symbolic representation of the last (innermost) open, if any.
* A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing.
* The table before opening the module.
* This module is used to store all kinds of components except
(labels and constructors) in environments. We keep a
representation of each nested "open" and the set of local
bindings between each of them.
* Local bindings since the last open
* Symbolic representation of the last (innermost) open, if any.
* The path of the opened module, to be prefixed in front of
its local names to produce a valid path in the current
environment.
* Components from the opened module.
* A callback to be applied when a component is used from this
"open". This is used to detect unused "opens". The
arguments are used to detect shadowing.
* The table before opening the module.
Formal parameter
Argument signature
Result signature
For memoization
Forward declarations
to be filled with Includemod.check_modtype_inclusion
to be filled with Mtype.strengthen
Print addresses
The name of the compilation unit currently compiled.
"" if outside a compilation unit.
signature of persistent compilation units
get_components
Lookup by identifier
fast path (avoids lookup)
Cstr M.t.C
Regular M.t, Ext M.C
Find the manifest type associated to a type when appropriate:
- the type should be public or should have a private row,
- the type should have an associated manifest type.
The manifest type of Private abstract data types without
private row are still considered unknown to the type system.
Hence, this case is caught by the following clause that also handles
purely abstract data types without manifest type definition.
Find the manifest type information associated to a type, i.e.
the necessary information for the compiler's type-based optimisations.
In particular, the manifest type associated to a private abstract type
is revealed for the sake of compiler's type-based optimisations.
The manifest type of Private abstract data types can still get
an approximation using their manifest type.
Lookup by name
PR#7611
Format.printf "USE module %s at %a@." (Path.last p)
Location.print comps.loc;
PR#7611
Helper to handle optional substitutions.
Iter on an environment (ignoring the body of functors and
not yet evaluated structures)
Expand manifest module type names at the top of the given module type
Location.prerr_warning Location.none
(Warnings.No_cmi_file (Path.name path));
Given a signature and a root path, prefix all idents in the signature
by the root path and build the corresponding substitution.
we extend the substitution in case of an inlined record
pretend this is a type, cf. PR#6650
Compute structure descriptions
The prefixed items get the same scope as [cm_path], which is
the prefix.
The prefixed items get the same scope as [cm_path], which is
the prefix.
fcomp_arg and fcomp_res must be prefixed eagerly, because
they are interpreted in the outer environment
Insertion of bindings by identifier + path
Note: we could also check here general validity of the
identifier, to protect against bad identifiers forged by -pp or
-ppx preprocessors.
Simplified version of store_type that doesn't compute and store
constructor and label infos, but simply record the arity and
manifest-ness of the type. Used in components_of_module to
keep track of type abbreviations (e.g. type t = float) in the
computation of label representations.
Compute the components of a functor application in a path.
we have to apply eagerly instead of passing sub to [components_of_module]
because of the call to [check_well_formed_module].
???
Define forward functions
Insertion of bindings by identifier
Insertion of bindings by name
Insertion of all components of a signature
Open a signature path
Open a signature from a file
a compilation unit cannot refer to a functor
Read a signature from a file
Save a signature to a file
Folding on environments
Make the initial environment
Return the environment summary
Error report | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Cmi_format
open Misc
open Asttypes
open Longident
open Path
open Types
open Btype
module String = Misc.Stdlib.String
let add_delayed_check_forward = ref (fun _ -> assert false)
let value_declarations : ((string * Location.t), (unit -> unit)) Hashtbl.t =
Hashtbl.create 16
This table is used to usage of value declarations . A declaration is
identified with its name and location . The callback attached to a
declaration is called whenever the value is used explicitly
( lookup_value ) or implicitly ( inclusion test between signatures ,
cf ) .
identified with its name and location. The callback attached to a
declaration is called whenever the value is used explicitly
(lookup_value) or implicitly (inclusion test between signatures,
cf Includemod.value_descriptions). *)
let type_declarations = Hashtbl.create 16
let module_declarations = Hashtbl.create 16
type constructor_usage = Positive | Pattern | Privatize
type constructor_usages =
{
mutable cu_positive: bool;
mutable cu_pattern: bool;
mutable cu_privatize: bool;
}
let add_constructor_usage cu = function
| Positive -> cu.cu_positive <- true
| Pattern -> cu.cu_pattern <- true
| Privatize -> cu.cu_privatize <- true
let constructor_usages () =
{cu_positive = false; cu_pattern = false; cu_privatize = false}
let used_constructors :
(string * Location.t * string, (constructor_usage -> unit)) Hashtbl.t
= Hashtbl.create 16
type error =
| Missing_module of Location.t * Path.t * Path.t
| Illegal_value_name of Location.t * string
exception Error of error
let error err = raise (Error err)
module NameMap = String.Map
type summary =
Env_empty
| Env_value of summary * Ident.t * value_description
| Env_type of summary * Ident.t * type_declaration
| Env_extension of summary * Ident.t * extension_constructor
| Env_module of summary * Ident.t * module_presence * module_declaration
| Env_modtype of summary * Ident.t * modtype_declaration
| Env_class of summary * Ident.t * class_declaration
| Env_cltype of summary * Ident.t * class_type_declaration
| Env_open of summary * Path.t
| Env_functor_arg of summary * Ident.t
| Env_constraints of summary * type_declaration Path.Map.t
| Env_copy_types of summary * string list
| Env_persistent of summary * Ident.t
type address =
| Aident of Ident.t
| Adot of address * int
module TycompTbl =
struct
type 'a t = {
current: 'a Ident.tbl;
opened: 'a opened option;
}
and 'a opened = {
components: ('a list) NameMap.t;
* Components from the opened module . We keep a list of
bindings for each name , as in comp_labels and
comp_constrs .
bindings for each name, as in comp_labels and
comp_constrs. *)
using: (string -> ('a * 'a) option -> unit) option;
next: 'a t;
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let add_open slot wrap components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; components; next};
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let nothing = fun () -> ()
let mk_callback rest name desc = function
| None -> nothing
| Some f ->
(fun () ->
match rest with
| [] -> f name None
| (hidden, _) :: _ -> f name (Some (desc, hidden))
)
let rec find_all name tbl =
List.map (fun (_id, desc) -> desc, nothing)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {using; next; components} ->
let rest = find_all name next in
match NameMap.find name components with
| exception Not_found -> rest
| opened ->
List.map
(fun desc -> desc, mk_callback rest name desc using)
opened
@ rest
let rec fold_name f tbl acc =
let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in
match tbl.opened with
| Some {using = _; next; components} ->
acc
|> NameMap.fold
(fun _name -> List.fold_right f)
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let diff_keys is_local tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
is_local (find_same id tbl2) &&
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
module IdTbl =
struct
type 'a t = {
current: 'a Ident.tbl;
opened: 'a opened option;
}
and 'a opened = {
root: Path.t;
components: 'a NameMap.t;
using: (string -> ('a * 'a) option -> unit) option;
next: 'a t;
}
let empty = { current = Ident.empty; opened = None }
let add id x tbl =
{tbl with current = Ident.add id x tbl.current}
let remove id tbl =
{tbl with current = Ident.remove id tbl.current}
let add_open slot wrap root components next =
let using =
match slot with
| None -> None
| Some f -> Some (fun s x -> f s (wrap x))
in
{
current = Ident.empty;
opened = Some {using; root; components; next};
}
let rec find_same id tbl =
try Ident.find_same id tbl.current
with Not_found as exn ->
begin match tbl.opened with
| Some {next; _} -> find_same id next
| None -> raise exn
end
let rec find_name ~mark name tbl =
try
let (id, desc) = Ident.find_name name tbl.current in
Pident id, desc
with Not_found as exn ->
begin match tbl.opened with
| Some {using; root; next; components} ->
begin try
let descr = NameMap.find name components in
let res = Pdot (root, name), descr in
if mark then begin match using with
| None -> ()
| Some f -> begin
match find_name ~mark:false name next with
| exception Not_found -> f name None
| _, descr' -> f name (Some (descr', descr))
end
end;
res
with Not_found ->
find_name ~mark name next
end
| None ->
raise exn
end
let rec update name f tbl =
try
let (id, desc) = Ident.find_name name tbl.current in
let new_desc = f desc in
{tbl with current = Ident.add id new_desc tbl.current}
with Not_found ->
begin match tbl.opened with
| Some {root; using; next; components} ->
begin try
let desc = NameMap.find name components in
let new_desc = f desc in
let components = NameMap.add name new_desc components in
{tbl with opened = Some {root; using; next; components}}
with Not_found ->
let next = update name f next in
{tbl with opened = Some {root; using; next; components}}
end
| None ->
tbl
end
let rec find_all name tbl =
List.map
(fun (id, desc) -> Pident id, desc)
(Ident.find_all name tbl.current) @
match tbl.opened with
| None -> []
| Some {root; using = _; next; components} ->
try
let desc = NameMap.find name components in
(Pdot (root, name), desc) :: find_all name next
with Not_found ->
find_all name next
let rec fold_name f tbl acc =
let acc =
Ident.fold_name
(fun id d -> f (Ident.name id) (Pident id, d))
tbl.current acc
in
match tbl.opened with
| Some {root; using = _; next; components} ->
acc
|> NameMap.fold
(fun name desc -> f name (Pdot (root, name), desc))
components
|> fold_name f next
| None ->
acc
let rec local_keys tbl acc =
let acc = Ident.fold_all (fun k _ accu -> k::accu) tbl.current acc in
match tbl.opened with
| Some o -> local_keys o.next acc
| None -> acc
let rec iter f tbl =
Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current;
match tbl.opened with
| Some {root; using = _; next; components} ->
NameMap.iter
(fun s x ->
let root_scope = Path.scope root in
f (Ident.create_scoped ~scope:root_scope s)
(Pdot (root, s), x))
components;
iter f next
| None -> ()
let diff_keys tbl1 tbl2 =
let keys2 = local_keys tbl2 [] in
List.filter
(fun id ->
try ignore (find_same id tbl1); false
with Not_found -> true)
keys2
end
type type_descriptions =
constructor_description list * label_description list
let in_signature_flag = 0x01
type 'a value_or_persistent =
| Value of 'a
| Persistent
type t = {
values: (value_description * address_lazy) IdTbl.t;
constrs: (constructor_description * address_lazy option) TycompTbl.t;
labels: label_description TycompTbl.t;
types: (type_declaration * type_descriptions) IdTbl.t;
modules: (module_declaration_lazy * address_lazy) value_or_persistent IdTbl.t;
modtypes: modtype_declaration IdTbl.t;
components: (module_components * address_lazy) value_or_persistent IdTbl.t;
classes: (class_declaration * address_lazy) IdTbl.t;
cltypes: class_type_declaration IdTbl.t;
functor_args: unit Ident.tbl;
summary: summary;
local_constraints: type_declaration Path.Map.t;
flags: int;
}
and module_declaration_lazy =
(Subst.t * Subst.scoping * module_declaration, module_declaration) EnvLazy.t
and module_components =
{
alerts: alerts;
loc: Location.t;
comps: (components_maker, module_components_repr option) EnvLazy.t;
}
and components_maker = {
cm_env: t;
cm_freshening_subst: Subst.t option;
cm_prefixing_subst: Subst.t;
cm_path: Path.t;
cm_addr: address_lazy;
cm_mty: Types.module_type;
}
and module_components_repr =
Structure_comps of structure_components
| Functor_comps of functor_components
and structure_components = {
mutable comp_values: (value_description * address_lazy) NameMap.t;
mutable comp_constrs:
((constructor_description * address_lazy option) list) NameMap.t;
mutable comp_labels: label_description list NameMap.t;
mutable comp_types: (type_declaration * type_descriptions) NameMap.t;
mutable comp_modules: (module_declaration_lazy * address_lazy) NameMap.t;
mutable comp_modtypes: modtype_declaration NameMap.t;
mutable comp_components: (module_components * address_lazy) NameMap.t;
mutable comp_classes: (class_declaration * address_lazy) NameMap.t;
mutable comp_cltypes: class_type_declaration NameMap.t;
}
and functor_components = {
fcomp_subst_cache: (Path.t, module_type) Hashtbl.t
}
and address_unforced =
| Projection of { parent : address_lazy; pos : int; }
| ModAlias of { env : t; path : Path.t; }
and address_lazy = (address_unforced, address) EnvLazy.t
let empty_structure =
Structure_comps {
comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty;
comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_components = NameMap.empty; comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty }
let copy_local ~from env =
{ env with
local_constraints = from.local_constraints;
flags = from.flags }
let same_constr = ref (fun _ _ _ -> assert false)
let check_well_formed_module = ref (fun _ -> assert false)
Helper to decide whether to report an identifier shadowing
by some ' open ' . For labels and constructors , we do not report
if the two elements are from the same re - exported declaration .
Later , one could also interpret some attributes on value and
type declarations to silence the shadowing warnings .
by some 'open'. For labels and constructors, we do not report
if the two elements are from the same re-exported declaration.
Later, one could also interpret some attributes on value and
type declarations to silence the shadowing warnings. *)
let check_shadowing env = function
| `Constructor (Some ((c1, _), (c2, _)))
when not (!same_constr env c1.cstr_res c2.cstr_res) ->
Some "constructor"
| `Label (Some (l1, l2))
when not (!same_constr env l1.lbl_res l2.lbl_res) ->
Some "label"
| `Value (Some _) -> Some "value"
| `Type (Some _) -> Some "type"
| `Module (Some _) | `Component (Some _) -> Some "module"
| `Module_type (Some _) -> Some "module type"
| `Class (Some _) -> Some "class"
| `Class_type (Some _) -> Some "class type"
| `Constructor _ | `Label _
| `Value None | `Type None | `Module None | `Module_type None
| `Class None | `Class_type None | `Component None ->
None
let subst_modtype_maker (subst, scoping, md) =
{md with md_type = Subst.modtype scoping subst md.md_type}
let empty = {
values = IdTbl.empty; constrs = TycompTbl.empty;
labels = TycompTbl.empty; types = IdTbl.empty;
modules = IdTbl.empty; modtypes = IdTbl.empty;
components = IdTbl.empty; classes = IdTbl.empty;
cltypes = IdTbl.empty;
summary = Env_empty; local_constraints = Path.Map.empty;
flags = 0;
functor_args = Ident.empty;
}
let in_signature b env =
let flags =
if b then env.flags lor in_signature_flag
else env.flags land (lnot in_signature_flag)
in
{env with flags}
let is_in_signature env = env.flags land in_signature_flag <> 0
let is_ident = function
Pident _ -> true
| Pdot _ | Papply _ -> false
let is_local_ext = function
| {cstr_tag = Cstr_extension(p, _)}, _ -> is_ident p
| _ -> false
let diff env1 env2 =
IdTbl.diff_keys env1.values env2.values @
TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs @
IdTbl.diff_keys env1.modules env2.modules @
IdTbl.diff_keys env1.classes env2.classes
let components_of_module' =
ref ((fun ~alerts:_ ~loc:_ _env _fsub _psub _path _addr _mty -> assert false):
alerts:alerts -> loc:Location.t -> t ->
Subst.t option -> Subst.t -> Path.t -> address_lazy -> module_type ->
module_components)
let components_of_module_maker' =
ref ((fun _ -> assert false) :
components_maker -> module_components_repr option)
let components_of_functor_appl' =
ref ((fun _f _env _p1 _p2 -> assert false) :
functor_components -> t -> Path.t -> Path.t -> module_components)
let check_modtype_inclusion =
ref ((fun ~loc:_ _env _mty1 _path1 _mty2 -> assert false) :
loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit)
let strengthen =
ref ((fun ~aliasable:_ _env _mty _path -> assert false) :
aliasable:bool -> t -> module_type -> Path.t -> module_type)
let md md_type =
{md_type; md_attributes=[]; md_loc=Location.none}
let rec print_address ppf = function
| Aident id -> Format.fprintf ppf "%s" (Ident.name id)
| Adot(a, pos) -> Format.fprintf ppf "%a.[%i]" print_address a pos
module Current_unit_name : sig
val get : unit -> modname
val set : modname -> unit
val is : modname -> bool
val is_name_of : Ident.t -> bool
end = struct
let current_unit =
ref ""
let get () =
!current_unit
let set name =
current_unit := name
let is name =
!current_unit = name
let is_name_of id =
is (Ident.name id)
end
let set_unit_name = Current_unit_name.set
let get_unit_name = Current_unit_name.get
let find_same_module id tbl =
match IdTbl.find_same id tbl with
| x -> x
| exception Not_found
when Ident.persistent id && not (Current_unit_name.is_name_of id) ->
Persistent
type persistent_module = {
pm_signature: signature Lazy.t;
pm_components: module_components;
}
let add_persistent_structure id env =
if not (Ident.persistent id) then invalid_arg "Env.add_persistent_structure";
if not (Current_unit_name.is_name_of id) then
{ env with
modules = IdTbl.add id Persistent env.modules;
components = IdTbl.add id Persistent env.components;
summary = Env_persistent (env.summary, id);
}
else
env
let sign_of_cmi ~freshen { Persistent_env.Persistent_signature.cmi; _ } =
let name = cmi.cmi_name in
let sign = cmi.cmi_sign in
let flags = cmi.cmi_flags in
let id = Ident.create_persistent name in
let path = Pident id in
let addr = EnvLazy.create_forced (Aident id) in
let alerts =
List.fold_left (fun acc -> function Alerts s -> s | _ -> acc)
Misc.Stdlib.String.Map.empty
flags
in
let loc = Location.none in
let pm_signature = lazy (Subst.signature Make_local Subst.identity sign) in
let pm_components =
let freshening_subst =
if freshen then (Some Subst.identity) else None in
!components_of_module' ~alerts ~loc
empty freshening_subst Subst.identity path addr (Mty_signature sign) in
{
pm_signature;
pm_components;
}
let read_sign_of_cmi = sign_of_cmi ~freshen:true
let save_sign_of_cmi = sign_of_cmi ~freshen:false
let persistent_env : persistent_module Persistent_env.t =
Persistent_env.empty ()
let without_cmis f x =
Persistent_env.without_cmis persistent_env f x
let imports () = Persistent_env.imports persistent_env
let import_crcs ~source crcs =
Persistent_env.import_crcs persistent_env ~source crcs
let read_pers_mod modname filename =
Persistent_env.read persistent_env read_sign_of_cmi modname filename
let find_pers_mod name =
Persistent_env.find persistent_env read_sign_of_cmi name
let check_pers_mod ~loc name =
Persistent_env.check persistent_env read_sign_of_cmi ~loc name
let crc_of_unit name =
Persistent_env.crc_of_unit persistent_env read_sign_of_cmi name
let is_imported_opaque modname =
Persistent_env.is_imported_opaque persistent_env modname
let reset_declaration_caches () =
Hashtbl.clear value_declarations;
Hashtbl.clear type_declarations;
Hashtbl.clear module_declarations;
Hashtbl.clear used_constructors;
()
let reset_cache () =
Current_unit_name.set "";
Persistent_env.clear persistent_env;
reset_declaration_caches ();
()
let reset_cache_toplevel () =
Persistent_env.clear_missing persistent_env;
reset_declaration_caches ();
()
let get_components_opt c =
match Persistent_env.can_load_cmis persistent_env with
| Persistent_env.Can_load_cmis ->
EnvLazy.force !components_of_module_maker' c.comps
| Persistent_env.Cannot_load_cmis log ->
EnvLazy.force_logged log !components_of_module_maker' c.comps
let get_components c =
match get_components_opt c with
| None -> empty_structure
| Some c -> c
let rec find_module_descr path env =
match path with
Pident id ->
begin match find_same_module id env.components with
| Value x -> fst x
| Persistent -> (find_pers_mod (Ident.name id)).pm_components
end
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c ->
fst (NameMap.find s c.comp_components)
| Functor_comps _ ->
raise Not_found
end
| Papply(p1, p2) ->
begin match get_components (find_module_descr p1 env) with
Functor_comps f ->
!components_of_functor_appl' f env p1 p2
| Structure_comps _ ->
raise Not_found
end
let find proj1 proj2 path env =
match path with
Pident id -> IdTbl.find_same id (proj1 env)
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c -> NameMap.find s (proj2 c)
| Functor_comps _ ->
raise Not_found
end
| Papply _ ->
raise Not_found
let find_value_full =
find (fun env -> env.values) (fun sc -> sc.comp_values)
and find_type_full =
find (fun env -> env.types) (fun sc -> sc.comp_types)
and find_modtype =
find (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
and find_class_full =
find (fun env -> env.classes) (fun sc -> sc.comp_classes)
and find_cltype =
find (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
let find_value p env =
fst (find_value_full p env)
let find_class p env =
fst (find_class_full p env)
let type_of_cstr path = function
| {cstr_inlined = Some d; _} ->
(d, ([], List.map snd (Datarepr.labels_of_type path d)))
| _ ->
assert false
let find_type_full path env =
match Path.constructor_typath path with
| Regular p ->
(try (Path.Map.find p env.local_constraints, ([], []))
with Not_found -> find_type_full p env)
| Cstr (ty_path, s) ->
let (_, (cstrs, _)) =
try find_type_full ty_path env
with Not_found -> assert false
in
let cstr =
try List.find (fun cstr -> cstr.cstr_name = s) cstrs
with Not_found -> assert false
in
type_of_cstr path cstr
| LocalExt id ->
let cstr =
try fst (TycompTbl.find_same id env.constrs)
with Not_found -> assert false
in
type_of_cstr path cstr
| Ext (mod_path, s) ->
let comps =
try find_module_descr mod_path env
with Not_found -> assert false
in
let comps =
match get_components comps with
| Structure_comps c -> c
| Functor_comps _ -> assert false
in
let exts =
List.filter
(function ({cstr_tag=Cstr_extension _}, _) -> true | _ -> false)
(try NameMap.find s comps.comp_constrs
with Not_found -> assert false)
in
match exts with
| [(cstr, _)] -> type_of_cstr path cstr
| _ -> assert false
let find_type p env =
fst (find_type_full p env)
let find_type_descrs p env =
snd (find_type_full p env)
let find_module ~alias path env =
match path with
Pident id ->
begin
match find_same_module id env.modules with
| Value (data, _) -> EnvLazy.force subst_modtype_maker data
| Persistent ->
let pm = find_pers_mod (Ident.name id) in
md (Mty_signature(Lazy.force pm.pm_signature))
end
| Pdot(p, s) ->
begin match get_components (find_module_descr p env) with
Structure_comps c ->
let data, _ = NameMap.find s c.comp_modules in
EnvLazy.force subst_modtype_maker data
| Functor_comps _ ->
raise Not_found
end
| Papply(p1, p2) ->
let desc1 = find_module_descr p1 env in
begin match get_components desc1 with
Functor_comps f ->
let mty =
match f.fcomp_res with
| Mty_alias _ as mty -> mty
| mty ->
if alias then mty else
try
Hashtbl.find f.fcomp_subst_cache p2
with Not_found ->
let mty =
Subst.modtype (Rescope (Path.scope path))
(Subst.add_module f.fcomp_param p2 Subst.identity)
f.fcomp_res in
Hashtbl.add f.fcomp_subst_cache p2 mty;
mty
in
md mty
| Structure_comps _ ->
raise Not_found
end
let rec find_module_address path env =
match path with
| Pident id ->
begin
match find_same_module id env.modules with
| Value (_, addr) -> get_address addr
| Persistent -> Aident id
end
| Pdot(p, s) -> begin
match get_components (find_module_descr p env) with
| Structure_comps c ->
let _, addr = NameMap.find s c.comp_modules in
get_address addr
| Functor_comps _ ->
raise Not_found
end
| Papply _ -> raise Not_found
and force_address = function
| Projection { parent; pos } -> Adot(get_address parent, pos)
| ModAlias { env; path } -> find_module_address path env
and get_address a =
EnvLazy.force force_address a
let find_value_address p env =
get_address (snd (find_value_full p env))
let find_class_address p env =
get_address (snd (find_class_full p env))
let rec get_constrs_address = function
| [] -> raise Not_found
| (_, None) :: rest -> get_constrs_address rest
| (_, Some a) :: _ -> get_address a
let find_constructor_address path env =
match path with
| Pident id -> begin
match TycompTbl.find_same id env.constrs with
| _, None -> raise Not_found
| _, Some addr -> get_address addr
end
| Pdot(p, s) -> begin
match get_components (find_module_descr p env) with
| Structure_comps c ->
get_constrs_address (NameMap.find s c.comp_constrs)
| Functor_comps _ ->
raise Not_found
end
| Papply _ ->
raise Not_found
let required_globals = ref []
let reset_required_globals () = required_globals := []
let get_required_globals () = !required_globals
let add_required_global id =
if Ident.global id && not !Clflags.transparent_modules
&& not (List.exists (Ident.same id) !required_globals)
then required_globals := id :: !required_globals
let rec normalize_module_path lax env = function
| Pident id as path when lax && Ident.persistent id ->
| Pdot (p, s) as path ->
let p' = normalize_module_path lax env p in
if p == p' then expand_module_path lax env path
else expand_module_path lax env (Pdot(p', s))
| Papply (p1, p2) as path ->
let p1' = normalize_module_path lax env p1 in
let p2' = normalize_module_path true env p2 in
if p1 == p1' && p2 == p2' then expand_module_path lax env path
else expand_module_path lax env (Papply(p1', p2'))
| Pident _ as path ->
expand_module_path lax env path
and expand_module_path lax env path =
try match find_module ~alias:true path env with
{md_type=Mty_alias path1} ->
let path' = normalize_module_path lax env path1 in
if lax || !Clflags.transparent_modules then path' else
let id = Path.head path in
if Ident.global id && not (Ident.same id (Path.head path'))
then add_required_global id;
path'
| _ -> path
with Not_found when lax
|| (match path with Pident id -> not (Ident.persistent id) | _ -> true) ->
path
let normalize_module_path oloc env path =
try normalize_module_path (oloc = None) env path
with Not_found ->
match oloc with None -> assert false
| Some loc ->
error (Missing_module(loc, path,
normalize_module_path true env path))
let normalize_path_prefix oloc env path =
match path with
Pdot(p, s) ->
let p2 = normalize_module_path oloc env p in
if p == p2 then path else Pdot(p2, s)
| Pident _ ->
path
| Papply _ ->
assert false
let is_uident s =
match s.[0] with
| 'A'..'Z' -> true
| _ -> false
let normalize_type_path oloc env path =
Inlined version of Path.is_constructor_typath :
constructor type paths ( i.e. path pointing to an inline
record argument of a constructpr ) are built as a regular
type path followed by a capitalized constructor name .
constructor type paths (i.e. path pointing to an inline
record argument of a constructpr) are built as a regular
type path followed by a capitalized constructor name. *)
match path with
| Pident _ ->
path
| Pdot(p, s) ->
let p2 =
if is_uident s && not (is_uident (Path.last p)) then
normalize_path_prefix oloc env p
else
normalize_module_path oloc env p
in
if p == p2 then path else Pdot (p2, s)
| Papply _ ->
assert false
let find_module path env =
find_module ~alias:false path env
let find_type_expansion path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body when decl.type_private = Public
|| decl.type_kind <> Type_abstract
|| Btype.has_constr_row body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_type_expansion_opt path env =
let decl = find_type path env in
match decl.type_manifest with
| Some body ->
(decl.type_params, body, decl.type_expansion_scope)
| _ -> raise Not_found
let find_modtype_expansion path env =
match (find_modtype path env).mtd_type with
| None -> raise Not_found
| Some mty -> mty
let rec is_functor_arg path env =
match path with
Pident id ->
begin try Ident.find_same id env.functor_args; true
with Not_found -> false
end
| Pdot (p, _s) -> is_functor_arg p env
| Papply _ -> true
exception Recmodule
let report_alerts ?loc p alerts =
match loc with
| Some loc ->
Misc.Stdlib.String.Map.iter
(fun kind message ->
let message = if message = "" then "" else "\n" ^ message in
Location.alert ~kind loc
(Printf.sprintf "module %s%s" (Path.name p) message)
)
alerts
| _ -> ()
let mark_module_used name loc =
try Hashtbl.find module_declarations (name, loc) ()
with Not_found -> ()
let rec lookup_module_descr_aux ?loc ~mark lid env =
match lid with
Lident s ->
let find_components s = (find_pers_mod s).pm_components in
begin match IdTbl.find_name ~mark s env.components with
| exception Not_found when not (Current_unit_name.is s) ->
let p = Path.Pident (Ident.create_persistent s) in
(p, find_components s)
| (p, data) ->
(p,
match data with
| Value (comp, _) -> comp
| Persistent -> find_components s)
end
| Ldot(l, s) ->
let (p, descr) = lookup_module_descr ?loc ~mark l env in
begin match get_components descr with
Structure_comps c ->
let (descr, _addr) = NameMap.find s c.comp_components in
(Pdot(p, s), descr)
| Functor_comps _ ->
raise Not_found
end
| Lapply(l1, l2) ->
let (p1, desc1) = lookup_module_descr ?loc ~mark l1 env in
let p2 = lookup_module ~load:true ~mark ?loc l2 env in
let {md_type=mty2} = find_module p2 env in
begin match get_components desc1 with
Functor_comps f ->
let loc = match loc with Some l -> l | None -> Location.none in
(match f.fcomp_arg with
| Some arg -> !check_modtype_inclusion ~loc env mty2 p2 arg);
(Papply(p1, p2), !components_of_functor_appl' f env p1 p2)
| Structure_comps _ ->
raise Not_found
end
and lookup_module_descr ?loc ~mark lid env =
let (p, comps) as res = lookup_module_descr_aux ?loc ~mark lid env in
if mark then mark_module_used (Path.last p) comps.loc;
report_alerts ?loc p comps.alerts;
res
and lookup_module ~load ?loc ~mark lid env : Path.t =
match lid with
Lident s ->
begin match IdTbl.find_name ~mark s env.modules with
| exception Not_found
when not (Current_unit_name.is s)
&& !Clflags.transparent_modules
&& not load ->
check_pers_mod s
~loc:(Option.value loc ~default:Location.none);
Path.Pident (Ident.create_persistent s)
| p, data ->
begin match data with
| Value (data, _) ->
let {md_loc; md_attributes; md_type} =
EnvLazy.force subst_modtype_maker data
in
if mark then mark_module_used s md_loc;
begin match md_type with
| Mty_ident (Path.Pident id) when Ident.name id = "#recmod#" ->
see # 5965
raise Recmodule
| _ -> ()
end;
report_alerts ?loc p
(Builtin_attributes.alerts_of_attrs md_attributes)
| Persistent ->
if !Clflags.transparent_modules && not load then
check_pers_mod s
~loc:(Option.value loc ~default:Location.none)
else begin
let pm = find_pers_mod s in
report_alerts ?loc p pm.pm_components.alerts
end
end;
p
end
| Ldot(l, s) ->
let (p, descr) = lookup_module_descr ?loc ~mark l env in
begin match get_components descr with
Structure_comps c ->
let (comps, _) = NameMap.find s c.comp_components in
if mark then mark_module_used s comps.loc;
let p = Pdot(p, s) in
report_alerts ?loc p comps.alerts;
p
| Functor_comps _ ->
raise Not_found
end
| Lapply(l1, l2) ->
let (p1, desc1) = lookup_module_descr ?loc ~mark l1 env in
let p2 = lookup_module ~load:true ?loc ~mark l2 env in
let {md_type=mty2} = find_module p2 env in
let p = Papply(p1, p2) in
begin match get_components desc1 with
Functor_comps f ->
let loc = match loc with Some l -> l | None -> Location.none in
(match f.fcomp_arg with
| Some arg -> (!check_modtype_inclusion ~loc env mty2 p2) arg);
p
| Structure_comps _ ->
raise Not_found
end
let lookup proj1 proj2 ?loc ~mark lid env =
match lid with
| Lident s -> IdTbl.find_name ~mark s (proj1 env)
| Ldot(l, s) ->
let path, desc = lookup_module_descr ?loc ~mark l env in
begin match get_components desc with
Structure_comps c ->
let data = NameMap.find s (proj2 c) in
(Pdot(path, s), data)
| Functor_comps _ ->
raise Not_found
end
| Lapply _ ->
raise Not_found
let lookup_all_simple proj1 proj2 shadow ?loc ~mark lid env =
match lid with
Lident s ->
let xl = TycompTbl.find_all s (proj1 env) in
let rec do_shadow =
function
| [] -> []
| ((x, f) :: xs) ->
(x, f) ::
(do_shadow (List.filter (fun (y, _) -> not (shadow x y)) xs))
in
do_shadow xl
| Ldot(l, s) ->
let (_p, desc) = lookup_module_descr ?loc ~mark l env in
begin match get_components desc with
Structure_comps c ->
let comps =
try NameMap.find s (proj2 c) with Not_found -> []
in
List.map
(fun data -> (data, (fun () -> ())))
comps
| Functor_comps _ ->
raise Not_found
end
| Lapply _ ->
raise Not_found
let has_local_constraints env = not (Path.Map.is_empty env.local_constraints)
let cstr_shadow (cstr1, _) (cstr2, _) =
match cstr1.cstr_tag, cstr2.cstr_tag with
| Cstr_extension _, Cstr_extension _ -> true
| _ -> false
let lbl_shadow _lbl1 _lbl2 = false
let ignore_address (path, (desc, _addr)) = (path, desc)
let lookup_value ?loc ~mark lid env =
ignore_address
(lookup (fun env -> env.values) (fun sc -> sc.comp_values)
?loc ~mark lid env)
let lookup_all_constructors ?loc ~mark lid env =
lookup_all_simple (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
cstr_shadow ?loc ~mark lid env
let lookup_all_labels ?loc ~mark lid env =
lookup_all_simple (fun env -> env.labels) (fun sc -> sc.comp_labels)
lbl_shadow ?loc ~mark lid env
let lookup_type ?loc ~mark lid env=
lookup (fun env -> env.types) (fun sc -> sc.comp_types)
?loc ~mark lid env
let lookup_modtype ?loc ~mark lid env =
lookup (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)
?loc ~mark lid env
let lookup_class ?loc ~mark lid env =
ignore_address
(lookup (fun env -> env.classes) (fun sc -> sc.comp_classes)
?loc ~mark lid env)
let lookup_cltype ?loc ~mark lid env =
lookup (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)
?loc ~mark lid env
type copy_of_types = {
to_copy: string list;
initial_values: (value_description * address_lazy) IdTbl.t;
new_values: (value_description * address_lazy) IdTbl.t;
}
let make_copy_of_types l env : copy_of_types =
let f (desc, addr) =
{desc with val_type = Subst.type_expr Subst.identity desc.val_type}, addr
in
let values =
List.fold_left (fun env s -> IdTbl.update s f env) env.values l
in
{to_copy = l; initial_values = env.values; new_values = values}
let do_copy_types { to_copy = l; initial_values; new_values = values } env =
if initial_values != env.values then fatal_error "Env.do_copy_types";
{env with values; summary = Env_copy_types (env.summary, l)}
let mark_value_used name vd =
try Hashtbl.find value_declarations (name, vd.val_loc) ()
with Not_found -> ()
let mark_type_used name vd =
try Hashtbl.find type_declarations (name, vd.type_loc) ()
with Not_found -> ()
let mark_constructor_used usage name vd constr =
try Hashtbl.find used_constructors (name, vd.type_loc, constr) usage
with Not_found -> ()
let mark_extension_used usage ext name =
let ty_name = Path.last ext.ext_type_path in
try Hashtbl.find used_constructors (ty_name, ext.ext_loc, name) usage
with Not_found -> ()
let set_value_used_callback name vd callback =
let key = (name, vd.val_loc) in
try
let old = Hashtbl.find value_declarations key in
Hashtbl.replace value_declarations key (fun () -> old (); callback ())
this is to support cases like :
let x = let x = 1 in x in x
where the two declarations have the same location
( e.g. resulting from expansion of grammar entries )
let x = let x = 1 in x in x
where the two declarations have the same location
(e.g. resulting from Camlp4 expansion of grammar entries) *)
with Not_found ->
Hashtbl.add value_declarations key callback
let set_type_used_callback name td callback =
let loc = td.type_loc in
if loc.Location.loc_ghost then ()
else let key = (name, loc) in
let old =
try Hashtbl.find type_declarations key
with Not_found -> ignore
in
Hashtbl.replace type_declarations key (fun () -> callback old)
let lookup_value ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_value ?loc ~mark lid env in
if mark then mark_value_used (Longident.last lid) desc;
r
let lookup_type ?loc ?(mark = true) lid env =
let (path, (decl, _)) = lookup_type ?loc ~mark lid env in
if mark then mark_type_used (Longident.last lid) decl;
path
let mark_type_path env path =
try
let decl = find_type path env in
mark_type_used (Path.last path) decl
with Not_found -> ()
let ty_path t =
match repr t with
| {desc=Tconstr(path, _, _)} -> path
| _ -> assert false
let lookup_constructor ?loc ?(mark = true) lid env =
match lookup_all_constructors ?loc ~mark lid env with
[] -> raise Not_found
| ((desc, _), use) :: _ ->
if mark then begin
mark_type_path env (ty_path desc.cstr_res);
use ()
end;
desc
let is_lident = function
Lident _ -> true
| _ -> false
let lookup_all_constructors ?loc ?(mark = true) lid env =
try
let cstrs = lookup_all_constructors ?loc ~mark lid env in
let wrap_use desc use () =
if mark then begin
mark_type_path env (ty_path desc.cstr_res);
use ()
end
in
List.map (fun ((cstr, _), use) -> (cstr, wrap_use cstr use)) cstrs
with
Not_found when is_lident lid -> []
let mark_constructor usage env name desc =
match desc.cstr_tag with
| Cstr_extension _ ->
begin
let ty_path = ty_path desc.cstr_res in
let ty_name = Path.last ty_path in
try Hashtbl.find used_constructors (ty_name, desc.cstr_loc, name) usage
with Not_found -> ()
end
| _ ->
let ty_path = ty_path desc.cstr_res in
let ty_decl = try find_type ty_path env with Not_found -> assert false in
let ty_name = Path.last ty_path in
mark_constructor_used usage ty_name ty_decl name
let lookup_label ?loc ?(mark = true) lid env =
match lookup_all_labels ?loc ~mark lid env with
[] -> raise Not_found
| (desc, use) :: _ ->
if mark then begin
mark_type_path env (ty_path desc.lbl_res);
use ()
end;
desc
let lookup_all_labels ?loc ?(mark = true) lid env =
try
let lbls = lookup_all_labels ?loc ~mark lid env in
let wrap_use desc use () =
if mark then begin
mark_type_path env (ty_path desc.lbl_res);
use ()
end
in
List.map (fun (lbl, use) -> (lbl, wrap_use lbl use)) lbls
with
Not_found when is_lident lid -> []
let lookup_module ~load ?loc ?(mark = true) lid env =
lookup_module ~load ?loc ~mark lid env
let lookup_modtype ?loc ?(mark = true) lid env =
lookup_modtype ?loc ~mark lid env
let lookup_class ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_class ?loc ~mark lid env in
special support for
if Path.name desc.cty_path = "" then ignore (lookup_type ?loc ~mark lid env)
else if mark then mark_type_path env desc.cty_path;
r
let lookup_cltype ?loc ?(mark = true) lid env =
let (_, desc) as r = lookup_cltype ?loc ~mark lid env in
if Path.name desc.clty_path = "" then ignore (lookup_type ?loc lid env)
else mark_type_path env desc.clty_path;
mark_type_path env desc.clty_path;
r
let may_subst subst_f sub x =
match sub with
| None -> x
| Some sub -> subst_f sub x
type iter_cont = unit -> unit
let iter_env_cont = ref []
let rec scrape_alias_for_visit env sub mty =
match mty with
| Mty_alias path ->
begin match may_subst Subst.module_path sub path with
| Pident id
when Ident.persistent id
&& not (Persistent_env.looked_up persistent_env (Ident.name id)) ->
false
PR#6600 : find_module may raise Not_found
try scrape_alias_for_visit env sub (find_module path env).md_type
with Not_found -> false
end
| _ -> true
let iter_env proj1 proj2 f env () =
IdTbl.iter (fun id x -> f (Pident id) x) (proj1 env);
let rec iter_components path path' mcomps =
let cont () =
let visit =
match EnvLazy.get_arg mcomps.comps with
| None -> true
| Some { cm_mty; cm_freshening_subst; _ } ->
scrape_alias_for_visit env cm_freshening_subst cm_mty
in
if not visit then () else
match get_components mcomps with
Structure_comps comps ->
NameMap.iter
(fun s d -> f (Pdot (path, s)) (Pdot (path', s), d))
(proj2 comps);
NameMap.iter
(fun s (c, _) ->
iter_components (Pdot (path, s)) (Pdot (path', s)) c)
comps.comp_components
| Functor_comps _ -> ()
in iter_env_cont := (path, cont) :: !iter_env_cont
in
IdTbl.iter
(fun id (path, comps) ->
match comps with
| Value (comps, _) -> iter_components (Pident id) path comps
| Persistent ->
let modname = Ident.name id in
match Persistent_env.find_in_cache persistent_env modname with
| None -> ()
| Some pm -> iter_components (Pident id) path pm.pm_components)
env.components
let run_iter_cont l =
iter_env_cont := [];
List.iter (fun c -> c ()) l;
let cont = List.rev !iter_env_cont in
iter_env_cont := [];
cont
let iter_types f = iter_env (fun env -> env.types) (fun sc -> sc.comp_types) f
let same_types env1 env2 =
env1.types == env2.types && env1.components == env2.components
let used_persistent () =
Persistent_env.fold persistent_env
(fun s _m r -> Concr.add s r)
Concr.empty
let find_all_comps proj s (p,(mcomps, _)) =
match get_components mcomps with
Functor_comps _ -> []
| Structure_comps comps ->
try
let c = NameMap.find s (proj comps) in
[Pdot(p,s), c]
with Not_found -> []
let rec find_shadowed_comps path env =
match path with
Pident id ->
List.filter_map
(fun (p, data) ->
match data with
| Value x -> Some (p, x)
| Persistent -> None)
(IdTbl.find_all (Ident.name id) env.components)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' =
List.map (find_all_comps (fun comps -> comps.comp_components) s) l
in
List.flatten l'
| Papply _ -> []
let find_shadowed proj1 proj2 path env =
match path with
Pident id ->
IdTbl.find_all (Ident.name id) (proj1 env)
| Pdot (p, s) ->
let l = find_shadowed_comps p env in
let l' = List.map (find_all_comps proj2 s) l in
List.flatten l'
| Papply _ -> []
let find_shadowed_types path env =
List.map fst
(find_shadowed
(fun env -> env.types) (fun comps -> comps.comp_types) path env)
let rec scrape_alias env sub ?path mty =
match mty, path with
Mty_ident _, _ ->
let p =
match may_subst (Subst.modtype Keep) sub mty with
| Mty_ident p -> p
only [ Mty_ident]s in [ sub ]
in
begin try
scrape_alias env sub (find_modtype_expansion p env) ?path
with Not_found ->
mty
end
| Mty_alias path, _ ->
let path = may_subst Subst.module_path sub path in
begin try
scrape_alias env sub (find_module path env).md_type ~path
with Not_found ->
mty
end
| mty, Some path ->
!strengthen ~aliasable:true env mty path
| _ -> mty
let prefix_idents root freshening_sub prefixing_sub sg =
let refresh id add_fn = function
| None -> id, None
| Some sub ->
let id' = Ident.rename id in
id', Some (add_fn id (Pident id') sub)
in
let rec prefix_idents root items_and_paths freshening_sub prefixing_sub =
function
| [] -> (List.rev items_and_paths, freshening_sub, prefixing_sub)
| Sig_value(id, _, _) as item :: rem ->
let p = Pdot(root, Ident.name id) in
prefix_idents root
((item, p) :: items_and_paths) freshening_sub prefixing_sub rem
| Sig_type(id, td, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_type(id', td, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_typext(id, ec, es, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_typext(id', ec, es, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_module(id, pres, md, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_module freshening_sub in
prefix_idents root
((Sig_module(id', pres, md, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_module id' p prefixing_sub)
rem
| Sig_modtype(id, mtd, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub =
refresh id (fun i p s -> Subst.add_modtype i (Mty_ident p) s)
freshening_sub
in
prefix_idents root
((Sig_modtype(id', mtd, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_modtype id' (Mty_ident p) prefixing_sub)
rem
| Sig_class(id, cd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_class(id', cd, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
| Sig_class_type(id, ctd, rs, vis) :: rem ->
let p = Pdot(root, Ident.name id) in
let id', freshening_sub = refresh id Subst.add_type freshening_sub in
prefix_idents root
((Sig_class_type(id', ctd, rs, vis), p) :: items_and_paths)
freshening_sub
(Subst.add_type id' p prefixing_sub)
rem
in
prefix_idents root [] freshening_sub prefixing_sub sg
let add_to_tbl id decl tbl =
let decls = try NameMap.find id tbl with Not_found -> [] in
NameMap.add id (decl :: decls) tbl
let value_declaration_address (_ : t) id decl =
match decl.val_kind with
| Val_prim _ -> EnvLazy.create_failed Not_found
| _ -> EnvLazy.create_forced (Aident id)
let extension_declaration_address (_ : t) id (_ : extension_constructor) =
EnvLazy.create_forced (Aident id)
let class_declaration_address (_ : t) id (_ : class_declaration) =
EnvLazy.create_forced (Aident id)
let module_declaration_address env id presence md =
match presence with
| Mp_absent -> begin
match md.md_type with
| Mty_alias path -> EnvLazy.create (ModAlias {env; path})
| _ -> assert false
end
| Mp_present ->
EnvLazy.create_forced (Aident id)
let rec components_of_module ~alerts ~loc env fs ps path addr mty =
{
alerts;
loc;
comps = EnvLazy.create {
cm_env = env;
cm_freshening_subst = fs;
cm_prefixing_subst = ps;
cm_path = path;
cm_addr = addr;
cm_mty = mty
}
}
and components_of_module_maker {cm_env; cm_freshening_subst; cm_prefixing_subst;
cm_path; cm_addr; cm_mty} =
match scrape_alias cm_env cm_freshening_subst cm_mty with
Mty_signature sg ->
let c =
{ comp_values = NameMap.empty;
comp_constrs = NameMap.empty;
comp_labels = NameMap.empty; comp_types = NameMap.empty;
comp_modules = NameMap.empty; comp_modtypes = NameMap.empty;
comp_components = NameMap.empty; comp_classes = NameMap.empty;
comp_cltypes = NameMap.empty } in
let items_and_paths, freshening_sub, prefixing_sub =
prefix_idents cm_path cm_freshening_subst cm_prefixing_subst sg
in
let env = ref cm_env in
let pos = ref 0 in
let next_address () =
let addr : address_unforced =
Projection { parent = cm_addr; pos = !pos }
in
incr pos;
EnvLazy.create addr
in
let sub = may_subst Subst.compose freshening_sub prefixing_sub in
List.iter (fun (item, path) ->
match item with
Sig_value(id, decl, _) ->
let decl' = Subst.value_description sub decl in
let addr =
match decl.val_kind with
| Val_prim _ -> EnvLazy.create_failed Not_found
| _ -> next_address ()
in
c.comp_values <-
NameMap.add (Ident.name id) (decl', addr) c.comp_values;
| Sig_type(id, decl, _, _) ->
let fresh_decl =
may_subst Subst.type_declaration freshening_sub decl
in
let final_decl = Subst.type_declaration prefixing_sub fresh_decl in
Datarepr.set_row_name final_decl
(Subst.type_path prefixing_sub (Path.Pident id));
let constructors =
List.map snd (Datarepr.constructors_of_type path final_decl) in
let labels =
List.map snd (Datarepr.labels_of_type path final_decl) in
c.comp_types <-
NameMap.add (Ident.name id)
(final_decl, (constructors, labels))
c.comp_types;
List.iter
(fun descr ->
c.comp_constrs <-
add_to_tbl descr.cstr_name (descr, None) c.comp_constrs)
constructors;
List.iter
(fun descr ->
c.comp_labels <-
add_to_tbl descr.lbl_name descr c.comp_labels)
labels;
env := store_type_infos id fresh_decl !env
| Sig_typext(id, ext, _, _) ->
let ext' = Subst.extension_constructor sub ext in
let descr = Datarepr.extension_descr path ext' in
let addr = next_address () in
c.comp_constrs <-
add_to_tbl (Ident.name id) (descr, Some addr) c.comp_constrs
| Sig_module(id, pres, md, _, _) ->
let md' =
EnvLazy.create (sub, Subst.Rescope (Path.scope cm_path), md)
in
let addr =
match pres with
| Mp_absent -> begin
match md.md_type with
| Mty_alias p ->
let path = may_subst Subst.module_path freshening_sub p in
EnvLazy.create (ModAlias {env = !env; path})
| _ -> assert false
end
| Mp_present -> next_address ()
in
c.comp_modules <-
NameMap.add (Ident.name id) (md', addr) c.comp_modules;
let alerts =
Builtin_attributes.alerts_of_attrs md.md_attributes
in
let comps =
components_of_module ~alerts ~loc:md.md_loc !env freshening_sub
prefixing_sub path addr md.md_type
in
c.comp_components <-
NameMap.add (Ident.name id) (comps, addr) c.comp_components;
env :=
store_module ~freshening_sub ~check:false id addr pres md !env
| Sig_modtype(id, decl, _) ->
let fresh_decl =
the fresh_decl is only going in the local temporary env , and
should n't be used for anything . So we make the items local .
shouldn't be used for anything. So we make the items local. *)
may_subst (Subst.modtype_declaration Make_local) freshening_sub
decl
in
let final_decl =
Subst.modtype_declaration (Rescope (Path.scope cm_path))
prefixing_sub fresh_decl
in
c.comp_modtypes <-
NameMap.add (Ident.name id) final_decl c.comp_modtypes;
env := store_modtype id fresh_decl !env
| Sig_class(id, decl, _, _) ->
let decl' = Subst.class_declaration sub decl in
c.comp_classes <-
NameMap.add (Ident.name id) (decl', next_address ())
c.comp_classes
| Sig_class_type(id, decl, _, _) ->
let decl' = Subst.cltype_declaration sub decl in
c.comp_cltypes <-
NameMap.add (Ident.name id) decl' c.comp_cltypes)
items_and_paths;
Some (Structure_comps c)
| Mty_functor(param, ty_arg, ty_res) ->
let sub =
may_subst Subst.compose cm_freshening_subst cm_prefixing_subst
in
let scoping = Subst.Rescope (Path.scope cm_path) in
Some (Functor_comps {
fcomp_param = param;
fcomp_arg = may_map (Subst.modtype scoping sub) ty_arg;
fcomp_res = Subst.modtype scoping sub ty_res;
fcomp_cache = Hashtbl.create 17;
fcomp_subst_cache = Hashtbl.create 17 })
| Mty_ident _
| Mty_alias _ -> None
and check_usage loc id warn tbl =
if not loc.Location.loc_ghost && Warnings.is_active (warn "") then begin
let name = Ident.name id in
let key = (name, loc) in
if Hashtbl.mem tbl key then ()
else let used = ref false in
Hashtbl.add tbl key (fun () -> used := true);
if not (name = "" || name.[0] = '_' || name.[0] = '#')
then
!add_delayed_check_forward
(fun () -> if not !used then Location.prerr_warning loc (warn name))
end;
and check_value_name name loc =
if String.length name > 0 && (name.[0] = '#') then
for i = 1 to String.length name - 1 do
if name.[i] = '#' then
error (Illegal_value_name(loc, name))
done
and store_value ?check id addr decl env =
check_value_name (Ident.name id) decl.val_loc;
may (fun f -> check_usage decl.val_loc id f value_declarations) check;
{ env with
values = IdTbl.add id (decl, addr) env.values;
summary = Env_value(env.summary, id, decl) }
and store_type ~check id info env =
let loc = info.type_loc in
if check then
check_usage loc id (fun s -> Warnings.Unused_type_declaration s)
type_declarations;
let path = Pident id in
let constructors = Datarepr.constructors_of_type path info in
let labels = Datarepr.labels_of_type path info in
let descrs = (List.map snd constructors, List.map snd labels) in
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_constructor ("", false, false))
then begin
let ty = Ident.name id in
List.iter
begin fun (_, {cstr_name = c; _}) ->
let k = (ty, loc, c) in
if not (Hashtbl.mem used_constructors k) then
let used = constructor_usages () in
Hashtbl.add used_constructors k (add_constructor_usage used);
if not (ty = "" || ty.[0] = '_')
then !add_delayed_check_forward
(fun () ->
if not (is_in_signature env) && not used.cu_positive then
Location.prerr_warning loc
(Warnings.Unused_constructor
(c, used.cu_pattern, used.cu_privatize)))
end
constructors
end;
{ env with
constrs =
List.fold_right
(fun (id, descr) constrs -> TycompTbl.add id (descr, None) constrs)
constructors
env.constrs;
labels =
List.fold_right
(fun (id, descr) labels -> TycompTbl.add id descr labels)
labels
env.labels;
types = IdTbl.add id (info, descrs) env.types;
summary = Env_type(env.summary, id, info) }
and store_type_infos id info env =
{ env with
types = IdTbl.add id (info,([],[])) env.types;
summary = Env_type(env.summary, id, info) }
and store_extension ~check id addr ext env =
let loc = ext.ext_loc in
if check && not loc.Location.loc_ghost &&
Warnings.is_active (Warnings.Unused_extension ("", false, false, false))
then begin
let is_exception = Path.same ext.ext_type_path Predef.path_exn in
let ty = Path.last ext.ext_type_path in
let n = Ident.name id in
let k = (ty, loc, n) in
if not (Hashtbl.mem used_constructors k) then begin
let used = constructor_usages () in
Hashtbl.add used_constructors k (add_constructor_usage used);
!add_delayed_check_forward
(fun () ->
if not (is_in_signature env) && not used.cu_positive then
Location.prerr_warning loc
(Warnings.Unused_extension
(n, is_exception, used.cu_pattern, used.cu_privatize)
)
)
end;
end;
let desc = Datarepr.extension_descr (Pident id) ext in
{ env with
constrs = TycompTbl.add id (desc, Some addr) env.constrs;
summary = Env_extension(env.summary, id, ext) }
and store_module ~check ~freshening_sub id addr presence md env =
let loc = md.md_loc in
if check then
check_usage loc id (fun s -> Warnings.Unused_module s)
module_declarations;
let alerts = Builtin_attributes.alerts_of_attrs md.md_attributes in
let module_decl_lazy =
match freshening_sub with
| None -> EnvLazy.create_forced md
| Some s -> EnvLazy.create (s, Subst.Rescope (Ident.scope id), md)
in
{ env with
modules = IdTbl.add id (Value (module_decl_lazy, addr)) env.modules;
components =
IdTbl.add id
(Value
(components_of_module ~alerts ~loc:md.md_loc
env freshening_sub Subst.identity (Pident id) addr md.md_type,
addr))
env.components;
summary = Env_module(env.summary, id, presence, md) }
and store_modtype id info env =
{ env with
modtypes = IdTbl.add id info env.modtypes;
summary = Env_modtype(env.summary, id, info) }
and store_class id addr desc env =
{ env with
classes = IdTbl.add id (desc, addr) env.classes;
summary = Env_class(env.summary, id, desc) }
and store_cltype id desc env =
{ env with
cltypes = IdTbl.add id desc env.cltypes;
summary = Env_cltype(env.summary, id, desc) }
let scrape_alias env mty = scrape_alias env None mty
let components_of_functor_appl f env p1 p2 =
try
Hashtbl.find f.fcomp_cache p2
with Not_found ->
let p = Papply(p1, p2) in
let sub = Subst.add_module f.fcomp_param p2 Subst.identity in
let mty = Subst.modtype (Rescope (Path.scope p)) sub f.fcomp_res in
let addr = EnvLazy.create_failed Not_found in
!check_well_formed_module env Location.(in_file !input_name)
("the signature of " ^ Path.name p) mty;
let comps =
components_of_module ~alerts:Misc.Stdlib.String.Map.empty
~loc:Location.none
env None Subst.identity p addr mty
in
Hashtbl.add f.fcomp_cache p2 comps;
comps
let _ =
components_of_module' := components_of_module;
components_of_functor_appl' := components_of_functor_appl;
components_of_module_maker' := components_of_module_maker
let add_functor_arg id env =
{env with
functor_args = Ident.add id () env.functor_args;
summary = Env_functor_arg (env.summary, id)}
let add_value ?check id desc env =
let addr = value_declaration_address env id desc in
store_value ?check id addr desc env
let add_type ~check id info env =
store_type ~check id info env
and add_extension ~check id ext env =
let addr = extension_declaration_address env id ext in
store_extension ~check id addr ext env
and add_module_declaration ?(arg=false) ~check id presence md env =
let addr = module_declaration_address env id presence md in
let env = store_module ~freshening_sub:None ~check id addr presence md env in
if arg then add_functor_arg id env else env
and add_modtype id info env =
store_modtype id info env
and add_class id ty env =
let addr = class_declaration_address env id ty in
store_class id addr ty env
and add_cltype id ty env =
store_cltype id ty env
let add_module ?arg id presence mty env =
add_module_declaration ~check:false ?arg id presence (md mty) env
let add_local_type path info env =
{ env with
local_constraints = Path.Map.add path info env.local_constraints }
let enter_value ?check name desc env =
let id = Ident.create_local name in
let addr = value_declaration_address env id desc in
let env = store_value ?check id addr desc env in
(id, env)
let enter_type ~scope name info env =
let id = Ident.create_scoped ~scope name in
let env = store_type ~check:true id info env in
(id, env)
let enter_extension ~scope name ext env =
let id = Ident.create_scoped ~scope name in
let addr = extension_declaration_address env id ext in
let env = store_extension ~check:true id addr ext env in
(id, env)
let enter_module_declaration ?arg id presence md env =
add_module_declaration ?arg ~check:true id presence md env
let enter_modtype ~scope name mtd env =
let id = Ident.create_scoped ~scope name in
let env = store_modtype id mtd env in
(id, env)
let enter_class ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let addr = class_declaration_address env id desc in
let env = store_class id addr desc env in
(id, env)
let enter_cltype ~scope name desc env =
let id = Ident.create_scoped ~scope name in
let env = store_cltype id desc env in
(id, env)
let enter_module ~scope ?arg s presence mty env =
let id = Ident.create_scoped ~scope s in
let env = enter_module_declaration ?arg id presence (md mty) env in
(id, env)
let add_item comp env =
match comp with
Sig_value(id, decl, _) -> add_value id decl env
| Sig_type(id, decl, _, _) -> add_type ~check:false id decl env
| Sig_typext(id, ext, _, _) -> add_extension ~check:false id ext env
| Sig_module(id, presence, md, _, _) ->
add_module_declaration ~check:false id presence md env
| Sig_modtype(id, decl, _) -> add_modtype id decl env
| Sig_class(id, decl, _, _) -> add_class id decl env
| Sig_class_type(id, decl, _, _) -> add_cltype id decl env
let rec add_signature sg env =
match sg with
[] -> env
| comp :: rem -> add_signature rem (add_item comp env)
let enter_signature ~scope sg env =
let sg = Subst.signature (Rescope scope) Subst.identity sg in
sg, add_signature sg env
let add_components slot root env0 comps =
let add_l w comps env0 =
TycompTbl.add_open slot w comps env0
in
let add w comps env0 = IdTbl.add_open slot w root comps env0 in
let constrs =
add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs
in
let labels =
add_l (fun x -> `Label x) comps.comp_labels env0.labels
in
let values =
add (fun x -> `Value x) comps.comp_values env0.values
in
let types =
add (fun x -> `Type x) comps.comp_types env0.types
in
let modtypes =
add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes
in
let classes =
add (fun x -> `Class x) comps.comp_classes env0.classes
in
let cltypes =
add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes
in
let components =
let components =
NameMap.map (fun x -> Value x) comps.comp_components
in
add (fun x -> `Component x) components env0.components
in
let modules =
let modules =
NameMap.map (fun x -> Value x) comps.comp_modules
in
add (fun x -> `Module x) modules env0.modules
in
{ env0 with
summary = Env_open(env0.summary, root);
constrs;
labels;
values;
types;
modtypes;
classes;
cltypes;
components;
modules;
}
let open_signature slot root env0 =
match get_components (find_module_descr root env0) with
| Functor_comps _ -> None
| Structure_comps comps ->
Some (add_components slot root env0 comps)
let open_pers_signature name env =
match open_signature None (Pident(Ident.create_persistent name)) env with
| Some env -> env
let open_signature
?(used_slot = ref false)
?(loc = Location.none) ?(toplevel = false)
ovf root env =
let unused =
match ovf with
| Asttypes.Fresh -> Warnings.Unused_open (Path.name root)
| Asttypes.Override -> Warnings.Unused_open_bang (Path.name root)
in
let warn_unused =
Warnings.is_active unused
and warn_shadow_id =
Warnings.is_active (Warnings.Open_shadow_identifier ("", ""))
and warn_shadow_lc =
Warnings.is_active (Warnings.Open_shadow_label_constructor ("",""))
in
if not toplevel && not loc.Location.loc_ghost
&& (warn_unused || warn_shadow_id || warn_shadow_lc)
then begin
let used = used_slot in
if warn_unused then
!add_delayed_check_forward
(fun () ->
if not !used then begin
used := true;
Location.prerr_warning loc unused
end
);
let shadowed = ref [] in
let slot s b =
begin match check_shadowing env b with
| Some kind when
ovf = Asttypes.Fresh && not (List.mem (kind, s) !shadowed) ->
shadowed := (kind, s) :: !shadowed;
let w =
match kind with
| "label" | "constructor" ->
Warnings.Open_shadow_label_constructor (kind, s)
| _ -> Warnings.Open_shadow_identifier (kind, s)
in
Location.prerr_warning loc w
| _ -> ()
end;
used := true
in
open_signature (Some slot) root env
end
else open_signature None root env
let read_signature modname filename =
let pm = read_pers_mod modname filename in
Lazy.force pm.pm_signature
let is_identchar_latin1 = function
| 'A'..'Z' | 'a'..'z' | '_' | '\192'..'\214' | '\216'..'\246'
| '\248'..'\255' | '\'' | '0'..'9' -> true
| _ -> false
let unit_name_of_filename fn =
match Filename.extension fn with
| ".cmi" -> begin
let unit =
String.capitalize_ascii (Filename.remove_extension fn)
in
if String.for_all is_identchar_latin1 unit then
Some unit
else
None
end
| _ -> None
let persistent_structures_of_dir dir =
Load_path.Dir.files dir
|> List.to_seq
|> Seq.filter_map unit_name_of_filename
|> String.Set.of_seq
let save_signature_with_transform cmi_transform ~alerts sg modname filename =
Btype.cleanup_abbrev ();
Subst.reset_for_saving ();
let sg = Subst.signature Make_local (Subst.for_saving Subst.identity) sg in
let cmi =
Persistent_env.make_cmi persistent_env modname sg alerts
|> cmi_transform in
let pm = save_sign_of_cmi
{ Persistent_env.Persistent_signature.cmi; filename } in
Persistent_env.save_cmi persistent_env
{ Persistent_env.Persistent_signature.filename; cmi } pm;
cmi
let save_signature ~alerts sg modname filename =
save_signature_with_transform (fun cmi -> cmi)
~alerts sg modname filename
let save_signature_with_imports ~alerts sg modname filename imports =
let with_imports cmi = { cmi with cmi_crcs = imports } in
save_signature_with_transform with_imports
~alerts sg modname filename
let find_all proj1 proj2 f lid env acc =
match lid with
| None ->
IdTbl.fold_name
(fun name (p, data) acc -> f name p data acc)
(proj1 env) acc
| Some l ->
let p, desc = lookup_module_descr ~mark:true l env in
begin match get_components desc with
Structure_comps c ->
NameMap.fold
(fun s data acc -> f s (Pdot (p, s)) data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let find_all_simple_list proj1 proj2 f lid env acc =
match lid with
| None ->
TycompTbl.fold_name
(fun data acc -> f data acc)
(proj1 env) acc
| Some l ->
let (_p, desc) = lookup_module_descr ~mark:true l env in
begin match get_components desc with
Structure_comps c ->
NameMap.fold
(fun _s comps acc ->
match comps with
| [] -> acc
| data :: _ -> f data acc)
(proj2 c) acc
| Functor_comps _ ->
acc
end
let fold_modules f lid env acc =
match lid with
| None ->
IdTbl.fold_name
(fun name (p, data) acc ->
match data with
| Value (data, _) ->
let data = EnvLazy.force subst_modtype_maker data in
f name p data acc
| Persistent ->
match Persistent_env.find_in_cache persistent_env name with
| None -> acc
| Some pm ->
let data = md (Mty_signature (Lazy.force pm.pm_signature)) in
f name p data acc)
env.modules
acc
| Some l ->
let p, desc = lookup_module_descr ~mark:true l env in
begin match get_components desc with
| Structure_comps c ->
NameMap.fold
(fun s (data, _) acc ->
f s (Pdot (p, s))
(EnvLazy.force subst_modtype_maker data) acc)
c.comp_modules
acc
| Functor_comps _ ->
acc
end
let fold_values f =
find_all (fun env -> env.values) (fun sc -> sc.comp_values)
(fun k p (vd, _) acc -> f k p vd acc)
and fold_constructors f =
find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs)
(fun (cd, _) acc -> f cd acc)
and fold_labels f =
find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f
and fold_types f =
find_all (fun env -> env.types) (fun sc -> sc.comp_types) f
and fold_modtypes f =
find_all (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) f
and fold_classes f =
find_all (fun env -> env.classes) (fun sc -> sc.comp_classes)
(fun k p (vd, _) acc -> f k p vd acc)
and fold_cltypes f =
find_all (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) f
let filter_non_loaded_persistent f env =
let to_remove =
IdTbl.fold_name
(fun name (_, data) acc ->
match data with
| Value _ -> acc
| Persistent ->
match Persistent_env.find_in_cache persistent_env name with
| Some _ -> acc
| None ->
if f (Ident.create_persistent name) then
acc
else
String.Set.add name acc)
env.modules
String.Set.empty
in
let remove_ids tbl ids =
String.Set.fold
(fun name tbl -> IdTbl.remove (Ident.create_persistent name) tbl)
ids
tbl
in
let rec filter_summary summary ids =
if String.Set.is_empty ids then
summary
else
match summary with
| Env_empty -> summary
| Env_value (s, id, vd) ->
Env_value (filter_summary s ids, id, vd)
| Env_type (s, id, td) ->
Env_type (filter_summary s ids, id, td)
| Env_extension (s, id, ec) ->
Env_extension (filter_summary s ids, id, ec)
| Env_module (s, id, mp, md) ->
Env_module (filter_summary s ids, id, mp, md)
| Env_modtype (s, id, md) ->
Env_modtype (filter_summary s ids, id, md)
| Env_class (s, id, cd) ->
Env_class (filter_summary s ids, id, cd)
| Env_cltype (s, id, ctd) ->
Env_cltype (filter_summary s ids, id, ctd)
| Env_open (s, p) ->
Env_open (filter_summary s ids, p)
| Env_functor_arg (s, id) ->
Env_functor_arg (filter_summary s ids, id)
| Env_constraints (s, cstrs) ->
Env_constraints (filter_summary s ids, cstrs)
| Env_copy_types (s, types) ->
Env_copy_types (filter_summary s ids, types)
| Env_persistent (s, id) ->
if String.Set.mem (Ident.name id) ids then
filter_summary s (String.Set.remove (Ident.name id) ids)
else
Env_persistent (filter_summary s ids, id)
in
{ env with
modules = remove_ids env.modules to_remove;
components = remove_ids env.components to_remove;
summary = filter_summary env.summary to_remove;
}
let (initial_safe_string, initial_unsafe_string) =
Predef.build_initial_env
(add_type ~check:false)
(add_extension ~check:false)
empty
let summary env =
if Path.Map.is_empty env.local_constraints then env.summary
else Env_constraints (env.summary, env.local_constraints)
let last_env = ref empty
let last_reduced_env = ref empty
let keep_only_summary env =
if !last_env == env then !last_reduced_env
else begin
let new_env =
{
empty with
summary = env.summary;
local_constraints = env.local_constraints;
flags = env.flags;
}
in
last_env := env;
last_reduced_env := new_env;
new_env
end
let env_of_only_summary env_from_summary env =
let new_env = env_from_summary env.summary Subst.identity in
{ new_env with
local_constraints = env.local_constraints;
flags = env.flags;
}
open Format
let report_error ppf = function
| Missing_module(_, path1, path2) ->
fprintf ppf "@[@[<hov>";
if Path.same path1 path2 then
fprintf ppf "Internal path@ %s@ is dangling." (Path.name path1)
else
fprintf ppf "Internal path@ %s@ expands to@ %s@ which is dangling."
(Path.name path1) (Path.name path2);
fprintf ppf "@]@ @[%s@ %s@ %s.@]@]"
"The compiled interface for module" (Ident.name (Path.head path2))
"was not found"
| Illegal_value_name(_loc, name) ->
fprintf ppf "'%s' is not a valid value identifier."
name
let () =
Location.register_error_of_exn
(function
| Error err ->
let loc = match err with
(Missing_module (loc, _, _) | Illegal_value_name (loc, _)) -> loc
in
let error_of_printer =
if loc = Location.none
then Location.error_of_printer_file
else Location.error_of_printer ~loc ?sub:None in
Some (error_of_printer report_error err)
| _ ->
None
)
|
0d0d6835c1af8c615e2018d6ce7ac56714be10f45bf172fb7dfb2ea5004d034b | b0-system/b0 | test.ml |
let main () =
let tid = Thread.create (fun () -> print_endline "Ha!") () in
Thread.join tid;
print_endline "Ho!"
let () = main ()
| null | https://raw.githubusercontent.com/b0-system/b0/2e7fa30e7e565e8edf831ea5e62c578eeca7c626/examples/ocaml-threads/test.ml | ocaml |
let main () =
let tid = Thread.create (fun () -> print_endline "Ha!") () in
Thread.join tid;
print_endline "Ho!"
let () = main ()
| |
73efe9191e6caeb77aa26a6fe0cdc83920dc8bc21d751f6942c80e811ebc4bc0 | karlhof26/gimp-scheme | AutoColorize_FlavorE_6_02.scm |
; Auto colorize image into random number of colors of random hues
author :
date : 2015
(define (script-fu-auto-colorize-e image layer
hatches
)
(let*
(
(color-map 0)
(colors 0)
(image-width)
(image-height)
( R 0.2126 ) ; constants for calculating luminance
( G 0.7152 )
;(B 0.0722)
;(0.299*R + 0.587*G + 0.114*B)
( R 0.299 )
;(G 0.587)
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
;(B (/ 18.0 255))
( R ( / 54.0 255 ) )
( G ( / 182.0 255 ) )
wikipedia
(B 0.0722)
(R 0.2126)
(G 0.7152)
;how my camera sees black and white
;(B (/ 147 479))
;(R (/ 138 479))
( G ( / 194 479 ) )
(r) ;randomly generated r g b values
(g)
(b)
(l-original) ;luminance original
(l-new)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
)
;(gimp-image-undo-disable image); DN = NO UNDO
undo - group in one step
;convert to indexed
(set! image-width (car (gimp-image-width image)))
(set! image-height (car (gimp-image-height image)))
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches FALSE FALSE "unused palette name")
;grabs color map
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(gimp-image-convert-rgb image) ;converts it to rgb before we call hatch loop
(set! y hatches) ;loop hatches number of times
(srand (car (gettimeofday)))
(gimp-context-set-sample-threshold 0)
(while (> y 0)
;do work here
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
;select each color
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
;(gimp-edit-copy layer)
;(set! floating (car(gimp-edit-paste layer TRUE)))
;(gimp-floating-sel-to-layer floating)
;(gimp-image-set-active-layer image floating)
(set! floating (car (gimp-layer-new image image-width image-height
RGBA-IMAGE "Colorize" 100 LAYER-MODE-NORMAL))) ;creates layer
;insert above current layer
;(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
(gimp-image-insert-layer image floating 0 0)
;set that layer to be active layer
(gimp-image-set-active-layer image floating)
(set! hue (rand 360))
(gimp-drawable-colorize-hsl floating hue 100 10)
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red R) (* green G) (* blue B)))
(set! difference 10)
;just randomly pick a color until we find a color of similar luminance
;absolutely not the ideal way of getting a color
(while (> difference 1)
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 235))
(set! g (+ (rand 20) 235))
(set! b (+ (rand 20) 235))
)
(begin
(set! r (- (rand 255) 1))
(set! g (- (rand 255) 1))
(set! b (- (rand 255) 1))
)
)
)
)
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r R) (* g G) (* b B)))
(set! difference (abs (- l-new l-original)))
)
( script - fu - colorize image floating ( list b ) 100 )
(gimp-context-set-foreground (list r g b))
(gimp-edit-fill floating FILL-FOREGROUND)
(if (> y 1) ;if y is still valid we set colors to the next colors
(begin
(set! colors (cdddr colors))
)
(begin ;else
)
)
;loop control
(set! y (- y 1))
);end of while
(gimp-selection-none image)
;(gimp-image-undo-enable image) ;DN = NO UNDO
undo group in one step
(gimp-displays-flush)
(gc) ; garbage cleanup
)
) ;end of define
(script-fu-register
"script-fu-auto-colorize-e" ;function name
"<Image>/Script-Fu2/Create from Image/Auto Colorize Flavor E" ;menu register
"Randomly colorize image with specified number of colors. \nfile: AutoColorize_FlavorE_6_02.scm" ;description
"Tin Tran" ;author name
"copyright info and description" ;copyright info or description
"2015" ;date
"RGB*, GRAY*" ;mode
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
)
; end of file | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/b1e836958cd24a085b245e7a7b9dbce50b6a8a70/AutoColorize_FlavorE_6_02.scm | scheme | Auto colorize image into random number of colors of random hues
constants for calculating luminance
(B 0.0722)
(0.299*R + 0.587*G + 0.114*B)
(G 0.587)
(B (/ 18.0 255))
how my camera sees black and white
(B (/ 147 479))
(R (/ 138 479))
randomly generated r g b values
luminance original
(gimp-image-undo-disable image); DN = NO UNDO
convert to indexed
grabs color map
converts it to rgb before we call hatch loop
loop hatches number of times
do work here
select each color
(gimp-edit-copy layer)
(set! floating (car(gimp-edit-paste layer TRUE)))
(gimp-floating-sel-to-layer floating)
(gimp-image-set-active-layer image floating)
creates layer
insert above current layer
(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
set that layer to be active layer
just randomly pick a color until we find a color of similar luminance
absolutely not the ideal way of getting a color
if y is still valid we set colors to the next colors
else
loop control
end of while
(gimp-image-undo-enable image) ;DN = NO UNDO
garbage cleanup
end of define
function name
menu register
description
author name
copyright info or description
date
mode
end of file |
author :
date : 2015
(define (script-fu-auto-colorize-e image layer
hatches
)
(let*
(
(color-map 0)
(colors 0)
(image-width)
(image-height)
( G 0.7152 )
( R 0.299 )
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( R ( / 54.0 255 ) )
( G ( / 182.0 255 ) )
wikipedia
(B 0.0722)
(R 0.2126)
(G 0.7152)
( G ( / 194 479 ) )
(g)
(b)
(l-new)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
)
undo - group in one step
(set! image-width (car (gimp-image-width image)))
(set! image-height (car (gimp-image-height image)))
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches FALSE FALSE "unused palette name")
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(srand (car (gettimeofday)))
(gimp-context-set-sample-threshold 0)
(while (> y 0)
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
(set! floating (car (gimp-layer-new image image-width image-height
(gimp-image-insert-layer image floating 0 0)
(gimp-image-set-active-layer image floating)
(set! hue (rand 360))
(gimp-drawable-colorize-hsl floating hue 100 10)
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red R) (* green G) (* blue B)))
(set! difference 10)
(while (> difference 1)
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 235))
(set! g (+ (rand 20) 235))
(set! b (+ (rand 20) 235))
)
(begin
(set! r (- (rand 255) 1))
(set! g (- (rand 255) 1))
(set! b (- (rand 255) 1))
)
)
)
)
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r R) (* g G) (* b B)))
(set! difference (abs (- l-new l-original)))
)
( script - fu - colorize image floating ( list b ) 100 )
(gimp-context-set-foreground (list r g b))
(gimp-edit-fill floating FILL-FOREGROUND)
(begin
(set! colors (cdddr colors))
)
)
)
(set! y (- y 1))
(gimp-selection-none image)
undo group in one step
(gimp-displays-flush)
)
(script-fu-register
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
)
|
b581c6fbcda5be5eb5f12f0d623c96a178b4004881e0a6e8660e1261019dcdab | whamtet/ctmx | project.clj | (defproject ctmx "1.4.9"
:description "Backend helpers for htmx"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]
[hiccup "2.0.0-alpha2"]
[org.clojure/clojurescript "1.10.773"]
[macchiato/hiccups "0.4.1"]
;; TODO reitit-ring
[metosin/reitit "0.5.11"]]
:repositories [["clojars" {:url "/"
:sign-releases false}]]
:resource-paths ["src/resources"]
:plugins [[lein-auto "0.1.3"]]
:repl-options {:init-ns ctmx.core}
:profiles {:test {:dependencies [[ring/ring-mock "0.4.0"]]}
:test-repl [:test :leiningen/default]})
| null | https://raw.githubusercontent.com/whamtet/ctmx/dc2324658d3da702b98c5014ba5f733e972957fb/project.clj | clojure | TODO reitit-ring | (defproject ctmx "1.4.9"
:description "Backend helpers for htmx"
:url ""
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[org.clojure/clojure "1.10.0"]
[hiccup "2.0.0-alpha2"]
[org.clojure/clojurescript "1.10.773"]
[macchiato/hiccups "0.4.1"]
[metosin/reitit "0.5.11"]]
:repositories [["clojars" {:url "/"
:sign-releases false}]]
:resource-paths ["src/resources"]
:plugins [[lein-auto "0.1.3"]]
:repl-options {:init-ns ctmx.core}
:profiles {:test {:dependencies [[ring/ring-mock "0.4.0"]]}
:test-repl [:test :leiningen/default]})
|
8bebcd49be15c9b8d6a992a6d92bb399fe3e7e23e3d19f65adaac8199dfc1f64 | xray-tech/xorc-xray | persistence.clj | (ns re.stage.persistence
(:require [integrant.core :as ig]
[re.boundary.states :as states]
[re.effects :as effects]))
(defmethod ig/init-key :re.stage/persistence [_ _]
{:enter (fn [{:keys [:core/state-id :core/state-meta
:core/program :oam/state] :as data}]
[(if state
(effects/enqueue data :db (states/save-statement {:id state-id
:program_id (:id program)
:state state
:meta (merge (:core/state state) state-meta)}))
(effects/enqueue data :db [{:delete :states
:where {:id state-id}}]))])})
| null | https://raw.githubusercontent.com/xray-tech/xorc-xray/ee1c841067207c5952473dc8fb1f0b7d237976cb/src/re/stage/persistence.clj | clojure | (ns re.stage.persistence
(:require [integrant.core :as ig]
[re.boundary.states :as states]
[re.effects :as effects]))
(defmethod ig/init-key :re.stage/persistence [_ _]
{:enter (fn [{:keys [:core/state-id :core/state-meta
:core/program :oam/state] :as data}]
[(if state
(effects/enqueue data :db (states/save-statement {:id state-id
:program_id (:id program)
:state state
:meta (merge (:core/state state) state-meta)}))
(effects/enqueue data :db [{:delete :states
:where {:id state-id}}]))])})
| |
88ff880b49bae737b2b6530cf20dd15e64e682707d11e355c42565b39f193f2a | stchang/macrotypes | stlc+rec-iso-tests.rkt | #lang s-exp turnstile/examples/optimize/stlc+rec-iso
(require rackunit/turnstile)
(define-type-alias IntList (μ (X) (∨ [nil : Unit] [cons : (× Int X)])))
(define-type-alias ILBody (∨ [nil : Unit] [cons : (× Int IntList)]))
;; nil
(define nil (fld {IntList} (var nil = (void) as ILBody)))
(check-type nil : IntList)
;; cons
(define cons (λ ([n : Int] [lst : IntList]) (fld {IntList} (var cons = (tup n lst) as ILBody))))
(check-type cons : (→ Int IntList IntList))
(check-type (cons 1 nil) : IntList)
(typecheck-fail (cons 1 2))
(typecheck-fail (cons "1" nil))
;; isnil
(define isnil
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => #t]
[cons p => #f])))
(check-type isnil : (→ IntList Bool))
(check-type (isnil nil) : Bool ⇒ #t)
(check-type (isnil (cons 1 nil)) : Bool ⇒ #f)
(typecheck-fail (isnil 1))
(typecheck-fail (isnil (cons 1 2)))
(check-type (λ ([f : (→ IntList Bool)]) (f nil)) : (→ (→ IntList Bool) Bool))
(check-type ((λ ([f : (→ IntList Bool)]) (f nil)) isnil) : Bool ⇒ #t)
;; hd
(define hd
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => 0]
[cons p => (proj p 0)])))
(check-type hd : (→ IntList Int))
(check-type (hd nil) : Int ⇒ 0)
(typecheck-fail (hd 1))
(check-type (hd (cons 11 nil)) : Int ⇒ 11)
;; tl
(define tl
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => lst]
[cons p => (proj p 1)])))
(check-type tl : (→ IntList IntList))
(check-type (tl nil) : IntList ⇒ nil)
(check-type (tl (cons 1 nil)) : IntList ⇒ nil)
(check-type (tl (cons 1 (cons 2 nil))) : IntList ⇒ (cons 2 nil))
(typecheck-fail (tl 1))
some typecheck failure
(typecheck-fail
(fld {Int} 1)
#:with-msg
"Expected μ type, got: Int")
(typecheck-fail
(unfld {Int} 1)
#:with-msg
"Expected μ type, got: Int")
previous stlc+var tests ----------------------------------------------------
;; define-type-alias
(define-type-alias Integer Int)
(define-type-alias ArithBinOp (→ Int Int Int))
( define - type - alias C Complex ) ; error , Complex undefined
(check-type ((λ ([x : Int]) (+ x 2)) 3) : Integer)
(check-type ((λ ([x : Integer]) (+ x 2)) 3) : Int)
(check-type ((λ ([x : Integer]) (+ x 2)) 3) : Integer)
(check-type + : ArithBinOp)
(check-type (λ ([f : ArithBinOp]) (f 1 2)) : (→ (→ Int Int Int) Int))
;; records (ie labeled tuples)
; no records, only tuples
(check-type "Stephen" : String)
( check - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " phone " Int ] [: " male ? " ] ) )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " name " )
: String ⇒ " " )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " name " )
: String ⇒ " " )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " phone " )
: Int ⇒ 781 )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " male ? " )
: ⇒ # t )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " my - name " String ] [: " phone " Int ] [: " male ? " ] ) )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " my - phone " Int ] [: " male ? " ] ) )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " phone " Int ] [: " is - male ? " ] ) )
;; variants
(check-type (var coffee = (void) as (∨ [coffee : Unit])) : (∨ [coffee : Unit]))
(check-not-type (var coffee = (void) as (∨ [coffee : Unit])) : (∨ [coffee : Unit] [tea : Unit]))
(typecheck-fail ((λ ([x : (∨ [coffee : Unit] [tea : Unit])]) x)
(var coffee = (void) as (∨ [coffee : Unit]))))
(check-type (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit])) : (∨ [coffee : Unit] [tea : Unit]))
(check-type (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit] [coke : Unit]))
: (∨ [coffee : Unit] [tea : Unit] [coke : Unit]))
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => 1])) ; not enough clauses
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => 1]
[teaaaaaa x => 2])) ; wrong clause
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => 1]
[tea x => 2]
[coke x => 3])) ; too many clauses
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => "1"]
[tea x => 2])) ; mismatched branch types
(check-type
(case (var coffee = 1 as (∨ [coffee : Int] [tea : Unit]))
[coffee x => x]
[tea x => 2]) : Int ⇒ 1)
(define-type-alias Drink (∨ [coffee : Int] [tea : Unit] [coke : Bool]))
(check-type ((λ ([x : Int]) (+ x x)) 10) : Int ⇒ 20)
(check-type (λ ([x : Int]) (+ (+ x x) (+ x x))) : (→ Int Int))
(check-type
(case ((λ ([d : Drink]) d)
(var coffee = 1 as (∨ [coffee : Int] [tea : Unit] [coke : Bool])))
[coffee x => (+ (+ x x) (+ x x))]
[tea x => 2]
[coke y => 3])
: Int ⇒ 4)
(check-type
(case ((λ ([d : Drink]) d) (var coffee = 1 as Drink))
[coffee x => (+ (+ x x) (+ x x))]
[tea x => 2]
[coke y => 3])
: Int ⇒ 4)
;; previous tests: ------------------------------------------------------------
;; tests for tuples -----------------------------------------------------------
(check-type (tup 1 2 3) : (× Int Int Int))
(check-type (tup 1 "1" #f +) : (× Int String Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Unit String Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int Unit Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int String Unit (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int String Bool (→ Int Int Unit)))
(check-type (proj (tup 1 "2" #f) 0) : Int ⇒ 1)
(check-type (proj (tup 1 "2" #f) 1) : String ⇒ "2")
(check-type (proj (tup 1 "2" #f) 2) : Bool ⇒ #f)
(typecheck-fail (proj (tup 1 "2" #f) 3)) ; index too large
(typecheck-fail
(proj 1 2)
#:with-msg
"Expected × type, got: Int")
;; ext-stlc.rkt tests ---------------------------------------------------------
;; should still pass
;; new literals and base types
(check-type "one" : String) ; literal now supported
(check-type #f : Bool) ; literal now supported
(check-type (λ ([x : Bool]) x) : (→ Bool Bool)) ; Bool is now valid type
;; Unit
(check-type (void) : Unit)
(check-type void : (→ Unit))
(typecheck-fail ((λ ([x : Unit]) x) 2))
(typecheck-fail ((λ ([x : Unit])) void))
(check-type ((λ ([x : Unit]) x) (void)) : Unit)
;; begin
(typecheck-fail (begin))
(check-type (begin 1) : Int)
( typecheck - fail ( begin 1 2 3 ) )
(check-type (begin (void) 1) : Int ⇒ 1)
;;ascription
(typecheck-fail (ann 1 : Bool))
(check-type (ann 1 : Int) : Int ⇒ 1)
(check-type ((λ ([x : Int]) (ann x : Int)) 10) : Int ⇒ 10)
; let
(check-type (let () (+ 1 1)) : Int ⇒ 2)
(check-type (let ([x 10]) (+ 1 2)) : Int)
(typecheck-fail (let ([x #f]) (+ x 1)))
(check-type (let ([x 10] [y 20]) ((λ ([z : Int] [a : Int]) (+ a z)) x y)) : Int ⇒ 30)
(typecheck-fail (let ([x 10] [y (+ x 1)]) (+ x y))) ; unbound identifier
(check-type (let* ([x 10] [y (+ x 1)]) (+ x y)) : Int ⇒ 21)
(typecheck-fail (let* ([x #t] [y (+ x 1)]) 1))
; letrec
(typecheck-fail (letrec ([(x : Int) #f] [(y : Int) 1]) y))
(typecheck-fail (letrec ([(y : Int) 1] [(x : Int) #f]) x))
(check-type (letrec ([(x : Int) 1] [(y : Int) (+ x 1)]) (+ x y)) : Int ⇒ 3)
;; recursive
(check-type
(letrec ([(countdown : (→ Int String))
(λ ([i : Int])
(if (= i 0)
"liftoff"
(countdown (- i 1))))])
(countdown 10)) : String ⇒ "liftoff")
;; mutually recursive
(check-type
(letrec ([(is-even? : (→ Int Bool))
(λ ([n : Int])
(or (zero? n)
(is-odd? (sub1 n))))]
[(is-odd? : (→ Int Bool))
(λ ([n : Int])
(and (not (zero? n))
(is-even? (sub1 n))))])
(is-odd? 11)) : Bool ⇒ #t)
tests from stlc+lit - tests.rkt --------------------------
; most should pass, some failing may now pass due to added types/forms
(check-type 1 : Int)
( check - not - type 1 : ( Int → Int ) )
;(typecheck-fail "one") ; literal now supported
;(typecheck-fail #f) ; literal now supported
(check-type (λ ([x : Int] [y : Int]) x) : (→ Int Int Int))
(check-not-type (λ ([x : Int]) x) : Int)
(check-type (λ ([x : Int]) x) : (→ Int Int))
(check-type (λ ([f : (→ Int Int)]) 1) : (→ (→ Int Int) Int))
(check-type ((λ ([x : Int]) x) 1) : Int ⇒ 1)
(typecheck-fail ((λ ([x : Bool]) x) 1)) ; Bool now valid type, but arg has wrong type
( typecheck - fail ( λ ( [ x : Bool ] ) x ) ) ; is now valid type
(typecheck-fail (λ ([f : Int]) (f 1 2))) ; applying f with non-fn type
(check-type (λ ([f : (→ Int Int Int)] [x : Int] [y : Int]) (f x y))
: (→ (→ Int Int Int) Int Int Int))
(check-type ((λ ([f : (→ Int Int Int)] [x : Int] [y : Int]) (f x y)) + 1 2) : Int ⇒ 3)
adding non - Int
(typecheck-fail (λ ([x : (→ Int Int)]) (+ x x))) ; x should be Int
(typecheck-fail ((λ ([x : Int] [y : Int]) y) 1)) ; wrong number of args
(check-type ((λ ([x : Int]) (+ x x)) 10) : Int ⇒ 20)
| null | https://raw.githubusercontent.com/stchang/macrotypes/05ec31f2e1fe0ddd653211e041e06c6c8071ffa6/turnstile-test/tests/turnstile/optimize/stlc%2Brec-iso-tests.rkt | racket | nil
cons
isnil
hd
tl
define-type-alias
error , Complex undefined
records (ie labeled tuples)
no records, only tuples
variants
not enough clauses
wrong clause
too many clauses
mismatched branch types
previous tests: ------------------------------------------------------------
tests for tuples -----------------------------------------------------------
index too large
ext-stlc.rkt tests ---------------------------------------------------------
should still pass
new literals and base types
literal now supported
literal now supported
Bool is now valid type
Unit
begin
ascription
let
unbound identifier
letrec
recursive
mutually recursive
most should pass, some failing may now pass due to added types/forms
(typecheck-fail "one") ; literal now supported
(typecheck-fail #f) ; literal now supported
Bool now valid type, but arg has wrong type
is now valid type
applying f with non-fn type
x should be Int
wrong number of args | #lang s-exp turnstile/examples/optimize/stlc+rec-iso
(require rackunit/turnstile)
(define-type-alias IntList (μ (X) (∨ [nil : Unit] [cons : (× Int X)])))
(define-type-alias ILBody (∨ [nil : Unit] [cons : (× Int IntList)]))
(define nil (fld {IntList} (var nil = (void) as ILBody)))
(check-type nil : IntList)
(define cons (λ ([n : Int] [lst : IntList]) (fld {IntList} (var cons = (tup n lst) as ILBody))))
(check-type cons : (→ Int IntList IntList))
(check-type (cons 1 nil) : IntList)
(typecheck-fail (cons 1 2))
(typecheck-fail (cons "1" nil))
(define isnil
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => #t]
[cons p => #f])))
(check-type isnil : (→ IntList Bool))
(check-type (isnil nil) : Bool ⇒ #t)
(check-type (isnil (cons 1 nil)) : Bool ⇒ #f)
(typecheck-fail (isnil 1))
(typecheck-fail (isnil (cons 1 2)))
(check-type (λ ([f : (→ IntList Bool)]) (f nil)) : (→ (→ IntList Bool) Bool))
(check-type ((λ ([f : (→ IntList Bool)]) (f nil)) isnil) : Bool ⇒ #t)
(define hd
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => 0]
[cons p => (proj p 0)])))
(check-type hd : (→ IntList Int))
(check-type (hd nil) : Int ⇒ 0)
(typecheck-fail (hd 1))
(check-type (hd (cons 11 nil)) : Int ⇒ 11)
(define tl
(λ ([lst : IntList])
(case (unfld {IntList} lst)
[nil n => lst]
[cons p => (proj p 1)])))
(check-type tl : (→ IntList IntList))
(check-type (tl nil) : IntList ⇒ nil)
(check-type (tl (cons 1 nil)) : IntList ⇒ nil)
(check-type (tl (cons 1 (cons 2 nil))) : IntList ⇒ (cons 2 nil))
(typecheck-fail (tl 1))
some typecheck failure
(typecheck-fail
(fld {Int} 1)
#:with-msg
"Expected μ type, got: Int")
(typecheck-fail
(unfld {Int} 1)
#:with-msg
"Expected μ type, got: Int")
previous stlc+var tests ----------------------------------------------------
(define-type-alias Integer Int)
(define-type-alias ArithBinOp (→ Int Int Int))
(check-type ((λ ([x : Int]) (+ x 2)) 3) : Integer)
(check-type ((λ ([x : Integer]) (+ x 2)) 3) : Int)
(check-type ((λ ([x : Integer]) (+ x 2)) 3) : Integer)
(check-type + : ArithBinOp)
(check-type (λ ([f : ArithBinOp]) (f 1 2)) : (→ (→ Int Int Int) Int))
(check-type "Stephen" : String)
( check - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " phone " Int ] [: " male ? " ] ) )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " name " )
: String ⇒ " " )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " name " )
: String ⇒ " " )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " phone " )
: Int ⇒ 781 )
( check - type ( proj ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) " male ? " )
: ⇒ # t )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " my - name " String ] [: " phone " Int ] [: " male ? " ] ) )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " my - phone " Int ] [: " male ? " ] ) )
( check - not - type ( tup [ " name " = " " ] [ " phone " = 781 ] [ " male ? " = # t ] ) :
( × [: " name " String ] [: " phone " Int ] [: " is - male ? " ] ) )
(check-type (var coffee = (void) as (∨ [coffee : Unit])) : (∨ [coffee : Unit]))
(check-not-type (var coffee = (void) as (∨ [coffee : Unit])) : (∨ [coffee : Unit] [tea : Unit]))
(typecheck-fail ((λ ([x : (∨ [coffee : Unit] [tea : Unit])]) x)
(var coffee = (void) as (∨ [coffee : Unit]))))
(check-type (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit])) : (∨ [coffee : Unit] [tea : Unit]))
(check-type (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit] [coke : Unit]))
: (∨ [coffee : Unit] [tea : Unit] [coke : Unit]))
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => 1]
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => 1]
[tea x => 2]
(typecheck-fail
(case (var coffee = (void) as (∨ [coffee : Unit] [tea : Unit]))
[coffee x => "1"]
(check-type
(case (var coffee = 1 as (∨ [coffee : Int] [tea : Unit]))
[coffee x => x]
[tea x => 2]) : Int ⇒ 1)
(define-type-alias Drink (∨ [coffee : Int] [tea : Unit] [coke : Bool]))
(check-type ((λ ([x : Int]) (+ x x)) 10) : Int ⇒ 20)
(check-type (λ ([x : Int]) (+ (+ x x) (+ x x))) : (→ Int Int))
(check-type
(case ((λ ([d : Drink]) d)
(var coffee = 1 as (∨ [coffee : Int] [tea : Unit] [coke : Bool])))
[coffee x => (+ (+ x x) (+ x x))]
[tea x => 2]
[coke y => 3])
: Int ⇒ 4)
(check-type
(case ((λ ([d : Drink]) d) (var coffee = 1 as Drink))
[coffee x => (+ (+ x x) (+ x x))]
[tea x => 2]
[coke y => 3])
: Int ⇒ 4)
(check-type (tup 1 2 3) : (× Int Int Int))
(check-type (tup 1 "1" #f +) : (× Int String Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Unit String Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int Unit Bool (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int String Unit (→ Int Int Int)))
(check-not-type (tup 1 "1" #f +) : (× Int String Bool (→ Int Int Unit)))
(check-type (proj (tup 1 "2" #f) 0) : Int ⇒ 1)
(check-type (proj (tup 1 "2" #f) 1) : String ⇒ "2")
(check-type (proj (tup 1 "2" #f) 2) : Bool ⇒ #f)
(typecheck-fail
(proj 1 2)
#:with-msg
"Expected × type, got: Int")
(check-type (void) : Unit)
(check-type void : (→ Unit))
(typecheck-fail ((λ ([x : Unit]) x) 2))
(typecheck-fail ((λ ([x : Unit])) void))
(check-type ((λ ([x : Unit]) x) (void)) : Unit)
(typecheck-fail (begin))
(check-type (begin 1) : Int)
( typecheck - fail ( begin 1 2 3 ) )
(check-type (begin (void) 1) : Int ⇒ 1)
(typecheck-fail (ann 1 : Bool))
(check-type (ann 1 : Int) : Int ⇒ 1)
(check-type ((λ ([x : Int]) (ann x : Int)) 10) : Int ⇒ 10)
(check-type (let () (+ 1 1)) : Int ⇒ 2)
(check-type (let ([x 10]) (+ 1 2)) : Int)
(typecheck-fail (let ([x #f]) (+ x 1)))
(check-type (let ([x 10] [y 20]) ((λ ([z : Int] [a : Int]) (+ a z)) x y)) : Int ⇒ 30)
(check-type (let* ([x 10] [y (+ x 1)]) (+ x y)) : Int ⇒ 21)
(typecheck-fail (let* ([x #t] [y (+ x 1)]) 1))
(typecheck-fail (letrec ([(x : Int) #f] [(y : Int) 1]) y))
(typecheck-fail (letrec ([(y : Int) 1] [(x : Int) #f]) x))
(check-type (letrec ([(x : Int) 1] [(y : Int) (+ x 1)]) (+ x y)) : Int ⇒ 3)
(check-type
(letrec ([(countdown : (→ Int String))
(λ ([i : Int])
(if (= i 0)
"liftoff"
(countdown (- i 1))))])
(countdown 10)) : String ⇒ "liftoff")
(check-type
(letrec ([(is-even? : (→ Int Bool))
(λ ([n : Int])
(or (zero? n)
(is-odd? (sub1 n))))]
[(is-odd? : (→ Int Bool))
(λ ([n : Int])
(and (not (zero? n))
(is-even? (sub1 n))))])
(is-odd? 11)) : Bool ⇒ #t)
tests from stlc+lit - tests.rkt --------------------------
(check-type 1 : Int)
( check - not - type 1 : ( Int → Int ) )
(check-type (λ ([x : Int] [y : Int]) x) : (→ Int Int Int))
(check-not-type (λ ([x : Int]) x) : Int)
(check-type (λ ([x : Int]) x) : (→ Int Int))
(check-type (λ ([f : (→ Int Int)]) 1) : (→ (→ Int Int) Int))
(check-type ((λ ([x : Int]) x) 1) : Int ⇒ 1)
(check-type (λ ([f : (→ Int Int Int)] [x : Int] [y : Int]) (f x y))
: (→ (→ Int Int Int) Int Int Int))
(check-type ((λ ([f : (→ Int Int Int)] [x : Int] [y : Int]) (f x y)) + 1 2) : Int ⇒ 3)
adding non - Int
(check-type ((λ ([x : Int]) (+ x x)) 10) : Int ⇒ 20)
|
4fb96b0ca89325fbaeab13b4f9641eb80a8aa1089986f04afdd684ad220d68bd | janestreet/hardcaml_verify | uid.ml | open Base
module type S = sig
type t [@@deriving sexp]
include Comparable.S with type t := t
include Stringable.S with type t := t
val create : int -> (unit -> t) Staged.t
end
module Int = struct
include Int
let create starts_at =
let uid = ref starts_at in
Staged.stage (fun () ->
let ret = !uid in
Int.incr uid;
ret)
;;
end
| null | https://raw.githubusercontent.com/janestreet/hardcaml_verify/5d4f1622335caa2ec7db67de2adcddb13aa7c855/src/uid.ml | ocaml | open Base
module type S = sig
type t [@@deriving sexp]
include Comparable.S with type t := t
include Stringable.S with type t := t
val create : int -> (unit -> t) Staged.t
end
module Int = struct
include Int
let create starts_at =
let uid = ref starts_at in
Staged.stage (fun () ->
let ret = !uid in
Int.incr uid;
ret)
;;
end
| |
a405dd94e35fc89f0a728af2c4a1ece1700338dd317f8c5d33879b001f57f9c3 | jackdoe/bzzz | term.clj | (ns bzzz.queries.term
(use bzzz.util)
(:import (org.apache.lucene.search TermQuery)
(org.apache.lucene.index Term)))
(defn parse
[generic input analyzer]
(let [{:keys [field value boost]
:or {boost 1}} input
q (TermQuery. (Term. ^String (need field "need <field>") ^String value))]
(.setBoost q boost)
q))
| null | https://raw.githubusercontent.com/jackdoe/bzzz/ae98708056e39ada28f22aad9e43ea91695b346b/src/bzzz/queries/term.clj | clojure | (ns bzzz.queries.term
(use bzzz.util)
(:import (org.apache.lucene.search TermQuery)
(org.apache.lucene.index Term)))
(defn parse
[generic input analyzer]
(let [{:keys [field value boost]
:or {boost 1}} input
q (TermQuery. (Term. ^String (need field "need <field>") ^String value))]
(.setBoost q boost)
q))
| |
8f395a2d6354dddc796a46f7fae87b30fab54265e49bd4c9dde3c640294e1445 | xvw/ocamlectron | Versions.mli | (** Versions allowed *)
open Js_of_ocaml
open Js
class type versions = object
method chrome : (js_string t) readonly_prop
method electron : (js_string t) readonly_prop
end
type t = versions Js.t
| null | https://raw.githubusercontent.com/xvw/ocamlectron/3e0cb9575975e69ab34cb7e0e3549d31c07141c2/lib/electron_plumbing/Versions.mli | ocaml | * Versions allowed | open Js_of_ocaml
open Js
class type versions = object
method chrome : (js_string t) readonly_prop
method electron : (js_string t) readonly_prop
end
type t = versions Js.t
|
19cf12115cb030189a328157f1c4082d7705153cdf48420f32dc26321445ef16 | TrustInSoft/tis-kernel | unify.mli | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
(**************************************************************************)
(* *)
This file is part of WP plug - in of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat a l'energie atomique et aux energies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(* -------------------------------------------------------------------------- *)
*
(* -------------------------------------------------------------------------- *)
open Logic
module Make(ADT:Data)(Field:Field) :
sig
type mgu
type t
type tau = (Field.t,ADT.t) datatype
type signature = (Field.t,ADT.t) funtype
val create : (ADT.t -> tau option) -> mgu
val fresh : mgu -> t
val int : t
val real : t
val bool : t
val prop : t
val quoted : mgu -> string -> t
val array : t -> t -> t
val record : (Field.t * t) list -> t
val data : ADT.t -> t list -> t
val typedef : t array -> tau -> t
val of_tau : mgu -> tau -> t
val of_sig : mgu -> signature -> t * t list
val unify : mgu -> t -> t -> unit
val sort : mgu -> t -> sort
val fields : mgu -> t -> (Field.t * t) list
val generalize : mgu -> t -> tau
val final_degree : mgu -> int
(** Number of polymorphic variables yet computed by [generalize] *)
val pretty : mgu -> Format.formatter -> t -> unit
end
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/plugins/wp/qed/top/unify.mli | ocaml | ************************************************************************
************************************************************************
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
--------------------------------------------------------------------------
--------------------------------------------------------------------------
* Number of polymorphic variables yet computed by [generalize] | This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
This file is part of WP plug - in of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat a l'energie atomique et aux energies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
*
open Logic
module Make(ADT:Data)(Field:Field) :
sig
type mgu
type t
type tau = (Field.t,ADT.t) datatype
type signature = (Field.t,ADT.t) funtype
val create : (ADT.t -> tau option) -> mgu
val fresh : mgu -> t
val int : t
val real : t
val bool : t
val prop : t
val quoted : mgu -> string -> t
val array : t -> t -> t
val record : (Field.t * t) list -> t
val data : ADT.t -> t list -> t
val typedef : t array -> tau -> t
val of_tau : mgu -> tau -> t
val of_sig : mgu -> signature -> t * t list
val unify : mgu -> t -> t -> unit
val sort : mgu -> t -> sort
val fields : mgu -> t -> (Field.t * t) list
val generalize : mgu -> t -> tau
val final_degree : mgu -> int
val pretty : mgu -> Format.formatter -> t -> unit
end
|
ab645942770feee7c93fcfa0d233f6a52809835cede963d16c7b89e1ba1c0195 | tomjridge/tjr_simple_earley | examples.ml | (** Some examples *)
(** Internal: grammmars defined abstractly *)
module Internal = struct
let example_grammars p =
let ( --> ) = p#make_rule in
let _1,_2,_3 = p#_1,p#_2,p#_3 in
let _E,_S,a = p#_E,p#_S,p#a in
let [one;eps;x] = List.map a ["1";"";"x"] in
let _EEE =
p#grammar
~name:"EEE"
~descr:"Very ambiguous grammar, for testing Earley"
~initial_nt:_E
~rules:[
_E -->_3 (_E,_E,_E);
_E -->_1 one;
_E -->_1 eps;
]
in
let aho_s =
p#grammar
~name:"aho_s"
~descr:"Aho et al. example grammar"
~initial_nt:_S
~rules:[
_S -->_3 (x,_S,_S);
_S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml"
~descr:"Aho et al. example grammar 2"
~initial_nt:_S
~rules:[
_S -->_3 (_S,_S,x);
_S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets"
~descr:
"Well-bracketed expressions, in a particular nasty form for parsing"
~initial_nt:_E
~rules:[
_E -->_2 (_E,_E);
_E -->_3 (a"(",_E,a")");
_E -->_1 eps
]
in
let _S_xSx =
p#grammar
~name:"S_xSx"
~descr:"Unambiguous grammar that favours right-most parsers"
~initial_nt:_S
~rules:[
_S -->_3 (x,_S,x);
_S -->_1 x
]
in
[_EEE;aho_s;aho_sml;brackets;_S_xSx]
end
(** A named tuple for tagging grammars in a slightly more digestible
form than a plain tuple *)
type ('a,'b) grammar = {
name:string;
descr:string;
initial_nt:'a;
rules:'b
}
(** Example instantiation with strings for symbols *)
module Example_instantiation = struct
type nt = string
type tm = string
type sym = string
type rule = nt * sym list
let make_rule nt rhs = (nt,rhs)
let _1 s = [s]
let _2 (s1,s2) = [s1;s2]
let _3 (s1,s2,s3) = [s1;s2;s3]
let _E = "E"
let _S = "S"
let a s = s
let eps = ""
let one = "1"
let x = "x"
let grammar ~name ~descr ~initial_nt ~rules = {name;descr;initial_nt;rules}
let example_grammars =
let p = object
method _1 = _1
method _2 = _2
method _3 = _3
method make_rule = make_rule
method grammar = grammar
method _E = _E
method _S = _S
method a = a
method eps = eps
method one = one
method x = x
end
in
Internal.example_grammars p
let _ = example_grammars
module Export = struct
(** NOTE nonterminals and terminals are represented by strings *)
let grammar_names = ["EEE";"aho_s";"aho_sml";"brackets";"S_xSx"]
let get_grammar_by_name name =
example_grammars |> List.find (fun g -> g.name = name)
(** We also want to get grammars with type [grammar_etc] *)
open Prelude
(** Hack to determine nt/tm based on string repr starting with a
capital letter *)
let is_nt nt = nt <> "" && (String.get nt 0 |> function
| 'A' .. 'Z' -> true
| _ -> false)
open Spec_types
let string_to_sym s = match is_nt s with
| true -> Nt s
| false -> Tm s
(** NOTE this returns a partial [grammar_etc] (input and
input_length are dummies), and nt_items are a tuple
[(nt,i,k,bs)] *)
let _get_grammar_etc_by_name name =
get_grammar_by_name name
|> fun { rules; _ } ->
let new_items ~nt ~input ~pos =
rules |> Misc.rev_filter_map (function (nt',rhs) ->
match nt'=nt with
| false -> None
| true ->
let bs = List.map string_to_sym rhs in
Some {nt;i_=pos;k_=pos;bs});
in
let parse_tm ~tm ~input ~pos ~input_length =
match Misc.string_matches_at ~string:input ~sub:tm ~pos with
| true -> [pos+(String.length tm)]
| false -> []
in
{ new_items; parse_tm; input=""; input_length=(-1) }
let get_grammar_etc_by_name ~name ~input ~input_length =
_get_grammar_etc_by_name name |> fun g ->
{ g with input; input_length }
(** Returns a non-partial [grammar_etc] *)
end
end
* Package example grammars as a function from grammar name . Example
names are : EEE , aho_s , aho_sml , brackets , S_xSx
{ % html :
< pre >
let _ EEE =
p#grammar
~name:"EEE "
~descr:"Very ambiguous grammar , for testing "
~rules : [
_ E -->_3 ( _ E,_E,_E ) ;
_ E -->_1 one ;
_ E -->_1 eps ;
]
in
let aho_s =
p#grammar
~name:"aho_s "
~descr:"Aho et al . example grammar "
~rules : [
_ S -->_3 ( x,_S,_S ) ;
_ S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml "
~descr:"Aho et al . example grammar 2 "
~rules : [
_ S -->_3 ( _ S,_S , x ) ;
_ S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets "
~descr :
" Well - bracketed expressions , in a particular nasty form for parsing "
~rules : [
_ E -->_2 ( _ E,_E ) ;
_ E -->_3 ( a"(",_E , a " ) " ) ;
_ E -->_1 eps
]
in
let _ S_xSx =
p#grammar
~name:"S_xSx "
~descr:"Unambiguous grammar that favours right - most parsers "
~rules : [
_ S -->_3 ( one,_S , one ) ;
_ S -->_1 one
]
< /pre >
% }
names are: EEE, aho_s, aho_sml, brackets, S_xSx
{%html:
<pre>
let _EEE =
p#grammar
~name:"EEE"
~descr:"Very ambiguous grammar, for testing Earley"
~rules:[
_E -->_3 (_E,_E,_E);
_E -->_1 one;
_E -->_1 eps;
]
in
let aho_s =
p#grammar
~name:"aho_s"
~descr:"Aho et al. example grammar"
~rules:[
_S -->_3 (x,_S,_S);
_S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml"
~descr:"Aho et al. example grammar 2"
~rules:[
_S -->_3 (_S,_S,x);
_S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets"
~descr:
"Well-bracketed expressions, in a particular nasty form for parsing"
~rules:[
_E -->_2 (_E,_E);
_E -->_3 (a"(",_E,a")");
_E -->_1 eps
]
in
let _S_xSx =
p#grammar
~name:"S_xSx"
~descr:"Unambiguous grammar that favours right-most parsers"
~rules:[
_S -->_3 (one,_S,one);
_S -->_1 one
]
</pre>
%}
*)
include Example_instantiation.Export
| null | https://raw.githubusercontent.com/tomjridge/tjr_simple_earley/ca558e0e7f4ddba4cd6573bf180710cd02f25ba4/_archive/.dockerfile/tjr_simple_earley/src/examples.ml | ocaml | * Some examples
* Internal: grammmars defined abstractly
* A named tuple for tagging grammars in a slightly more digestible
form than a plain tuple
* Example instantiation with strings for symbols
* NOTE nonterminals and terminals are represented by strings
* We also want to get grammars with type [grammar_etc]
* Hack to determine nt/tm based on string repr starting with a
capital letter
* NOTE this returns a partial [grammar_etc] (input and
input_length are dummies), and nt_items are a tuple
[(nt,i,k,bs)]
* Returns a non-partial [grammar_etc] |
module Internal = struct
let example_grammars p =
let ( --> ) = p#make_rule in
let _1,_2,_3 = p#_1,p#_2,p#_3 in
let _E,_S,a = p#_E,p#_S,p#a in
let [one;eps;x] = List.map a ["1";"";"x"] in
let _EEE =
p#grammar
~name:"EEE"
~descr:"Very ambiguous grammar, for testing Earley"
~initial_nt:_E
~rules:[
_E -->_3 (_E,_E,_E);
_E -->_1 one;
_E -->_1 eps;
]
in
let aho_s =
p#grammar
~name:"aho_s"
~descr:"Aho et al. example grammar"
~initial_nt:_S
~rules:[
_S -->_3 (x,_S,_S);
_S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml"
~descr:"Aho et al. example grammar 2"
~initial_nt:_S
~rules:[
_S -->_3 (_S,_S,x);
_S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets"
~descr:
"Well-bracketed expressions, in a particular nasty form for parsing"
~initial_nt:_E
~rules:[
_E -->_2 (_E,_E);
_E -->_3 (a"(",_E,a")");
_E -->_1 eps
]
in
let _S_xSx =
p#grammar
~name:"S_xSx"
~descr:"Unambiguous grammar that favours right-most parsers"
~initial_nt:_S
~rules:[
_S -->_3 (x,_S,x);
_S -->_1 x
]
in
[_EEE;aho_s;aho_sml;brackets;_S_xSx]
end
type ('a,'b) grammar = {
name:string;
descr:string;
initial_nt:'a;
rules:'b
}
module Example_instantiation = struct
type nt = string
type tm = string
type sym = string
type rule = nt * sym list
let make_rule nt rhs = (nt,rhs)
let _1 s = [s]
let _2 (s1,s2) = [s1;s2]
let _3 (s1,s2,s3) = [s1;s2;s3]
let _E = "E"
let _S = "S"
let a s = s
let eps = ""
let one = "1"
let x = "x"
let grammar ~name ~descr ~initial_nt ~rules = {name;descr;initial_nt;rules}
let example_grammars =
let p = object
method _1 = _1
method _2 = _2
method _3 = _3
method make_rule = make_rule
method grammar = grammar
method _E = _E
method _S = _S
method a = a
method eps = eps
method one = one
method x = x
end
in
Internal.example_grammars p
let _ = example_grammars
module Export = struct
let grammar_names = ["EEE";"aho_s";"aho_sml";"brackets";"S_xSx"]
let get_grammar_by_name name =
example_grammars |> List.find (fun g -> g.name = name)
open Prelude
let is_nt nt = nt <> "" && (String.get nt 0 |> function
| 'A' .. 'Z' -> true
| _ -> false)
open Spec_types
let string_to_sym s = match is_nt s with
| true -> Nt s
| false -> Tm s
let _get_grammar_etc_by_name name =
get_grammar_by_name name
|> fun { rules; _ } ->
let new_items ~nt ~input ~pos =
rules |> Misc.rev_filter_map (function (nt',rhs) ->
match nt'=nt with
| false -> None
| true ->
let bs = List.map string_to_sym rhs in
Some {nt;i_=pos;k_=pos;bs});
in
let parse_tm ~tm ~input ~pos ~input_length =
match Misc.string_matches_at ~string:input ~sub:tm ~pos with
| true -> [pos+(String.length tm)]
| false -> []
in
{ new_items; parse_tm; input=""; input_length=(-1) }
let get_grammar_etc_by_name ~name ~input ~input_length =
_get_grammar_etc_by_name name |> fun g ->
{ g with input; input_length }
end
end
* Package example grammars as a function from grammar name . Example
names are : EEE , aho_s , aho_sml , brackets , S_xSx
{ % html :
< pre >
let _ EEE =
p#grammar
~name:"EEE "
~descr:"Very ambiguous grammar , for testing "
~rules : [
_ E -->_3 ( _ E,_E,_E ) ;
_ E -->_1 one ;
_ E -->_1 eps ;
]
in
let aho_s =
p#grammar
~name:"aho_s "
~descr:"Aho et al . example grammar "
~rules : [
_ S -->_3 ( x,_S,_S ) ;
_ S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml "
~descr:"Aho et al . example grammar 2 "
~rules : [
_ S -->_3 ( _ S,_S , x ) ;
_ S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets "
~descr :
" Well - bracketed expressions , in a particular nasty form for parsing "
~rules : [
_ E -->_2 ( _ E,_E ) ;
_ E -->_3 ( a"(",_E , a " ) " ) ;
_ E -->_1 eps
]
in
let _ S_xSx =
p#grammar
~name:"S_xSx "
~descr:"Unambiguous grammar that favours right - most parsers "
~rules : [
_ S -->_3 ( one,_S , one ) ;
_ S -->_1 one
]
< /pre >
% }
names are: EEE, aho_s, aho_sml, brackets, S_xSx
{%html:
<pre>
let _EEE =
p#grammar
~name:"EEE"
~descr:"Very ambiguous grammar, for testing Earley"
~rules:[
_E -->_3 (_E,_E,_E);
_E -->_1 one;
_E -->_1 eps;
]
in
let aho_s =
p#grammar
~name:"aho_s"
~descr:"Aho et al. example grammar"
~rules:[
_S -->_3 (x,_S,_S);
_S -->_1 eps
]
in
let aho_sml =
p#grammar
~name:"aho_sml"
~descr:"Aho et al. example grammar 2"
~rules:[
_S -->_3 (_S,_S,x);
_S -->_1 eps
]
in
let brackets =
p#grammar
~name:"brackets"
~descr:
"Well-bracketed expressions, in a particular nasty form for parsing"
~rules:[
_E -->_2 (_E,_E);
_E -->_3 (a"(",_E,a")");
_E -->_1 eps
]
in
let _S_xSx =
p#grammar
~name:"S_xSx"
~descr:"Unambiguous grammar that favours right-most parsers"
~rules:[
_S -->_3 (one,_S,one);
_S -->_1 one
]
</pre>
%}
*)
include Example_instantiation.Export
|
941dc35686b6bae4830d84a4edc34f6ce5dd6573a7128a7394526858462d7428 | GracielaUSB/graciela | Monad.hs | |
Module : Language . . . Monad
Description : The parsing monad for Graciela
Copyright : © 2015 - 2016 moises+
Stability : experimental
Portability : POSIX
This is a modified ParsecT monad with a custom state , operating on a
stream of TokenPos .
Module : Language.Graciela.Parser.Monad
Description : The parsing monad for Graciela
Copyright : © 2015-2016 Graciela USB
Maintainer : moises+
Stability : experimental
Portability : POSIX
This is a modified ParsecT monad with a custom state, operating on a
stream of TokenPos.
-}
{-# LANGUAGE DeriveFunctor #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NamedFieldPuns #
# LANGUAGE TupleSections #
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_HADDOCK show-extensions #-}
module Language.Graciela.Parser.Monad
( ParserT
, Parser
, MonadParser (..)
, evalParserT
, execParserT
, runParserT
, evalParser
, execParser
, runParser
, satisfy
, match
, anyToken
, oneOf
, noneOf
, followedBy
, boolLit
, charLit
, integerLit
, floatLit
, stringLit
, identifier
, safeIdentifier
, identifierAndLoc
, parens
, brackets
-- , block
, percents
, beginEnd
, some
, many
, endBy
, endBy1
, sepBy
, sepBy1
, sepEndBy
, sepEndBy1
, some'
, many'
, sepBy1'
, declarative
) where
--------------------------------------------------------------------------------
import Language.Graciela.AST.Struct
import Language.Graciela.AST.Type (Type)
import Language.Graciela.Common
import Language.Graciela.Error
import Language.Graciela.Parser.Config (Config (..), defaultConfig)
import Language.Graciela.Parser.Prim ()
import Language.Graciela.Parser.State hiding (State)
import qualified Language.Graciela.Parser.State as Parser (State)
import Language.Graciela.Token (Token (..), TokenPos (..))
--------------------------------------------------------------------------------
import Control.Applicative (Alternative)
import Control.Lens (use, view, (%=), (.=), (<<.=),
(<~), (^.), _1, _2)
import Control.Monad (MonadPlus)
import Control.Monad.Identity (Identity (..))
import Control.Monad.Reader (MonadReader (..), asks)
import Control.Monad.State (MonadState)
import Control.Monad.Trans.Except (ExceptT (..), catchE,
runExceptT, throwE)
import Control.Monad.Trans.Reader (ReaderT (..), runReaderT)
import Control.Monad.Trans.State (StateT (..), evalStateT)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE (fromList)
import qualified Data.Map.Strict as Map (empty, lookup)
import Data.Sequence (Seq, (<|), (|>))
import qualified Data.Sequence as Seq (empty, singleton)
import qualified Data.Set as Set (empty, singleton)
import Data.Text (Text)
import Text.Megaparsec (ErrorItem (..),
ParseError (..), ParsecT,
between, getPosition,
lookAhead, manyTill,
withRecovery, (<|>))
import qualified Text.Megaparsec as Mega (runParserT)
import Text.Megaparsec.Error (parseErrorTextPretty)
import Text.Megaparsec.Prim (MonadParsec (..))
--------------------------------------------------------------------------------
| monad transformer .
newtype ParserT m a = ParserT
{ unParserT :: ParsecT Error [TokenPos] (ReaderT Config (StateT Parser.State m)) a }
deriving ( Functor, Applicative, Monad
, MonadState Parser.State
, MonadParsec Error [TokenPos]
, MonadReader Config
, MonadPlus, Alternative)
| monad .
type Parser = ParserT Identity
--------------------------------------------------------------------------------
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, and return a tuple with the final value and state.
runParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m (Either (ParseError TokenPos Error) a, Parser.State)
runParserT p fp s input = runStateT (runReaderT flatten cfg) s
where
cfg = defaultConfig (EnableTrace `elem` s^.pragmas) (GetAddressOf `elem` s^.pragmas)
flatten = do
definitions <~ asks nativeFunctions
symbolTable <~ asks nativeSymbols
x <- Mega.runParserT (unParserT p) fp input
pure $ case x of
Right (Just v) -> Right v
Left e -> Left e
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, discarding the final state.
evalParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m (Either (ParseError TokenPos Error) a)
evalParserT p fp s input = view _1 <$> runParserT p fp s input
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, discarding the final value.
execParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m Parser.State
execParserT p fp s input = view _2 <$> runParserT p fp s input
--------------------------------------------------------------------------------
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, and return a tuple with the final value and state.
runParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> (Either (ParseError TokenPos Error) a, Parser.State)
runParser p fp s input = runIdentity $ runParserT p fp s input
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, discarding the final state.
evalParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> Either (ParseError TokenPos Error) a
evalParser p fp s input = runIdentity $ evalParserT p fp s input
-- | Evaluate a parser computation with the given filename, stream of tokens,
-- and initial state, discarding the final value.
execParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> Parser.State
execParser p fp s input = runIdentity $ execParserT p fp s input
--------------------------------------------------------------------------------
infixl 3 <!>
infixl 3 <!!>
class MonadParsec Error [TokenPos] p => MonadParser p where
putError :: SourcePos -> Error -> p ()
getType :: Text -> p (Maybe Type)
getStruct :: Text -> p (Maybe Struct)
satisfy' :: (Token -> Bool) -> p TokenPos
match' :: Token -> p Location
(<!>) :: p (Maybe a) -> (SourcePos, Error) -> p (Maybe a)
a <!> (p, e) = a <|> (putError p e *> pure Nothing)
(<!!>) :: p a -> (SourcePos, Error) -> p (Maybe a)
a <!!> b = Just <$> a <!> b
instance Monad m => MonadParser (ParserT m) where
putError = pPutError
getType = pGetType
getStruct = pGetStruct
satisfy' = pSatisfy'
match' = pMatch'
instance MonadParser g => MonadParser (StateT s g) where
putError l e = lift $ putError l e
getType = lift . getType
getStruct = lift . getStruct
satisfy' = lift . satisfy'
match' = lift . match'
pPutError :: Monad m => SourcePos -> Error -> ParserT m ()
pPutError from e = ParserT $ do
let
err = ParseError (NE.fromList [from]) Set.empty Set.empty (Set.singleton e)
errors %= (|> err)
pGetType :: (Monad m)
=> Text -> ParserT m (Maybe Type)
pGetType name = do
types <- asks nativeTypes
case name `Map.lookup` types of
Just (t, loc) -> return $ Just t
Nothing -> return Nothing
pGetStruct :: (Monad m)
=> Text -> ParserT m (Maybe Struct)
pGetStruct name = Map.lookup name <$> use dataTypes
pRecover :: MonadParser m
=> m b
-> ParseError TokenPos Error
-> m (Maybe a)
pRecover follow e = do
pos <- getPosition
putError pos . UnknownError . init $ parseErrorTextPretty e
void $ anyToken `manyTill` (void (lookAhead follow) <|> eof)
pure Nothing
pSatisfy' :: Monad m
=> (Token -> Bool) -> ParserT m TokenPos
pSatisfy' f = token test Nothing
where
test tp @ TokenPos { tok } =
if f tok
then Right tp
else Left . unex $ tp
unex = (, Set.empty, Set.empty) . Set.singleton . Tokens . (:|[])
pMatch' :: Monad m
=> Token-> ParserT m Location
pMatch' t = withRecovery recover (match t)
where
recover e = do
pos <- getPosition
Modify the error , so it knows the expected token ( there is obviously a better way , IDK right now )
let
from :| _ = errorPos e
expected = Set.singleton . Tokens . NE.fromList $ [TokenPos from from t]
loc = Location (pos, pos)
errors %= (|> e { errorExpected = expected } )
pure loc
--------------------------------------------------------------------------------
satisfy :: MonadParser m
=> (Token -> Bool) -> m Token
satisfy f = tok <$> satisfy' f
match :: MonadParser m
=> Token -> m Location
match t = do
TokenPos { start, end } <- satisfy' (== t)
pure $ Location (start, end)
anyToken :: MonadParser m => m Token
anyToken = satisfy (const True)
oneOf :: (Foldable f, MonadParser m)
=> f Token -> m Token
oneOf ts = satisfy (`elem` ts)
noneOf :: (Foldable f, MonadParser m)
=> f Token -> m Token
noneOf ts = satisfy (`notElem` ts)
followedBy :: (MonadParser m)
=> m (Maybe a) -> m b -> m (Maybe a)
followedBy p follow =
withRecovery (pRecover follow) (p <* lookAhead follow)
--------------------------------------------------------------------------------
boolLit :: MonadParser m
=> m Bool
boolLit = unTokBool <$> satisfy bool
where
bool TokBool {} = True
bool _ = False
charLit :: MonadParser m
=> m Char
charLit = unTokChar <$> satisfy char
where
char TokChar {} = True
char _ = False
integerLit :: MonadParser m
=> m Int32
integerLit = unTokInteger <$> satisfy string
where
string TokInteger {} = True
string _ = False
floatLit :: MonadParser m
=> m Double
floatLit = unTokFloat <$> satisfy float
where
float TokFloat {} = True
float _ = False
stringLit :: MonadParser m
=> m Text
stringLit = unTokString <$> satisfy string
where
string TokString {} = True
string _ = False
identifier :: MonadParser m
=> m Text
identifier = unTokId <$> satisfy ident
where
ident TokId {} = True
ident _ = False
safeIdentifier :: MonadParser m
=> m (Maybe Text)
safeIdentifier = withRecovery recover (Just <$> identifier)
where
recover e = do
pos <- getPosition
putError pos . UnknownError $
"An identifier was expected but none was given."
pure Nothing
-- | Match an identifier and return both its name and location
identifierAndLoc :: MonadParser m
=> m (Text, Location)
identifierAndLoc = do
TokenPos { tok = TokId name, start, end } <- satisfy' ident
pure (name, Location (start, end))
where
ident TokId {} = True
ident _ = False
--------------------------------------------------------------------------------
parens :: MonadParser m
=> m a -> m a
parens = between
(match TokLeftPar )
(match' TokRightPar)
brackets :: MonadParser m
=> m a -> m a
brackets = between
(match TokLeftBracket )
(match' TokRightBracket)
block : : MonadParser m
-- => m a -> m a
-- block = between
-- (match TokOpenBlock )
-- (match TokCloseBlock)
percents :: MonadParser m
=> m a -> m a
percents = between
(match TokLeftPercent )
(match' TokRightPercent)
beginEnd :: MonadParser m
=> m a -> m a
beginEnd = between
(match TokBegin)
(match' TokEnd )
--------------------------------------------------------------------------------
-- | One or more.
some :: Alternative m => m a -> m (Seq a)
some v = some_v
where
many_v = some_v <|> pure Seq.empty
some_v = fmap (<|) v <*> many_v
# INLINE some #
-- | Zero or more.
many :: Alternative m => m a -> m (Seq a)
many v = many_v
where
many_v = some_v <|> pure Seq.empty
some_v = fmap (<|) v <*> many_v
# INLINE many #
-- | One or more, carrying a state.
some' :: (Monad m, Alternative m) => (a -> m a) -> a -> m a
some' v s = v s >>= many' v
-- | Zero or more, carrying a state
many' :: (Monad m, Alternative m) => (a -> m a) -> a -> m a
many' v s = some' v s <|> pure s
| @endBy p sep@ parses or more occurrences of @p@ , separated
and ended by @sep@. Returns a sequence of values returned by @p@.
--
-- > cStatements = cStatement `endBy` semicolon
endBy :: Alternative m => m a -> m sep -> m (Seq a)
endBy p sep = many (p <* sep)
# INLINE endBy #
| @endBy1 p sep@ parses /one/ or more occurrences of @p@ , separated
and ended by @sep@. Returns a sequence of values returned by @p@.
endBy1 :: Alternative m => m a -> m sep -> m (Seq a)
endBy1 p sep = some (p <* sep)
# INLINE endBy1 #
| @sepBy p sep@ parses or more occurrences of @p@ , separated
by @sep@. Returns a sequence of values returned by @p@.
--
> commaSep p = p ` sepBy ` comma
sepBy :: Alternative m => m a -> m sep -> m (Seq a)
sepBy p sep = sepBy1 p sep <|> pure Seq.empty
# INLINE sepBy #
| @sepBy1 p sep@ parses /one/ or more occurrences of @p@ , separated
by @sep@. Returns a sequence of values returned by @p@.
sepBy1 :: Alternative m => m a -> m sep -> m (Seq a)
sepBy1 p sep = (<|) <$> p <*> many (sep *> p)
# INLINE sepBy1 #
-- | @sepBy1' s p sep@ parses /one/ or more occurrences of @p s@, separated
by @sep@. Returns a sequence of values returned by @p@.
sepBy1' :: (Monad m, Alternative m) => (a -> m a) -> m sep -> a -> m a
sepBy1' p sep s = p s >>= many' (\t -> sep *> p t)
| @sepEndBy p sep@ parses or more occurrences of @p@ ,
separated and optionally ended by @sep@. Returns a sequence of values
returned by @p@.
sepEndBy :: Alternative m => m a -> m sep -> m (Seq a)
sepEndBy p sep = sepEndBy1 p sep <|> pure Seq.empty
# INLINE sepEndBy #
| @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@ ,
separated and optionally ended by @sep@. Returns a list of values
returned by @p@.
sepEndBy1 :: Alternative m => m a -> m sep -> m (Seq a)
sepEndBy1 p sep = (<|) <$> p <*> ((sep *> sepEndBy p sep) <|> pure Seq.empty)
--------------------------------------------------------------------------------
declarative :: (MonadParser m, MonadState Parser.State m)
=> m a -> m a
declarative p = (isDeclarative <<.= True) >>= \x -> (p <* (isDeclarative .= x))
--------------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/GracielaUSB/graciela/db69c8b225d6172aaa0ff90a67f4a997e4d8a0c6/src/Haskell/Language/Graciela/Parser/Monad.hs | haskell | # LANGUAGE DeriveFunctor #
# LANGUAGE UndecidableInstances #
# OPTIONS_HADDOCK show-extensions #
, block
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, and return a tuple with the final value and state.
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, discarding the final state.
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, discarding the final value.
------------------------------------------------------------------------------
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, and return a tuple with the final value and state.
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, discarding the final state.
| Evaluate a parser computation with the given filename, stream of tokens,
and initial state, discarding the final value.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Match an identifier and return both its name and location
------------------------------------------------------------------------------
=> m a -> m a
block = between
(match TokOpenBlock )
(match TokCloseBlock)
------------------------------------------------------------------------------
| One or more.
| Zero or more.
| One or more, carrying a state.
| Zero or more, carrying a state
> cStatements = cStatement `endBy` semicolon
| @sepBy1' s p sep@ parses /one/ or more occurrences of @p s@, separated
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | |
Module : Language . . . Monad
Description : The parsing monad for Graciela
Copyright : © 2015 - 2016 moises+
Stability : experimental
Portability : POSIX
This is a modified ParsecT monad with a custom state , operating on a
stream of TokenPos .
Module : Language.Graciela.Parser.Monad
Description : The parsing monad for Graciela
Copyright : © 2015-2016 Graciela USB
Maintainer : moises+
Stability : experimental
Portability : POSIX
This is a modified ParsecT monad with a custom state, operating on a
stream of TokenPos.
-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NamedFieldPuns #
# LANGUAGE TupleSections #
module Language.Graciela.Parser.Monad
( ParserT
, Parser
, MonadParser (..)
, evalParserT
, execParserT
, runParserT
, evalParser
, execParser
, runParser
, satisfy
, match
, anyToken
, oneOf
, noneOf
, followedBy
, boolLit
, charLit
, integerLit
, floatLit
, stringLit
, identifier
, safeIdentifier
, identifierAndLoc
, parens
, brackets
, percents
, beginEnd
, some
, many
, endBy
, endBy1
, sepBy
, sepBy1
, sepEndBy
, sepEndBy1
, some'
, many'
, sepBy1'
, declarative
) where
import Language.Graciela.AST.Struct
import Language.Graciela.AST.Type (Type)
import Language.Graciela.Common
import Language.Graciela.Error
import Language.Graciela.Parser.Config (Config (..), defaultConfig)
import Language.Graciela.Parser.Prim ()
import Language.Graciela.Parser.State hiding (State)
import qualified Language.Graciela.Parser.State as Parser (State)
import Language.Graciela.Token (Token (..), TokenPos (..))
import Control.Applicative (Alternative)
import Control.Lens (use, view, (%=), (.=), (<<.=),
(<~), (^.), _1, _2)
import Control.Monad (MonadPlus)
import Control.Monad.Identity (Identity (..))
import Control.Monad.Reader (MonadReader (..), asks)
import Control.Monad.State (MonadState)
import Control.Monad.Trans.Except (ExceptT (..), catchE,
runExceptT, throwE)
import Control.Monad.Trans.Reader (ReaderT (..), runReaderT)
import Control.Monad.Trans.State (StateT (..), evalStateT)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE (fromList)
import qualified Data.Map.Strict as Map (empty, lookup)
import Data.Sequence (Seq, (<|), (|>))
import qualified Data.Sequence as Seq (empty, singleton)
import qualified Data.Set as Set (empty, singleton)
import Data.Text (Text)
import Text.Megaparsec (ErrorItem (..),
ParseError (..), ParsecT,
between, getPosition,
lookAhead, manyTill,
withRecovery, (<|>))
import qualified Text.Megaparsec as Mega (runParserT)
import Text.Megaparsec.Error (parseErrorTextPretty)
import Text.Megaparsec.Prim (MonadParsec (..))
| monad transformer .
newtype ParserT m a = ParserT
{ unParserT :: ParsecT Error [TokenPos] (ReaderT Config (StateT Parser.State m)) a }
deriving ( Functor, Applicative, Monad
, MonadState Parser.State
, MonadParsec Error [TokenPos]
, MonadReader Config
, MonadPlus, Alternative)
| monad .
type Parser = ParserT Identity
runParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m (Either (ParseError TokenPos Error) a, Parser.State)
runParserT p fp s input = runStateT (runReaderT flatten cfg) s
where
cfg = defaultConfig (EnableTrace `elem` s^.pragmas) (GetAddressOf `elem` s^.pragmas)
flatten = do
definitions <~ asks nativeFunctions
symbolTable <~ asks nativeSymbols
x <- Mega.runParserT (unParserT p) fp input
pure $ case x of
Right (Just v) -> Right v
Left e -> Left e
evalParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m (Either (ParseError TokenPos Error) a)
evalParserT p fp s input = view _1 <$> runParserT p fp s input
execParserT :: Monad m
=> ParserT m (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> m Parser.State
execParserT p fp s input = view _2 <$> runParserT p fp s input
runParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> (Either (ParseError TokenPos Error) a, Parser.State)
runParser p fp s input = runIdentity $ runParserT p fp s input
evalParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> Either (ParseError TokenPos Error) a
evalParser p fp s input = runIdentity $ evalParserT p fp s input
execParser :: Parser (Maybe a)
-> FilePath
-> Parser.State
-> [TokenPos]
-> Parser.State
execParser p fp s input = runIdentity $ execParserT p fp s input
infixl 3 <!>
infixl 3 <!!>
class MonadParsec Error [TokenPos] p => MonadParser p where
putError :: SourcePos -> Error -> p ()
getType :: Text -> p (Maybe Type)
getStruct :: Text -> p (Maybe Struct)
satisfy' :: (Token -> Bool) -> p TokenPos
match' :: Token -> p Location
(<!>) :: p (Maybe a) -> (SourcePos, Error) -> p (Maybe a)
a <!> (p, e) = a <|> (putError p e *> pure Nothing)
(<!!>) :: p a -> (SourcePos, Error) -> p (Maybe a)
a <!!> b = Just <$> a <!> b
instance Monad m => MonadParser (ParserT m) where
putError = pPutError
getType = pGetType
getStruct = pGetStruct
satisfy' = pSatisfy'
match' = pMatch'
instance MonadParser g => MonadParser (StateT s g) where
putError l e = lift $ putError l e
getType = lift . getType
getStruct = lift . getStruct
satisfy' = lift . satisfy'
match' = lift . match'
pPutError :: Monad m => SourcePos -> Error -> ParserT m ()
pPutError from e = ParserT $ do
let
err = ParseError (NE.fromList [from]) Set.empty Set.empty (Set.singleton e)
errors %= (|> err)
pGetType :: (Monad m)
=> Text -> ParserT m (Maybe Type)
pGetType name = do
types <- asks nativeTypes
case name `Map.lookup` types of
Just (t, loc) -> return $ Just t
Nothing -> return Nothing
pGetStruct :: (Monad m)
=> Text -> ParserT m (Maybe Struct)
pGetStruct name = Map.lookup name <$> use dataTypes
pRecover :: MonadParser m
=> m b
-> ParseError TokenPos Error
-> m (Maybe a)
pRecover follow e = do
pos <- getPosition
putError pos . UnknownError . init $ parseErrorTextPretty e
void $ anyToken `manyTill` (void (lookAhead follow) <|> eof)
pure Nothing
pSatisfy' :: Monad m
=> (Token -> Bool) -> ParserT m TokenPos
pSatisfy' f = token test Nothing
where
test tp @ TokenPos { tok } =
if f tok
then Right tp
else Left . unex $ tp
unex = (, Set.empty, Set.empty) . Set.singleton . Tokens . (:|[])
pMatch' :: Monad m
=> Token-> ParserT m Location
pMatch' t = withRecovery recover (match t)
where
recover e = do
pos <- getPosition
Modify the error , so it knows the expected token ( there is obviously a better way , IDK right now )
let
from :| _ = errorPos e
expected = Set.singleton . Tokens . NE.fromList $ [TokenPos from from t]
loc = Location (pos, pos)
errors %= (|> e { errorExpected = expected } )
pure loc
satisfy :: MonadParser m
=> (Token -> Bool) -> m Token
satisfy f = tok <$> satisfy' f
match :: MonadParser m
=> Token -> m Location
match t = do
TokenPos { start, end } <- satisfy' (== t)
pure $ Location (start, end)
anyToken :: MonadParser m => m Token
anyToken = satisfy (const True)
oneOf :: (Foldable f, MonadParser m)
=> f Token -> m Token
oneOf ts = satisfy (`elem` ts)
noneOf :: (Foldable f, MonadParser m)
=> f Token -> m Token
noneOf ts = satisfy (`notElem` ts)
followedBy :: (MonadParser m)
=> m (Maybe a) -> m b -> m (Maybe a)
followedBy p follow =
withRecovery (pRecover follow) (p <* lookAhead follow)
boolLit :: MonadParser m
=> m Bool
boolLit = unTokBool <$> satisfy bool
where
bool TokBool {} = True
bool _ = False
charLit :: MonadParser m
=> m Char
charLit = unTokChar <$> satisfy char
where
char TokChar {} = True
char _ = False
integerLit :: MonadParser m
=> m Int32
integerLit = unTokInteger <$> satisfy string
where
string TokInteger {} = True
string _ = False
floatLit :: MonadParser m
=> m Double
floatLit = unTokFloat <$> satisfy float
where
float TokFloat {} = True
float _ = False
stringLit :: MonadParser m
=> m Text
stringLit = unTokString <$> satisfy string
where
string TokString {} = True
string _ = False
identifier :: MonadParser m
=> m Text
identifier = unTokId <$> satisfy ident
where
ident TokId {} = True
ident _ = False
safeIdentifier :: MonadParser m
=> m (Maybe Text)
safeIdentifier = withRecovery recover (Just <$> identifier)
where
recover e = do
pos <- getPosition
putError pos . UnknownError $
"An identifier was expected but none was given."
pure Nothing
identifierAndLoc :: MonadParser m
=> m (Text, Location)
identifierAndLoc = do
TokenPos { tok = TokId name, start, end } <- satisfy' ident
pure (name, Location (start, end))
where
ident TokId {} = True
ident _ = False
parens :: MonadParser m
=> m a -> m a
parens = between
(match TokLeftPar )
(match' TokRightPar)
brackets :: MonadParser m
=> m a -> m a
brackets = between
(match TokLeftBracket )
(match' TokRightBracket)
block : : MonadParser m
percents :: MonadParser m
=> m a -> m a
percents = between
(match TokLeftPercent )
(match' TokRightPercent)
beginEnd :: MonadParser m
=> m a -> m a
beginEnd = between
(match TokBegin)
(match' TokEnd )
some :: Alternative m => m a -> m (Seq a)
some v = some_v
where
many_v = some_v <|> pure Seq.empty
some_v = fmap (<|) v <*> many_v
# INLINE some #
many :: Alternative m => m a -> m (Seq a)
many v = many_v
where
many_v = some_v <|> pure Seq.empty
some_v = fmap (<|) v <*> many_v
# INLINE many #
some' :: (Monad m, Alternative m) => (a -> m a) -> a -> m a
some' v s = v s >>= many' v
many' :: (Monad m, Alternative m) => (a -> m a) -> a -> m a
many' v s = some' v s <|> pure s
| @endBy p sep@ parses or more occurrences of @p@ , separated
and ended by @sep@. Returns a sequence of values returned by @p@.
endBy :: Alternative m => m a -> m sep -> m (Seq a)
endBy p sep = many (p <* sep)
# INLINE endBy #
| @endBy1 p sep@ parses /one/ or more occurrences of @p@ , separated
and ended by @sep@. Returns a sequence of values returned by @p@.
endBy1 :: Alternative m => m a -> m sep -> m (Seq a)
endBy1 p sep = some (p <* sep)
# INLINE endBy1 #
| @sepBy p sep@ parses or more occurrences of @p@ , separated
by @sep@. Returns a sequence of values returned by @p@.
> commaSep p = p ` sepBy ` comma
sepBy :: Alternative m => m a -> m sep -> m (Seq a)
sepBy p sep = sepBy1 p sep <|> pure Seq.empty
# INLINE sepBy #
| @sepBy1 p sep@ parses /one/ or more occurrences of @p@ , separated
by @sep@. Returns a sequence of values returned by @p@.
sepBy1 :: Alternative m => m a -> m sep -> m (Seq a)
sepBy1 p sep = (<|) <$> p <*> many (sep *> p)
# INLINE sepBy1 #
by @sep@. Returns a sequence of values returned by @p@.
sepBy1' :: (Monad m, Alternative m) => (a -> m a) -> m sep -> a -> m a
sepBy1' p sep s = p s >>= many' (\t -> sep *> p t)
| @sepEndBy p sep@ parses or more occurrences of @p@ ,
separated and optionally ended by @sep@. Returns a sequence of values
returned by @p@.
sepEndBy :: Alternative m => m a -> m sep -> m (Seq a)
sepEndBy p sep = sepEndBy1 p sep <|> pure Seq.empty
# INLINE sepEndBy #
| @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@ ,
separated and optionally ended by @sep@. Returns a list of values
returned by @p@.
sepEndBy1 :: Alternative m => m a -> m sep -> m (Seq a)
sepEndBy1 p sep = (<|) <$> p <*> ((sep *> sepEndBy p sep) <|> pure Seq.empty)
declarative :: (MonadParser m, MonadState Parser.State m)
=> m a -> m a
declarative p = (isDeclarative <<.= True) >>= \x -> (p <* (isDeclarative .= x))
|
06581c1f8f6e055c477b497080feab6d38c07b49133cadfbe9a5a4ce57c44294 | stuartsierra/mapgraph | readme.clj | (ns com.stuartsierra.mapgraph.readme
"Examples used in documentation"
(:require [com.stuartsierra.mapgraph :as mg]
[com.stuartsierra.mapgraph.examples :as examples]))
(def db (atom (mg/new-db)))
(swap! db mg/add-id-attr :user/id :color/hex)
(swap! db mg/add
{:user/id 1
:user/name "Pat"
:user/favorite-color {:color/hex "9C27B0"
:color/name "Purple"}}
{:user/id 2
:user/name "Reese"
:user/favorite-color {:color/hex "D50000"
:color/name "Red"}})
(get @db [:user/id 2])
= > { : user / id 2 ,
: user / name " Reese " ,
;; :user/favorite-color [:color/hex "D50000"]}
(mg/pull @db
[:user/name {:user/favorite-color [:color/name]}]
[:user/id 2])
= > { : user / name " Reese " ,
;; :user/favorite-color {:color/name "Red"}}
(swap! db
mg/add
{:user/id 1
:user/profession "Programmer"})
(mg/pull @db
[:user/id :user/name :user/profession]
[:user/id 1])
{ : user / id 1 ,
: user / name " " ,
;; :user/profession "Programmer"}
(swap! db
mg/add
{:user/id 1
:user/friends #{{:user/id 2}}}
{:user/id 2
:user/friends #{{:user/id 1}}})
(mg/pull @db
[:user/name
{:user/friends [:user/name
{:user/friends [:user/name]}]}]
[:user/id 1])
= > { : user / name " " ,
: user / friends # { { : user / name " Reese " ,
: user / friends # { { : user / name " " } } } } }
(swap! db dissoc [:user/id 2])
(mg/pull @db '[*] [:user/id 2])
;;=> nil
(mg/pull @db
[:user/name
{:user/friends [:user/name]}]
[:user/id 1])
= > { : user / name " " ,
;; :user/friends #{}}
(swap! db mg/add
{:user/id 1
:user/favorite-sports '(hockey tennis golf)})
(mg/pull @db
[:user/name :user/favorite-sports]
[:user/id 1])
= > { : user / name " " , : user / favorite - sports ( hockey tennis golf ) }
(swap! db mg/add
{:user/id 1
:user/favorite-sports '(tennis polo)})
(mg/pull @db
[:user/name :user/favorite-sports]
[:user/id 1])
= > { : user / name " " , : user / favorite - sports ( tennis polo ) }
(mg/pull examples/hosts
[:host/ip
:host/rules
{:host/gateway [:host/ip]
:host/peers [:host/ip]
:host/connections [:host/name]}]
[:host/ip "10.10.1.1"])
= > { : host / ip " 10.10.1.1 " ,
: host / rules { " input " { " block " " * " , " allow " 80 } ,
" output " { " allow " 80 } } ,
: host / gateway { : host / ip " 10.10.10.1 " } ,
: host / peers # { { : host / ip " 10.10.1.3 " }
{ : host / ip " 10.10.1.2 " } } ,
;; :host/connections {"database" {:host/name "db"},
[ " cache " " level2 " ] { : host / name " cache " } } }
(try (swap! db mg/add {:user/id 3 :user/friends [{:user/id 1} "Bob"]})
(catch Throwable t t))
;; #error
;; {:reason ::mg/mixed-collection,
;; ::mg/attribute :user/friends,
: : mg / value [ { : user / id 1 } " " ] }
| null | https://raw.githubusercontent.com/stuartsierra/mapgraph/15886ad6f2f5b1c49df0970c9cccc9e72f2afe6e/test/com/stuartsierra/mapgraph/readme.clj | clojure | :user/favorite-color [:color/hex "D50000"]}
:user/favorite-color {:color/name "Red"}}
:user/profession "Programmer"}
=> nil
:user/friends #{}}
:host/connections {"database" {:host/name "db"},
#error
{:reason ::mg/mixed-collection,
::mg/attribute :user/friends, | (ns com.stuartsierra.mapgraph.readme
"Examples used in documentation"
(:require [com.stuartsierra.mapgraph :as mg]
[com.stuartsierra.mapgraph.examples :as examples]))
(def db (atom (mg/new-db)))
(swap! db mg/add-id-attr :user/id :color/hex)
(swap! db mg/add
{:user/id 1
:user/name "Pat"
:user/favorite-color {:color/hex "9C27B0"
:color/name "Purple"}}
{:user/id 2
:user/name "Reese"
:user/favorite-color {:color/hex "D50000"
:color/name "Red"}})
(get @db [:user/id 2])
= > { : user / id 2 ,
: user / name " Reese " ,
(mg/pull @db
[:user/name {:user/favorite-color [:color/name]}]
[:user/id 2])
= > { : user / name " Reese " ,
(swap! db
mg/add
{:user/id 1
:user/profession "Programmer"})
(mg/pull @db
[:user/id :user/name :user/profession]
[:user/id 1])
{ : user / id 1 ,
: user / name " " ,
(swap! db
mg/add
{:user/id 1
:user/friends #{{:user/id 2}}}
{:user/id 2
:user/friends #{{:user/id 1}}})
(mg/pull @db
[:user/name
{:user/friends [:user/name
{:user/friends [:user/name]}]}]
[:user/id 1])
= > { : user / name " " ,
: user / friends # { { : user / name " Reese " ,
: user / friends # { { : user / name " " } } } } }
(swap! db dissoc [:user/id 2])
(mg/pull @db '[*] [:user/id 2])
(mg/pull @db
[:user/name
{:user/friends [:user/name]}]
[:user/id 1])
= > { : user / name " " ,
(swap! db mg/add
{:user/id 1
:user/favorite-sports '(hockey tennis golf)})
(mg/pull @db
[:user/name :user/favorite-sports]
[:user/id 1])
= > { : user / name " " , : user / favorite - sports ( hockey tennis golf ) }
(swap! db mg/add
{:user/id 1
:user/favorite-sports '(tennis polo)})
(mg/pull @db
[:user/name :user/favorite-sports]
[:user/id 1])
= > { : user / name " " , : user / favorite - sports ( tennis polo ) }
(mg/pull examples/hosts
[:host/ip
:host/rules
{:host/gateway [:host/ip]
:host/peers [:host/ip]
:host/connections [:host/name]}]
[:host/ip "10.10.1.1"])
= > { : host / ip " 10.10.1.1 " ,
: host / rules { " input " { " block " " * " , " allow " 80 } ,
" output " { " allow " 80 } } ,
: host / gateway { : host / ip " 10.10.10.1 " } ,
: host / peers # { { : host / ip " 10.10.1.3 " }
{ : host / ip " 10.10.1.2 " } } ,
[ " cache " " level2 " ] { : host / name " cache " } } }
(try (swap! db mg/add {:user/id 3 :user/friends [{:user/id 1} "Bob"]})
(catch Throwable t t))
: : mg / value [ { : user / id 1 } " " ] }
|
cbbd284c84ae7ef1e9a9e48d0fb8177605143977c5ba163b01909e033e47b55d | ssardina/ergo | basic-elevator.scm | This is a version of the elevator domain , formalized as in
original in , where the actions take numeric arguments .
The basic action theory : two fluents and three actions
(define-fluents
floor 7 ; where the elevator is located
on-buttons '(3 5)) ; the list of call buttons that are on
(define-action (up n) ; go up to floor n
#:prereq (< floor n)
floor n)
(define-action (down n) ; go down to floor n
#:prereq (> floor n)
floor n)
(define-action (turnoff n) ; turn off the call button for floor n
on-buttons (remove n on-buttons))
;; Get to floor n using an up action, a down action, or no action
(define (go-floor n)
(:choose (:act (up n)) (:test (= floor n)) (:act (down n))))
Serve all the floors and then park
(define (serve-floors)
(:begin
(:until (null? on-buttons)
(:for-some n on-buttons (go-floor n) (:act (turnoff n))))
(go-floor 1)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Main program: run the elevator using the above procedure
(define (main) (display (ergo-do #:mode 'first (serve-floors))))
| null | https://raw.githubusercontent.com/ssardina/ergo/4225ebb95779d1748f377cf2e4d0a593d6a2a103/Examples/basic-elevator.scm | scheme | where the elevator is located
the list of call buttons that are on
go up to floor n
go down to floor n
turn off the call button for floor n
Get to floor n using an up action, a down action, or no action
Main program: run the elevator using the above procedure | This is a version of the elevator domain , formalized as in
original in , where the actions take numeric arguments .
The basic action theory : two fluents and three actions
(define-fluents
#:prereq (< floor n)
floor n)
#:prereq (> floor n)
floor n)
on-buttons (remove n on-buttons))
(define (go-floor n)
(:choose (:act (up n)) (:test (= floor n)) (:act (down n))))
Serve all the floors and then park
(define (serve-floors)
(:begin
(:until (null? on-buttons)
(:for-some n on-buttons (go-floor n) (:act (turnoff n))))
(go-floor 1)))
(define (main) (display (ergo-do #:mode 'first (serve-floors))))
|
da2d5e213a6868856bfc66cad5b8853b970e7c331d6d570dc9a86320420eb37f | jimcrayne/jhc | tc230.hs | {-# OPTIONS -fglasgow-exts #-}
Trac # 1445
module Bug where
f :: () -> (?p :: ()) => () -> ()
f _ _ = ()
g :: (?p :: ()) => ()
g = f () ()
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/regress/tests/1_typecheck/2_pass/ghc/uncat/tc230.hs | haskell | # OPTIONS -fglasgow-exts # |
Trac # 1445
module Bug where
f :: () -> (?p :: ()) => () -> ()
f _ _ = ()
g :: (?p :: ()) => ()
g = f () ()
|
02871c9806ee6a9c2e8d8f49af967a71ee9cf5b415c532de80a25e3c485a58e9 | diagrams/diagrams-lib | Direction.hs | # LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.Direction
Copyright : ( c ) 2014 diagrams - lib team ( see LICENSE )
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- Type for representing directions, polymorphic in vector space
--
-----------------------------------------------------------------------------
module Diagrams.Direction
( Direction
, _Dir
, direction, dir, fromDirection, fromDir
, angleBetweenDirs
, dirBetween
) where
import Control.Lens (Iso', iso)
import Diagrams.Angle
import Diagrams.Core
import Linear.Affine
import Linear.Metric
import Linear.Vector
--------------------------------------------------------------------------------
-- Direction
-- | A vector is described by a @Direction@ and a magnitude. So we
-- can think of a @Direction@ as a vector that has forgotten its
-- magnitude. @Direction@s can be used with 'fromDirection' and the
-- lenses provided by its instances.
newtype Direction v n = Dir (v n)
deriving (Read, Show, Eq, Ord, Functor) -- todo: special instances
type instance V (Direction v n) = v
type instance N (Direction v n) = n
instance (V (v n) ~ v, N (v n) ~ n, Transformable (v n)) => Transformable (Direction v n) where
transform t (Dir v) = Dir (transform t v)
instance HasTheta v => HasTheta (Direction v) where
_theta = _Dir . _theta
instance HasPhi v => HasPhi (Direction v) where
_phi = _Dir . _phi
-- | _Dir is provided to allow efficient implementations of functions
-- in particular vector-spaces, but should be used with care as it
-- exposes too much information.
_Dir :: Iso' (Direction v n) (v n)
_Dir = iso (\(Dir v) -> v) Dir
| @direction v@ is the direction in which @v@ points . Returns an
unspecified value when given the zero vector as input .
direction :: v n -> Direction v n
direction = Dir
-- | Synonym for 'direction'.
dir :: v n -> Direction v n
dir = Dir
| @fromDirection d@ is the unit vector in the direction @d@.
fromDirection :: (Metric v, Floating n) => Direction v n -> v n
fromDirection (Dir v) = signorm v
-- | Synonym for 'fromDirection'.
fromDir :: (Metric v, Floating n) => Direction v n -> v n
fromDir (Dir v) = signorm v
| compute the positive angle between the two directions in their common plane
angleBetweenDirs :: (Metric v, Floating n, Ord n)
=> Direction v n -> Direction v n -> Angle n
angleBetweenDirs d1 d2 = angleBetween (fromDirection d1) (fromDirection d2)
| @dirBetween p q@ returns the direction from @p@ to @q@.
dirBetween :: (Additive v, Num n) => Point v n -> Point v n -> Direction v n
dirBetween p q = dir $ q .-. p
| null | https://raw.githubusercontent.com/diagrams/diagrams-lib/ed8276e7babecace51aad34b3dfd608847be2c47/src/Diagrams/Direction.hs | haskell | # LANGUAGE TypeFamilies #
---------------------------------------------------------------------------
|
Module : Diagrams.Direction
License : BSD-style (see LICENSE)
Maintainer :
Type for representing directions, polymorphic in vector space
---------------------------------------------------------------------------
------------------------------------------------------------------------------
Direction
| A vector is described by a @Direction@ and a magnitude. So we
can think of a @Direction@ as a vector that has forgotten its
magnitude. @Direction@s can be used with 'fromDirection' and the
lenses provided by its instances.
todo: special instances
| _Dir is provided to allow efficient implementations of functions
in particular vector-spaces, but should be used with care as it
exposes too much information.
| Synonym for 'direction'.
| Synonym for 'fromDirection'. | # LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
Copyright : ( c ) 2014 diagrams - lib team ( see LICENSE )
module Diagrams.Direction
( Direction
, _Dir
, direction, dir, fromDirection, fromDir
, angleBetweenDirs
, dirBetween
) where
import Control.Lens (Iso', iso)
import Diagrams.Angle
import Diagrams.Core
import Linear.Affine
import Linear.Metric
import Linear.Vector
newtype Direction v n = Dir (v n)
type instance V (Direction v n) = v
type instance N (Direction v n) = n
instance (V (v n) ~ v, N (v n) ~ n, Transformable (v n)) => Transformable (Direction v n) where
transform t (Dir v) = Dir (transform t v)
instance HasTheta v => HasTheta (Direction v) where
_theta = _Dir . _theta
instance HasPhi v => HasPhi (Direction v) where
_phi = _Dir . _phi
_Dir :: Iso' (Direction v n) (v n)
_Dir = iso (\(Dir v) -> v) Dir
| @direction v@ is the direction in which @v@ points . Returns an
unspecified value when given the zero vector as input .
direction :: v n -> Direction v n
direction = Dir
dir :: v n -> Direction v n
dir = Dir
| @fromDirection d@ is the unit vector in the direction @d@.
fromDirection :: (Metric v, Floating n) => Direction v n -> v n
fromDirection (Dir v) = signorm v
fromDir :: (Metric v, Floating n) => Direction v n -> v n
fromDir (Dir v) = signorm v
| compute the positive angle between the two directions in their common plane
angleBetweenDirs :: (Metric v, Floating n, Ord n)
=> Direction v n -> Direction v n -> Angle n
angleBetweenDirs d1 d2 = angleBetween (fromDirection d1) (fromDirection d2)
| @dirBetween p q@ returns the direction from @p@ to @q@.
dirBetween :: (Additive v, Num n) => Point v n -> Point v n -> Direction v n
dirBetween p q = dir $ q .-. p
|
d788e3bae172ff6ebae51f5fe645e7bb5e7cb6937e137441d4c476bc2f8deff1 | sealchain-project/sealchain | Repl.hs | # LANGUAGE CPP #
-- |
Copyright : ( C ) 2016
-- License : BSD-style (see the file LICENSE)
--
module Main where
#if !defined(ghcjs_HOST_OS)
import qualified Pact.Main as Repl
#endif
main :: IO ()
main =
#if defined(ghcjs_HOST_OS)
error "Error: command line REPL does not exist in GHCJS mode"
#else
Repl.main
#endif
| null | https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/pact/executables/Repl.hs | haskell | |
License : BSD-style (see the file LICENSE)
| # LANGUAGE CPP #
Copyright : ( C ) 2016
module Main where
#if !defined(ghcjs_HOST_OS)
import qualified Pact.Main as Repl
#endif
main :: IO ()
main =
#if defined(ghcjs_HOST_OS)
error "Error: command line REPL does not exist in GHCJS mode"
#else
Repl.main
#endif
|
22f761b4994b1261271df71422bc791a12c20e66742c8a43a3964e90b02539c5 | input-output-hk/marlowe-cardano | Codec.hs | -----------------------------------------------------------------------------
--
-- Module : $Headers
License : Apache 2.0
--
-- Stability : Experimental
Portability : Portable
--
-- | Coding and decoding.
--
-----------------------------------------------------------------------------
# LANGUAGE FlexibleContexts #
module Language.Marlowe.CLI.Codec
( -- * Codecs
decodeBech32
, encodeBech32
) where
import Control.Monad.Except (MonadError, MonadIO, liftIO)
import Language.Marlowe.CLI.IO (liftCli, liftCliMaybe)
import Language.Marlowe.CLI.Types (CliError)
import System.IO (hPutStrLn, stderr)
import qualified Codec.Binary.Bech32 as Bech32
(dataPartFromBytes, dataPartToBytes, decodeLenient, encodeLenient, humanReadablePartFromText, humanReadablePartToText)
import qualified Data.ByteString.Base16 as Base16 (decode, encode)
import qualified Data.ByteString.Char8 as BS (pack, unpack)
import qualified Data.Text as T (pack, unpack)
-- | Decode Bech32 data.
decodeBech32 :: MonadError CliError m
=> MonadIO m
=> String -- ^ The Bech32 data.
-> m () -- ^ Action to print the decoded data.
decodeBech32 text =
do
(humanReadablePart, dataPart) <-
liftCli
. Bech32.decodeLenient
$ T.pack text
let
humanReadablePart' =
T.unpack
$ Bech32.humanReadablePartToText humanReadablePart
dataPart' <-
liftCliMaybe "Failed decoding data part."
$ BS.unpack . Base16.encode
<$> Bech32.dataPartToBytes dataPart
liftIO . hPutStrLn stderr $ "Human-readable part: " <> humanReadablePart'
liftIO $ putStrLn dataPart'
-- | Encode Bech32 data.
encodeBech32 :: MonadError CliError m
=> MonadIO m
=> String -- ^ The human-readable prefix.
^ The base 16 data to be encoded .
-> m () -- ^ Acction to print the encoded data.
encodeBech32 humanReadablePart dataPart =
do
humanReadablePart' <-
liftCli
. Bech32.humanReadablePartFromText
$ T.pack humanReadablePart
datapart' <-
liftCli
. fmap Bech32.dataPartFromBytes
. Base16.decode
$ BS.pack dataPart
let
encoded =
T.unpack
$ Bech32.encodeLenient humanReadablePart' datapart'
liftIO $ putStrLn encoded
| null | https://raw.githubusercontent.com/input-output-hk/marlowe-cardano/78a3dbb1cd692146b7d1a32e1e66faed884f2432/marlowe-cli/src/Language/Marlowe/CLI/Codec.hs | haskell | ---------------------------------------------------------------------------
Module : $Headers
Stability : Experimental
| Coding and decoding.
---------------------------------------------------------------------------
* Codecs
| Decode Bech32 data.
^ The Bech32 data.
^ Action to print the decoded data.
| Encode Bech32 data.
^ The human-readable prefix.
^ Acction to print the encoded data. | License : Apache 2.0
Portability : Portable
# LANGUAGE FlexibleContexts #
module Language.Marlowe.CLI.Codec
decodeBech32
, encodeBech32
) where
import Control.Monad.Except (MonadError, MonadIO, liftIO)
import Language.Marlowe.CLI.IO (liftCli, liftCliMaybe)
import Language.Marlowe.CLI.Types (CliError)
import System.IO (hPutStrLn, stderr)
import qualified Codec.Binary.Bech32 as Bech32
(dataPartFromBytes, dataPartToBytes, decodeLenient, encodeLenient, humanReadablePartFromText, humanReadablePartToText)
import qualified Data.ByteString.Base16 as Base16 (decode, encode)
import qualified Data.ByteString.Char8 as BS (pack, unpack)
import qualified Data.Text as T (pack, unpack)
decodeBech32 :: MonadError CliError m
=> MonadIO m
decodeBech32 text =
do
(humanReadablePart, dataPart) <-
liftCli
. Bech32.decodeLenient
$ T.pack text
let
humanReadablePart' =
T.unpack
$ Bech32.humanReadablePartToText humanReadablePart
dataPart' <-
liftCliMaybe "Failed decoding data part."
$ BS.unpack . Base16.encode
<$> Bech32.dataPartToBytes dataPart
liftIO . hPutStrLn stderr $ "Human-readable part: " <> humanReadablePart'
liftIO $ putStrLn dataPart'
encodeBech32 :: MonadError CliError m
=> MonadIO m
^ The base 16 data to be encoded .
encodeBech32 humanReadablePart dataPart =
do
humanReadablePart' <-
liftCli
. Bech32.humanReadablePartFromText
$ T.pack humanReadablePart
datapart' <-
liftCli
. fmap Bech32.dataPartFromBytes
. Base16.decode
$ BS.pack dataPart
let
encoded =
T.unpack
$ Bech32.encodeLenient humanReadablePart' datapart'
liftIO $ putStrLn encoded
|
24fdbba51101517b80d52b550fd97901d284bf0e5b083d489e69a43b25c929a0 | cnuernber/avclj | av_pixfmt.clj | (ns avclj.av-pixfmt
Autogenerated from avclj.av - pixfmt - data-- DO NOT EDIT
""
(:require [avclj.av-pixfmt-data]))
(def ^{:tag 'long} AV_PIX_FMT_0BGR avclj.av-pixfmt-data/AV_PIX_FMT_0BGR)
(def ^{:tag 'long} AV_PIX_FMT_0BGR32 avclj.av-pixfmt-data/AV_PIX_FMT_0BGR32)
(def ^{:tag 'long} AV_PIX_FMT_0RGB avclj.av-pixfmt-data/AV_PIX_FMT_0RGB)
(def ^{:tag 'long} AV_PIX_FMT_0RGB32 avclj.av-pixfmt-data/AV_PIX_FMT_0RGB32)
(def ^{:tag 'long} AV_PIX_FMT_ABGR avclj.av-pixfmt-data/AV_PIX_FMT_ABGR)
(def ^{:tag 'long} AV_PIX_FMT_ARGB avclj.av-pixfmt-data/AV_PIX_FMT_ARGB)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64 avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64BE avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64BE)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64LE avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB8)
(def ^{:tag 'long} AV_PIX_FMT_BGR0 avclj.av-pixfmt-data/AV_PIX_FMT_BGR0)
(def ^{:tag 'long} AV_PIX_FMT_BGR24 avclj.av-pixfmt-data/AV_PIX_FMT_BGR24)
(def ^{:tag 'long} AV_PIX_FMT_BGR32 avclj.av-pixfmt-data/AV_PIX_FMT_BGR32)
(def ^{:tag 'long} AV_PIX_FMT_BGR32_1 avclj.av-pixfmt-data/AV_PIX_FMT_BGR32_1)
(def ^{:tag 'long} AV_PIX_FMT_BGR4 avclj.av-pixfmt-data/AV_PIX_FMT_BGR4)
(def ^{:tag 'long} AV_PIX_FMT_BGR444 avclj.av-pixfmt-data/AV_PIX_FMT_BGR444)
(def ^{:tag 'long} AV_PIX_FMT_BGR444BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR444BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR444LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR444LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR48 avclj.av-pixfmt-data/AV_PIX_FMT_BGR48)
(def ^{:tag 'long} AV_PIX_FMT_BGR48BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR48BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR48LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR48LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR4_BYTE avclj.av-pixfmt-data/AV_PIX_FMT_BGR4_BYTE)
(def ^{:tag 'long} AV_PIX_FMT_BGR555 avclj.av-pixfmt-data/AV_PIX_FMT_BGR555)
(def ^{:tag 'long} AV_PIX_FMT_BGR555BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR555BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR555LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR555LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR565 avclj.av-pixfmt-data/AV_PIX_FMT_BGR565)
(def ^{:tag 'long} AV_PIX_FMT_BGR565BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR565BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR565LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR565LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR8 avclj.av-pixfmt-data/AV_PIX_FMT_BGR8)
(def ^{:tag 'long} AV_PIX_FMT_BGRA avclj.av-pixfmt-data/AV_PIX_FMT_BGRA)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64 avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64BE avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64BE)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64LE avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64LE)
(def ^{:tag 'long} AV_PIX_FMT_CUDA avclj.av-pixfmt-data/AV_PIX_FMT_CUDA)
(def ^{:tag 'long} AV_PIX_FMT_D3D11 avclj.av-pixfmt-data/AV_PIX_FMT_D3D11)
(def ^{:tag 'long} AV_PIX_FMT_D3D11VA_VLD avclj.av-pixfmt-data/AV_PIX_FMT_D3D11VA_VLD)
(def ^{:tag 'long} AV_PIX_FMT_DRM_PRIME avclj.av-pixfmt-data/AV_PIX_FMT_DRM_PRIME)
(def ^{:tag 'long} AV_PIX_FMT_DXVA2_VLD avclj.av-pixfmt-data/AV_PIX_FMT_DXVA2_VLD)
(def ^{:tag 'long} AV_PIX_FMT_GBR24P avclj.av-pixfmt-data/AV_PIX_FMT_GBR24P)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP avclj.av-pixfmt-data/AV_PIX_FMT_GBRP)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32 avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY8 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY8)
(def ^{:tag 'long} AV_PIX_FMT_GRAY8A avclj.av-pixfmt-data/AV_PIX_FMT_GRAY8A)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32 avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32LE)
(def ^{:tag 'long} AV_PIX_FMT_MEDIACODEC avclj.av-pixfmt-data/AV_PIX_FMT_MEDIACODEC)
(def ^{:tag 'long} AV_PIX_FMT_MMAL avclj.av-pixfmt-data/AV_PIX_FMT_MMAL)
(def ^{:tag 'long} AV_PIX_FMT_MONOBLACK avclj.av-pixfmt-data/AV_PIX_FMT_MONOBLACK)
(def ^{:tag 'long} AV_PIX_FMT_MONOWHITE avclj.av-pixfmt-data/AV_PIX_FMT_MONOWHITE)
(def ^{:tag 'long} AV_PIX_FMT_NB avclj.av-pixfmt-data/AV_PIX_FMT_NB)
(def ^{:tag 'long} AV_PIX_FMT_NONE avclj.av-pixfmt-data/AV_PIX_FMT_NONE)
(def ^{:tag 'long} AV_PIX_FMT_NV12 avclj.av-pixfmt-data/AV_PIX_FMT_NV12)
(def ^{:tag 'long} AV_PIX_FMT_NV16 avclj.av-pixfmt-data/AV_PIX_FMT_NV16)
(def ^{:tag 'long} AV_PIX_FMT_NV20 avclj.av-pixfmt-data/AV_PIX_FMT_NV20)
(def ^{:tag 'long} AV_PIX_FMT_NV20BE avclj.av-pixfmt-data/AV_PIX_FMT_NV20BE)
(def ^{:tag 'long} AV_PIX_FMT_NV20LE avclj.av-pixfmt-data/AV_PIX_FMT_NV20LE)
(def ^{:tag 'long} AV_PIX_FMT_NV21 avclj.av-pixfmt-data/AV_PIX_FMT_NV21)
(def ^{:tag 'long} AV_PIX_FMT_NV24 avclj.av-pixfmt-data/AV_PIX_FMT_NV24)
(def ^{:tag 'long} AV_PIX_FMT_NV42 avclj.av-pixfmt-data/AV_PIX_FMT_NV42)
(def ^{:tag 'long} AV_PIX_FMT_OPENCL avclj.av-pixfmt-data/AV_PIX_FMT_OPENCL)
(def ^{:tag 'long} AV_PIX_FMT_P010 avclj.av-pixfmt-data/AV_PIX_FMT_P010)
(def ^{:tag 'long} AV_PIX_FMT_P010BE avclj.av-pixfmt-data/AV_PIX_FMT_P010BE)
(def ^{:tag 'long} AV_PIX_FMT_P010LE avclj.av-pixfmt-data/AV_PIX_FMT_P010LE)
(def ^{:tag 'long} AV_PIX_FMT_P016 avclj.av-pixfmt-data/AV_PIX_FMT_P016)
(def ^{:tag 'long} AV_PIX_FMT_P016BE avclj.av-pixfmt-data/AV_PIX_FMT_P016BE)
(def ^{:tag 'long} AV_PIX_FMT_P016LE avclj.av-pixfmt-data/AV_PIX_FMT_P016LE)
(def ^{:tag 'long} AV_PIX_FMT_PAL8 avclj.av-pixfmt-data/AV_PIX_FMT_PAL8)
(def ^{:tag 'long} AV_PIX_FMT_QSV avclj.av-pixfmt-data/AV_PIX_FMT_QSV)
(def ^{:tag 'long} AV_PIX_FMT_RGB0 avclj.av-pixfmt-data/AV_PIX_FMT_RGB0)
(def ^{:tag 'long} AV_PIX_FMT_RGB24 avclj.av-pixfmt-data/AV_PIX_FMT_RGB24)
(def ^{:tag 'long} AV_PIX_FMT_RGB32 avclj.av-pixfmt-data/AV_PIX_FMT_RGB32)
(def ^{:tag 'long} AV_PIX_FMT_RGB32_1 avclj.av-pixfmt-data/AV_PIX_FMT_RGB32_1)
(def ^{:tag 'long} AV_PIX_FMT_RGB4 avclj.av-pixfmt-data/AV_PIX_FMT_RGB4)
(def ^{:tag 'long} AV_PIX_FMT_RGB444 avclj.av-pixfmt-data/AV_PIX_FMT_RGB444)
(def ^{:tag 'long} AV_PIX_FMT_RGB444BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB444BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB444LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB444LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB48 avclj.av-pixfmt-data/AV_PIX_FMT_RGB48)
(def ^{:tag 'long} AV_PIX_FMT_RGB48BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB48BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB48LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB48LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB4_BYTE avclj.av-pixfmt-data/AV_PIX_FMT_RGB4_BYTE)
(def ^{:tag 'long} AV_PIX_FMT_RGB555 avclj.av-pixfmt-data/AV_PIX_FMT_RGB555)
(def ^{:tag 'long} AV_PIX_FMT_RGB555BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB555BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB555LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB555LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB565 avclj.av-pixfmt-data/AV_PIX_FMT_RGB565)
(def ^{:tag 'long} AV_PIX_FMT_RGB565BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB565BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB565LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB565LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB8 avclj.av-pixfmt-data/AV_PIX_FMT_RGB8)
(def ^{:tag 'long} AV_PIX_FMT_RGBA avclj.av-pixfmt-data/AV_PIX_FMT_RGBA)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64 avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64BE avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64BE)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64LE avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64LE)
(def ^{:tag 'long} AV_PIX_FMT_UYVY422 avclj.av-pixfmt-data/AV_PIX_FMT_UYVY422)
(def ^{:tag 'long} AV_PIX_FMT_UYYVYY411 avclj.av-pixfmt-data/AV_PIX_FMT_UYYVYY411)
(def ^{:tag 'long} AV_PIX_FMT_VAAPI avclj.av-pixfmt-data/AV_PIX_FMT_VAAPI)
(def ^{:tag 'long} AV_PIX_FMT_VDPAU avclj.av-pixfmt-data/AV_PIX_FMT_VDPAU)
(def ^{:tag 'long} AV_PIX_FMT_VIDEOTOOLBOX avclj.av-pixfmt-data/AV_PIX_FMT_VIDEOTOOLBOX)
(def ^{:tag 'long} AV_PIX_FMT_VULKAN avclj.av-pixfmt-data/AV_PIX_FMT_VULKAN)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10 avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10BE avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10BE)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10LE avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10LE)
(def ^{:tag 'long} AV_PIX_FMT_XVMC avclj.av-pixfmt-data/AV_PIX_FMT_XVMC)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12 avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12BE avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12BE)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12LE avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12LE)
(def ^{:tag 'long} AV_PIX_FMT_Y210 avclj.av-pixfmt-data/AV_PIX_FMT_Y210)
(def ^{:tag 'long} AV_PIX_FMT_Y210BE avclj.av-pixfmt-data/AV_PIX_FMT_Y210BE)
(def ^{:tag 'long} AV_PIX_FMT_Y210LE avclj.av-pixfmt-data/AV_PIX_FMT_Y210LE)
(def ^{:tag 'long} AV_PIX_FMT_Y400A avclj.av-pixfmt-data/AV_PIX_FMT_Y400A)
(def ^{:tag 'long} AV_PIX_FMT_YA16 avclj.av-pixfmt-data/AV_PIX_FMT_YA16)
(def ^{:tag 'long} AV_PIX_FMT_YA16BE avclj.av-pixfmt-data/AV_PIX_FMT_YA16BE)
(def ^{:tag 'long} AV_PIX_FMT_YA16LE avclj.av-pixfmt-data/AV_PIX_FMT_YA16LE)
(def ^{:tag 'long} AV_PIX_FMT_YA8 avclj.av-pixfmt-data/AV_PIX_FMT_YA8)
(def ^{:tag 'long} AV_PIX_FMT_YUV410P avclj.av-pixfmt-data/AV_PIX_FMT_YUV410P)
(def ^{:tag 'long} AV_PIX_FMT_YUV411P avclj.av-pixfmt-data/AV_PIX_FMT_YUV411P)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ411P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ411P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ420P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ420P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ422P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ422P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ440P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ440P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ444P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ444P)
(def ^{:tag 'long} AV_PIX_FMT_YUYV422 avclj.av-pixfmt-data/AV_PIX_FMT_YUYV422)
(def ^{:tag 'long} AV_PIX_FMT_YVYU422 avclj.av-pixfmt-data/AV_PIX_FMT_YVYU422)
(defn pixfmt->value
([pixfmt]
(avclj.av-pixfmt-data/pixfmt->value pixfmt)))
(def pixfmt-name-value-map avclj.av-pixfmt-data/pixfmt-name-value-map)
(def pixfmt-value-name-multi-map avclj.av-pixfmt-data/pixfmt-value-name-multi-map)
(defn value->pixfmt
([pixval]
(avclj.av-pixfmt-data/value->pixfmt pixval)))
| null | https://raw.githubusercontent.com/cnuernber/avclj/d0ea7338110712f29d3f0b6f127d71d59fa4552b/src/avclj/av_pixfmt.clj | clojure | (ns avclj.av-pixfmt
Autogenerated from avclj.av - pixfmt - data-- DO NOT EDIT
""
(:require [avclj.av-pixfmt-data]))
(def ^{:tag 'long} AV_PIX_FMT_0BGR avclj.av-pixfmt-data/AV_PIX_FMT_0BGR)
(def ^{:tag 'long} AV_PIX_FMT_0BGR32 avclj.av-pixfmt-data/AV_PIX_FMT_0BGR32)
(def ^{:tag 'long} AV_PIX_FMT_0RGB avclj.av-pixfmt-data/AV_PIX_FMT_0RGB)
(def ^{:tag 'long} AV_PIX_FMT_0RGB32 avclj.av-pixfmt-data/AV_PIX_FMT_0RGB32)
(def ^{:tag 'long} AV_PIX_FMT_ABGR avclj.av-pixfmt-data/AV_PIX_FMT_ABGR)
(def ^{:tag 'long} AV_PIX_FMT_ARGB avclj.av-pixfmt-data/AV_PIX_FMT_ARGB)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64 avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64BE avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64BE)
(def ^{:tag 'long} AV_PIX_FMT_AYUV64LE avclj.av-pixfmt-data/AV_PIX_FMT_AYUV64LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_BGGR8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_BGGR8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GBRG8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GBRG8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_GRBG8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_GRBG8)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16BE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16BE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB16LE avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB16LE)
(def ^{:tag 'long} AV_PIX_FMT_BAYER_RGGB8 avclj.av-pixfmt-data/AV_PIX_FMT_BAYER_RGGB8)
(def ^{:tag 'long} AV_PIX_FMT_BGR0 avclj.av-pixfmt-data/AV_PIX_FMT_BGR0)
(def ^{:tag 'long} AV_PIX_FMT_BGR24 avclj.av-pixfmt-data/AV_PIX_FMT_BGR24)
(def ^{:tag 'long} AV_PIX_FMT_BGR32 avclj.av-pixfmt-data/AV_PIX_FMT_BGR32)
(def ^{:tag 'long} AV_PIX_FMT_BGR32_1 avclj.av-pixfmt-data/AV_PIX_FMT_BGR32_1)
(def ^{:tag 'long} AV_PIX_FMT_BGR4 avclj.av-pixfmt-data/AV_PIX_FMT_BGR4)
(def ^{:tag 'long} AV_PIX_FMT_BGR444 avclj.av-pixfmt-data/AV_PIX_FMT_BGR444)
(def ^{:tag 'long} AV_PIX_FMT_BGR444BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR444BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR444LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR444LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR48 avclj.av-pixfmt-data/AV_PIX_FMT_BGR48)
(def ^{:tag 'long} AV_PIX_FMT_BGR48BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR48BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR48LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR48LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR4_BYTE avclj.av-pixfmt-data/AV_PIX_FMT_BGR4_BYTE)
(def ^{:tag 'long} AV_PIX_FMT_BGR555 avclj.av-pixfmt-data/AV_PIX_FMT_BGR555)
(def ^{:tag 'long} AV_PIX_FMT_BGR555BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR555BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR555LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR555LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR565 avclj.av-pixfmt-data/AV_PIX_FMT_BGR565)
(def ^{:tag 'long} AV_PIX_FMT_BGR565BE avclj.av-pixfmt-data/AV_PIX_FMT_BGR565BE)
(def ^{:tag 'long} AV_PIX_FMT_BGR565LE avclj.av-pixfmt-data/AV_PIX_FMT_BGR565LE)
(def ^{:tag 'long} AV_PIX_FMT_BGR8 avclj.av-pixfmt-data/AV_PIX_FMT_BGR8)
(def ^{:tag 'long} AV_PIX_FMT_BGRA avclj.av-pixfmt-data/AV_PIX_FMT_BGRA)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64 avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64BE avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64BE)
(def ^{:tag 'long} AV_PIX_FMT_BGRA64LE avclj.av-pixfmt-data/AV_PIX_FMT_BGRA64LE)
(def ^{:tag 'long} AV_PIX_FMT_CUDA avclj.av-pixfmt-data/AV_PIX_FMT_CUDA)
(def ^{:tag 'long} AV_PIX_FMT_D3D11 avclj.av-pixfmt-data/AV_PIX_FMT_D3D11)
(def ^{:tag 'long} AV_PIX_FMT_D3D11VA_VLD avclj.av-pixfmt-data/AV_PIX_FMT_D3D11VA_VLD)
(def ^{:tag 'long} AV_PIX_FMT_DRM_PRIME avclj.av-pixfmt-data/AV_PIX_FMT_DRM_PRIME)
(def ^{:tag 'long} AV_PIX_FMT_DXVA2_VLD avclj.av-pixfmt-data/AV_PIX_FMT_DXVA2_VLD)
(def ^{:tag 'long} AV_PIX_FMT_GBR24P avclj.av-pixfmt-data/AV_PIX_FMT_GBR24P)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP10LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP10LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP12LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP12LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAP16LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAP16LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32 avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRAPF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRAPF32LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP avclj.av-pixfmt-data/AV_PIX_FMT_GBRP)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP10LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP10LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP12LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP12LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP14LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP14LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP16LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP16LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9 avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRP9LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRP9LE)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32 avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GBRPF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GBRPF32LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY10LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY10LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY12LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY12LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY14LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY14LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY16LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY16LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY8 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY8)
(def ^{:tag 'long} AV_PIX_FMT_GRAY8A avclj.av-pixfmt-data/AV_PIX_FMT_GRAY8A)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9 avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAY9LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAY9LE)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32 avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32BE avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32BE)
(def ^{:tag 'long} AV_PIX_FMT_GRAYF32LE avclj.av-pixfmt-data/AV_PIX_FMT_GRAYF32LE)
(def ^{:tag 'long} AV_PIX_FMT_MEDIACODEC avclj.av-pixfmt-data/AV_PIX_FMT_MEDIACODEC)
(def ^{:tag 'long} AV_PIX_FMT_MMAL avclj.av-pixfmt-data/AV_PIX_FMT_MMAL)
(def ^{:tag 'long} AV_PIX_FMT_MONOBLACK avclj.av-pixfmt-data/AV_PIX_FMT_MONOBLACK)
(def ^{:tag 'long} AV_PIX_FMT_MONOWHITE avclj.av-pixfmt-data/AV_PIX_FMT_MONOWHITE)
(def ^{:tag 'long} AV_PIX_FMT_NB avclj.av-pixfmt-data/AV_PIX_FMT_NB)
(def ^{:tag 'long} AV_PIX_FMT_NONE avclj.av-pixfmt-data/AV_PIX_FMT_NONE)
(def ^{:tag 'long} AV_PIX_FMT_NV12 avclj.av-pixfmt-data/AV_PIX_FMT_NV12)
(def ^{:tag 'long} AV_PIX_FMT_NV16 avclj.av-pixfmt-data/AV_PIX_FMT_NV16)
(def ^{:tag 'long} AV_PIX_FMT_NV20 avclj.av-pixfmt-data/AV_PIX_FMT_NV20)
(def ^{:tag 'long} AV_PIX_FMT_NV20BE avclj.av-pixfmt-data/AV_PIX_FMT_NV20BE)
(def ^{:tag 'long} AV_PIX_FMT_NV20LE avclj.av-pixfmt-data/AV_PIX_FMT_NV20LE)
(def ^{:tag 'long} AV_PIX_FMT_NV21 avclj.av-pixfmt-data/AV_PIX_FMT_NV21)
(def ^{:tag 'long} AV_PIX_FMT_NV24 avclj.av-pixfmt-data/AV_PIX_FMT_NV24)
(def ^{:tag 'long} AV_PIX_FMT_NV42 avclj.av-pixfmt-data/AV_PIX_FMT_NV42)
(def ^{:tag 'long} AV_PIX_FMT_OPENCL avclj.av-pixfmt-data/AV_PIX_FMT_OPENCL)
(def ^{:tag 'long} AV_PIX_FMT_P010 avclj.av-pixfmt-data/AV_PIX_FMT_P010)
(def ^{:tag 'long} AV_PIX_FMT_P010BE avclj.av-pixfmt-data/AV_PIX_FMT_P010BE)
(def ^{:tag 'long} AV_PIX_FMT_P010LE avclj.av-pixfmt-data/AV_PIX_FMT_P010LE)
(def ^{:tag 'long} AV_PIX_FMT_P016 avclj.av-pixfmt-data/AV_PIX_FMT_P016)
(def ^{:tag 'long} AV_PIX_FMT_P016BE avclj.av-pixfmt-data/AV_PIX_FMT_P016BE)
(def ^{:tag 'long} AV_PIX_FMT_P016LE avclj.av-pixfmt-data/AV_PIX_FMT_P016LE)
(def ^{:tag 'long} AV_PIX_FMT_PAL8 avclj.av-pixfmt-data/AV_PIX_FMT_PAL8)
(def ^{:tag 'long} AV_PIX_FMT_QSV avclj.av-pixfmt-data/AV_PIX_FMT_QSV)
(def ^{:tag 'long} AV_PIX_FMT_RGB0 avclj.av-pixfmt-data/AV_PIX_FMT_RGB0)
(def ^{:tag 'long} AV_PIX_FMT_RGB24 avclj.av-pixfmt-data/AV_PIX_FMT_RGB24)
(def ^{:tag 'long} AV_PIX_FMT_RGB32 avclj.av-pixfmt-data/AV_PIX_FMT_RGB32)
(def ^{:tag 'long} AV_PIX_FMT_RGB32_1 avclj.av-pixfmt-data/AV_PIX_FMT_RGB32_1)
(def ^{:tag 'long} AV_PIX_FMT_RGB4 avclj.av-pixfmt-data/AV_PIX_FMT_RGB4)
(def ^{:tag 'long} AV_PIX_FMT_RGB444 avclj.av-pixfmt-data/AV_PIX_FMT_RGB444)
(def ^{:tag 'long} AV_PIX_FMT_RGB444BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB444BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB444LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB444LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB48 avclj.av-pixfmt-data/AV_PIX_FMT_RGB48)
(def ^{:tag 'long} AV_PIX_FMT_RGB48BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB48BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB48LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB48LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB4_BYTE avclj.av-pixfmt-data/AV_PIX_FMT_RGB4_BYTE)
(def ^{:tag 'long} AV_PIX_FMT_RGB555 avclj.av-pixfmt-data/AV_PIX_FMT_RGB555)
(def ^{:tag 'long} AV_PIX_FMT_RGB555BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB555BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB555LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB555LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB565 avclj.av-pixfmt-data/AV_PIX_FMT_RGB565)
(def ^{:tag 'long} AV_PIX_FMT_RGB565BE avclj.av-pixfmt-data/AV_PIX_FMT_RGB565BE)
(def ^{:tag 'long} AV_PIX_FMT_RGB565LE avclj.av-pixfmt-data/AV_PIX_FMT_RGB565LE)
(def ^{:tag 'long} AV_PIX_FMT_RGB8 avclj.av-pixfmt-data/AV_PIX_FMT_RGB8)
(def ^{:tag 'long} AV_PIX_FMT_RGBA avclj.av-pixfmt-data/AV_PIX_FMT_RGBA)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64 avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64BE avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64BE)
(def ^{:tag 'long} AV_PIX_FMT_RGBA64LE avclj.av-pixfmt-data/AV_PIX_FMT_RGBA64LE)
(def ^{:tag 'long} AV_PIX_FMT_UYVY422 avclj.av-pixfmt-data/AV_PIX_FMT_UYVY422)
(def ^{:tag 'long} AV_PIX_FMT_UYYVYY411 avclj.av-pixfmt-data/AV_PIX_FMT_UYYVYY411)
(def ^{:tag 'long} AV_PIX_FMT_VAAPI avclj.av-pixfmt-data/AV_PIX_FMT_VAAPI)
(def ^{:tag 'long} AV_PIX_FMT_VDPAU avclj.av-pixfmt-data/AV_PIX_FMT_VDPAU)
(def ^{:tag 'long} AV_PIX_FMT_VIDEOTOOLBOX avclj.av-pixfmt-data/AV_PIX_FMT_VIDEOTOOLBOX)
(def ^{:tag 'long} AV_PIX_FMT_VULKAN avclj.av-pixfmt-data/AV_PIX_FMT_VULKAN)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10 avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10BE avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10BE)
(def ^{:tag 'long} AV_PIX_FMT_X2RGB10LE avclj.av-pixfmt-data/AV_PIX_FMT_X2RGB10LE)
(def ^{:tag 'long} AV_PIX_FMT_XVMC avclj.av-pixfmt-data/AV_PIX_FMT_XVMC)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12 avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12BE avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12BE)
(def ^{:tag 'long} AV_PIX_FMT_XYZ12LE avclj.av-pixfmt-data/AV_PIX_FMT_XYZ12LE)
(def ^{:tag 'long} AV_PIX_FMT_Y210 avclj.av-pixfmt-data/AV_PIX_FMT_Y210)
(def ^{:tag 'long} AV_PIX_FMT_Y210BE avclj.av-pixfmt-data/AV_PIX_FMT_Y210BE)
(def ^{:tag 'long} AV_PIX_FMT_Y210LE avclj.av-pixfmt-data/AV_PIX_FMT_Y210LE)
(def ^{:tag 'long} AV_PIX_FMT_Y400A avclj.av-pixfmt-data/AV_PIX_FMT_Y400A)
(def ^{:tag 'long} AV_PIX_FMT_YA16 avclj.av-pixfmt-data/AV_PIX_FMT_YA16)
(def ^{:tag 'long} AV_PIX_FMT_YA16BE avclj.av-pixfmt-data/AV_PIX_FMT_YA16BE)
(def ^{:tag 'long} AV_PIX_FMT_YA16LE avclj.av-pixfmt-data/AV_PIX_FMT_YA16LE)
(def ^{:tag 'long} AV_PIX_FMT_YA8 avclj.av-pixfmt-data/AV_PIX_FMT_YA8)
(def ^{:tag 'long} AV_PIX_FMT_YUV410P avclj.av-pixfmt-data/AV_PIX_FMT_YUV410P)
(def ^{:tag 'long} AV_PIX_FMT_YUV411P avclj.av-pixfmt-data/AV_PIX_FMT_YUV411P)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV420P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV420P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV422P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV422P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV440P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV440P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P14LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P14LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUV444P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUV444P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA420P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA420P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA422P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA422P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P10LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P10LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P12LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P12LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P16LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P16LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9 avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9BE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9BE)
(def ^{:tag 'long} AV_PIX_FMT_YUVA444P9LE avclj.av-pixfmt-data/AV_PIX_FMT_YUVA444P9LE)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ411P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ411P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ420P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ420P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ422P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ422P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ440P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ440P)
(def ^{:tag 'long} AV_PIX_FMT_YUVJ444P avclj.av-pixfmt-data/AV_PIX_FMT_YUVJ444P)
(def ^{:tag 'long} AV_PIX_FMT_YUYV422 avclj.av-pixfmt-data/AV_PIX_FMT_YUYV422)
(def ^{:tag 'long} AV_PIX_FMT_YVYU422 avclj.av-pixfmt-data/AV_PIX_FMT_YVYU422)
(defn pixfmt->value
([pixfmt]
(avclj.av-pixfmt-data/pixfmt->value pixfmt)))
(def pixfmt-name-value-map avclj.av-pixfmt-data/pixfmt-name-value-map)
(def pixfmt-value-name-multi-map avclj.av-pixfmt-data/pixfmt-value-name-multi-map)
(defn value->pixfmt
([pixval]
(avclj.av-pixfmt-data/value->pixfmt pixval)))
| |
873640d609b2b13ca408d0c6e3a09bffbe8f8222e623bc96375dbf2782662d9f | matsen/pplacer | r_plots.ml |
open Ppatteries
open Multiset
(* the percent extra to stretch the x limits *)
let relax_factor = 0.05
let int_div x y = (float_of_int x) /. (float_of_int y)
let min_list l = List.reduce min l
let max_list l = List.reduce max l
let min_x all_dists = (1. -. relax_factor) *. (min_list all_dists)
let max_x all_dists = (1. +. relax_factor) *. (max_list all_dists)
(* density shows where the sample sits in the shuffled distances *)
let write_density p type_str name1 name2 sample_dist null_dists =
(* the data *)
let prefix = type_str^"."^name1^".VS."^name2 in
let dat_name = prefix^".dat" in
let dat_ch = open_out dat_name in
List.iter
(fun x -> Printf.fprintf dat_ch "%g\n" x)
null_dists;
close_out dat_ch;
(* the r file *)
let r_ch = open_out (prefix^".R") in
Printf.fprintf r_ch "pdf(\"%s\")\n" (prefix^".pdf");
Printf.fprintf r_ch "data <- read.table(\"%s\")\n" dat_name;
let all_dists = sample_dist::null_dists in
Printf.fprintf r_ch
"plot(density(data[,1]), main=\"d(%s,%s) = %f\", xlab=expression(KR~Z[%g]~distance), xlim=c(%g,%g))\n"
name1 name2 sample_dist p (min_x all_dists) (max_x all_dists);
Printf.fprintf r_ch "abline(v=%g, col=\"red\")\n" sample_dist;
Printf.fprintf r_ch "dev.off()\n";
close_out r_ch;
()
| null | https://raw.githubusercontent.com/matsen/pplacer/f40a363e962cca7131f1f2d372262e0081ff1190/pplacer_src/r_plots.ml | ocaml | the percent extra to stretch the x limits
density shows where the sample sits in the shuffled distances
the data
the r file |
open Ppatteries
open Multiset
let relax_factor = 0.05
let int_div x y = (float_of_int x) /. (float_of_int y)
let min_list l = List.reduce min l
let max_list l = List.reduce max l
let min_x all_dists = (1. -. relax_factor) *. (min_list all_dists)
let max_x all_dists = (1. +. relax_factor) *. (max_list all_dists)
let write_density p type_str name1 name2 sample_dist null_dists =
let prefix = type_str^"."^name1^".VS."^name2 in
let dat_name = prefix^".dat" in
let dat_ch = open_out dat_name in
List.iter
(fun x -> Printf.fprintf dat_ch "%g\n" x)
null_dists;
close_out dat_ch;
let r_ch = open_out (prefix^".R") in
Printf.fprintf r_ch "pdf(\"%s\")\n" (prefix^".pdf");
Printf.fprintf r_ch "data <- read.table(\"%s\")\n" dat_name;
let all_dists = sample_dist::null_dists in
Printf.fprintf r_ch
"plot(density(data[,1]), main=\"d(%s,%s) = %f\", xlab=expression(KR~Z[%g]~distance), xlim=c(%g,%g))\n"
name1 name2 sample_dist p (min_x all_dists) (max_x all_dists);
Printf.fprintf r_ch "abline(v=%g, col=\"red\")\n" sample_dist;
Printf.fprintf r_ch "dev.off()\n";
close_out r_ch;
()
|
6389b95babf785153c5cf8b597d5e3fa91c668d62ef184843eadad42ccf3e8a4 | evturn/haskellbook | 18.02-bind.hs | import Control.Monad (join)
-- Write `bind` in terms of `fmap` and `join`.
bind :: Monad m => (a -> m b) -> m a -> m b
bind f x = join $ f <$> x
| null | https://raw.githubusercontent.com/evturn/haskellbook/3d310d0ddd4221ffc5b9fd7ec6476b2a0731274a/18/18.02-bind.hs | haskell | Write `bind` in terms of `fmap` and `join`. | import Control.Monad (join)
bind :: Monad m => (a -> m b) -> m a -> m b
bind f x = join $ f <$> x
|
279bd2f6f3504d1d623c23b6cae80d7e159ad83a1e22ae23c570fba125513e80 | brownplt/lambda-py | lambdapy-test-skull.rkt | #lang racket
(require "lambdapy-test-util.rkt")
(full-expect
((let (x local = (undefined-val)) in
(id x local)) () ())
((err val_err) ε Σ))
(full-expect
((let (x local = (undefined-val)) in
(seq
(assign (id x local) := (sym "testval"))
(id x local)))
()
())
((sym "testval") ε Σ))
(full-expect
((let (x local = (undefined-val)) in
(seq
(assign (id x local) := (sym "testval"))
(seq
(delete (id x local))
(id x local))))
()
())
((err val_err) ε Σ))
(full-expect
((delete (id x global))
{(x 5)} {(5 vnone)})
(val_none () {(5 val_none)}))
(when (redex-match λπ e (term (undefined-val)))
(error 'redex-tests "Undefined-val was an expression"))
(when (not (redex-match λπ e+undef (term (undefined-val))))
(error 'redex-tests "Undefined-val was an expression"))
(when (redex-match λπ (get-attr e e) (term (get-attr (undefined-val) vnone)))
(error 'redex-tests "Undefined-val was a (nested) expression"))
| null | https://raw.githubusercontent.com/brownplt/lambda-py/c3ee39502c8953d36b886e5a203f2eb51d2f495b/redex/lambdapy-test-skull.rkt | racket | #lang racket
(require "lambdapy-test-util.rkt")
(full-expect
((let (x local = (undefined-val)) in
(id x local)) () ())
((err val_err) ε Σ))
(full-expect
((let (x local = (undefined-val)) in
(seq
(assign (id x local) := (sym "testval"))
(id x local)))
()
())
((sym "testval") ε Σ))
(full-expect
((let (x local = (undefined-val)) in
(seq
(assign (id x local) := (sym "testval"))
(seq
(delete (id x local))
(id x local))))
()
())
((err val_err) ε Σ))
(full-expect
((delete (id x global))
{(x 5)} {(5 vnone)})
(val_none () {(5 val_none)}))
(when (redex-match λπ e (term (undefined-val)))
(error 'redex-tests "Undefined-val was an expression"))
(when (not (redex-match λπ e+undef (term (undefined-val))))
(error 'redex-tests "Undefined-val was an expression"))
(when (redex-match λπ (get-attr e e) (term (get-attr (undefined-val) vnone)))
(error 'redex-tests "Undefined-val was a (nested) expression"))
| |
0d7e9e7358a52c6aa36cb649956572651822f2d12a9f6563ab91163e616957c2 | Clozure/ccl-tests | make-broadcast-stream.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Thu Jan 29 21:28:25 2004
;;;; Contains: Tests of MAKE-BROADCAST-STREAM
(in-package :cl-test)
(deftest make-broadcast-stream.1
(let ((s (make-broadcast-stream)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
;; (assert (not (input-stream-p s)))
(assert (open-stream-p s))
(assert (streamp s))
;; (assert (eq (stream-element-type s) t))
(values
(notnot (typep s 'stream))
(notnot (typep s 'broadcast-stream))
(notnot (output-stream-p s))
(progn (write-char #\x s) nil)
))
t t t nil)
(deftest make-broadcast-stream.2
(with-output-to-string
(s1)
(let ((s (make-broadcast-stream s1)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
;; (assert (not (input-stream-p s)))
(assert (open-stream-p s))
(assert (streamp s))
(assert (eql (stream-element-type s)
(stream-element-type s1)))
(write-char #\x s)))
"x")
(deftest make-broadcast-stream.3
(let ((s1 (make-string-output-stream))
(s2 (make-string-output-stream)))
(let ((s (make-broadcast-stream s1 s2)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
;; (assert (not (input-stream-p s)))
(assert (open-stream-p s))
(assert (streamp s))
(assert (eql (stream-element-type s)
(stream-element-type s2)))
(format s "This is a test"))
(values
(get-output-stream-string s1)
(get-output-stream-string s2)))
"This is a test"
"This is a test")
(deftest make-broadcast-stream.4
(fresh-line (make-broadcast-stream))
nil)
(deftest make-broadcast-stream.5
(file-length (make-broadcast-stream))
0)
(deftest make-broadcast-stream.6
(file-position (make-broadcast-stream))
0)
(deftest make-broadcast-stream.7
(file-string-length (make-broadcast-stream) "antidisestablishmentarianism")
1)
(deftest make-broadcast-stream.8
(stream-external-format (make-broadcast-stream))
:default)
FIXME
;;; Add tests for: close,
;;; peek-char, read-char-no-hang, terpri, fresh-line, unread-char,
;;; read-line, write-line, write-string, read-sequence, write-sequence,
;;; read-byte, write-byte, listen, clear-input, finish-output, force-output,
clear - output , print , prin1 princ
;;; Error tests
(deftest make-broadcast-stream.error.1
(check-type-error #'make-broadcast-stream
#'(lambda (x) (and (streamp x) (output-stream-p x))))
nil)
(deftest make-broadcast-stream.error.2
(check-type-error #'make-broadcast-stream
#'(lambda (x) (and (streamp x) (output-stream-p x)))
*streams*)
nil)
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/make-broadcast-stream.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of MAKE-BROADCAST-STREAM
(assert (not (input-stream-p s)))
(assert (eq (stream-element-type s) t))
(assert (not (input-stream-p s)))
(assert (not (input-stream-p s)))
Add tests for: close,
peek-char, read-char-no-hang, terpri, fresh-line, unread-char,
read-line, write-line, write-string, read-sequence, write-sequence,
read-byte, write-byte, listen, clear-input, finish-output, force-output,
Error tests | Author :
Created : Thu Jan 29 21:28:25 2004
(in-package :cl-test)
(deftest make-broadcast-stream.1
(let ((s (make-broadcast-stream)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
(assert (open-stream-p s))
(assert (streamp s))
(values
(notnot (typep s 'stream))
(notnot (typep s 'broadcast-stream))
(notnot (output-stream-p s))
(progn (write-char #\x s) nil)
))
t t t nil)
(deftest make-broadcast-stream.2
(with-output-to-string
(s1)
(let ((s (make-broadcast-stream s1)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
(assert (open-stream-p s))
(assert (streamp s))
(assert (eql (stream-element-type s)
(stream-element-type s1)))
(write-char #\x s)))
"x")
(deftest make-broadcast-stream.3
(let ((s1 (make-string-output-stream))
(s2 (make-string-output-stream)))
(let ((s (make-broadcast-stream s1 s2)))
(assert (typep s 'stream))
(assert (typep s 'broadcast-stream))
(assert (output-stream-p s))
(assert (open-stream-p s))
(assert (streamp s))
(assert (eql (stream-element-type s)
(stream-element-type s2)))
(format s "This is a test"))
(values
(get-output-stream-string s1)
(get-output-stream-string s2)))
"This is a test"
"This is a test")
(deftest make-broadcast-stream.4
(fresh-line (make-broadcast-stream))
nil)
(deftest make-broadcast-stream.5
(file-length (make-broadcast-stream))
0)
(deftest make-broadcast-stream.6
(file-position (make-broadcast-stream))
0)
(deftest make-broadcast-stream.7
(file-string-length (make-broadcast-stream) "antidisestablishmentarianism")
1)
(deftest make-broadcast-stream.8
(stream-external-format (make-broadcast-stream))
:default)
FIXME
clear - output , print , prin1 princ
(deftest make-broadcast-stream.error.1
(check-type-error #'make-broadcast-stream
#'(lambda (x) (and (streamp x) (output-stream-p x))))
nil)
(deftest make-broadcast-stream.error.2
(check-type-error #'make-broadcast-stream
#'(lambda (x) (and (streamp x) (output-stream-p x)))
*streams*)
nil)
|
0b91cd8ab441cd7b16004a0c547b80105034134366eae48b348e21cb812b7e4d | hiroshi-unno/coar | synthesizer.ml | open Core
open Common
open Common.Util
open PCSatCommon
module Config = struct
type strategy =
| Template of TBSynthesizer.Config.t (** configuration for TBSynthesizer *)
* configuration for CBSynthesizer [ DT / GSC / SCQM / LTB ]
* configuration for PASynthesizer [ PASAT / PASID ]
[@@ deriving yojson]
type t = {
verbose: bool;
strategy: strategy;
check_candidates: bool; (** check whether candidates satisfy the examples *)
refine_candidates: bool; (** refine candidates until they satisfy the examples *)
} [@@ deriving yojson]
let instantiate_ext_files cfg = let open Or_error in
match cfg.strategy with
| Template strategy_cfg ->
TBSynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = Template strategy_cfg }
| Classification strategy_cfg ->
CBSynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = Classification strategy_cfg }
| PredicateAbstraction strategy_cfg ->
PASynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = PredicateAbstraction strategy_cfg }
let load_ext_file = function
| ExtFile.Filename filename ->
begin
let open Or_error in
try_with (fun () -> Yojson.Safe.from_file filename)
>>= fun raw_json ->
match of_yojson raw_json with
| Ok x ->
instantiate_ext_files x >>= fun x ->
Ok (ExtFile.Instance x)
| Error msg ->
error_string @@ Printf.sprintf
"Invalid Synthesizer Configuration (%s): %s" filename msg
end
| Instance x -> Ok (ExtFile.Instance x)
module type ConfigType = sig val config : t end
end
module type SynthesizerType = sig
val run_phase : int -> State.u -> State.s Or_error.t
end
module Make (RLCfg: RLConfig.ConfigType) (Cfg: Config.ConfigType) (Problem: PCSP.Problem.ProblemType): SynthesizerType = struct
let config = Cfg.config
let id = PCSP.Problem.id_of Problem.problem
module CandidateChecker =
CandidateChecker.Make (val (Debug.Config.(if config.verbose then enable else disable))) (Problem)
module Debug = Debug.Make (val Debug.Config.(if config.verbose then enable else disable))
let _ = Debug.set_id id
module Synthesizer =
(val (match config.strategy with
| Config.Template cfg ->
(module TBSynthesizer.Make (RLCfg) (struct let config = cfg end) (Problem)
: SynthesizerType)
| Classification cfg ->
(module CBSynthesizer.Make (struct let config = cfg end) (Problem)
: SynthesizerType)
| PredicateAbstraction cfg ->
(module PASynthesizer.Make (struct let config = cfg end) (Problem)
: SynthesizerType)))
let check_candidates e =
if config.check_candidates && not config.refine_candidates then
let open State.Monad_infix in
Debug.print @@ lazy "** checking whether the candidates satisfy the examples";
Ok e >>=? fun vs cands ->
match ExClauseSet.check_candidates
~id
~inst:true (VersionSpace.fenv_of vs)
(PCSP.Problem.senv_of Problem.problem)
(VersionSpace.examples_of vs)
(List.map ~f:fst cands) with
| None -> Ok e
| Some (cand, clause) ->
Debug.print @@ lazy "The candidate\n";
Debug.print @@ lazy (Ast.CandSol.str_of cand);
Debug.print @@ lazy "\nviolates the following example\n";
Debug.print @@ lazy (ExClause.str_of clause);
Debug.print @@ lazy "\nThis may be a bug of the synthesizer.";
(* Or_error.error_string "Error in Synthesizer.check_candidates" *)
Ok e
else Ok e
let run_phase iters e =
if RLCfg.config.enable then begin
(if RLCfg.config.show_examples then
let examples = State.pos_neg_und_examples_of e in
Out_channel.print_endline (Printf.sprintf "examples: %s" (Yojson.Safe.to_string @@ VersionSpace.to_yojson examples)));
if RLCfg.config.show_elapsed_time then
Out_channel.print_endline "begin synthesizer";
let tm = Timer.make () in
let open Or_error in
let res = Synthesizer.run_phase iters e >>= check_candidates in
if RLCfg.config.show_elapsed_time then
Out_channel.print_endline (Format.sprintf "end synthesizer: %f" (tm ()));
res
end else Synthesizer.run_phase iters e
let rec refine_cands iters e = let open Or_error.Monad_infix in
run_phase iters e >>= function
| State.Continue (vs, cands) ->
begin match CandidateChecker.check_candidates vs (List.map ~f:fst cands) with
| `Unsat -> Ok (State.Unsat)
| `Valid ->
Debug.print @@ lazy " The new candidate is valid.";
Ok (State.Continue (vs, cands))
| `Invalid (pos, neg, und) ->
Debug.print @@ lazy " The new candidate is invalid, restart synthesizer.";
let new_examples =
Set.Poly.union_list [pos; neg; und]
|> Set.Poly.map ~f:(fun (ex, srcs) ->
ex, List.map srcs ~f:(fun c -> ClauseGraph.mk_example c, true)) in
refine_cands iters @@ State.of_examples vs new_examples
end
| _ -> assert false
let run_phase iters e =
if config.refine_candidates then refine_cands iters e else run_phase iters e
end
let make rl_config config problem =
(module Make
(struct let config = rl_config end)
(struct let config = config end)
(struct let problem = problem end) : SynthesizerType)
| null | https://raw.githubusercontent.com/hiroshi-unno/coar/90a23a09332c68f380efd4115b3f6fdc825f413d/lib/PCSat/synthesis/synthesizer.ml | ocaml | * configuration for TBSynthesizer
* check whether candidates satisfy the examples
* refine candidates until they satisfy the examples
Or_error.error_string "Error in Synthesizer.check_candidates" | open Core
open Common
open Common.Util
open PCSatCommon
module Config = struct
type strategy =
* configuration for CBSynthesizer [ DT / GSC / SCQM / LTB ]
* configuration for PASynthesizer [ PASAT / PASID ]
[@@ deriving yojson]
type t = {
verbose: bool;
strategy: strategy;
} [@@ deriving yojson]
let instantiate_ext_files cfg = let open Or_error in
match cfg.strategy with
| Template strategy_cfg ->
TBSynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = Template strategy_cfg }
| Classification strategy_cfg ->
CBSynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = Classification strategy_cfg }
| PredicateAbstraction strategy_cfg ->
PASynthesizer.Config.instantiate_ext_files strategy_cfg >>= fun strategy_cfg ->
Ok { cfg with strategy = PredicateAbstraction strategy_cfg }
let load_ext_file = function
| ExtFile.Filename filename ->
begin
let open Or_error in
try_with (fun () -> Yojson.Safe.from_file filename)
>>= fun raw_json ->
match of_yojson raw_json with
| Ok x ->
instantiate_ext_files x >>= fun x ->
Ok (ExtFile.Instance x)
| Error msg ->
error_string @@ Printf.sprintf
"Invalid Synthesizer Configuration (%s): %s" filename msg
end
| Instance x -> Ok (ExtFile.Instance x)
module type ConfigType = sig val config : t end
end
module type SynthesizerType = sig
val run_phase : int -> State.u -> State.s Or_error.t
end
module Make (RLCfg: RLConfig.ConfigType) (Cfg: Config.ConfigType) (Problem: PCSP.Problem.ProblemType): SynthesizerType = struct
let config = Cfg.config
let id = PCSP.Problem.id_of Problem.problem
module CandidateChecker =
CandidateChecker.Make (val (Debug.Config.(if config.verbose then enable else disable))) (Problem)
module Debug = Debug.Make (val Debug.Config.(if config.verbose then enable else disable))
let _ = Debug.set_id id
module Synthesizer =
(val (match config.strategy with
| Config.Template cfg ->
(module TBSynthesizer.Make (RLCfg) (struct let config = cfg end) (Problem)
: SynthesizerType)
| Classification cfg ->
(module CBSynthesizer.Make (struct let config = cfg end) (Problem)
: SynthesizerType)
| PredicateAbstraction cfg ->
(module PASynthesizer.Make (struct let config = cfg end) (Problem)
: SynthesizerType)))
let check_candidates e =
if config.check_candidates && not config.refine_candidates then
let open State.Monad_infix in
Debug.print @@ lazy "** checking whether the candidates satisfy the examples";
Ok e >>=? fun vs cands ->
match ExClauseSet.check_candidates
~id
~inst:true (VersionSpace.fenv_of vs)
(PCSP.Problem.senv_of Problem.problem)
(VersionSpace.examples_of vs)
(List.map ~f:fst cands) with
| None -> Ok e
| Some (cand, clause) ->
Debug.print @@ lazy "The candidate\n";
Debug.print @@ lazy (Ast.CandSol.str_of cand);
Debug.print @@ lazy "\nviolates the following example\n";
Debug.print @@ lazy (ExClause.str_of clause);
Debug.print @@ lazy "\nThis may be a bug of the synthesizer.";
Ok e
else Ok e
let run_phase iters e =
if RLCfg.config.enable then begin
(if RLCfg.config.show_examples then
let examples = State.pos_neg_und_examples_of e in
Out_channel.print_endline (Printf.sprintf "examples: %s" (Yojson.Safe.to_string @@ VersionSpace.to_yojson examples)));
if RLCfg.config.show_elapsed_time then
Out_channel.print_endline "begin synthesizer";
let tm = Timer.make () in
let open Or_error in
let res = Synthesizer.run_phase iters e >>= check_candidates in
if RLCfg.config.show_elapsed_time then
Out_channel.print_endline (Format.sprintf "end synthesizer: %f" (tm ()));
res
end else Synthesizer.run_phase iters e
let rec refine_cands iters e = let open Or_error.Monad_infix in
run_phase iters e >>= function
| State.Continue (vs, cands) ->
begin match CandidateChecker.check_candidates vs (List.map ~f:fst cands) with
| `Unsat -> Ok (State.Unsat)
| `Valid ->
Debug.print @@ lazy " The new candidate is valid.";
Ok (State.Continue (vs, cands))
| `Invalid (pos, neg, und) ->
Debug.print @@ lazy " The new candidate is invalid, restart synthesizer.";
let new_examples =
Set.Poly.union_list [pos; neg; und]
|> Set.Poly.map ~f:(fun (ex, srcs) ->
ex, List.map srcs ~f:(fun c -> ClauseGraph.mk_example c, true)) in
refine_cands iters @@ State.of_examples vs new_examples
end
| _ -> assert false
let run_phase iters e =
if config.refine_candidates then refine_cands iters e else run_phase iters e
end
let make rl_config config problem =
(module Make
(struct let config = rl_config end)
(struct let config = config end)
(struct let problem = problem end) : SynthesizerType)
|
aca1db8b79fb717b108ee420b41e0d6ab9f1f0ad0afb106861a2ed2b549e69a0 | orionsbelt-battlegrounds/obb-rules | raptor.cljc | (ns obb-rules.units.raptor)
(def metadata
{:name "raptor"
:code "rp"
:attack 280
:defense 400
:range 2
:value 20
:bonus {:attack {:category {:light 1000}}}
:type :mechanic
:category :light
:displacement :air
:movement-type :all
:movement-cost 1})
| null | https://raw.githubusercontent.com/orionsbelt-battlegrounds/obb-rules/97fad6506eb81142f74f4722aca58b80d618bf45/src/obb_rules/units/raptor.cljc | clojure | (ns obb-rules.units.raptor)
(def metadata
{:name "raptor"
:code "rp"
:attack 280
:defense 400
:range 2
:value 20
:bonus {:attack {:category {:light 1000}}}
:type :mechanic
:category :light
:displacement :air
:movement-type :all
:movement-cost 1})
| |
848c8d16c89b524d3e97250e137e0e0753495206bab908768ea3a1cc1bcc32dc | gafiatulin/codewars | Employee.hs | -- Disgruntled Employee
-- /
module Codewars.Kata.Employee where
off :: Integer -> [Integer]
off n = takeWhile (<=n) . map (^2) $ [1..]
| null | https://raw.githubusercontent.com/gafiatulin/codewars/535db608333e854be93ecfc165686a2162264fef/src/6%20kyu/Employee.hs | haskell | Disgruntled Employee
/ |
module Codewars.Kata.Employee where
off :: Integer -> [Integer]
off n = takeWhile (<=n) . map (^2) $ [1..]
|
e5ec4704b8c8c7548d477700eae267bbef51c02689c37461f6332b879b821450 | turnbullpress/aom-code | collectd.clj | (ns examplecom.etc.collectd
(:require [clojure.tools.logging :refer :all]
[riemann.streams :refer :all]
[clojure.string :as str]
[clojure.walk :as walk]))
(defn docker-attribute-map
[attributes]
(let [instance (str/split (str/replace attributes #"^.*\[(.*)\]$" "$1") #",")]
(walk/keywordize-keys (into {} (for [pair instance] (apply hash-map (str/split pair #"=")))))))
(defn docker-attributes
[{:keys [plugin_instance] :as event}]
(if-let [attributes (re-find #"^.*\[.*\]$" plugin_instance)]
(merge event (docker-attribute-map attributes))
event))
(defn parse-docker-service-host
[{:keys [type type_instance plugin_instance] :as event}]
(let [host (re-find #"^\w+\.?\w+\.?\w+" (:plugin_instance event))
service (cond-> (str (:type event)) (:type_instance event) (str "." (:type_instance event)))]
(assoc event :service service :host host)))
(def default-services
[{:service #"^load/load/(.*)$" :rewrite "load $1"}
{:service #"^swap/percent-(.*)$" :rewrite "swap $1"}
{:service #"^memory/percent-(.*)$" :rewrite "memory $1"}
{:service #"^processes/ps_state-(.*)$" :rewrite "processes $1"}
{:service #"^processes-(.*)/(.*)$" :rewrite "processes $1 $2"}
{:service #"^cpu/percent-(.*)$" :rewrite "cpu $1"}
{:service #"^df-(.*)/(df_complex|percent_bytes)-(.*)$" :rewrite "df $1 $2 $3"}
{:service #"^interface-(.*)/if_(errors|packets|octets)/(tx|rx)$" :rewrite "nic $1 $3 $2"}
{:service #"^protocols-(.*)/(.*)$" :rewrite "protocols $1 $2"}
{:service #"^GenericJMX-(:?_|\/)?(.*)$" :rewrite "jmx $2"}
{:service #"^haproxy\/(gauge|derive)-(.*)$" :rewrite "haproxy $2"}
{:service #"^statsd\/(gauge|derive|latency)-(.*)$" :rewrite "$2"}
{:service #"^statsd\/(gauge|derive|latency)-(.*)$" :rewrite "statsd $1 $2"}
{:service #"^mysql-(.*)\/(counter|gauge)-(.*)$" :rewrite "mysql $1 $3"}
{:service #"^dbi-(.*)\/(gauge|counter)-(.*)$" :rewrite "dbi $1 $3"}
{:service #"^redis-(.*)$" :rewrite "redis $1"}])
(defn rewrite-service-with
[rules]
(let [matcher (fn [s1 s2] (if (string? s1) (= s1 s2) (re-find s1 s2)))]
(fn [{:keys [service] :as event}]
(or
(first
(for [{:keys [rewrite] :as rule} rules
:when (matcher (:service rule) service)]
(assoc event :service
(if (string? (:service rule))
rewrite
(str/replace service (:service rule) rewrite)))))
event))))
(def rewrite-service
(rewrite-service-with default-services))
| null | https://raw.githubusercontent.com/turnbullpress/aom-code/2c016cab87d81bcd1f04ab41b6824798eb09e780/9/riemann/examplecom/etc/collectd.clj | clojure | (ns examplecom.etc.collectd
(:require [clojure.tools.logging :refer :all]
[riemann.streams :refer :all]
[clojure.string :as str]
[clojure.walk :as walk]))
(defn docker-attribute-map
[attributes]
(let [instance (str/split (str/replace attributes #"^.*\[(.*)\]$" "$1") #",")]
(walk/keywordize-keys (into {} (for [pair instance] (apply hash-map (str/split pair #"=")))))))
(defn docker-attributes
[{:keys [plugin_instance] :as event}]
(if-let [attributes (re-find #"^.*\[.*\]$" plugin_instance)]
(merge event (docker-attribute-map attributes))
event))
(defn parse-docker-service-host
[{:keys [type type_instance plugin_instance] :as event}]
(let [host (re-find #"^\w+\.?\w+\.?\w+" (:plugin_instance event))
service (cond-> (str (:type event)) (:type_instance event) (str "." (:type_instance event)))]
(assoc event :service service :host host)))
(def default-services
[{:service #"^load/load/(.*)$" :rewrite "load $1"}
{:service #"^swap/percent-(.*)$" :rewrite "swap $1"}
{:service #"^memory/percent-(.*)$" :rewrite "memory $1"}
{:service #"^processes/ps_state-(.*)$" :rewrite "processes $1"}
{:service #"^processes-(.*)/(.*)$" :rewrite "processes $1 $2"}
{:service #"^cpu/percent-(.*)$" :rewrite "cpu $1"}
{:service #"^df-(.*)/(df_complex|percent_bytes)-(.*)$" :rewrite "df $1 $2 $3"}
{:service #"^interface-(.*)/if_(errors|packets|octets)/(tx|rx)$" :rewrite "nic $1 $3 $2"}
{:service #"^protocols-(.*)/(.*)$" :rewrite "protocols $1 $2"}
{:service #"^GenericJMX-(:?_|\/)?(.*)$" :rewrite "jmx $2"}
{:service #"^haproxy\/(gauge|derive)-(.*)$" :rewrite "haproxy $2"}
{:service #"^statsd\/(gauge|derive|latency)-(.*)$" :rewrite "$2"}
{:service #"^statsd\/(gauge|derive|latency)-(.*)$" :rewrite "statsd $1 $2"}
{:service #"^mysql-(.*)\/(counter|gauge)-(.*)$" :rewrite "mysql $1 $3"}
{:service #"^dbi-(.*)\/(gauge|counter)-(.*)$" :rewrite "dbi $1 $3"}
{:service #"^redis-(.*)$" :rewrite "redis $1"}])
(defn rewrite-service-with
[rules]
(let [matcher (fn [s1 s2] (if (string? s1) (= s1 s2) (re-find s1 s2)))]
(fn [{:keys [service] :as event}]
(or
(first
(for [{:keys [rewrite] :as rule} rules
:when (matcher (:service rule) service)]
(assoc event :service
(if (string? (:service rule))
rewrite
(str/replace service (:service rule) rewrite)))))
event))))
(def rewrite-service
(rewrite-service-with default-services))
| |
2c332fc19eaeb9e23f811859d872fe9de11d31d77da2cae94d3af169c2f1418b | gren-lang/compiler | Build.hs | {-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wno-unused-do-bind #-}
module Build
( fromExposed,
fromPaths,
fromRepl,
Artifacts (..),
Root (..),
Module (..),
CachedInterface (..),
ReplArtifacts (..),
DocsGoal (..),
getRootNames,
)
where
import AST.Canonical qualified as Can
import AST.Optimized qualified as Opt
import AST.Source qualified as Src
import AbsoluteSrcDir (AbsoluteSrcDir (..))
import AbsoluteSrcDir qualified
import Compile qualified
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar
import Control.Monad (filterM)
import Data.ByteString qualified as B
import Data.Char qualified as Char
import Data.Graph qualified as Graph
import Data.List qualified as List
import Data.Map.Strict ((!))
import Data.Map.Strict qualified as Map
import Data.Map.Utils qualified as Map
import Data.Maybe qualified as Maybe
import Data.Name qualified as Name
import Data.NonEmptyList qualified as NE
import Data.OneOrMore qualified as OneOrMore
import Data.Set qualified as Set
import Directories qualified as Dirs
import File qualified
import Gren.Details qualified as Details
import Gren.Docs qualified as Docs
import Gren.Interface qualified as I
import Gren.ModuleName qualified as ModuleName
import Gren.Outline qualified as Outline
import Gren.Package qualified as Pkg
import Gren.Platform qualified as P
import Json.Encode qualified as E
import Parse.Module qualified as Parse
import Reporting qualified
import Reporting.Annotation qualified as A
import Reporting.Error qualified as Error
import Reporting.Error.Docs qualified as EDocs
import Reporting.Error.Import qualified as Import
import Reporting.Error.Syntax qualified as Syntax
import Reporting.Exit qualified as Exit
import Reporting.Render.Type.Localizer qualified as L
import System.Directory qualified as Dir
import System.FilePath ((<.>), (</>))
import System.FilePath qualified as FP
-- ENVIRONMENT
data Env = Env
{ _key :: Reporting.BKey,
_root :: FilePath,
_project :: Parse.ProjectType,
_platform :: P.Platform,
_srcDirs :: [AbsoluteSrcDir],
_buildID :: Details.BuildID,
_locals :: Map.Map ModuleName.Raw Details.Local,
_foreigns :: Map.Map ModuleName.Raw Details.Foreign
}
makeEnv :: Reporting.BKey -> FilePath -> Details.Details -> IO Env
makeEnv key root (Details.Details _ validOutline buildID locals foreigns _) =
case validOutline of
Details.ValidApp platform givenSrcDirs ->
do
srcDirs <- traverse (Outline.toAbsoluteSrcDir root) (NE.toList givenSrcDirs)
return $ Env key root Parse.Application platform srcDirs buildID locals foreigns
Details.ValidPkg platform pkg _ ->
do
srcDir <- Outline.toAbsoluteSrcDir root (Outline.RelativeSrcDir "src")
return $ Env key root (Parse.Package pkg) platform [srcDir] buildID locals foreigns
-- FORK
-- PERF try using IORef semephore on file crawl phase?
described in Chapter 13 of Parallel and Concurrent Programming in Haskell by
-and-concurrent/9781449335939/ch13.html#sec_conc-par-overhead
--
fork :: IO a -> IO (MVar a)
fork work =
do
mvar <- newEmptyMVar
_ <- forkIO $ putMVar mvar =<< work
return mvar
forkWithKey :: (k -> a -> IO b) -> Map.Map k a -> IO (Map.Map k (MVar b))
forkWithKey func dict =
Map.traverseWithKey (\k v -> fork (func k v)) dict
-- FROM EXPOSED
fromExposed :: Reporting.Style -> FilePath -> Details.Details -> DocsGoal docs -> NE.List ModuleName.Raw -> IO (Either Exit.BuildProblem docs)
fromExposed style root details docsGoal exposed@(NE.List e es) =
Reporting.trackBuild style $ \key ->
do
env <- makeEnv key root details
dmvar <- Details.loadInterfaces root details
-- crawl
mvar <- newEmptyMVar
let docsNeed = toDocsNeed docsGoal
roots <- Map.fromKeysA (fork . crawlModule env mvar docsNeed) (e : es)
putMVar mvar roots
mapM_ readMVar roots
statuses <- traverse readMVar =<< readMVar mvar
-- compile
midpoint <- checkMidpoint dmvar statuses
case midpoint of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right foreigns ->
do
rmvar <- newEmptyMVar
resultMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultMVars
results <- traverse readMVar resultMVars
writeDetails root details results
finalizeExposed root docsGoal exposed results
-- FROM PATHS
data Artifacts = Artifacts
{ _name :: Pkg.Name,
_deps :: Dependencies,
_roots :: NE.List Root,
_modules :: [Module]
}
data Module
= Fresh ModuleName.Raw I.Interface Opt.LocalGraph
| Cached ModuleName.Raw Bool (MVar CachedInterface)
type Dependencies =
Map.Map ModuleName.Canonical I.DependencyInterface
fromPaths :: Reporting.Style -> FilePath -> Details.Details -> NE.List FilePath -> IO (Either Exit.BuildProblem Artifacts)
fromPaths style root details paths =
Reporting.trackBuild style $ \key ->
do
env <- makeEnv key root details
elroots <- findRoots env paths
case elroots of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right lroots ->
do
-- crawl
dmvar <- Details.loadInterfaces root details
smvar <- newMVar Map.empty
srootMVars <- traverse (fork . crawlRoot env smvar) lroots
sroots <- traverse readMVar srootMVars
statuses <- traverse readMVar =<< readMVar smvar
midpoint <- checkMidpointAndRoots dmvar statuses sroots
case midpoint of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right foreigns ->
do
-- compile
rmvar <- newEmptyMVar
resultsMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultsMVars
rrootMVars <- traverse (fork . checkRoot env resultsMVars) sroots
results <- traverse readMVar resultsMVars
writeDetails root details results
toArtifacts env foreigns results <$> traverse readMVar rrootMVars
-- GET ROOT NAMES
getRootNames :: Artifacts -> NE.List ModuleName.Raw
getRootNames (Artifacts _ _ roots _) =
fmap getRootName roots
getRootName :: Root -> ModuleName.Raw
getRootName root =
case root of
Inside name -> name
Outside name _ _ -> name
CRAWL
type StatusDict =
Map.Map ModuleName.Raw (MVar Status)
data Status
= SCached Details.Local
| SChanged Details.Local B.ByteString Src.Module DocsNeed
| SBadImport Import.Problem
| SBadSyntax FilePath File.Time B.ByteString Syntax.Error
| SForeign Pkg.Name
| SKernel
crawlDeps :: Env -> MVar StatusDict -> [ModuleName.Raw] -> a -> IO a
crawlDeps env mvar deps blockedValue =
do
statusDict <- takeMVar mvar
let depsDict = Map.fromKeys (\_ -> ()) deps
let newsDict = Map.difference depsDict statusDict
statuses <- Map.traverseWithKey crawlNew newsDict
putMVar mvar (Map.union statuses statusDict)
mapM_ readMVar statuses
return blockedValue
where
crawlNew name () = fork (crawlModule env mvar (DocsNeed False) name)
crawlModule :: Env -> MVar StatusDict -> DocsNeed -> ModuleName.Raw -> IO Status
crawlModule env@(Env _ root projectType _ srcDirs buildID locals foreigns) mvar docsNeed name =
do
let fileName = ModuleName.toFilePath name <.> "gren"
paths <- filterM File.exists (map (`AbsoluteSrcDir.addRelative` fileName) srcDirs)
case paths of
[path] ->
case Map.lookup name foreigns of
Just (Details.Foreign dep deps) ->
return $ SBadImport $ Import.Ambiguous path [] dep deps
Nothing ->
do
newTime <- File.getTime path
case Map.lookup name locals of
Nothing ->
crawlFile env mvar docsNeed name path newTime buildID
Just local@(Details.Local oldPath oldTime deps _ lastChange _) ->
if path /= oldPath || oldTime /= newTime || needsDocs docsNeed
then crawlFile env mvar docsNeed name path newTime lastChange
else crawlDeps env mvar deps (SCached local)
p1 : p2 : ps ->
return $ SBadImport $ Import.AmbiguousLocal (FP.makeRelative root p1) (FP.makeRelative root p2) (map (FP.makeRelative root) ps)
[] ->
case Map.lookup name foreigns of
Just (Details.Foreign dep deps) ->
case deps of
[] ->
return $ SForeign dep
d : ds ->
return $ SBadImport $ Import.AmbiguousForeign dep d ds
Nothing ->
if Name.isKernel name && Parse.isKernel projectType
then do
exists <- File.exists ("src" </> ModuleName.toFilePath name <.> "js")
return $ if exists then SKernel else SBadImport Import.NotFound
else return $ SBadImport Import.NotFound
crawlFile :: Env -> MVar StatusDict -> DocsNeed -> ModuleName.Raw -> FilePath -> File.Time -> Details.BuildID -> IO Status
crawlFile env@(Env _ root projectType _ _ buildID _ _) mvar docsNeed expectedName path time lastChange =
do
source <- File.readUtf8 (root </> path)
case Parse.fromByteString projectType source of
Left err ->
return $ SBadSyntax path time source err
Right modul@(Src.Module maybeActualName _ _ imports values _ _ _ _ _ _) ->
case maybeActualName of
Nothing ->
return $ SBadSyntax path time source (Syntax.ModuleNameUnspecified expectedName)
Just name@(A.At _ actualName) ->
if expectedName == actualName
then
let deps = map (Src.getImportName . snd) imports
local = Details.Local path time deps (any (isMain . snd) values) lastChange buildID
in crawlDeps env mvar deps (SChanged local source modul docsNeed)
else return $ SBadSyntax path time source (Syntax.ModuleNameMismatch expectedName name)
isMain :: A.Located Src.Value -> Bool
isMain (A.At _ (Src.Value (A.At _ name) _ _ _ _)) =
name == Name._main
-- CHECK MODULE
type ResultDict =
Map.Map ModuleName.Raw (MVar Result)
data Result
= RNew !Details.Local !I.Interface !Opt.LocalGraph !(Maybe Docs.Module)
| RSame !Details.Local !I.Interface !Opt.LocalGraph !(Maybe Docs.Module)
| RCached Bool Details.BuildID (MVar CachedInterface)
| RNotFound Import.Problem
| RProblem Error.Module
| RBlocked
| RForeign I.Interface
| RKernel
data CachedInterface
= Unneeded
| Loaded I.Interface
| Corrupted
checkModule :: Env -> Dependencies -> MVar ResultDict -> ModuleName.Raw -> Status -> IO Result
checkModule env@(Env _ root projectType _ _ _ _ _) foreigns resultsMVar name status =
case status of
SCached local@(Details.Local path time deps hasMain lastChange lastCompile) ->
do
results <- readMVar resultsMVar
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
do
source <- File.readUtf8 path
case Parse.fromByteString projectType source of
Right modul -> compile env (DocsNeed False) local source ifaces modul
Left err ->
return $
RProblem $
Error.Module name path time source (Error.BadSyntax err)
DepsSame _ _ ->
do
mvar <- newMVar Unneeded
return (RCached hasMain lastChange mvar)
DepsBlock ->
return RBlocked
DepsNotFound problems ->
do
source <- File.readUtf8 path
return $
RProblem $
Error.Module name path time source $
case Parse.fromByteString projectType source of
Right (Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
Error.BadImports (toImportErrors env results imports problems)
Left err ->
Error.BadSyntax err
SChanged local@(Details.Local path time deps _ _ lastCompile) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) docsNeed ->
do
results <- readMVar resultsMVar
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
compile env docsNeed local source ifaces modul
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return RBlocked
Just ifaces -> compile env docsNeed local source ifaces modul
DepsBlock ->
return RBlocked
DepsNotFound problems ->
return $
RProblem $
Error.Module name path time source $
Error.BadImports (toImportErrors env results imports problems)
SBadImport importProblem ->
return (RNotFound importProblem)
SBadSyntax path time source err ->
return $
RProblem $
Error.Module name path time source $
Error.BadSyntax err
SForeign home ->
case foreigns ! ModuleName.Canonical home name of
I.Public iface -> return (RForeign iface)
I.Private _ _ _ -> error $ "mistakenly seeing private interface for " ++ Pkg.toChars home ++ " " ++ ModuleName.toChars name
SKernel ->
return RKernel
CHECK DEPS
data DepsStatus
= DepsChange (Map.Map ModuleName.Raw I.Interface)
| DepsSame [Dep] [CDep]
| DepsBlock
| DepsNotFound (NE.List (ModuleName.Raw, Import.Problem))
checkDeps :: FilePath -> ResultDict -> [ModuleName.Raw] -> Details.BuildID -> IO DepsStatus
checkDeps root results deps lastCompile =
checkDepsHelp root results deps [] [] [] [] False 0 lastCompile
type Dep = (ModuleName.Raw, I.Interface)
type CDep = (ModuleName.Raw, MVar CachedInterface)
checkDepsHelp :: FilePath -> ResultDict -> [ModuleName.Raw] -> [Dep] -> [Dep] -> [CDep] -> [(ModuleName.Raw, Import.Problem)] -> Bool -> Details.BuildID -> Details.BuildID -> IO DepsStatus
checkDepsHelp root results deps new same cached importProblems isBlocked lastDepChange lastCompile =
case deps of
dep : otherDeps ->
do
result <- readMVar (results ! dep)
case result of
RNew (Details.Local _ _ _ _ lastChange _) iface _ _ ->
checkDepsHelp root results otherDeps ((dep, iface) : new) same cached importProblems isBlocked (max lastChange lastDepChange) lastCompile
RSame (Details.Local _ _ _ _ lastChange _) iface _ _ ->
checkDepsHelp root results otherDeps new ((dep, iface) : same) cached importProblems isBlocked (max lastChange lastDepChange) lastCompile
RCached _ lastChange mvar ->
checkDepsHelp root results otherDeps new same ((dep, mvar) : cached) importProblems isBlocked (max lastChange lastDepChange) lastCompile
RNotFound prob ->
checkDepsHelp root results otherDeps new same cached ((dep, prob) : importProblems) True lastDepChange lastCompile
RProblem _ ->
checkDepsHelp root results otherDeps new same cached importProblems True lastDepChange lastCompile
RBlocked ->
checkDepsHelp root results otherDeps new same cached importProblems True lastDepChange lastCompile
RForeign iface ->
checkDepsHelp root results otherDeps new ((dep, iface) : same) cached importProblems isBlocked lastDepChange lastCompile
RKernel ->
checkDepsHelp root results otherDeps new same cached importProblems isBlocked lastDepChange lastCompile
[] ->
case reverse importProblems of
p : ps ->
return $ DepsNotFound (NE.List p ps)
[] ->
if isBlocked
then return $ DepsBlock
else
if null new && lastDepChange <= lastCompile
then return $ DepsSame same cached
else do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return DepsBlock
Just ifaces -> return $ DepsChange $ Map.union (Map.fromList new) ifaces
-- TO IMPORT ERROR
toImportErrors :: Env -> ResultDict -> [([Src.Comment], Src.Import)] -> NE.List (ModuleName.Raw, Import.Problem) -> NE.List Import.Error
toImportErrors (Env _ _ _ _ _ _ locals foreigns) results imports problems =
let knownModules =
Set.unions
[ Map.keysSet foreigns,
Map.keysSet locals,
Map.keysSet results
]
unimportedModules =
Set.difference knownModules (Set.fromList (map (Src.getImportName . snd) imports))
regionDict =
Map.fromList (map (\(_, Src.Import (A.At region name) _ _ _ _) -> (name, region)) imports)
toError (name, problem) =
Import.Error (regionDict ! name) name unimportedModules problem
in fmap toError problems
-- LOAD CACHED INTERFACES
loadInterfaces :: FilePath -> [Dep] -> [CDep] -> IO (Maybe (Map.Map ModuleName.Raw I.Interface))
loadInterfaces root same cached =
do
loading <- traverse (fork . loadInterface root) cached
maybeLoaded <- traverse readMVar loading
case sequence maybeLoaded of
Nothing ->
return Nothing
Just loaded ->
return $ Just $ Map.union (Map.fromList loaded) (Map.fromList same)
loadInterface :: FilePath -> CDep -> IO (Maybe Dep)
loadInterface root (name, ciMvar) =
do
cachedInterface <- takeMVar ciMvar
case cachedInterface of
Corrupted ->
do
putMVar ciMvar cachedInterface
return Nothing
Loaded iface ->
do
putMVar ciMvar cachedInterface
return (Just (name, iface))
Unneeded ->
do
maybeIface <- File.readBinary (Dirs.greni root name)
case maybeIface of
Nothing ->
do
putMVar ciMvar Corrupted
return Nothing
Just iface ->
do
putMVar ciMvar (Loaded iface)
return (Just (name, iface))
CHECK
checkMidpoint :: MVar (Maybe Dependencies) -> Map.Map ModuleName.Raw Status -> IO (Either Exit.BuildProjectProblem Dependencies)
checkMidpoint dmvar statuses =
case checkForCycles statuses of
Nothing ->
do
maybeForeigns <- readMVar dmvar
case maybeForeigns of
Nothing -> return (Left Exit.BP_CannotLoadDependencies)
Just fs -> return (Right fs)
Just (NE.List name names) ->
do
_ <- readMVar dmvar
return (Left (Exit.BP_Cycle name names))
checkMidpointAndRoots :: MVar (Maybe Dependencies) -> Map.Map ModuleName.Raw Status -> NE.List RootStatus -> IO (Either Exit.BuildProjectProblem Dependencies)
checkMidpointAndRoots dmvar statuses sroots =
case checkForCycles statuses of
Nothing ->
case checkUniqueRoots statuses sroots of
Nothing ->
do
maybeForeigns <- readMVar dmvar
case maybeForeigns of
Nothing -> return (Left Exit.BP_CannotLoadDependencies)
Just fs -> return (Right fs)
Just problem ->
do
_ <- readMVar dmvar
return (Left problem)
Just (NE.List name names) ->
do
_ <- readMVar dmvar
return (Left (Exit.BP_Cycle name names))
-- CHECK FOR CYCLES
checkForCycles :: Map.Map ModuleName.Raw Status -> Maybe (NE.List ModuleName.Raw)
checkForCycles modules =
let !graph = Map.foldrWithKey addToGraph [] modules
!sccs = Graph.stronglyConnComp graph
in checkForCyclesHelp sccs
checkForCyclesHelp :: [Graph.SCC ModuleName.Raw] -> Maybe (NE.List ModuleName.Raw)
checkForCyclesHelp sccs =
case sccs of
[] ->
Nothing
scc : otherSccs ->
case scc of
Graph.AcyclicSCC _ -> checkForCyclesHelp otherSccs
Graph.CyclicSCC [] -> checkForCyclesHelp otherSccs
Graph.CyclicSCC (m : ms) -> Just (NE.List m ms)
type Node =
(ModuleName.Raw, ModuleName.Raw, [ModuleName.Raw])
addToGraph :: ModuleName.Raw -> Status -> [Node] -> [Node]
addToGraph name status graph =
let dependencies =
case status of
SCached (Details.Local _ _ deps _ _ _) -> deps
SChanged (Details.Local _ _ deps _ _ _) _ _ _ -> deps
SBadImport _ -> []
SBadSyntax _ _ _ _ -> []
SForeign _ -> []
SKernel -> []
in (name, name, dependencies) : graph
-- CHECK UNIQUE ROOTS
checkUniqueRoots :: Map.Map ModuleName.Raw Status -> NE.List RootStatus -> Maybe Exit.BuildProjectProblem
checkUniqueRoots insides sroots =
let outsidesDict =
Map.fromListWith OneOrMore.more (Maybe.mapMaybe rootStatusToNamePathPair (NE.toList sroots))
in case Map.traverseWithKey checkOutside outsidesDict of
Left problem ->
Just problem
Right outsides ->
case sequence_ (Map.intersectionWithKey checkInside outsides insides) of
Right () -> Nothing
Left problem -> Just problem
rootStatusToNamePathPair :: RootStatus -> Maybe (ModuleName.Raw, OneOrMore.OneOrMore FilePath)
rootStatusToNamePathPair sroot =
case sroot of
SInside _ -> Nothing
SOutsideOk (Details.Local path _ _ _ _ _) _ modul -> Just (Src.getName modul, OneOrMore.one path)
SOutsideErr _ -> Nothing
checkOutside :: ModuleName.Raw -> OneOrMore.OneOrMore FilePath -> Either Exit.BuildProjectProblem FilePath
checkOutside name paths =
case OneOrMore.destruct NE.List paths of
NE.List p [] -> Right p
NE.List p1 (p2 : _) -> Left (Exit.BP_RootNameDuplicate name p1 p2)
checkInside :: ModuleName.Raw -> FilePath -> Status -> Either Exit.BuildProjectProblem ()
checkInside name p1 status =
case status of
SCached (Details.Local p2 _ _ _ _ _) -> Left (Exit.BP_RootNameDuplicate name p1 p2)
SChanged (Details.Local p2 _ _ _ _ _) _ _ _ -> Left (Exit.BP_RootNameDuplicate name p1 p2)
SBadImport _ -> Right ()
SBadSyntax _ _ _ _ -> Right ()
SForeign _ -> Right ()
SKernel -> Right ()
-- COMPILE MODULE
compile :: Env -> DocsNeed -> Details.Local -> B.ByteString -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> IO Result
compile (Env key root projectType platform _ buildID _ _) docsNeed (Details.Local path time deps main lastChange _) source ifaces modul =
let pkg = projectTypeToPkg projectType
in case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
case makeDocs docsNeed canonical of
Left err ->
return $
RProblem $
Error.Module (Src.getName modul) path time source (Error.BadDocs err)
Right docs ->
do
let name = Src.getName modul
let iface = I.fromModule pkg canonical annotations
let greni = Dirs.greni root name
File.writeBinary (Dirs.greno root name) objects
maybeOldi <- File.readBinary greni
case maybeOldi of
Just oldi | oldi == iface ->
do
-- iface should be fully forced by equality check
Reporting.report key Reporting.BDone
let local = Details.Local path time deps main lastChange buildID
return (RSame local iface objects docs)
_ ->
do
-- iface may be lazy still
File.writeBinary greni iface
Reporting.report key Reporting.BDone
let local = Details.Local path time deps main buildID buildID
return (RNew local iface objects docs)
Left err ->
return $
RProblem $
Error.Module (Src.getName modul) path time source err
projectTypeToPkg :: Parse.ProjectType -> Pkg.Name
projectTypeToPkg projectType =
case projectType of
Parse.Package pkg -> pkg
Parse.Application -> Pkg.dummyName
-- WRITE DETAILS
writeDetails :: FilePath -> Details.Details -> Map.Map ModuleName.Raw Result -> IO ()
writeDetails root (Details.Details time outline buildID locals foreigns extras) results =
File.writeBinary (Dirs.details root) $
Details.Details time outline buildID (Map.foldrWithKey addNewLocal locals results) foreigns extras
addNewLocal :: ModuleName.Raw -> Result -> Map.Map ModuleName.Raw Details.Local -> Map.Map ModuleName.Raw Details.Local
addNewLocal name result locals =
case result of
RNew local _ _ _ -> Map.insert name local locals
RSame local _ _ _ -> Map.insert name local locals
RCached _ _ _ -> locals
RNotFound _ -> locals
RProblem _ -> locals
RBlocked -> locals
RForeign _ -> locals
RKernel -> locals
-- FINALIZE EXPOSED
finalizeExposed :: FilePath -> DocsGoal docs -> NE.List ModuleName.Raw -> Map.Map ModuleName.Raw Result -> IO (Either Exit.BuildProblem docs)
finalizeExposed root docsGoal exposed results =
case foldr (addImportProblems results) [] (NE.toList exposed) of
p : ps ->
return $ Left $ Exit.BuildProjectProblem (Exit.BP_MissingExposed (NE.List p ps))
[] ->
case Map.foldr addErrors [] results of
[] -> Right <$> finalizeDocs docsGoal results
e : es -> return $ Left $ Exit.BuildBadModules root e es
addErrors :: Result -> [Error.Module] -> [Error.Module]
addErrors result errors =
case result of
RNew _ _ _ _ -> errors
RSame _ _ _ _ -> errors
RCached _ _ _ -> errors
RNotFound _ -> errors
RProblem e -> e : errors
RBlocked -> errors
RForeign _ -> errors
RKernel -> errors
addImportProblems :: Map.Map ModuleName.Raw Result -> ModuleName.Raw -> [(ModuleName.Raw, Import.Problem)] -> [(ModuleName.Raw, Import.Problem)]
addImportProblems results name problems =
case results ! name of
RNew _ _ _ _ -> problems
RSame _ _ _ _ -> problems
RCached _ _ _ -> problems
RNotFound p -> (name, p) : problems
RProblem _ -> problems
RBlocked -> problems
RForeign _ -> problems
RKernel -> problems
DOCS
data DocsGoal a where
KeepDocs :: DocsGoal Docs.Documentation
WriteDocs :: FilePath -> DocsGoal ()
IgnoreDocs :: DocsGoal ()
newtype DocsNeed = DocsNeed {needsDocs :: Bool}
toDocsNeed :: DocsGoal a -> DocsNeed
toDocsNeed goal =
case goal of
IgnoreDocs -> DocsNeed False
WriteDocs _ -> DocsNeed True
KeepDocs -> DocsNeed True
makeDocs :: DocsNeed -> Can.Module -> Either EDocs.Error (Maybe Docs.Module)
makeDocs (DocsNeed isNeeded) modul =
if isNeeded
then case Docs.fromModule modul of
Right docs -> Right (Just docs)
Left err -> Left err
else Right Nothing
finalizeDocs :: DocsGoal docs -> Map.Map ModuleName.Raw Result -> IO docs
finalizeDocs goal results =
case goal of
KeepDocs ->
return $ Map.mapMaybe toDocs results
WriteDocs path ->
E.writeUgly path $ Docs.encode $ Map.mapMaybe toDocs results
IgnoreDocs ->
return ()
toDocs :: Result -> Maybe Docs.Module
toDocs result =
case result of
RNew _ _ _ d -> d
RSame _ _ _ d -> d
RCached _ _ _ -> Nothing
RNotFound _ -> Nothing
RProblem _ -> Nothing
RBlocked -> Nothing
RForeign _ -> Nothing
RKernel -> Nothing
--------------------------------------------------------------------------------
------ NOW FOR SOME REPL STUFF -------------------------------------------------
--------------------------------------------------------------------------------
-- FROM REPL
data ReplArtifacts = ReplArtifacts
{ _repl_home :: ModuleName.Canonical,
_repl_modules :: [Module],
_repl_localizer :: L.Localizer,
_repl_annotations :: Map.Map Name.Name Can.Annotation
}
fromRepl :: FilePath -> Details.Details -> B.ByteString -> IO (Either Exit.Repl ReplArtifacts)
fromRepl root details source =
do
env@(Env _ _ projectType _ _ _ _ _) <- makeEnv Reporting.ignorer root details
case Parse.fromByteString projectType source of
Left syntaxError ->
return $ Left $ Exit.ReplBadInput source $ Error.BadSyntax syntaxError
Right modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
do
dmvar <- Details.loadInterfaces root details
let deps = map (Src.getImportName . snd) imports
mvar <- newMVar Map.empty
crawlDeps env mvar deps ()
statuses <- traverse readMVar =<< readMVar mvar
midpoint <- checkMidpoint dmvar statuses
case midpoint of
Left problem ->
return $ Left $ Exit.ReplProjectProblem problem
Right foreigns ->
do
rmvar <- newEmptyMVar
resultMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultMVars
results <- traverse readMVar resultMVars
writeDetails root details results
depsStatus <- checkDeps root resultMVars deps 0
finalizeReplArtifacts env source modul depsStatus resultMVars results
finalizeReplArtifacts :: Env -> B.ByteString -> Src.Module -> DepsStatus -> ResultDict -> Map.Map ModuleName.Raw Result -> IO (Either Exit.Repl ReplArtifacts)
finalizeReplArtifacts env@(Env _ root projectType platform _ _ _ _) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) depsStatus resultMVars results =
let pkg =
projectTypeToPkg projectType
compileInput ifaces =
case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
let h = Can._name canonical
m = Fresh (Src.getName modul) (I.fromModule pkg canonical annotations) objects
ms = Map.foldrWithKey addInside [] results
in return $ Right $ ReplArtifacts h (m : ms) (L.fromModule modul) annotations
Left errors ->
return $ Left $ Exit.ReplBadInput source errors
in case depsStatus of
DepsChange ifaces ->
compileInput ifaces
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Just ifaces -> compileInput ifaces
Nothing -> return $ Left $ Exit.ReplBadCache
DepsBlock ->
case Map.foldr addErrors [] results of
[] -> return $ Left $ Exit.ReplBlocked
e : es -> return $ Left $ Exit.ReplBadLocalDeps root e es
DepsNotFound problems ->
return $
Left $
Exit.ReplBadInput source $
Error.BadImports $
toImportErrors env resultMVars imports problems
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
---- AFTER THIS , EVERYTHING IS ABOUT HANDLING MODULES GIVEN BY FILEPATH ------
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- FIND ROOT
data RootLocation
= LInside ModuleName.Raw
| LOutside FilePath
findRoots :: Env -> NE.List FilePath -> IO (Either Exit.BuildProjectProblem (NE.List RootLocation))
findRoots env paths =
do
mvars <- traverse (fork . getRootInfo env) paths
einfos <- traverse readMVar mvars
return $ checkRoots =<< sequence einfos
checkRoots :: NE.List RootInfo -> Either Exit.BuildProjectProblem (NE.List RootLocation)
checkRoots infos =
let toOneOrMore loc@(RootInfo absolute _ _) =
(absolute, OneOrMore.one loc)
fromOneOrMore loc locs =
case locs of
[] -> Right ()
loc2 : _ -> Left (Exit.BP_MainPathDuplicate (_relative loc) (_relative loc2))
in fmap (\_ -> fmap _location infos) $
traverse (OneOrMore.destruct fromOneOrMore) $
Map.fromListWith OneOrMore.more $
map toOneOrMore (NE.toList infos)
-- ROOT INFO
data RootInfo = RootInfo
{ _absolute :: FilePath,
_relative :: FilePath,
_location :: RootLocation
}
getRootInfo :: Env -> FilePath -> IO (Either Exit.BuildProjectProblem RootInfo)
getRootInfo env path =
do
exists <- File.exists path
if exists
then getRootInfoHelp env path =<< Dir.canonicalizePath path
else return (Left (Exit.BP_PathUnknown path))
getRootInfoHelp :: Env -> FilePath -> FilePath -> IO (Either Exit.BuildProjectProblem RootInfo)
getRootInfoHelp (Env _ _ _ _ srcDirs _ _ _) path absolutePath =
let (dirs, file) = FP.splitFileName absolutePath
(final, ext) = FP.splitExtension file
in if ext /= ".gren"
then return $ Left $ Exit.BP_WithBadExtension path
else
let absoluteSegments = FP.splitDirectories dirs ++ [final]
in case Maybe.mapMaybe (isInsideSrcDirByPath absoluteSegments) srcDirs of
[] ->
return $ Right $ RootInfo absolutePath path (LOutside path)
[(_, Right names)] ->
do
let name = Name.fromChars (List.intercalate "." names)
matchingDirs <- filterM (isInsideSrcDirByName names) srcDirs
case matchingDirs of
d1 : d2 : _ ->
do
let p1 = AbsoluteSrcDir.addRelative d1 (FP.joinPath names <.> "gren")
let p2 = AbsoluteSrcDir.addRelative d2 (FP.joinPath names <.> "gren")
return $ Left $ Exit.BP_RootNameDuplicate name p1 p2
_ ->
return $ Right $ RootInfo absolutePath path (LInside name)
[(s, Left names)] ->
return $ Left $ Exit.BP_RootNameInvalid path s names
(s1, _) : (s2, _) : _ ->
return $ Left $ Exit.BP_WithAmbiguousSrcDir path s1 s2
isInsideSrcDirByName :: [String] -> AbsoluteSrcDir -> IO Bool
isInsideSrcDirByName names srcDir =
File.exists (AbsoluteSrcDir.addRelative srcDir (FP.joinPath names <.> "gren"))
isInsideSrcDirByPath :: [String] -> AbsoluteSrcDir -> Maybe (FilePath, Either [String] [String])
isInsideSrcDirByPath segments (AbsoluteSrcDir srcDir) =
case dropPrefix (FP.splitDirectories srcDir) segments of
Nothing ->
Nothing
Just names ->
if all isGoodName names
then Just (srcDir, Right names)
else Just (srcDir, Left names)
isGoodName :: [Char] -> Bool
isGoodName name =
case name of
[] ->
False
char : chars ->
Char.isUpper char && all (\c -> Char.isAlphaNum c || c == '_') chars
INVARIANT : Dir.canonicalizePath has been run on both inputs
--
dropPrefix :: [FilePath] -> [FilePath] -> Maybe [FilePath]
dropPrefix roots paths =
case roots of
[] ->
Just paths
r : rs ->
case paths of
[] -> Nothing
p : ps -> if r == p then dropPrefix rs ps else Nothing
CRAWL ROOTS
data RootStatus
= SInside ModuleName.Raw
| SOutsideOk Details.Local B.ByteString Src.Module
| SOutsideErr Error.Module
crawlRoot :: Env -> MVar StatusDict -> RootLocation -> IO RootStatus
crawlRoot env@(Env _ _ projectType _ _ buildID _ _) mvar root =
case root of
LInside name ->
do
statusMVar <- newEmptyMVar
statusDict <- takeMVar mvar
putMVar mvar (Map.insert name statusMVar statusDict)
putMVar statusMVar =<< crawlModule env mvar (DocsNeed False) name
return (SInside name)
LOutside path ->
do
time <- File.getTime path
source <- File.readUtf8 path
case Parse.fromByteString projectType source of
Right modul@(Src.Module _ _ _ imports values _ _ _ _ _ _) ->
do
let deps = map (Src.getImportName . snd) imports
let local = Details.Local path time deps (any (isMain . snd) values) buildID buildID
crawlDeps env mvar deps (SOutsideOk local source modul)
Left syntaxError ->
return $
SOutsideErr $
Error.Module "???" path time source (Error.BadSyntax syntaxError)
-- CHECK ROOTS
data RootResult
= RInside ModuleName.Raw
| ROutsideOk ModuleName.Raw I.Interface Opt.LocalGraph
| ROutsideErr Error.Module
| ROutsideBlocked
checkRoot :: Env -> ResultDict -> RootStatus -> IO RootResult
checkRoot env@(Env _ root _ _ _ _ _ _) results rootStatus =
case rootStatus of
SInside name ->
return (RInside name)
SOutsideErr err ->
return (ROutsideErr err)
SOutsideOk local@(Details.Local path time deps _ _ lastCompile) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
do
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
compileOutside env local source ifaces modul
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return ROutsideBlocked
Just ifaces -> compileOutside env local source ifaces modul
DepsBlock ->
return ROutsideBlocked
DepsNotFound problems ->
return $
ROutsideErr $
Error.Module (Src.getName modul) path time source $
Error.BadImports (toImportErrors env results imports problems)
compileOutside :: Env -> Details.Local -> B.ByteString -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> IO RootResult
compileOutside (Env key _ projectType platform _ _ _ _) (Details.Local path time _ _ _ _) source ifaces modul =
let pkg = projectTypeToPkg projectType
name = Src.getName modul
in case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
do
Reporting.report key Reporting.BDone
return $ ROutsideOk name (I.fromModule pkg canonical annotations) objects
Left errors ->
return $ ROutsideErr $ Error.Module name path time source errors
-- TO ARTIFACTS
data Root
= Inside ModuleName.Raw
| Outside ModuleName.Raw I.Interface Opt.LocalGraph
toArtifacts :: Env -> Dependencies -> Map.Map ModuleName.Raw Result -> NE.List RootResult -> Either Exit.BuildProblem Artifacts
toArtifacts (Env _ root projectType _ _ _ _ _) foreigns results rootResults =
case gatherProblemsOrMains results rootResults of
Left (NE.List e es) ->
Left (Exit.BuildBadModules root e es)
Right roots ->
Right $
Artifacts (projectTypeToPkg projectType) foreigns roots $
Map.foldrWithKey addInside (foldr addOutside [] rootResults) results
gatherProblemsOrMains :: Map.Map ModuleName.Raw Result -> NE.List RootResult -> Either (NE.List Error.Module) (NE.List Root)
gatherProblemsOrMains results (NE.List rootResult rootResults) =
let addResult result (es, roots) =
case result of
RInside n -> (es, Inside n : roots)
ROutsideOk n i o -> (es, Outside n i o : roots)
ROutsideErr e -> (e : es, roots)
ROutsideBlocked -> (es, roots)
errors = Map.foldr addErrors [] results
in case (rootResult, foldr addResult (errors, []) rootResults) of
(RInside n, ([], ms)) -> Right (NE.List (Inside n) ms)
(RInside _, (e : es, _)) -> Left (NE.List e es)
(ROutsideOk n i o, ([], ms)) -> Right (NE.List (Outside n i o) ms)
(ROutsideOk _ _ _, (e : es, _)) -> Left (NE.List e es)
(ROutsideErr e, (es, _)) -> Left (NE.List e es)
(ROutsideBlocked, ([], _)) -> error "seems like .gren/ is corrupted"
(ROutsideBlocked, (e : es, _)) -> Left (NE.List e es)
addInside :: ModuleName.Raw -> Result -> [Module] -> [Module]
addInside name result modules =
case result of
RNew _ iface objs _ -> Fresh name iface objs : modules
RSame _ iface objs _ -> Fresh name iface objs : modules
RCached main _ mvar -> Cached name main mvar : modules
RNotFound _ -> error (badInside name)
RProblem _ -> error (badInside name)
RBlocked -> error (badInside name)
RForeign _ -> modules
RKernel -> modules
badInside :: ModuleName.Raw -> [Char]
badInside name =
"Error from `" ++ Name.toChars name ++ "` should have been reported already."
addOutside :: RootResult -> [Module] -> [Module]
addOutside root modules =
case root of
RInside _ -> modules
ROutsideOk name iface objs -> Fresh name iface objs : modules
ROutsideErr _ -> modules
ROutsideBlocked -> modules
| null | https://raw.githubusercontent.com/gren-lang/compiler/5ec14c10db01438d7dd51539908abb9a377090e6/builder/src/Build.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# OPTIONS_GHC -Wno-unused-do-bind #
ENVIRONMENT
FORK
PERF try using IORef semephore on file crawl phase?
FROM EXPOSED
crawl
compile
FROM PATHS
crawl
compile
GET ROOT NAMES
CHECK MODULE
TO IMPORT ERROR
LOAD CACHED INTERFACES
CHECK FOR CYCLES
CHECK UNIQUE ROOTS
COMPILE MODULE
iface should be fully forced by equality check
iface may be lazy still
WRITE DETAILS
FINALIZE EXPOSED
------------------------------------------------------------------------------
---- NOW FOR SOME REPL STUFF -------------------------------------------------
------------------------------------------------------------------------------
FROM REPL
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- AFTER THIS , EVERYTHING IS ABOUT HANDLING MODULES GIVEN BY FILEPATH ------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
FIND ROOT
ROOT INFO
CHECK ROOTS
TO ARTIFACTS |
module Build
( fromExposed,
fromPaths,
fromRepl,
Artifacts (..),
Root (..),
Module (..),
CachedInterface (..),
ReplArtifacts (..),
DocsGoal (..),
getRootNames,
)
where
import AST.Canonical qualified as Can
import AST.Optimized qualified as Opt
import AST.Source qualified as Src
import AbsoluteSrcDir (AbsoluteSrcDir (..))
import AbsoluteSrcDir qualified
import Compile qualified
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar
import Control.Monad (filterM)
import Data.ByteString qualified as B
import Data.Char qualified as Char
import Data.Graph qualified as Graph
import Data.List qualified as List
import Data.Map.Strict ((!))
import Data.Map.Strict qualified as Map
import Data.Map.Utils qualified as Map
import Data.Maybe qualified as Maybe
import Data.Name qualified as Name
import Data.NonEmptyList qualified as NE
import Data.OneOrMore qualified as OneOrMore
import Data.Set qualified as Set
import Directories qualified as Dirs
import File qualified
import Gren.Details qualified as Details
import Gren.Docs qualified as Docs
import Gren.Interface qualified as I
import Gren.ModuleName qualified as ModuleName
import Gren.Outline qualified as Outline
import Gren.Package qualified as Pkg
import Gren.Platform qualified as P
import Json.Encode qualified as E
import Parse.Module qualified as Parse
import Reporting qualified
import Reporting.Annotation qualified as A
import Reporting.Error qualified as Error
import Reporting.Error.Docs qualified as EDocs
import Reporting.Error.Import qualified as Import
import Reporting.Error.Syntax qualified as Syntax
import Reporting.Exit qualified as Exit
import Reporting.Render.Type.Localizer qualified as L
import System.Directory qualified as Dir
import System.FilePath ((<.>), (</>))
import System.FilePath qualified as FP
data Env = Env
{ _key :: Reporting.BKey,
_root :: FilePath,
_project :: Parse.ProjectType,
_platform :: P.Platform,
_srcDirs :: [AbsoluteSrcDir],
_buildID :: Details.BuildID,
_locals :: Map.Map ModuleName.Raw Details.Local,
_foreigns :: Map.Map ModuleName.Raw Details.Foreign
}
makeEnv :: Reporting.BKey -> FilePath -> Details.Details -> IO Env
makeEnv key root (Details.Details _ validOutline buildID locals foreigns _) =
case validOutline of
Details.ValidApp platform givenSrcDirs ->
do
srcDirs <- traverse (Outline.toAbsoluteSrcDir root) (NE.toList givenSrcDirs)
return $ Env key root Parse.Application platform srcDirs buildID locals foreigns
Details.ValidPkg platform pkg _ ->
do
srcDir <- Outline.toAbsoluteSrcDir root (Outline.RelativeSrcDir "src")
return $ Env key root (Parse.Package pkg) platform [srcDir] buildID locals foreigns
described in Chapter 13 of Parallel and Concurrent Programming in Haskell by
-and-concurrent/9781449335939/ch13.html#sec_conc-par-overhead
fork :: IO a -> IO (MVar a)
fork work =
do
mvar <- newEmptyMVar
_ <- forkIO $ putMVar mvar =<< work
return mvar
forkWithKey :: (k -> a -> IO b) -> Map.Map k a -> IO (Map.Map k (MVar b))
forkWithKey func dict =
Map.traverseWithKey (\k v -> fork (func k v)) dict
fromExposed :: Reporting.Style -> FilePath -> Details.Details -> DocsGoal docs -> NE.List ModuleName.Raw -> IO (Either Exit.BuildProblem docs)
fromExposed style root details docsGoal exposed@(NE.List e es) =
Reporting.trackBuild style $ \key ->
do
env <- makeEnv key root details
dmvar <- Details.loadInterfaces root details
mvar <- newEmptyMVar
let docsNeed = toDocsNeed docsGoal
roots <- Map.fromKeysA (fork . crawlModule env mvar docsNeed) (e : es)
putMVar mvar roots
mapM_ readMVar roots
statuses <- traverse readMVar =<< readMVar mvar
midpoint <- checkMidpoint dmvar statuses
case midpoint of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right foreigns ->
do
rmvar <- newEmptyMVar
resultMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultMVars
results <- traverse readMVar resultMVars
writeDetails root details results
finalizeExposed root docsGoal exposed results
data Artifacts = Artifacts
{ _name :: Pkg.Name,
_deps :: Dependencies,
_roots :: NE.List Root,
_modules :: [Module]
}
data Module
= Fresh ModuleName.Raw I.Interface Opt.LocalGraph
| Cached ModuleName.Raw Bool (MVar CachedInterface)
type Dependencies =
Map.Map ModuleName.Canonical I.DependencyInterface
fromPaths :: Reporting.Style -> FilePath -> Details.Details -> NE.List FilePath -> IO (Either Exit.BuildProblem Artifacts)
fromPaths style root details paths =
Reporting.trackBuild style $ \key ->
do
env <- makeEnv key root details
elroots <- findRoots env paths
case elroots of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right lroots ->
do
dmvar <- Details.loadInterfaces root details
smvar <- newMVar Map.empty
srootMVars <- traverse (fork . crawlRoot env smvar) lroots
sroots <- traverse readMVar srootMVars
statuses <- traverse readMVar =<< readMVar smvar
midpoint <- checkMidpointAndRoots dmvar statuses sroots
case midpoint of
Left problem ->
return (Left (Exit.BuildProjectProblem problem))
Right foreigns ->
do
rmvar <- newEmptyMVar
resultsMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultsMVars
rrootMVars <- traverse (fork . checkRoot env resultsMVars) sroots
results <- traverse readMVar resultsMVars
writeDetails root details results
toArtifacts env foreigns results <$> traverse readMVar rrootMVars
getRootNames :: Artifacts -> NE.List ModuleName.Raw
getRootNames (Artifacts _ _ roots _) =
fmap getRootName roots
getRootName :: Root -> ModuleName.Raw
getRootName root =
case root of
Inside name -> name
Outside name _ _ -> name
CRAWL
type StatusDict =
Map.Map ModuleName.Raw (MVar Status)
data Status
= SCached Details.Local
| SChanged Details.Local B.ByteString Src.Module DocsNeed
| SBadImport Import.Problem
| SBadSyntax FilePath File.Time B.ByteString Syntax.Error
| SForeign Pkg.Name
| SKernel
crawlDeps :: Env -> MVar StatusDict -> [ModuleName.Raw] -> a -> IO a
crawlDeps env mvar deps blockedValue =
do
statusDict <- takeMVar mvar
let depsDict = Map.fromKeys (\_ -> ()) deps
let newsDict = Map.difference depsDict statusDict
statuses <- Map.traverseWithKey crawlNew newsDict
putMVar mvar (Map.union statuses statusDict)
mapM_ readMVar statuses
return blockedValue
where
crawlNew name () = fork (crawlModule env mvar (DocsNeed False) name)
crawlModule :: Env -> MVar StatusDict -> DocsNeed -> ModuleName.Raw -> IO Status
crawlModule env@(Env _ root projectType _ srcDirs buildID locals foreigns) mvar docsNeed name =
do
let fileName = ModuleName.toFilePath name <.> "gren"
paths <- filterM File.exists (map (`AbsoluteSrcDir.addRelative` fileName) srcDirs)
case paths of
[path] ->
case Map.lookup name foreigns of
Just (Details.Foreign dep deps) ->
return $ SBadImport $ Import.Ambiguous path [] dep deps
Nothing ->
do
newTime <- File.getTime path
case Map.lookup name locals of
Nothing ->
crawlFile env mvar docsNeed name path newTime buildID
Just local@(Details.Local oldPath oldTime deps _ lastChange _) ->
if path /= oldPath || oldTime /= newTime || needsDocs docsNeed
then crawlFile env mvar docsNeed name path newTime lastChange
else crawlDeps env mvar deps (SCached local)
p1 : p2 : ps ->
return $ SBadImport $ Import.AmbiguousLocal (FP.makeRelative root p1) (FP.makeRelative root p2) (map (FP.makeRelative root) ps)
[] ->
case Map.lookup name foreigns of
Just (Details.Foreign dep deps) ->
case deps of
[] ->
return $ SForeign dep
d : ds ->
return $ SBadImport $ Import.AmbiguousForeign dep d ds
Nothing ->
if Name.isKernel name && Parse.isKernel projectType
then do
exists <- File.exists ("src" </> ModuleName.toFilePath name <.> "js")
return $ if exists then SKernel else SBadImport Import.NotFound
else return $ SBadImport Import.NotFound
crawlFile :: Env -> MVar StatusDict -> DocsNeed -> ModuleName.Raw -> FilePath -> File.Time -> Details.BuildID -> IO Status
crawlFile env@(Env _ root projectType _ _ buildID _ _) mvar docsNeed expectedName path time lastChange =
do
source <- File.readUtf8 (root </> path)
case Parse.fromByteString projectType source of
Left err ->
return $ SBadSyntax path time source err
Right modul@(Src.Module maybeActualName _ _ imports values _ _ _ _ _ _) ->
case maybeActualName of
Nothing ->
return $ SBadSyntax path time source (Syntax.ModuleNameUnspecified expectedName)
Just name@(A.At _ actualName) ->
if expectedName == actualName
then
let deps = map (Src.getImportName . snd) imports
local = Details.Local path time deps (any (isMain . snd) values) lastChange buildID
in crawlDeps env mvar deps (SChanged local source modul docsNeed)
else return $ SBadSyntax path time source (Syntax.ModuleNameMismatch expectedName name)
isMain :: A.Located Src.Value -> Bool
isMain (A.At _ (Src.Value (A.At _ name) _ _ _ _)) =
name == Name._main
type ResultDict =
Map.Map ModuleName.Raw (MVar Result)
data Result
= RNew !Details.Local !I.Interface !Opt.LocalGraph !(Maybe Docs.Module)
| RSame !Details.Local !I.Interface !Opt.LocalGraph !(Maybe Docs.Module)
| RCached Bool Details.BuildID (MVar CachedInterface)
| RNotFound Import.Problem
| RProblem Error.Module
| RBlocked
| RForeign I.Interface
| RKernel
data CachedInterface
= Unneeded
| Loaded I.Interface
| Corrupted
checkModule :: Env -> Dependencies -> MVar ResultDict -> ModuleName.Raw -> Status -> IO Result
checkModule env@(Env _ root projectType _ _ _ _ _) foreigns resultsMVar name status =
case status of
SCached local@(Details.Local path time deps hasMain lastChange lastCompile) ->
do
results <- readMVar resultsMVar
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
do
source <- File.readUtf8 path
case Parse.fromByteString projectType source of
Right modul -> compile env (DocsNeed False) local source ifaces modul
Left err ->
return $
RProblem $
Error.Module name path time source (Error.BadSyntax err)
DepsSame _ _ ->
do
mvar <- newMVar Unneeded
return (RCached hasMain lastChange mvar)
DepsBlock ->
return RBlocked
DepsNotFound problems ->
do
source <- File.readUtf8 path
return $
RProblem $
Error.Module name path time source $
case Parse.fromByteString projectType source of
Right (Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
Error.BadImports (toImportErrors env results imports problems)
Left err ->
Error.BadSyntax err
SChanged local@(Details.Local path time deps _ _ lastCompile) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) docsNeed ->
do
results <- readMVar resultsMVar
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
compile env docsNeed local source ifaces modul
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return RBlocked
Just ifaces -> compile env docsNeed local source ifaces modul
DepsBlock ->
return RBlocked
DepsNotFound problems ->
return $
RProblem $
Error.Module name path time source $
Error.BadImports (toImportErrors env results imports problems)
SBadImport importProblem ->
return (RNotFound importProblem)
SBadSyntax path time source err ->
return $
RProblem $
Error.Module name path time source $
Error.BadSyntax err
SForeign home ->
case foreigns ! ModuleName.Canonical home name of
I.Public iface -> return (RForeign iface)
I.Private _ _ _ -> error $ "mistakenly seeing private interface for " ++ Pkg.toChars home ++ " " ++ ModuleName.toChars name
SKernel ->
return RKernel
CHECK DEPS
data DepsStatus
= DepsChange (Map.Map ModuleName.Raw I.Interface)
| DepsSame [Dep] [CDep]
| DepsBlock
| DepsNotFound (NE.List (ModuleName.Raw, Import.Problem))
checkDeps :: FilePath -> ResultDict -> [ModuleName.Raw] -> Details.BuildID -> IO DepsStatus
checkDeps root results deps lastCompile =
checkDepsHelp root results deps [] [] [] [] False 0 lastCompile
type Dep = (ModuleName.Raw, I.Interface)
type CDep = (ModuleName.Raw, MVar CachedInterface)
checkDepsHelp :: FilePath -> ResultDict -> [ModuleName.Raw] -> [Dep] -> [Dep] -> [CDep] -> [(ModuleName.Raw, Import.Problem)] -> Bool -> Details.BuildID -> Details.BuildID -> IO DepsStatus
checkDepsHelp root results deps new same cached importProblems isBlocked lastDepChange lastCompile =
case deps of
dep : otherDeps ->
do
result <- readMVar (results ! dep)
case result of
RNew (Details.Local _ _ _ _ lastChange _) iface _ _ ->
checkDepsHelp root results otherDeps ((dep, iface) : new) same cached importProblems isBlocked (max lastChange lastDepChange) lastCompile
RSame (Details.Local _ _ _ _ lastChange _) iface _ _ ->
checkDepsHelp root results otherDeps new ((dep, iface) : same) cached importProblems isBlocked (max lastChange lastDepChange) lastCompile
RCached _ lastChange mvar ->
checkDepsHelp root results otherDeps new same ((dep, mvar) : cached) importProblems isBlocked (max lastChange lastDepChange) lastCompile
RNotFound prob ->
checkDepsHelp root results otherDeps new same cached ((dep, prob) : importProblems) True lastDepChange lastCompile
RProblem _ ->
checkDepsHelp root results otherDeps new same cached importProblems True lastDepChange lastCompile
RBlocked ->
checkDepsHelp root results otherDeps new same cached importProblems True lastDepChange lastCompile
RForeign iface ->
checkDepsHelp root results otherDeps new ((dep, iface) : same) cached importProblems isBlocked lastDepChange lastCompile
RKernel ->
checkDepsHelp root results otherDeps new same cached importProblems isBlocked lastDepChange lastCompile
[] ->
case reverse importProblems of
p : ps ->
return $ DepsNotFound (NE.List p ps)
[] ->
if isBlocked
then return $ DepsBlock
else
if null new && lastDepChange <= lastCompile
then return $ DepsSame same cached
else do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return DepsBlock
Just ifaces -> return $ DepsChange $ Map.union (Map.fromList new) ifaces
toImportErrors :: Env -> ResultDict -> [([Src.Comment], Src.Import)] -> NE.List (ModuleName.Raw, Import.Problem) -> NE.List Import.Error
toImportErrors (Env _ _ _ _ _ _ locals foreigns) results imports problems =
let knownModules =
Set.unions
[ Map.keysSet foreigns,
Map.keysSet locals,
Map.keysSet results
]
unimportedModules =
Set.difference knownModules (Set.fromList (map (Src.getImportName . snd) imports))
regionDict =
Map.fromList (map (\(_, Src.Import (A.At region name) _ _ _ _) -> (name, region)) imports)
toError (name, problem) =
Import.Error (regionDict ! name) name unimportedModules problem
in fmap toError problems
loadInterfaces :: FilePath -> [Dep] -> [CDep] -> IO (Maybe (Map.Map ModuleName.Raw I.Interface))
loadInterfaces root same cached =
do
loading <- traverse (fork . loadInterface root) cached
maybeLoaded <- traverse readMVar loading
case sequence maybeLoaded of
Nothing ->
return Nothing
Just loaded ->
return $ Just $ Map.union (Map.fromList loaded) (Map.fromList same)
loadInterface :: FilePath -> CDep -> IO (Maybe Dep)
loadInterface root (name, ciMvar) =
do
cachedInterface <- takeMVar ciMvar
case cachedInterface of
Corrupted ->
do
putMVar ciMvar cachedInterface
return Nothing
Loaded iface ->
do
putMVar ciMvar cachedInterface
return (Just (name, iface))
Unneeded ->
do
maybeIface <- File.readBinary (Dirs.greni root name)
case maybeIface of
Nothing ->
do
putMVar ciMvar Corrupted
return Nothing
Just iface ->
do
putMVar ciMvar (Loaded iface)
return (Just (name, iface))
CHECK
checkMidpoint :: MVar (Maybe Dependencies) -> Map.Map ModuleName.Raw Status -> IO (Either Exit.BuildProjectProblem Dependencies)
checkMidpoint dmvar statuses =
case checkForCycles statuses of
Nothing ->
do
maybeForeigns <- readMVar dmvar
case maybeForeigns of
Nothing -> return (Left Exit.BP_CannotLoadDependencies)
Just fs -> return (Right fs)
Just (NE.List name names) ->
do
_ <- readMVar dmvar
return (Left (Exit.BP_Cycle name names))
checkMidpointAndRoots :: MVar (Maybe Dependencies) -> Map.Map ModuleName.Raw Status -> NE.List RootStatus -> IO (Either Exit.BuildProjectProblem Dependencies)
checkMidpointAndRoots dmvar statuses sroots =
case checkForCycles statuses of
Nothing ->
case checkUniqueRoots statuses sroots of
Nothing ->
do
maybeForeigns <- readMVar dmvar
case maybeForeigns of
Nothing -> return (Left Exit.BP_CannotLoadDependencies)
Just fs -> return (Right fs)
Just problem ->
do
_ <- readMVar dmvar
return (Left problem)
Just (NE.List name names) ->
do
_ <- readMVar dmvar
return (Left (Exit.BP_Cycle name names))
checkForCycles :: Map.Map ModuleName.Raw Status -> Maybe (NE.List ModuleName.Raw)
checkForCycles modules =
let !graph = Map.foldrWithKey addToGraph [] modules
!sccs = Graph.stronglyConnComp graph
in checkForCyclesHelp sccs
checkForCyclesHelp :: [Graph.SCC ModuleName.Raw] -> Maybe (NE.List ModuleName.Raw)
checkForCyclesHelp sccs =
case sccs of
[] ->
Nothing
scc : otherSccs ->
case scc of
Graph.AcyclicSCC _ -> checkForCyclesHelp otherSccs
Graph.CyclicSCC [] -> checkForCyclesHelp otherSccs
Graph.CyclicSCC (m : ms) -> Just (NE.List m ms)
type Node =
(ModuleName.Raw, ModuleName.Raw, [ModuleName.Raw])
addToGraph :: ModuleName.Raw -> Status -> [Node] -> [Node]
addToGraph name status graph =
let dependencies =
case status of
SCached (Details.Local _ _ deps _ _ _) -> deps
SChanged (Details.Local _ _ deps _ _ _) _ _ _ -> deps
SBadImport _ -> []
SBadSyntax _ _ _ _ -> []
SForeign _ -> []
SKernel -> []
in (name, name, dependencies) : graph
checkUniqueRoots :: Map.Map ModuleName.Raw Status -> NE.List RootStatus -> Maybe Exit.BuildProjectProblem
checkUniqueRoots insides sroots =
let outsidesDict =
Map.fromListWith OneOrMore.more (Maybe.mapMaybe rootStatusToNamePathPair (NE.toList sroots))
in case Map.traverseWithKey checkOutside outsidesDict of
Left problem ->
Just problem
Right outsides ->
case sequence_ (Map.intersectionWithKey checkInside outsides insides) of
Right () -> Nothing
Left problem -> Just problem
rootStatusToNamePathPair :: RootStatus -> Maybe (ModuleName.Raw, OneOrMore.OneOrMore FilePath)
rootStatusToNamePathPair sroot =
case sroot of
SInside _ -> Nothing
SOutsideOk (Details.Local path _ _ _ _ _) _ modul -> Just (Src.getName modul, OneOrMore.one path)
SOutsideErr _ -> Nothing
checkOutside :: ModuleName.Raw -> OneOrMore.OneOrMore FilePath -> Either Exit.BuildProjectProblem FilePath
checkOutside name paths =
case OneOrMore.destruct NE.List paths of
NE.List p [] -> Right p
NE.List p1 (p2 : _) -> Left (Exit.BP_RootNameDuplicate name p1 p2)
checkInside :: ModuleName.Raw -> FilePath -> Status -> Either Exit.BuildProjectProblem ()
checkInside name p1 status =
case status of
SCached (Details.Local p2 _ _ _ _ _) -> Left (Exit.BP_RootNameDuplicate name p1 p2)
SChanged (Details.Local p2 _ _ _ _ _) _ _ _ -> Left (Exit.BP_RootNameDuplicate name p1 p2)
SBadImport _ -> Right ()
SBadSyntax _ _ _ _ -> Right ()
SForeign _ -> Right ()
SKernel -> Right ()
compile :: Env -> DocsNeed -> Details.Local -> B.ByteString -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> IO Result
compile (Env key root projectType platform _ buildID _ _) docsNeed (Details.Local path time deps main lastChange _) source ifaces modul =
let pkg = projectTypeToPkg projectType
in case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
case makeDocs docsNeed canonical of
Left err ->
return $
RProblem $
Error.Module (Src.getName modul) path time source (Error.BadDocs err)
Right docs ->
do
let name = Src.getName modul
let iface = I.fromModule pkg canonical annotations
let greni = Dirs.greni root name
File.writeBinary (Dirs.greno root name) objects
maybeOldi <- File.readBinary greni
case maybeOldi of
Just oldi | oldi == iface ->
do
Reporting.report key Reporting.BDone
let local = Details.Local path time deps main lastChange buildID
return (RSame local iface objects docs)
_ ->
do
File.writeBinary greni iface
Reporting.report key Reporting.BDone
let local = Details.Local path time deps main buildID buildID
return (RNew local iface objects docs)
Left err ->
return $
RProblem $
Error.Module (Src.getName modul) path time source err
projectTypeToPkg :: Parse.ProjectType -> Pkg.Name
projectTypeToPkg projectType =
case projectType of
Parse.Package pkg -> pkg
Parse.Application -> Pkg.dummyName
writeDetails :: FilePath -> Details.Details -> Map.Map ModuleName.Raw Result -> IO ()
writeDetails root (Details.Details time outline buildID locals foreigns extras) results =
File.writeBinary (Dirs.details root) $
Details.Details time outline buildID (Map.foldrWithKey addNewLocal locals results) foreigns extras
addNewLocal :: ModuleName.Raw -> Result -> Map.Map ModuleName.Raw Details.Local -> Map.Map ModuleName.Raw Details.Local
addNewLocal name result locals =
case result of
RNew local _ _ _ -> Map.insert name local locals
RSame local _ _ _ -> Map.insert name local locals
RCached _ _ _ -> locals
RNotFound _ -> locals
RProblem _ -> locals
RBlocked -> locals
RForeign _ -> locals
RKernel -> locals
finalizeExposed :: FilePath -> DocsGoal docs -> NE.List ModuleName.Raw -> Map.Map ModuleName.Raw Result -> IO (Either Exit.BuildProblem docs)
finalizeExposed root docsGoal exposed results =
case foldr (addImportProblems results) [] (NE.toList exposed) of
p : ps ->
return $ Left $ Exit.BuildProjectProblem (Exit.BP_MissingExposed (NE.List p ps))
[] ->
case Map.foldr addErrors [] results of
[] -> Right <$> finalizeDocs docsGoal results
e : es -> return $ Left $ Exit.BuildBadModules root e es
addErrors :: Result -> [Error.Module] -> [Error.Module]
addErrors result errors =
case result of
RNew _ _ _ _ -> errors
RSame _ _ _ _ -> errors
RCached _ _ _ -> errors
RNotFound _ -> errors
RProblem e -> e : errors
RBlocked -> errors
RForeign _ -> errors
RKernel -> errors
addImportProblems :: Map.Map ModuleName.Raw Result -> ModuleName.Raw -> [(ModuleName.Raw, Import.Problem)] -> [(ModuleName.Raw, Import.Problem)]
addImportProblems results name problems =
case results ! name of
RNew _ _ _ _ -> problems
RSame _ _ _ _ -> problems
RCached _ _ _ -> problems
RNotFound p -> (name, p) : problems
RProblem _ -> problems
RBlocked -> problems
RForeign _ -> problems
RKernel -> problems
DOCS
data DocsGoal a where
KeepDocs :: DocsGoal Docs.Documentation
WriteDocs :: FilePath -> DocsGoal ()
IgnoreDocs :: DocsGoal ()
newtype DocsNeed = DocsNeed {needsDocs :: Bool}
toDocsNeed :: DocsGoal a -> DocsNeed
toDocsNeed goal =
case goal of
IgnoreDocs -> DocsNeed False
WriteDocs _ -> DocsNeed True
KeepDocs -> DocsNeed True
makeDocs :: DocsNeed -> Can.Module -> Either EDocs.Error (Maybe Docs.Module)
makeDocs (DocsNeed isNeeded) modul =
if isNeeded
then case Docs.fromModule modul of
Right docs -> Right (Just docs)
Left err -> Left err
else Right Nothing
finalizeDocs :: DocsGoal docs -> Map.Map ModuleName.Raw Result -> IO docs
finalizeDocs goal results =
case goal of
KeepDocs ->
return $ Map.mapMaybe toDocs results
WriteDocs path ->
E.writeUgly path $ Docs.encode $ Map.mapMaybe toDocs results
IgnoreDocs ->
return ()
toDocs :: Result -> Maybe Docs.Module
toDocs result =
case result of
RNew _ _ _ d -> d
RSame _ _ _ d -> d
RCached _ _ _ -> Nothing
RNotFound _ -> Nothing
RProblem _ -> Nothing
RBlocked -> Nothing
RForeign _ -> Nothing
RKernel -> Nothing
data ReplArtifacts = ReplArtifacts
{ _repl_home :: ModuleName.Canonical,
_repl_modules :: [Module],
_repl_localizer :: L.Localizer,
_repl_annotations :: Map.Map Name.Name Can.Annotation
}
fromRepl :: FilePath -> Details.Details -> B.ByteString -> IO (Either Exit.Repl ReplArtifacts)
fromRepl root details source =
do
env@(Env _ _ projectType _ _ _ _ _) <- makeEnv Reporting.ignorer root details
case Parse.fromByteString projectType source of
Left syntaxError ->
return $ Left $ Exit.ReplBadInput source $ Error.BadSyntax syntaxError
Right modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
do
dmvar <- Details.loadInterfaces root details
let deps = map (Src.getImportName . snd) imports
mvar <- newMVar Map.empty
crawlDeps env mvar deps ()
statuses <- traverse readMVar =<< readMVar mvar
midpoint <- checkMidpoint dmvar statuses
case midpoint of
Left problem ->
return $ Left $ Exit.ReplProjectProblem problem
Right foreigns ->
do
rmvar <- newEmptyMVar
resultMVars <- forkWithKey (checkModule env foreigns rmvar) statuses
putMVar rmvar resultMVars
results <- traverse readMVar resultMVars
writeDetails root details results
depsStatus <- checkDeps root resultMVars deps 0
finalizeReplArtifacts env source modul depsStatus resultMVars results
finalizeReplArtifacts :: Env -> B.ByteString -> Src.Module -> DepsStatus -> ResultDict -> Map.Map ModuleName.Raw Result -> IO (Either Exit.Repl ReplArtifacts)
finalizeReplArtifacts env@(Env _ root projectType platform _ _ _ _) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) depsStatus resultMVars results =
let pkg =
projectTypeToPkg projectType
compileInput ifaces =
case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
let h = Can._name canonical
m = Fresh (Src.getName modul) (I.fromModule pkg canonical annotations) objects
ms = Map.foldrWithKey addInside [] results
in return $ Right $ ReplArtifacts h (m : ms) (L.fromModule modul) annotations
Left errors ->
return $ Left $ Exit.ReplBadInput source errors
in case depsStatus of
DepsChange ifaces ->
compileInput ifaces
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Just ifaces -> compileInput ifaces
Nothing -> return $ Left $ Exit.ReplBadCache
DepsBlock ->
case Map.foldr addErrors [] results of
[] -> return $ Left $ Exit.ReplBlocked
e : es -> return $ Left $ Exit.ReplBadLocalDeps root e es
DepsNotFound problems ->
return $
Left $
Exit.ReplBadInput source $
Error.BadImports $
toImportErrors env resultMVars imports problems
data RootLocation
= LInside ModuleName.Raw
| LOutside FilePath
findRoots :: Env -> NE.List FilePath -> IO (Either Exit.BuildProjectProblem (NE.List RootLocation))
findRoots env paths =
do
mvars <- traverse (fork . getRootInfo env) paths
einfos <- traverse readMVar mvars
return $ checkRoots =<< sequence einfos
checkRoots :: NE.List RootInfo -> Either Exit.BuildProjectProblem (NE.List RootLocation)
checkRoots infos =
let toOneOrMore loc@(RootInfo absolute _ _) =
(absolute, OneOrMore.one loc)
fromOneOrMore loc locs =
case locs of
[] -> Right ()
loc2 : _ -> Left (Exit.BP_MainPathDuplicate (_relative loc) (_relative loc2))
in fmap (\_ -> fmap _location infos) $
traverse (OneOrMore.destruct fromOneOrMore) $
Map.fromListWith OneOrMore.more $
map toOneOrMore (NE.toList infos)
data RootInfo = RootInfo
{ _absolute :: FilePath,
_relative :: FilePath,
_location :: RootLocation
}
getRootInfo :: Env -> FilePath -> IO (Either Exit.BuildProjectProblem RootInfo)
getRootInfo env path =
do
exists <- File.exists path
if exists
then getRootInfoHelp env path =<< Dir.canonicalizePath path
else return (Left (Exit.BP_PathUnknown path))
getRootInfoHelp :: Env -> FilePath -> FilePath -> IO (Either Exit.BuildProjectProblem RootInfo)
getRootInfoHelp (Env _ _ _ _ srcDirs _ _ _) path absolutePath =
let (dirs, file) = FP.splitFileName absolutePath
(final, ext) = FP.splitExtension file
in if ext /= ".gren"
then return $ Left $ Exit.BP_WithBadExtension path
else
let absoluteSegments = FP.splitDirectories dirs ++ [final]
in case Maybe.mapMaybe (isInsideSrcDirByPath absoluteSegments) srcDirs of
[] ->
return $ Right $ RootInfo absolutePath path (LOutside path)
[(_, Right names)] ->
do
let name = Name.fromChars (List.intercalate "." names)
matchingDirs <- filterM (isInsideSrcDirByName names) srcDirs
case matchingDirs of
d1 : d2 : _ ->
do
let p1 = AbsoluteSrcDir.addRelative d1 (FP.joinPath names <.> "gren")
let p2 = AbsoluteSrcDir.addRelative d2 (FP.joinPath names <.> "gren")
return $ Left $ Exit.BP_RootNameDuplicate name p1 p2
_ ->
return $ Right $ RootInfo absolutePath path (LInside name)
[(s, Left names)] ->
return $ Left $ Exit.BP_RootNameInvalid path s names
(s1, _) : (s2, _) : _ ->
return $ Left $ Exit.BP_WithAmbiguousSrcDir path s1 s2
isInsideSrcDirByName :: [String] -> AbsoluteSrcDir -> IO Bool
isInsideSrcDirByName names srcDir =
File.exists (AbsoluteSrcDir.addRelative srcDir (FP.joinPath names <.> "gren"))
isInsideSrcDirByPath :: [String] -> AbsoluteSrcDir -> Maybe (FilePath, Either [String] [String])
isInsideSrcDirByPath segments (AbsoluteSrcDir srcDir) =
case dropPrefix (FP.splitDirectories srcDir) segments of
Nothing ->
Nothing
Just names ->
if all isGoodName names
then Just (srcDir, Right names)
else Just (srcDir, Left names)
isGoodName :: [Char] -> Bool
isGoodName name =
case name of
[] ->
False
char : chars ->
Char.isUpper char && all (\c -> Char.isAlphaNum c || c == '_') chars
INVARIANT : Dir.canonicalizePath has been run on both inputs
dropPrefix :: [FilePath] -> [FilePath] -> Maybe [FilePath]
dropPrefix roots paths =
case roots of
[] ->
Just paths
r : rs ->
case paths of
[] -> Nothing
p : ps -> if r == p then dropPrefix rs ps else Nothing
CRAWL ROOTS
data RootStatus
= SInside ModuleName.Raw
| SOutsideOk Details.Local B.ByteString Src.Module
| SOutsideErr Error.Module
crawlRoot :: Env -> MVar StatusDict -> RootLocation -> IO RootStatus
crawlRoot env@(Env _ _ projectType _ _ buildID _ _) mvar root =
case root of
LInside name ->
do
statusMVar <- newEmptyMVar
statusDict <- takeMVar mvar
putMVar mvar (Map.insert name statusMVar statusDict)
putMVar statusMVar =<< crawlModule env mvar (DocsNeed False) name
return (SInside name)
LOutside path ->
do
time <- File.getTime path
source <- File.readUtf8 path
case Parse.fromByteString projectType source of
Right modul@(Src.Module _ _ _ imports values _ _ _ _ _ _) ->
do
let deps = map (Src.getImportName . snd) imports
let local = Details.Local path time deps (any (isMain . snd) values) buildID buildID
crawlDeps env mvar deps (SOutsideOk local source modul)
Left syntaxError ->
return $
SOutsideErr $
Error.Module "???" path time source (Error.BadSyntax syntaxError)
data RootResult
= RInside ModuleName.Raw
| ROutsideOk ModuleName.Raw I.Interface Opt.LocalGraph
| ROutsideErr Error.Module
| ROutsideBlocked
checkRoot :: Env -> ResultDict -> RootStatus -> IO RootResult
checkRoot env@(Env _ root _ _ _ _ _ _) results rootStatus =
case rootStatus of
SInside name ->
return (RInside name)
SOutsideErr err ->
return (ROutsideErr err)
SOutsideOk local@(Details.Local path time deps _ _ lastCompile) source modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) ->
do
depsStatus <- checkDeps root results deps lastCompile
case depsStatus of
DepsChange ifaces ->
compileOutside env local source ifaces modul
DepsSame same cached ->
do
maybeLoaded <- loadInterfaces root same cached
case maybeLoaded of
Nothing -> return ROutsideBlocked
Just ifaces -> compileOutside env local source ifaces modul
DepsBlock ->
return ROutsideBlocked
DepsNotFound problems ->
return $
ROutsideErr $
Error.Module (Src.getName modul) path time source $
Error.BadImports (toImportErrors env results imports problems)
compileOutside :: Env -> Details.Local -> B.ByteString -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> IO RootResult
compileOutside (Env key _ projectType platform _ _ _ _) (Details.Local path time _ _ _ _) source ifaces modul =
let pkg = projectTypeToPkg projectType
name = Src.getName modul
in case Compile.compile platform pkg ifaces modul of
Right (Compile.Artifacts canonical annotations objects) ->
do
Reporting.report key Reporting.BDone
return $ ROutsideOk name (I.fromModule pkg canonical annotations) objects
Left errors ->
return $ ROutsideErr $ Error.Module name path time source errors
data Root
= Inside ModuleName.Raw
| Outside ModuleName.Raw I.Interface Opt.LocalGraph
toArtifacts :: Env -> Dependencies -> Map.Map ModuleName.Raw Result -> NE.List RootResult -> Either Exit.BuildProblem Artifacts
toArtifacts (Env _ root projectType _ _ _ _ _) foreigns results rootResults =
case gatherProblemsOrMains results rootResults of
Left (NE.List e es) ->
Left (Exit.BuildBadModules root e es)
Right roots ->
Right $
Artifacts (projectTypeToPkg projectType) foreigns roots $
Map.foldrWithKey addInside (foldr addOutside [] rootResults) results
gatherProblemsOrMains :: Map.Map ModuleName.Raw Result -> NE.List RootResult -> Either (NE.List Error.Module) (NE.List Root)
gatherProblemsOrMains results (NE.List rootResult rootResults) =
let addResult result (es, roots) =
case result of
RInside n -> (es, Inside n : roots)
ROutsideOk n i o -> (es, Outside n i o : roots)
ROutsideErr e -> (e : es, roots)
ROutsideBlocked -> (es, roots)
errors = Map.foldr addErrors [] results
in case (rootResult, foldr addResult (errors, []) rootResults) of
(RInside n, ([], ms)) -> Right (NE.List (Inside n) ms)
(RInside _, (e : es, _)) -> Left (NE.List e es)
(ROutsideOk n i o, ([], ms)) -> Right (NE.List (Outside n i o) ms)
(ROutsideOk _ _ _, (e : es, _)) -> Left (NE.List e es)
(ROutsideErr e, (es, _)) -> Left (NE.List e es)
(ROutsideBlocked, ([], _)) -> error "seems like .gren/ is corrupted"
(ROutsideBlocked, (e : es, _)) -> Left (NE.List e es)
addInside :: ModuleName.Raw -> Result -> [Module] -> [Module]
addInside name result modules =
case result of
RNew _ iface objs _ -> Fresh name iface objs : modules
RSame _ iface objs _ -> Fresh name iface objs : modules
RCached main _ mvar -> Cached name main mvar : modules
RNotFound _ -> error (badInside name)
RProblem _ -> error (badInside name)
RBlocked -> error (badInside name)
RForeign _ -> modules
RKernel -> modules
badInside :: ModuleName.Raw -> [Char]
badInside name =
"Error from `" ++ Name.toChars name ++ "` should have been reported already."
addOutside :: RootResult -> [Module] -> [Module]
addOutside root modules =
case root of
RInside _ -> modules
ROutsideOk name iface objs -> Fresh name iface objs : modules
ROutsideErr _ -> modules
ROutsideBlocked -> modules
|
50da040e6dbfd3fea0d2df6b3771ea6726117c935f9119131651f23eeae02d03 | webyrd/n-grams-for-synthesis | compare-code-and-srfi.scm | (define code-names
(with-input-from-file "generic-arrays.scm"
(lambda ()
(let loop ((result '())
(obj (read)))
;; (pp obj) ;;; make sure all functions are defined with (define (a b) ...)
(if (##eof-object? obj)
result
(if (and (list? obj)
(not (null? obj))
(eq? (car obj) 'define)
(not (null? (cdr obj)))
(list? (cadr obj))
(not (null? (cadr obj)))
(symbol? (caadr obj))
(< 0 (string-length (symbol->string (caadr obj))))
(not (eq? (string-ref (symbol->string (caadr obj)) 0)
#\#)))
(loop (cons (caadr obj) result)
(read))
(loop result (read))))))))
(define other-names
(with-input-from-file "generic-arrays.scm"
(lambda ()
(let loop ((result '())
(obj (read)))
;; (pp obj) ;;; make sure all functions are defined with (define (a b) ...)
(if (##eof-object? obj)
result
(if (and (list? obj)
(not (null? obj))
(eq? (car obj) 'define)
(not (null? (cdr obj)))
(symbol? (cadr obj)))
(loop (cons (cadr obj) result)
(read))
(loop result (read))))))))
(define srfi-names
(with-input-from-file "srfi-122.scm"
(lambda ()
(let loop ((obj (read)))
(if (not (and (list? obj)
(not (null? obj))
(eq? (car obj)
'with-output-to-file)))
(loop (read))
(let ((result '()))
(define (process obj)
(if (list? obj)
(if (and (not (null? obj))
(eq? (car obj)
'format-lambda-list))
(set! result (cons (car (cadadr obj))
result))
(for-each process obj))))
(process obj)
result))))))
(define (in-a-not-in-b a b)
(do ((a a (cdr a))
(result '() (if (memq (car a) b)
result
(cons (car a) result))))
((null? a) result)))
(newline)(pp "SRFI names not in code: ")
(pp (in-a-not-in-b srfi-names code-names))
(newline)(pp "Code names not in SRFI: ")
(pp (in-a-not-in-b code-names srfi-names))
| null | https://raw.githubusercontent.com/webyrd/n-grams-for-synthesis/b53b071e53445337d3fe20db0249363aeb9f3e51/datasets/srfi/srfi-122/compare-code-and-srfi.scm | scheme | (pp obj) ;;; make sure all functions are defined with (define (a b) ...)
(pp obj) ;;; make sure all functions are defined with (define (a b) ...) | (define code-names
(with-input-from-file "generic-arrays.scm"
(lambda ()
(let loop ((result '())
(obj (read)))
(if (##eof-object? obj)
result
(if (and (list? obj)
(not (null? obj))
(eq? (car obj) 'define)
(not (null? (cdr obj)))
(list? (cadr obj))
(not (null? (cadr obj)))
(symbol? (caadr obj))
(< 0 (string-length (symbol->string (caadr obj))))
(not (eq? (string-ref (symbol->string (caadr obj)) 0)
#\#)))
(loop (cons (caadr obj) result)
(read))
(loop result (read))))))))
(define other-names
(with-input-from-file "generic-arrays.scm"
(lambda ()
(let loop ((result '())
(obj (read)))
(if (##eof-object? obj)
result
(if (and (list? obj)
(not (null? obj))
(eq? (car obj) 'define)
(not (null? (cdr obj)))
(symbol? (cadr obj)))
(loop (cons (cadr obj) result)
(read))
(loop result (read))))))))
(define srfi-names
(with-input-from-file "srfi-122.scm"
(lambda ()
(let loop ((obj (read)))
(if (not (and (list? obj)
(not (null? obj))
(eq? (car obj)
'with-output-to-file)))
(loop (read))
(let ((result '()))
(define (process obj)
(if (list? obj)
(if (and (not (null? obj))
(eq? (car obj)
'format-lambda-list))
(set! result (cons (car (cadadr obj))
result))
(for-each process obj))))
(process obj)
result))))))
(define (in-a-not-in-b a b)
(do ((a a (cdr a))
(result '() (if (memq (car a) b)
result
(cons (car a) result))))
((null? a) result)))
(newline)(pp "SRFI names not in code: ")
(pp (in-a-not-in-b srfi-names code-names))
(newline)(pp "Code names not in SRFI: ")
(pp (in-a-not-in-b code-names srfi-names))
|
85bd620baa3572805d28274d6969ee3b02cafb6ccf08b9f22058bf1b0f3607ac | mokus0/junkbox | BiQuad.hs | Digital biquadratic filters
--
{-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
module Math.BiQuad where
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import Data.Bits
import Data.Complex
import Data.IORef
import Data.Int
import Data.Random
import Data.Word
import Math.Polynomial
data BiQuad a = BiQuad
{ a1, a2 :: !a
, b0, b1, b2 :: !a
} deriving (Eq, Ord, Read, Show, Functor)
transfer BiQuad{..} z
= evalPoly (poly LE [b0 , b1 , b2]) zInv
/ evalPoly (poly LE [1 , a1 , a2]) zInv
where zInv = recip z
poles BiQuad{..} = (2 * a2 / d, 0.5 * d)
where d = negate a1 - sqrt (a1*a1 - 4*a2)
directForm1 BiQuad{..} !x0 = do
(x1, x2, y1, y2) <- get
let !y0 = b0 * x0 + b1 * x1 + b2 * x2
- a1 * y1 - a2 * y2
put (x0, x1, y0, y1)
return y0
directForm2 BiQuad{..} !x = do
(w1, w2) <- get
let !w = x - a1 * w1 - a2 * w2
put (w, w1)
return $! (b0 * w + b1 * w1 + b2 * w2)
directForm2t BiQuad{..} x = do
(d1, d2) <- get
let y = b0 * x + d1
d1' = b1 * x - a1 * y + d2
d2' = b2 * x - a2 * y
put (d1', d2')
return (y, d1', d2')
run filt = flip (evalState . mapM filt)
candleFilter :: BiQuad Double
candleFilter = BiQuad (-0.87727063) 0.31106039 0.10844744 0.21689488 0.10844744
swap ~(a,b) = (b,a)
runIO :: Num a => RVar a -> BiQuad a -> IO (RVarT IO a)
runIO dist filt = do
st <- newIORef (0,0)
return $ do
x <- sample dist
lift (atomicModifyIORef' st (swap . runState (directForm2 filt x)))
fixed point version of 2 - stage IIR candle filter .
( 2nd - order butterworth low - pass filter with sample rate of 60Hz ,
cutoff at 8 Hz , as described by Park Hays at [ 0 ] )
--
-- sigbits/exponent accounting (in {- s/e -} comments) should be thorough
-- enough that, assuming no silly errors, it can pretty easily be extended
-- to a proof that overflow is impossible.
--
-- Rounding can be improved slightly in a couple places by adding terms
-- before rounding, but I didn't do that here in order to keep the structure
-- clearer. It may also be possible to get a bit more overall precision by
-- changing parameters of the filter to get coefficients with fewer
-- significand bits (perhaps by slightly altering the sample rate, for
-- example).
--
It may also be worth looking at direct - form 1 ; I suspect an 8 - bit direct
-- form implementation may have enough precision to be pleasant to watch.
-- It'd use the same amount of state memory as this one, but a fair bit
-- less compute time.
--
[ 0 ] -flickering-candle/
candle16_df2t :: Monad m => Int8 -> StateT (Int16, Int16) m (Int16, Int16, Int16)
7/5
3/3
4/5
7/9
7/10
7/10
(d1, d2) <- get
15/13
15/13
15/14
-- note: d2's sigbits depends on value of a2
put (d1', d2')
return (y, d1', d2')
main = do
st <- newIORef (0, 0)
replicateM (2^16) $ do
x <- sample (normal 0 32) :: IO Double
atomicModifyIORef' st (swap . runState (candle16_df2t (round x)))
foo = do
st <- newIORef (0, 0)
replicateM (2^16) $ do
x <- sample stdNormal :: IO Double
atomicModifyIORef' st (swap . runState (directForm2t candleFilter x)) | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Math/BiQuad.hs | haskell |
# LANGUAGE BangPatterns #
sigbits/exponent accounting (in {- s/e -} comments) should be thorough
enough that, assuming no silly errors, it can pretty easily be extended
to a proof that overflow is impossible.
Rounding can be improved slightly in a couple places by adding terms
before rounding, but I didn't do that here in order to keep the structure
clearer. It may also be possible to get a bit more overall precision by
changing parameters of the filter to get coefficients with fewer
significand bits (perhaps by slightly altering the sample rate, for
example).
form implementation may have enough precision to be pleasant to watch.
It'd use the same amount of state memory as this one, but a fair bit
less compute time.
note: d2's sigbits depends on value of a2 | Digital biquadratic filters
# LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
module Math.BiQuad where
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import Data.Bits
import Data.Complex
import Data.IORef
import Data.Int
import Data.Random
import Data.Word
import Math.Polynomial
data BiQuad a = BiQuad
{ a1, a2 :: !a
, b0, b1, b2 :: !a
} deriving (Eq, Ord, Read, Show, Functor)
transfer BiQuad{..} z
= evalPoly (poly LE [b0 , b1 , b2]) zInv
/ evalPoly (poly LE [1 , a1 , a2]) zInv
where zInv = recip z
poles BiQuad{..} = (2 * a2 / d, 0.5 * d)
where d = negate a1 - sqrt (a1*a1 - 4*a2)
directForm1 BiQuad{..} !x0 = do
(x1, x2, y1, y2) <- get
let !y0 = b0 * x0 + b1 * x1 + b2 * x2
- a1 * y1 - a2 * y2
put (x0, x1, y0, y1)
return y0
directForm2 BiQuad{..} !x = do
(w1, w2) <- get
let !w = x - a1 * w1 - a2 * w2
put (w, w1)
return $! (b0 * w + b1 * w1 + b2 * w2)
directForm2t BiQuad{..} x = do
(d1, d2) <- get
let y = b0 * x + d1
d1' = b1 * x - a1 * y + d2
d2' = b2 * x - a2 * y
put (d1', d2')
return (y, d1', d2')
run filt = flip (evalState . mapM filt)
candleFilter :: BiQuad Double
candleFilter = BiQuad (-0.87727063) 0.31106039 0.10844744 0.21689488 0.10844744
swap ~(a,b) = (b,a)
runIO :: Num a => RVar a -> BiQuad a -> IO (RVarT IO a)
runIO dist filt = do
st <- newIORef (0,0)
return $ do
x <- sample dist
lift (atomicModifyIORef' st (swap . runState (directForm2 filt x)))
fixed point version of 2 - stage IIR candle filter .
( 2nd - order butterworth low - pass filter with sample rate of 60Hz ,
cutoff at 8 Hz , as described by Park Hays at [ 0 ] )
It may also be worth looking at direct - form 1 ; I suspect an 8 - bit direct
[ 0 ] -flickering-candle/
candle16_df2t :: Monad m => Int8 -> StateT (Int16, Int16) m (Int16, Int16, Int16)
7/5
3/3
4/5
7/9
7/10
7/10
(d1, d2) <- get
15/13
15/13
15/14
put (d1', d2')
return (y, d1', d2')
main = do
st <- newIORef (0, 0)
replicateM (2^16) $ do
x <- sample (normal 0 32) :: IO Double
atomicModifyIORef' st (swap . runState (candle16_df2t (round x)))
foo = do
st <- newIORef (0, 0)
replicateM (2^16) $ do
x <- sample stdNormal :: IO Double
atomicModifyIORef' st (swap . runState (directForm2t candleFilter x)) |
0022e871b9d8096e16001b262306c2a5c0c45f8667bee4d1d29e97e69760cd8b | bitemyapp/fp-course | Extend.hs | # LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE InstanceSigs #
module Course.Extend where
import Course.Core
import Course.ExactlyOne
import Course.List
import Course.Optional
import Course.Functor
| All instances of the ` Extend ` type - class must satisfy one law . This law
-- is not checked by the compiler. This law is given as:
--
-- * The law of associativity
-- `∀f g. (f <<=) . (g <<=) ≅ (<<=) (f . (g <<=))`
class Functor f => Extend f where
-- Pronounced, extend.
(<<=) ::
(f a -> b)
-> f a
-> f b
infixr 1 <<=
| Implement the instance for @ExactlyOne@.
--
-- >>> id <<= ExactlyOne 7
ExactlyOne ( ExactlyOne 7 )
instance Extend ExactlyOne where
(<<=) ::
(ExactlyOne a -> b)
-> ExactlyOne a
-> ExactlyOne b
(<<=) =
error "todo: Course.Extend (<<=)#instance ExactlyOne"
| Implement the instance for @List@.
--
-- >>> length <<= ('a' :. 'b' :. 'c' :. Nil)
-- [3,2,1]
--
> > > i d < < = ( 1 : . 2 : . 3 : . 4 : . Nil )
-- [[1,2,3,4],[2,3,4],[3,4],[4]]
--
> > > reverse < < = ( ( 1 : . 2 : . 3 : . Nil ) : . ( 4 : . 5 : . 6 : . Nil ) : . Nil )
-- [[[4,5,6],[1,2,3]],[[4,5,6]]]
instance Extend List where
(<<=) ::
(List a -> b)
-> List a
-> List b
(<<=) =
error "todo: Course.Extend (<<=)#instance List"
| Implement the instance for @Optional@.
--
> > > i d < < = ( Full 7 )
Full ( Full 7 )
--
-- >>> id <<= Empty
-- Empty
instance Extend Optional where
(<<=) ::
(Optional a -> b)
-> Optional a
-> Optional b
(<<=) =
error "todo: Course.Extend (<<=)#instance Optional"
-- | Duplicate the functor using extension.
--
> > > ( ExactlyOne 7 )
ExactlyOne ( ExactlyOne 7 )
--
> > > ( 1 : . 2 : . 3 : . 4 : . Nil )
-- [[1,2,3,4],[2,3,4],[3,4],[4]]
--
> > > ( Full 7 )
Full ( Full 7 )
--
-- >>> cojoin Empty
-- Empty
cojoin ::
Extend f =>
f a
-> f (f a)
cojoin =
error "todo: Course.Extend#cojoin"
| null | https://raw.githubusercontent.com/bitemyapp/fp-course/a9a325cd895a0953151ec3d02f40006eb7993fb8/src/Course/Extend.hs | haskell | is not checked by the compiler. This law is given as:
* The law of associativity
`∀f g. (f <<=) . (g <<=) ≅ (<<=) (f . (g <<=))`
Pronounced, extend.
>>> id <<= ExactlyOne 7
>>> length <<= ('a' :. 'b' :. 'c' :. Nil)
[3,2,1]
[[1,2,3,4],[2,3,4],[3,4],[4]]
[[[4,5,6],[1,2,3]],[[4,5,6]]]
>>> id <<= Empty
Empty
| Duplicate the functor using extension.
[[1,2,3,4],[2,3,4],[3,4],[4]]
>>> cojoin Empty
Empty | # LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE InstanceSigs #
module Course.Extend where
import Course.Core
import Course.ExactlyOne
import Course.List
import Course.Optional
import Course.Functor
| All instances of the ` Extend ` type - class must satisfy one law . This law
class Functor f => Extend f where
(<<=) ::
(f a -> b)
-> f a
-> f b
infixr 1 <<=
| Implement the instance for @ExactlyOne@.
ExactlyOne ( ExactlyOne 7 )
instance Extend ExactlyOne where
(<<=) ::
(ExactlyOne a -> b)
-> ExactlyOne a
-> ExactlyOne b
(<<=) =
error "todo: Course.Extend (<<=)#instance ExactlyOne"
| Implement the instance for @List@.
> > > i d < < = ( 1 : . 2 : . 3 : . 4 : . Nil )
> > > reverse < < = ( ( 1 : . 2 : . 3 : . Nil ) : . ( 4 : . 5 : . 6 : . Nil ) : . Nil )
instance Extend List where
(<<=) ::
(List a -> b)
-> List a
-> List b
(<<=) =
error "todo: Course.Extend (<<=)#instance List"
| Implement the instance for @Optional@.
> > > i d < < = ( Full 7 )
Full ( Full 7 )
instance Extend Optional where
(<<=) ::
(Optional a -> b)
-> Optional a
-> Optional b
(<<=) =
error "todo: Course.Extend (<<=)#instance Optional"
> > > ( ExactlyOne 7 )
ExactlyOne ( ExactlyOne 7 )
> > > ( 1 : . 2 : . 3 : . 4 : . Nil )
> > > ( Full 7 )
Full ( Full 7 )
cojoin ::
Extend f =>
f a
-> f (f a)
cojoin =
error "todo: Course.Extend#cojoin"
|
64bb5aa2fe08472a78a1abd1258a6a812e3167b906ec7a2a6b3140d93d094e77 | herbelin/coq-hh | evar_tactics.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Term
open Util
open Evar_refiner
open Tacmach
open Tacexpr
open Refiner
open Proof_type
open Evd
open Sign
open Termops
(* The instantiate tactic *)
let instantiate n (ist,rawc) ido gl =
let sigma = gl.sigma in
let evl =
match ido with
ConclLocation () -> evar_list sigma (pf_concl gl)
| HypLocation (id,hloc) ->
let decl = Environ.lookup_named_val id (Goal.V82.hyps sigma (sig_it gl)) in
match hloc with
InHyp ->
(match decl with
(_,None,typ) -> evar_list sigma typ
| _ -> error
"Please be more specific: in type or value?")
| InHypTypeOnly ->
let (_, _, typ) = decl in evar_list sigma typ
| InHypValueOnly ->
(match decl with
(_,Some body,_) -> evar_list sigma body
| _ -> error "Not a defined hypothesis.") in
if List.length evl < n then
error "Not enough uninstantiated existential variables.";
if n <= 0 then error "Incorrect existential variable index.";
let evk,_ = List.nth evl (n-1) in
let evi = Evd.find sigma evk in
let ltac_vars = Tacinterp.extract_ltac_constr_values ist (Evd.evar_env evi) in
let sigma' = w_refine (evk,evi) (ltac_vars,rawc) sigma in
tclTHEN
(tclEVARS sigma')
tclNORMEVAR
gl
let let_evar name typ gls =
let src = (dummy_loc,GoalEvar) in
let sigma',evar = Evarutil.new_evar gls.sigma (pf_env gls) ~src typ in
Refiner.tclTHEN (Refiner.tclEVARS sigma')
(Tactics.letin_tac None name evar None nowhere) gls
| null | https://raw.githubusercontent.com/herbelin/coq-hh/296d03d5049fea661e8bdbaf305ed4bf6d2001d2/tactics/evar_tactics.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
The instantiate tactic | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Term
open Util
open Evar_refiner
open Tacmach
open Tacexpr
open Refiner
open Proof_type
open Evd
open Sign
open Termops
let instantiate n (ist,rawc) ido gl =
let sigma = gl.sigma in
let evl =
match ido with
ConclLocation () -> evar_list sigma (pf_concl gl)
| HypLocation (id,hloc) ->
let decl = Environ.lookup_named_val id (Goal.V82.hyps sigma (sig_it gl)) in
match hloc with
InHyp ->
(match decl with
(_,None,typ) -> evar_list sigma typ
| _ -> error
"Please be more specific: in type or value?")
| InHypTypeOnly ->
let (_, _, typ) = decl in evar_list sigma typ
| InHypValueOnly ->
(match decl with
(_,Some body,_) -> evar_list sigma body
| _ -> error "Not a defined hypothesis.") in
if List.length evl < n then
error "Not enough uninstantiated existential variables.";
if n <= 0 then error "Incorrect existential variable index.";
let evk,_ = List.nth evl (n-1) in
let evi = Evd.find sigma evk in
let ltac_vars = Tacinterp.extract_ltac_constr_values ist (Evd.evar_env evi) in
let sigma' = w_refine (evk,evi) (ltac_vars,rawc) sigma in
tclTHEN
(tclEVARS sigma')
tclNORMEVAR
gl
let let_evar name typ gls =
let src = (dummy_loc,GoalEvar) in
let sigma',evar = Evarutil.new_evar gls.sigma (pf_env gls) ~src typ in
Refiner.tclTHEN (Refiner.tclEVARS sigma')
(Tactics.letin_tac None name evar None nowhere) gls
|
b147e21940452bae26c71610c4eb5d4227f72c6a828ca30585896c1a2013a5b3 | ergenekonyigit/trendcat | dev.cljs | (ns ^:figwheel-no-load trendcat.dev
(:require
[trendcat.core :as core]
[devtools.core :as devtools]))
(enable-console-print!)
(devtools/install!)
(core/init!)
| null | https://raw.githubusercontent.com/ergenekonyigit/trendcat/6e12fa78df75f1421a1c32c88bd27ac11a599d86/env/dev/cljs/trendcat/dev.cljs | clojure | (ns ^:figwheel-no-load trendcat.dev
(:require
[trendcat.core :as core]
[devtools.core :as devtools]))
(enable-console-print!)
(devtools/install!)
(core/init!)
| |
97ab2ca9994511e9d0dd96faaad7d0c039163b8eb9e4d423353f85a27e41de24 | wireapp/wire-server | User.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
-- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Wire.API.User
( UserIdList (..),
QualifiedUserIdList (..),
LimitedQualifiedUserIdList (..),
ScimUserInfo (..),
ScimUserInfos (..),
UserSet (..),
-- Profiles
UserProfile (..),
SelfProfile (..),
-- User (should not be here)
User (..),
userEmail,
userPhone,
userSSOId,
userIssuer,
userSCIMExternalId,
scimExternalId,
ssoIssuerAndNameId,
connectedProfile,
publicProfile,
userObjectSchema,
* NewUser
NewUserPublic (..),
RegisterError (..),
RegisterSuccess (..),
RegisterResponses,
RegisterInternalResponses,
NewUser (..),
emptyNewUser,
NewUserSpar (..),
CreateUserSparError (..),
CreateUserSparInternalResponses,
newUserFromSpar,
urefToExternalId,
urefToEmail,
ExpiresIn,
newUserInvitationCode,
newUserTeam,
newUserEmail,
newUserPhone,
newUserSSOId,
isNewUserEphemeral,
isNewUserTeamMember,
-- * NewUserOrigin
NewUserOrigin (..),
InvitationCode (..),
NewTeamUser (..),
BindingNewTeamUser (..),
-- * Profile Updates
UserUpdate (..),
UpdateProfileError (..),
PutSelfResponses,
PasswordChange (..),
ChangePasswordError (..),
ChangePasswordResponses,
LocaleUpdate (..),
EmailUpdate (..),
PhoneUpdate (..),
ChangePhoneError (..),
ChangePhoneResponses,
RemoveIdentityError (..),
RemoveIdentityResponses,
HandleUpdate (..),
ChangeHandleError (..),
ChangeHandleResponses,
NameUpdate (..),
ChangeEmailResponse (..),
-- * Account Deletion
DeleteUser (..),
mkDeleteUser,
VerifyDeleteUser (..),
mkVerifyDeleteUser,
DeletionCodeTimeout (..),
DeleteUserResult (..),
-- * List Users
ListUsersQuery (..),
-- * re-exports
module Wire.API.User.Identity,
module Wire.API.User.Profile,
* 2nd factor auth
VerificationAction (..),
SendVerificationCode (..),
)
where
import Control.Applicative
import Control.Error.Safe (rightMay)
import Control.Lens (over, view, (.~), (?~), (^.))
import Data.Aeson (FromJSON (..), ToJSON (..))
import qualified Data.Aeson.Types as A
import qualified Data.Attoparsec.ByteString as Parser
import Data.ByteString.Builder (toLazyByteString)
import Data.ByteString.Conversion
import qualified Data.CaseInsensitive as CI
import qualified Data.Code as Code
import qualified Data.Currency as Currency
import Data.Domain (Domain (Domain))
import Data.Either.Extra (maybeToEither)
import Data.Handle (Handle)
import qualified Data.HashMap.Strict.InsOrd as InsOrdHashMap
import Data.Id
import Data.Json.Util (UTCTimeMillis, (#))
import Data.LegalHold (UserLegalHoldStatus)
import Data.Misc (PlainTextPassword (..))
import Data.Qualified
import Data.Range
import Data.SOP
import Data.Schema
import Data.String.Conversions (cs)
import qualified Data.Swagger as S
import qualified Data.Text as T
import Data.Text.Ascii
import qualified Data.Text.Encoding as T
import Data.UUID (UUID, nil)
import qualified Data.UUID as UUID
import Deriving.Swagger
import GHC.TypeLits
import qualified Generics.SOP as GSOP
import Imports
import qualified SAML2.WebSSO as SAML
import qualified SAML2.WebSSO.Types.Email as SAMLEmail
import Servant (FromHttpApiData (..), ToHttpApiData (..), type (.++))
import qualified Test.QuickCheck as QC
import URI.ByteString (serializeURIRef)
import qualified Web.Cookie as Web
import Wire.API.Error
import Wire.API.Error.Brig
import qualified Wire.API.Error.Brig as E
import Wire.API.Provider.Service (ServiceRef)
import Wire.API.Routes.MultiVerb
import Wire.API.Team (BindingNewTeam, bindingNewTeamObjectSchema)
import Wire.API.Team.Role
import Wire.API.User.Activation (ActivationCode)
import Wire.API.User.Auth (CookieLabel)
import Wire.API.User.Identity
import Wire.API.User.Profile
import Wire.API.User.RichInfo
import Wire.Arbitrary (Arbitrary (arbitrary), GenericUniform (..))
--------------------------------------------------------------------------------
UserIdList
-- | This datatype replaces the old `Members` datatype,
which has been replaced by ` SimpleMembers ` . This is
-- needed due to backwards compatible reasons since old
-- clients will break if we switch these types. Also, this
-- definition represents better what information it carries
newtype UserIdList = UserIdList {mUsers :: [UserId]}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema UserIdList
instance ToSchema UserIdList where
schema =
object "UserIdList" $
UserIdList
<$> mUsers
.= field "user_ids" (array schema)
--------------------------------------------------------------------------------
-- QualifiedUserIdList
newtype QualifiedUserIdList = QualifiedUserIdList {qualifiedUserIdList :: [Qualified UserId]}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema QualifiedUserIdList
instance ToSchema QualifiedUserIdList where
schema =
object "QualifiedUserIdList" $
QualifiedUserIdList
<$> qualifiedUserIdList
.= field "qualified_user_ids" (array schema)
<* (fmap qUnqualified . qualifiedUserIdList)
.= field "user_ids" (deprecatedSchema "qualified_user_ids" (array schema))
--------------------------------------------------------------------------------
LimitedQualifiedUserIdList
| We can not use ' Wrapped ' here because all the instances require proof that 1
-- is less than or equal to 'max'.
newtype LimitedQualifiedUserIdList (max :: Nat) = LimitedQualifiedUserIdList
{qualifiedUsers :: Range 1 max [Qualified UserId]}
deriving stock (Eq, Show, Generic)
deriving (S.ToSchema) via CustomSwagger '[FieldLabelModifier CamelToSnake] (LimitedQualifiedUserIdList max)
instance (KnownNat max, 1 <= max) => Arbitrary (LimitedQualifiedUserIdList max) where
arbitrary = LimitedQualifiedUserIdList <$> arbitrary
instance (KnownNat max, 1 <= max) => FromJSON (LimitedQualifiedUserIdList max) where
parseJSON = A.withObject "LimitedQualifiedUserIdList" $ \o ->
LimitedQualifiedUserIdList <$> o A..: "qualified_users"
instance 1 <= max => ToJSON (LimitedQualifiedUserIdList max) where
toJSON e = A.object ["qualified_users" A..= qualifiedUsers e]
--------------------------------------------------------------------------------
UserProfile
-- | A subset of the data of an existing 'User' that is returned on the API and is visible to
-- other users. Each user also has access to their own profile in a richer format --
' SelfProfile ' .
data UserProfile = UserProfile
{ profileQualifiedId :: Qualified UserId,
profileName :: Name,
-- | DEPRECATED
profilePict :: Pict,
profileAssets :: [Asset],
profileAccentId :: ColourId,
profileDeleted :: Bool,
-- | Set if the user represents an external service,
-- i.e. it is a "bot".
profileService :: Maybe ServiceRef,
profileHandle :: Maybe Handle,
profileExpire :: Maybe UTCTimeMillis,
profileTeam :: Maybe TeamId,
profileEmail :: Maybe Email,
profileLegalholdStatus :: UserLegalHoldStatus
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform UserProfile)
deriving (FromJSON, ToJSON, S.ToSchema) via (Schema UserProfile)
instance ToSchema UserProfile where
schema =
object "UserProfile" $
UserProfile
<$> profileQualifiedId
.= field "qualified_id" schema
<* (qUnqualified . profileQualifiedId)
.= optional (field "id" (deprecatedSchema "qualified_id" schema))
<*> profileName
.= field "name" schema
<*> profilePict
.= (field "picture" schema <|> pure noPict)
<*> profileAssets
.= (field "assets" (array schema) <|> pure [])
<*> profileAccentId
.= field "accent_id" schema
<*> ((\del -> if del then Just True else Nothing) . profileDeleted)
.= maybe_ (fromMaybe False <$> optField "deleted" schema)
<*> profileService
.= maybe_ (optField "service" schema)
<*> profileHandle
.= maybe_ (optField "handle" schema)
<*> profileExpire
.= maybe_ (optField "expires_at" schema)
<*> profileTeam
.= maybe_ (optField "team" schema)
<*> profileEmail
.= maybe_ (optField "email" schema)
<*> profileLegalholdStatus
.= field "legalhold_status" schema
--------------------------------------------------------------------------------
SelfProfile
-- | A self profile.
newtype SelfProfile = SelfProfile
{ selfUser :: User
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform SelfProfile)
deriving newtype (S.ToSchema)
instance ToJSON SelfProfile where
toJSON (SelfProfile u) = toJSON u
instance FromJSON SelfProfile where
parseJSON = A.withObject "SelfProfile" $ \o ->
SelfProfile <$> parseJSON (A.Object o)
--------------------------------------------------------------------------------
-- User
--
-- FUTUREWORK: Move this type somewhere else, it's not part of the client API.
-- | The data of an existing user.
data User = User
{ userId :: UserId,
userQualifiedId :: Qualified UserId,
-- | User identity. For endpoints like @/self@, it will be present in the response iff
-- the user is activated, and the email/phone contained in it will be guaranteedly
-- verified. {#RefActivation}
userIdentity :: Maybe UserIdentity,
-- | required; non-unique
userDisplayName :: Name,
-- | DEPRECATED
userPict :: Pict,
userAssets :: [Asset],
userAccentId :: ColourId,
userDeleted :: Bool,
userLocale :: Locale,
-- | Set if the user represents an external service,
-- i.e. it is a "bot".
userService :: Maybe ServiceRef,
-- | not required; must be unique if present
userHandle :: Maybe Handle,
-- | Set if the user is ephemeral
userExpire :: Maybe UTCTimeMillis,
-- | Set if the user is part of a binding team
userTeam :: Maybe TeamId,
-- | How is the user profile managed (e.g. if it's via SCIM then the user profile
-- can't be edited via normal means)
userManagedBy :: ManagedBy
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform User)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema User)
-- :
-- disentangle for ' User ' , ' NewUser ' , ' UserIdentity ' , ' NewUserOrigin ' .
instance ToSchema User where
schema = object "User" userObjectSchema
userObjectSchema :: ObjectSchema SwaggerDoc User
userObjectSchema =
User
<$> userId
.= field "id" schema
<*> userQualifiedId
.= field "qualified_id" schema
<*> userIdentity
.= maybeUserIdentityObjectSchema
<*> userDisplayName
.= field "name" schema
<*> userPict
.= (fromMaybe noPict <$> optField "picture" schema)
<*> userAssets
.= (fromMaybe [] <$> optField "assets" (array schema))
<*> userAccentId
.= field "accent_id" schema
<*> (fromMaybe False <$> (\u -> if userDeleted u then Just True else Nothing) .= maybe_ (optField "deleted" schema))
<*> userLocale
.= field "locale" schema
<*> userService
.= maybe_ (optField "service" schema)
<*> userHandle
.= maybe_ (optField "handle" schema)
<*> userExpire
.= maybe_ (optField "expires_at" schema)
<*> userTeam
.= maybe_ (optField "team" schema)
<*> userManagedBy
.= (fromMaybe ManagedByWire <$> optField "managed_by" schema)
userEmail :: User -> Maybe Email
userEmail = emailIdentity <=< userIdentity
userPhone :: User -> Maybe Phone
userPhone = phoneIdentity <=< userIdentity
userSSOId :: User -> Maybe UserSSOId
userSSOId = ssoIdentity <=< userIdentity
userSCIMExternalId :: User -> Maybe Text
userSCIMExternalId usr = scimExternalId (userManagedBy usr) =<< userSSOId usr
-- FUTUREWORK: this is only ignoring case in the email format, and emails should be
-- handled case-insensitively. -909
scimExternalId :: ManagedBy -> UserSSOId -> Maybe Text
scimExternalId _ (UserScimExternalId extId) = Just extId
scimExternalId ManagedByScim (UserSSOId (SAML.UserRef _ nameIdXML)) = Just . CI.original . SAML.unsafeShowNameID $ nameIdXML
scimExternalId ManagedByWire (UserSSOId _) = Nothing
ssoIssuerAndNameId :: UserSSOId -> Maybe (Text, Text)
ssoIssuerAndNameId (UserSSOId (SAML.UserRef (SAML.Issuer uri) nameIdXML)) = Just (fromUri uri, fromNameId nameIdXML)
where
fromUri = cs . toLazyByteString . serializeURIRef
fromNameId = CI.original . SAML.unsafeShowNameID
ssoIssuerAndNameId (UserScimExternalId _) = Nothing
userIssuer :: User -> Maybe SAML.Issuer
userIssuer user = userSSOId user >>= fromSSOId
where
fromSSOId :: UserSSOId -> Maybe SAML.Issuer
fromSSOId (UserSSOId (SAML.UserRef issuer _)) = Just issuer
fromSSOId _ = Nothing
connectedProfile :: User -> UserLegalHoldStatus -> UserProfile
connectedProfile u legalHoldStatus =
UserProfile
{ profileQualifiedId = userQualifiedId u,
profileHandle = userHandle u,
profileName = userDisplayName u,
profilePict = userPict u,
profileAssets = userAssets u,
profileAccentId = userAccentId u,
profileService = userService u,
profileDeleted = userDeleted u,
profileExpire = userExpire u,
profileTeam = userTeam u,
-- We don't want to show the email by default;
-- However we do allow adding it back in intentionally later.
profileEmail = Nothing,
profileLegalholdStatus = legalHoldStatus
}
-- FUTUREWORK: should public and conect profile be separate types?
publicProfile :: User -> UserLegalHoldStatus -> UserProfile
publicProfile u legalHoldStatus =
-- Note that we explicitly unpack and repack the types here rather than using
-- RecordWildCards or something similar because we want changes to the public profile
to be EXPLICIT and INTENTIONAL so we do n't accidentally leak sensitive data .
let UserProfile
{ profileQualifiedId,
profileHandle,
profileName,
profilePict,
profileAssets,
profileAccentId,
profileService,
profileDeleted,
profileExpire,
profileTeam,
profileLegalholdStatus
} = connectedProfile u legalHoldStatus
in UserProfile
{ profileEmail = Nothing,
profileQualifiedId,
profileHandle,
profileName,
profilePict,
profileAssets,
profileAccentId,
profileService,
profileDeleted,
profileExpire,
profileTeam,
profileLegalholdStatus
}
--------------------------------------------------------------------------------
NewUser
| We use the same ' NewUser ' type for the @\/register@ and @\/i\/users@ endpoints . This
-- newtype is used as request body type for the public @\/register@ endpoint, where only a
subset of the ' NewUser ' functionality should be allowed .
--
-- Specifically, we forbid the following:
--
* Setting ' SSOIdentity ' ( SSO users are created by Spar )
--
* Setting the UUID ( only needed so that Spar can find the user if Spar crashes before it
-- finishes creating the user).
--
* Setting ' ManagedBy ' ( it should be the default in all cases unless Spar creates a
-- SCIM-managed user)
newtype NewUserPublic = NewUserPublic NewUser
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUserPublic)
instance ToSchema NewUserPublic where
schema =
unwrap .= withParser schema (either fail pure . validateNewUserPublic)
where
unwrap (NewUserPublic nu) = nu
validateNewUserPublic :: NewUser -> Either String NewUserPublic
validateNewUserPublic nu
| isJust (newUserSSOId nu) =
Left "SSO-managed users are not allowed here."
| isJust (newUserUUID nu) =
Left "it is not allowed to provide a UUID for the users here."
| newUserManagedBy nu `notElem` [Nothing, Just ManagedByWire] =
Left "only managed-by-Wire users can be created here."
| otherwise =
Right (NewUserPublic nu)
-- | A user is Ephemeral if she has neither email, phone, nor sso credentials and is not
created via scim . Ephemeral users can be deleted after expires_in or sessionTokenTimeout
-- (whichever comes earlier).
isNewUserEphemeral :: NewUser -> Bool
isNewUserEphemeral u = noId && noScim
where
noId = isNothing $ newUserIdentity u
noScim = case newUserManagedBy u of
Nothing -> True
Just ManagedByWire -> True
Just ManagedByScim -> False
isNewUserTeamMember :: NewUser -> Bool
isNewUserTeamMember u = case newUserTeam u of
Just (NewTeamMember _) -> True
Just (NewTeamMemberSSO _) -> True
Just (NewTeamCreator _) -> False
Nothing -> False
instance Arbitrary NewUserPublic where
arbitrary = arbitrary `QC.suchThatMap` (rightMay . validateNewUserPublic)
data RegisterError
= RegisterErrorAllowlistError
| RegisterErrorInvalidInvitationCode
| RegisterErrorMissingIdentity
| RegisterErrorUserKeyExists
| RegisterErrorInvalidActivationCodeWrongUser
| RegisterErrorInvalidActivationCodeWrongCode
| RegisterErrorInvalidEmail
| RegisterErrorInvalidPhone
| RegisterErrorBlacklistedPhone
| RegisterErrorBlacklistedEmail
| RegisterErrorTooManyTeamMembers
| RegisterErrorUserCreationRestricted
deriving (Show, Generic)
deriving (AsUnion RegisterErrorResponses) via GenericAsUnion RegisterErrorResponses RegisterError
instance GSOP.Generic RegisterError
type RegisterErrorResponses =
'[ ErrorResponse 'AllowlistError,
ErrorResponse 'InvalidInvitationCode,
ErrorResponse 'MissingIdentity,
ErrorResponse 'UserKeyExists,
ErrorResponse 'InvalidActivationCodeWrongUser,
ErrorResponse 'InvalidActivationCodeWrongCode,
ErrorResponse 'InvalidEmail,
ErrorResponse 'InvalidPhone,
ErrorResponse 'BlacklistedPhone,
ErrorResponse 'BlacklistedEmail,
ErrorResponse 'TooManyTeamMembers,
ErrorResponse 'UserCreationRestricted
]
type RegisterResponses =
RegisterErrorResponses
.++ '[ WithHeaders
'[ DescHeader "Set-Cookie" "Cookie" Web.SetCookie,
DescHeader "Location" "UserId" UserId
]
RegisterSuccess
(Respond 201 "User created and pending activation" SelfProfile)
]
instance AsHeaders '[Web.SetCookie, UserId] SelfProfile RegisterSuccess where
fromHeaders (I cookie :* (_ :* Nil), sp) = RegisterSuccess cookie sp
toHeaders (RegisterSuccess cookie sp) = (I cookie :* (I (userId (selfUser sp)) :* Nil), sp)
data RegisterSuccess = RegisterSuccess Web.SetCookie SelfProfile
instance (res ~ RegisterResponses) => AsUnion res (Either RegisterError RegisterSuccess) where
toUnion = eitherToUnion (toUnion @RegisterErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @RegisterErrorResponses) (unI . unZ)
type RegisterInternalResponses =
RegisterErrorResponses
.++ '[ WithHeaders
'[DescHeader "Location" "UserId" UserId]
SelfProfile
(Respond 201 "User created and pending activation" SelfProfile)
]
instance AsHeaders '[UserId] SelfProfile SelfProfile where
fromHeaders (_ :* Nil, sp) = sp
toHeaders sp = (I (userId (selfUser sp)) :* Nil, sp)
instance (res ~ RegisterInternalResponses) => AsUnion res (Either RegisterError SelfProfile) where
toUnion = eitherToUnion (toUnion @RegisterErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @RegisterErrorResponses) (unI . unZ)
urefToExternalId :: SAML.UserRef -> Maybe Text
urefToExternalId = fmap CI.original . SAML.shortShowNameID . view SAML.uidSubject
urefToEmail :: SAML.UserRef -> Maybe Email
urefToEmail uref = case uref ^. SAML.uidSubject . SAML.nameID of
SAML.UNameIDEmail email -> parseEmail . SAMLEmail.render . CI.original $ email
_ -> Nothing
data CreateUserSparError
= CreateUserSparHandleError ChangeHandleError
| CreateUserSparRegistrationError RegisterError
deriving (Show, Generic)
type CreateUserSparErrorResponses =
RegisterErrorResponses .++ ChangeHandleErrorResponses
type CreateUserSparResponses =
CreateUserSparErrorResponses
.++ '[ WithHeaders
'[ DescHeader "Set-Cookie" "Cookie" Web.SetCookie,
DescHeader "Location" "UserId" UserId
]
RegisterSuccess
(Respond 201 "User created and pending activation" SelfProfile)
]
type CreateUserSparInternalResponses =
CreateUserSparErrorResponses
.++ '[ WithHeaders
'[DescHeader "Location" "UserId" UserId]
SelfProfile
(Respond 201 "User created and pending activation" SelfProfile)
]
instance (res ~ CreateUserSparErrorResponses) => AsUnion res CreateUserSparError where
toUnion = eitherToUnion (toUnion @ChangeHandleErrorResponses) (toUnion @RegisterErrorResponses) . errToEither
fromUnion = errFromEither . eitherFromUnion (fromUnion @ChangeHandleErrorResponses) (fromUnion @RegisterErrorResponses)
instance (res ~ CreateUserSparResponses) => AsUnion res (Either CreateUserSparError RegisterSuccess) where
toUnion = eitherToUnion (toUnion @CreateUserSparErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @CreateUserSparErrorResponses) (unI . unZ)
instance (res ~ CreateUserSparInternalResponses) => AsUnion res (Either CreateUserSparError SelfProfile) where
toUnion = eitherToUnion (toUnion @CreateUserSparErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @CreateUserSparErrorResponses) (unI . unZ)
errToEither :: CreateUserSparError -> Either ChangeHandleError RegisterError
errToEither (CreateUserSparHandleError e) = Left e
errToEither (CreateUserSparRegistrationError e) = Right e
errFromEither :: Either ChangeHandleError RegisterError -> CreateUserSparError
errFromEither (Left e) = CreateUserSparHandleError e
errFromEither (Right e) = CreateUserSparRegistrationError e
data NewUserSpar = NewUserSpar
{ newUserSparUUID :: UUID,
newUserSparSSOId :: UserSSOId,
newUserSparDisplayName :: Name,
newUserSparTeamId :: TeamId,
newUserSparManagedBy :: ManagedBy,
newUserSparHandle :: Maybe Handle,
newUserSparRichInfo :: Maybe RichInfo,
newUserSparLocale :: Maybe Locale,
newUserSparRole :: Role
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUserSpar)
instance ToSchema NewUserSpar where
schema =
object "NewUserSpar" $
NewUserSpar
<$> newUserSparUUID
.= field "newUserSparUUID" genericToSchema
<*> newUserSparSSOId
.= field "newUserSparSSOId" genericToSchema
<*> newUserSparDisplayName
.= field "newUserSparDisplayName" schema
<*> newUserSparTeamId
.= field "newUserSparTeamId" schema
<*> newUserSparManagedBy
.= field "newUserSparManagedBy" schema
<*> newUserSparHandle
.= maybe_ (optField "newUserSparHandle" schema)
<*> newUserSparRichInfo
.= maybe_ (optField "newUserSparRichInfo" schema)
<*> newUserSparLocale
.= maybe_ (optField "newUserSparLocale" schema)
<*> newUserSparRole
.= field "newUserSparRole" schema
newUserFromSpar :: NewUserSpar -> NewUser
newUserFromSpar new =
NewUser
{ newUserDisplayName = newUserSparDisplayName new,
newUserUUID = Just $ newUserSparUUID new,
newUserIdentity = Just $ SSOIdentity (newUserSparSSOId new) Nothing Nothing,
newUserPict = Nothing,
newUserAssets = [],
newUserAccentId = Nothing,
newUserEmailCode = Nothing,
newUserPhoneCode = Nothing,
newUserOrigin = Just . NewUserOriginTeamUser . NewTeamMemberSSO $ newUserSparTeamId new,
newUserLabel = Nothing,
newUserPassword = Nothing,
newUserExpiresIn = Nothing,
newUserManagedBy = Just $ newUserSparManagedBy new,
newUserLocale = newUserSparLocale new
}
data NewUser = NewUser
{ newUserDisplayName :: Name,
-- | use this as 'UserId' (if 'Nothing', call 'Data.UUID.nextRandom').
newUserUUID :: Maybe UUID,
newUserIdentity :: Maybe UserIdentity,
-- | DEPRECATED
newUserPict :: Maybe Pict,
newUserAssets :: [Asset],
newUserAccentId :: Maybe ColourId,
newUserEmailCode :: Maybe ActivationCode,
newUserPhoneCode :: Maybe ActivationCode,
newUserOrigin :: Maybe NewUserOrigin,
newUserLabel :: Maybe CookieLabel,
newUserLocale :: Maybe Locale,
newUserPassword :: Maybe PlainTextPassword,
newUserExpiresIn :: Maybe ExpiresIn,
newUserManagedBy :: Maybe ManagedBy
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUser)
emptyNewUser :: Name -> NewUser
emptyNewUser name =
NewUser
{ newUserDisplayName = name,
newUserUUID = Nothing,
newUserIdentity = Nothing,
newUserPict = Nothing,
newUserAssets = [],
newUserAccentId = Nothing,
newUserEmailCode = Nothing,
newUserPhoneCode = Nothing,
newUserOrigin = Nothing,
newUserLabel = Nothing,
newUserLocale = Nothing,
newUserPassword = Nothing,
newUserExpiresIn = Nothing,
newUserManagedBy = Nothing
}
| 1 second - 1 week
type ExpiresIn = Range 1 604800 Integer
| Raw representation of ' NewUser ' to help with writing Schema instances .
data NewUserRaw = NewUserRaw
{ newUserRawDisplayName :: Name,
newUserRawUUID :: Maybe UUID,
newUserRawEmail :: Maybe Email,
newUserRawPhone :: Maybe Phone,
newUserRawSSOId :: Maybe UserSSOId,
-- | DEPRECATED
newUserRawPict :: Maybe Pict,
newUserRawAssets :: [Asset],
newUserRawAccentId :: Maybe ColourId,
newUserRawEmailCode :: Maybe ActivationCode,
newUserRawPhoneCode :: Maybe ActivationCode,
newUserRawInvitationCode :: Maybe InvitationCode,
newUserRawTeamCode :: Maybe InvitationCode,
newUserRawTeam :: Maybe BindingNewTeamUser,
newUserRawTeamId :: Maybe TeamId,
newUserRawLabel :: Maybe CookieLabel,
newUserRawLocale :: Maybe Locale,
newUserRawPassword :: Maybe PlainTextPassword,
newUserRawExpiresIn :: Maybe ExpiresIn,
newUserRawManagedBy :: Maybe ManagedBy
}
newUserRawObjectSchema :: ObjectSchema SwaggerDoc NewUserRaw
newUserRawObjectSchema =
NewUserRaw
<$> newUserRawDisplayName
.= field "name" schema
<*> newUserRawUUID
.= maybe_ (optField "uuid" genericToSchema)
<*> newUserRawEmail
.= maybe_ (optField "email" schema)
<*> newUserRawPhone
.= maybe_ (optField "phone" schema)
<*> newUserRawSSOId
.= maybe_ (optField "sso_id" genericToSchema)
<*> newUserRawPict
.= maybe_ (optField "picture" schema)
<*> newUserRawAssets
.= (fromMaybe [] <$> optField "assets" (array schema))
<*> newUserRawAccentId
.= maybe_ (optField "accent_id" schema)
<*> newUserRawEmailCode
.= maybe_ (optField "email_code" schema)
<*> newUserRawPhoneCode
.= maybe_ (optField "phone_code" schema)
<*> newUserRawInvitationCode
.= maybe_ (optField "invitation_code" schema)
<*> newUserRawTeamCode
.= maybe_ (optField "team_code" schema)
<*> newUserRawTeam
.= maybe_ (optField "team" schema)
<*> newUserRawTeamId
.= maybe_ (optField "team_id" schema)
<*> newUserRawLabel
.= maybe_ (optField "label" schema)
<*> newUserRawLocale
.= maybe_ (optField "locale" schema)
<*> newUserRawPassword
.= maybe_ (optField "password" schema)
<*> newUserRawExpiresIn
.= maybe_ (optField "expires_in" schema)
<*> newUserRawManagedBy
.= maybe_ (optField "managed_by" schema)
instance ToSchema NewUser where
schema =
object "NewUser" $ newUserToRaw .= withParser newUserRawObjectSchema newUserFromRaw
newUserToRaw :: NewUser -> NewUserRaw
newUserToRaw NewUser {..} =
let maybeOriginNTU = newUserOriginNewTeamUser =<< newUserOrigin
in NewUserRaw
{ newUserRawDisplayName = newUserDisplayName,
newUserRawUUID = newUserUUID,
newUserRawEmail = emailIdentity =<< newUserIdentity,
newUserRawPhone = phoneIdentity =<< newUserIdentity,
newUserRawSSOId = ssoIdentity =<< newUserIdentity,
newUserRawPict = newUserPict,
newUserRawAssets = newUserAssets,
newUserRawAccentId = newUserAccentId,
newUserRawEmailCode = newUserEmailCode,
newUserRawPhoneCode = newUserPhoneCode,
newUserRawInvitationCode = newUserOriginInvitationCode =<< newUserOrigin,
newUserRawTeamCode = newTeamUserCode =<< maybeOriginNTU,
newUserRawTeam = newTeamUserCreator =<< maybeOriginNTU,
newUserRawTeamId = newTeamUserTeamId =<< maybeOriginNTU,
newUserRawLabel = newUserLabel,
newUserRawLocale = newUserLocale,
newUserRawPassword = newUserPassword,
newUserRawExpiresIn = newUserExpiresIn,
newUserRawManagedBy = newUserManagedBy
}
newUserFromRaw :: NewUserRaw -> A.Parser NewUser
newUserFromRaw NewUserRaw {..} = do
origin <-
either fail pure $
maybeNewUserOriginFromComponents
(isJust newUserRawPassword)
(isJust newUserRawSSOId)
(newUserRawInvitationCode, newUserRawTeamCode, newUserRawTeam, newUserRawTeamId)
let identity = maybeUserIdentityFromComponents (newUserRawEmail, newUserRawPhone, newUserRawSSOId)
expiresIn <-
case (newUserRawExpiresIn, identity) of
(Just _, Just _) -> fail "Only users without an identity can expire"
_ -> pure newUserRawExpiresIn
pure $
NewUser
{ newUserDisplayName = newUserRawDisplayName,
newUserUUID = newUserRawUUID,
newUserIdentity = identity,
newUserPict = newUserRawPict,
newUserAssets = newUserRawAssets,
newUserAccentId = newUserRawAccentId,
newUserEmailCode = newUserRawEmailCode,
newUserPhoneCode = newUserRawPhoneCode,
newUserOrigin = origin,
newUserLabel = newUserRawLabel,
newUserLocale = newUserRawLocale,
newUserPassword = newUserRawPassword,
newUserExpiresIn = expiresIn,
newUserManagedBy = newUserRawManagedBy
}
-- FUTUREWORK: align more with FromJSON instance?
instance Arbitrary NewUser where
arbitrary = do
newUserIdentity <- arbitrary
newUserOrigin <- genUserOrigin newUserIdentity
newUserDisplayName <- arbitrary
newUserUUID <- QC.elements [Just nil, Nothing]
newUserPict <- arbitrary
newUserAssets <- arbitrary
newUserAccentId <- arbitrary
newUserEmailCode <- arbitrary
newUserPhoneCode <- arbitrary
newUserLabel <- arbitrary
newUserLocale <- arbitrary
newUserPassword <- genUserPassword newUserIdentity newUserOrigin
newUserExpiresIn <- genUserExpiresIn newUserIdentity
newUserManagedBy <- arbitrary
pure NewUser {..}
where
genUserOrigin newUserIdentity = do
teamid <- arbitrary
let hasSSOId = case newUserIdentity of
Just SSOIdentity {} -> True
_ -> False
ssoOrigin = Just (NewUserOriginTeamUser (NewTeamMemberSSO teamid))
isSsoOrigin (Just (NewUserOriginTeamUser (NewTeamMemberSSO _))) = True
isSsoOrigin _ = False
if hasSSOId
then pure ssoOrigin
else arbitrary `QC.suchThat` (not . isSsoOrigin)
genUserPassword newUserIdentity newUserOrigin = do
let hasSSOId = case newUserIdentity of
Just SSOIdentity {} -> True
_ -> False
isTeamUser = case newUserOrigin of
Just (NewUserOriginTeamUser _) -> True
_ -> False
if isTeamUser && not hasSSOId then Just <$> arbitrary else arbitrary
genUserExpiresIn newUserIdentity =
if isJust newUserIdentity then pure Nothing else arbitrary
newUserInvitationCode :: NewUser -> Maybe InvitationCode
newUserInvitationCode nu = case newUserOrigin nu of
Just (NewUserOriginInvitationCode ic) -> Just ic
_ -> Nothing
newUserTeam :: NewUser -> Maybe NewTeamUser
newUserTeam nu = case newUserOrigin nu of
Just (NewUserOriginTeamUser tu) -> Just tu
_ -> Nothing
newUserEmail :: NewUser -> Maybe Email
newUserEmail = emailIdentity <=< newUserIdentity
newUserPhone :: NewUser -> Maybe Phone
newUserPhone = phoneIdentity <=< newUserIdentity
newUserSSOId :: NewUser -> Maybe UserSSOId
newUserSSOId = ssoIdentity <=< newUserIdentity
--------------------------------------------------------------------------------
-- NewUserOrigin
data NewUserOrigin
= NewUserOriginInvitationCode InvitationCode
| NewUserOriginTeamUser NewTeamUser
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform NewUserOrigin)
type NewUserOriginComponents = (Maybe InvitationCode, Maybe InvitationCode, Maybe BindingNewTeamUser, Maybe TeamId)
newUserOriginInvitationCode :: NewUserOrigin -> Maybe InvitationCode
newUserOriginInvitationCode = \case
NewUserOriginInvitationCode ic -> Just ic
NewUserOriginTeamUser _ -> Nothing
newUserOriginNewTeamUser :: NewUserOrigin -> Maybe NewTeamUser
newUserOriginNewTeamUser = \case
NewUserOriginInvitationCode _ -> Nothing
NewUserOriginTeamUser ntu -> Just ntu
maybeNewUserOriginFromComponents ::
-- | Does the user have a password
Bool ->
-- | Does the user have an SSO Identity
Bool ->
NewUserOriginComponents ->
Either String (Maybe NewUserOrigin)
maybeNewUserOriginFromComponents hasPassword hasSSO (invcode, teamcode, team, teamid) = do
result <- case (invcode, teamcode, team, hasSSO, teamid) of
(Just a, Nothing, Nothing, False, Nothing) -> Right . Just . NewUserOriginInvitationCode $ a
(Nothing, Just a, Nothing, False, Nothing) -> Right . Just . NewUserOriginTeamUser $ NewTeamMember a
(Nothing, Nothing, Just a, False, Nothing) -> Right . Just . NewUserOriginTeamUser $ NewTeamCreator a
(Nothing, Nothing, Nothing, True, Just t) -> Right . Just . NewUserOriginTeamUser $ NewTeamMemberSSO t
(Nothing, Nothing, Nothing, False, Nothing) -> Right Nothing
(_, _, _, True, Nothing) -> Left "sso_id, team_id must be either both present or both absent."
(_, _, _, False, Just _) -> Left "sso_id, team_id must be either both present or both absent."
_ -> Left "team_code, team, invitation_code, sso_id, and the pair (sso_id, team_id) are mutually exclusive"
case (result, hasPassword, hasSSO) of
(_, _, True) -> Right result
(Just (NewUserOriginTeamUser _), False, _) -> Left "all team users must set a password on creation"
_ -> pure result
-- | A random invitation code for use during registration
newtype InvitationCode = InvitationCode
{fromInvitationCode :: AsciiBase64Url}
deriving stock (Eq, Show, Generic)
deriving newtype (ToSchema, ToByteString, FromByteString, Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema InvitationCode
instance S.ToParamSchema InvitationCode where
toParamSchema _ = S.toParamSchema (Proxy @Text)
instance FromHttpApiData InvitationCode where
parseQueryParam = bimap cs InvitationCode . validateBase64Url
instance ToHttpApiData InvitationCode where
toQueryParam = cs . toByteString . fromInvitationCode
--------------------------------------------------------------------------------
-- NewTeamUser
data NewTeamUser
= -- | requires email address
NewTeamMember InvitationCode
| NewTeamCreator BindingNewTeamUser
| sso : users with saml credentials and/or created via scim
NewTeamMemberSSO TeamId
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform NewTeamUser)
newTeamUserCode :: NewTeamUser -> Maybe InvitationCode
newTeamUserCode = \case
NewTeamMember ic -> Just ic
NewTeamCreator _ -> Nothing
NewTeamMemberSSO _ -> Nothing
newTeamUserCreator :: NewTeamUser -> Maybe BindingNewTeamUser
newTeamUserCreator = \case
NewTeamMember _ -> Nothing
NewTeamCreator bntu -> Just bntu
NewTeamMemberSSO _ -> Nothing
newTeamUserTeamId :: NewTeamUser -> Maybe TeamId
newTeamUserTeamId = \case
NewTeamMember _ -> Nothing
NewTeamCreator _ -> Nothing
NewTeamMemberSSO tid -> Just tid
data BindingNewTeamUser = BindingNewTeamUser
{ bnuTeam :: BindingNewTeam,
bnuCurrency :: Maybe Currency.Alpha
-- FUTUREWORK:
-- Remove Currency selection once billing supports currency changes after team creation
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform BindingNewTeamUser)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema BindingNewTeamUser)
instance ToSchema BindingNewTeamUser where
schema =
object "BindingNewTeamUser" $
BindingNewTeamUser
<$> bnuTeam
.= bindingNewTeamObjectSchema
<*> bnuCurrency
.= maybe_ (optField "currency" genericToSchema)
--------------------------------------------------------------------------------
-- SCIM User Info
data ScimUserInfo = ScimUserInfo
{ suiUserId :: UserId,
suiCreatedOn :: Maybe UTCTimeMillis
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform ScimUserInfo)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema ScimUserInfo)
instance ToSchema ScimUserInfo where
schema =
object "ScimUserInfo" $
ScimUserInfo
<$> suiUserId
.= field "id" schema
<*> suiCreatedOn
.= maybe_ (optField "created_on" schema)
newtype ScimUserInfos = ScimUserInfos {scimUserInfos :: [ScimUserInfo]}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform ScimUserInfos)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema ScimUserInfos)
instance ToSchema ScimUserInfos where
schema =
object "ScimUserInfos" $
ScimUserInfos
<$> scimUserInfos
.= field "scim_user_infos" (array schema)
-------------------------------------------------------------------------------
-- | Set of user ids, can be used for different purposes (e.g., used on the internal
-- APIs for listing user's clients)
newtype UserSet = UserSet
{ usUsrs :: Set UserId
}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema UserSet)
instance ToSchema UserSet where
schema =
object "UserSet" $
UserSet
<$> usUsrs
.= field "users" (set schema)
--------------------------------------------------------------------------------
-- Profile Updates
data UserUpdate = UserUpdate
{ uupName :: Maybe Name,
-- | DEPRECATED
uupPict :: Maybe Pict,
uupAssets :: Maybe [Asset],
uupAccentId :: Maybe ColourId
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema UserUpdate)
deriving (Arbitrary) via (GenericUniform UserUpdate)
instance ToSchema UserUpdate where
schema =
object "UserUpdate" $
UserUpdate
<$> uupName
.= maybe_ (optField "name" schema)
<*> uupPict
.= maybe_ (optField "picture" schema)
<*> uupAssets
.= maybe_ (optField "assets" (array schema))
<*> uupAccentId
.= maybe_ (optField "accent_id" schema)
data UpdateProfileError
= DisplayNameManagedByScim
| ProfileNotFound
deriving (Generic)
deriving (AsUnion PutSelfErrorResponses) via GenericAsUnion PutSelfErrorResponses UpdateProfileError
instance GSOP.Generic UpdateProfileError
type PutSelfErrorResponses = '[ErrorResponse 'E.NameManagedByScim, ErrorResponse 'E.UserNotFound]
type PutSelfResponses = PutSelfErrorResponses .++ '[RespondEmpty 200 "User updated"]
instance (res ~ PutSelfResponses) => AsUnion res (Maybe UpdateProfileError) where
toUnion = maybeToUnion (toUnion @PutSelfErrorResponses)
fromUnion = maybeFromUnion (fromUnion @PutSelfErrorResponses)
-- | The payload for setting or changing a password.
data PasswordChange = PasswordChange
{ cpOldPassword :: Maybe PlainTextPassword,
cpNewPassword :: PlainTextPassword
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform PasswordChange)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema PasswordChange)
instance ToSchema PasswordChange where
schema =
over
doc
( description
?~ "Data to change a password. The old password is required if \
\a password already exists."
)
. object "PasswordChange"
$ PasswordChange
<$> cpOldPassword
.= maybe_ (optField "old_password" schema)
<*> cpNewPassword
.= field "new_password" schema
data ChangePasswordError
= InvalidCurrentPassword
| ChangePasswordNoIdentity
| ChangePasswordMustDiffer
deriving (Generic)
deriving (AsUnion ChangePasswordErrorResponses) via GenericAsUnion ChangePasswordErrorResponses ChangePasswordError
instance GSOP.Generic ChangePasswordError
type ChangePasswordErrorResponses =
[ ErrorResponse 'E.BadCredentials,
ErrorResponse 'E.NoIdentity,
ErrorResponse 'E.ChangePasswordMustDiffer
]
type ChangePasswordResponses =
ChangePasswordErrorResponses .++ '[RespondEmpty 200 "Password Changed"]
instance (res ~ ChangePasswordResponses) => AsUnion res (Maybe ChangePasswordError) where
toUnion = maybeToUnion (toUnion @ChangePasswordErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangePasswordErrorResponses)
newtype LocaleUpdate = LocaleUpdate {luLocale :: Locale}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema LocaleUpdate)
instance ToSchema LocaleUpdate where
schema =
object "LocaleUpdate" $
LocaleUpdate
<$> luLocale
.= field "locale" schema
newtype EmailUpdate = EmailUpdate {euEmail :: Email}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema EmailUpdate)
instance ToSchema EmailUpdate where
schema =
object "EmailUpdate" $
EmailUpdate
<$> euEmail
.= field "email" schema
instance ToJSON EmailUpdate where
toJSON e = A.object ["email" A..= euEmail e]
instance FromJSON EmailUpdate where
parseJSON = A.withObject "email-update" $ \o ->
EmailUpdate <$> o A..: "email"
newtype PhoneUpdate = PhoneUpdate {puPhone :: Phone}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via Schema PhoneUpdate
instance ToSchema PhoneUpdate where
schema =
object "PhoneUpdate" $
PhoneUpdate
<$> puPhone
.= field "phone" schema
data ChangePhoneError
= PhoneExists
| InvalidNewPhone
| BlacklistedNewPhone
deriving (Generic)
deriving (AsUnion ChangePhoneErrorResponses) via GenericAsUnion ChangePhoneErrorResponses ChangePhoneError
instance GSOP.Generic ChangePhoneError
type ChangePhoneErrorResponses =
[ ErrorResponse 'UserKeyExists,
ErrorResponse 'InvalidPhone,
ErrorResponse 'BlacklistedPhone
]
type ChangePhoneResponses =
ChangePhoneErrorResponses .++ '[RespondEmpty 202 "Phone updated"]
instance (res ~ ChangePhoneResponses) => AsUnion res (Maybe ChangePhoneError) where
toUnion = maybeToUnion (toUnion @ChangePhoneErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangePhoneErrorResponses)
data RemoveIdentityError
= LastIdentity
| NoPassword
| NoIdentity
deriving (Generic)
deriving (AsUnion RemoveIdentityErrorResponses) via GenericAsUnion RemoveIdentityErrorResponses RemoveIdentityError
instance GSOP.Generic RemoveIdentityError
type RemoveIdentityErrorResponses =
[ ErrorResponse 'E.LastIdentity,
ErrorResponse 'E.NoPassword,
ErrorResponse 'E.NoIdentity
]
type RemoveIdentityResponses =
RemoveIdentityErrorResponses .++ '[RespondEmpty 200 "Identity Removed"]
instance (res ~ RemoveIdentityResponses) => AsUnion res (Maybe RemoveIdentityError) where
toUnion = maybeToUnion (toUnion @RemoveIdentityErrorResponses)
fromUnion = maybeFromUnion (fromUnion @RemoveIdentityErrorResponses)
newtype HandleUpdate = HandleUpdate {huHandle :: Text}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema HandleUpdate)
instance ToSchema HandleUpdate where
schema =
object "HandleUpdate" $
HandleUpdate <$> huHandle .= field "handle" schema
data ChangeHandleError
= ChangeHandleNoIdentity
| ChangeHandleExists
| ChangeHandleInvalid
| ChangeHandleManagedByScim
deriving (Show, Generic)
deriving (AsUnion ChangeHandleErrorResponses) via GenericAsUnion ChangeHandleErrorResponses ChangeHandleError
instance GSOP.Generic ChangeHandleError
type ChangeHandleErrorResponses =
'[ ErrorResponse 'E.NoIdentity,
ErrorResponse 'E.HandleExists,
ErrorResponse 'E.InvalidHandle,
ErrorResponse 'E.HandleManagedByScim
]
type ChangeHandleResponses =
ChangeHandleErrorResponses .++ '[RespondEmpty 200 "Handle Changed"]
instance (res ~ ChangeHandleResponses) => AsUnion res (Maybe ChangeHandleError) where
toUnion = maybeToUnion (toUnion @ChangeHandleErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangeHandleErrorResponses)
newtype NameUpdate = NameUpdate {nuHandle :: Text}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
instance ToJSON NameUpdate where
toJSON h = A.object ["name" A..= nuHandle h]
instance FromJSON NameUpdate where
parseJSON = A.withObject "name-update" $ \o ->
NameUpdate <$> o A..: "name"
data ChangeEmailResponse
= ChangeEmailResponseIdempotent
| ChangeEmailResponseNeedsActivation
instance
AsUnion
'[Respond 202 desc1 (), Respond 204 desc2 ()]
ChangeEmailResponse
where
toUnion ChangeEmailResponseIdempotent = S (Z (I ()))
toUnion ChangeEmailResponseNeedsActivation = Z (I ())
fromUnion (Z (I ())) = ChangeEmailResponseNeedsActivation
fromUnion (S (Z (I ()))) = ChangeEmailResponseIdempotent
fromUnion (S (S x)) = case x of {}
-----------------------------------------------------------------------------
-- Account Deletion
-- | Payload for requesting account deletion.
newtype DeleteUser = DeleteUser
{ deleteUserPassword :: Maybe PlainTextPassword
}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema DeleteUser)
instance ToSchema DeleteUser where
schema =
object "DeleteUser" $
DeleteUser
<$> deleteUserPassword
.= maybe_ (optField "password" schema)
mkDeleteUser :: Maybe PlainTextPassword -> DeleteUser
mkDeleteUser = DeleteUser
instance ToJSON DeleteUser where
toJSON d =
A.object $
"password"
A..= deleteUserPassword d
# []
instance FromJSON DeleteUser where
parseJSON = A.withObject "DeleteUser" $ \o ->
DeleteUser <$> o A..:? "password"
-- | Payload for verifying account deletion via a code.
data VerifyDeleteUser = VerifyDeleteUser
{ verifyDeleteUserKey :: Code.Key,
verifyDeleteUserCode :: Code.Value
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform VerifyDeleteUser)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema VerifyDeleteUser)
mkVerifyDeleteUser :: Code.Key -> Code.Value -> VerifyDeleteUser
mkVerifyDeleteUser = VerifyDeleteUser
instance ToSchema VerifyDeleteUser where
schema =
objectWithDocModifier "VerifyDeleteUser" (description ?~ "Data for verifying an account deletion.") $
VerifyDeleteUser
<$> verifyDeleteUserKey
.= fieldWithDocModifier "key" (description ?~ "The identifying key of the account (i.e. user ID).") schema
<*> verifyDeleteUserCode
.= fieldWithDocModifier "code" (description ?~ "The verification code.") schema
-- | A response for a pending deletion code.
newtype DeletionCodeTimeout = DeletionCodeTimeout
{fromDeletionCodeTimeout :: Code.Timeout}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema DeletionCodeTimeout)
instance ToSchema DeletionCodeTimeout where
schema =
object "DeletionCodeTimeout" $
DeletionCodeTimeout
<$> fromDeletionCodeTimeout
.= field "expires_in" schema
instance ToJSON DeletionCodeTimeout where
toJSON (DeletionCodeTimeout t) = A.object ["expires_in" A..= t]
instance FromJSON DeletionCodeTimeout where
parseJSON = A.withObject "DeletionCodeTimeout" $ \o ->
DeletionCodeTimeout <$> o A..: "expires_in"
-- | Result of an internal user/account deletion
data DeleteUserResult
= -- | User never existed
NoUser
| -- | User/account was deleted before
AccountAlreadyDeleted
| -- | User/account was deleted in this call
AccountDeleted
deriving (Eq, Show)
data ListUsersQuery
= ListUsersByIds [Qualified UserId]
| ListUsersByHandles (Range 1 4 [Qualified Handle])
deriving (Show, Eq)
instance FromJSON ListUsersQuery where
parseJSON =
A.withObject "ListUsersQuery" $ \o -> do
mUids <- ListUsersByIds <$$> o A..:? "qualified_ids"
mHandles <- ListUsersByHandles <$$> o A..:? "qualified_handles"
case (mUids, mHandles) of
(Just uids, Nothing) -> pure uids
(Nothing, Just handles) -> pure handles
(_, _) -> fail "exactly one of qualified_ids or qualified_handles must be provided."
instance ToJSON ListUsersQuery where
toJSON (ListUsersByIds uids) = A.object ["qualified_ids" A..= uids]
toJSON (ListUsersByHandles handles) = A.object ["qualified_handles" A..= handles]
NB : It is not possible to specific mutually exclusive fields in swagger2 , so
-- here we write it in description and modify the example to have the correct
-- JSON.
instance S.ToSchema ListUsersQuery where
declareNamedSchema _ = do
uids <- S.declareSchemaRef (Proxy @[Qualified UserId])
handles <- S.declareSchemaRef (Proxy @(Range 1 4 [Qualified Handle]))
pure $
S.NamedSchema (Just "ListUsersQuery") $
mempty
& S.type_ ?~ S.SwaggerObject
& S.description ?~ "exactly one of qualified_ids or qualified_handles must be provided."
& S.properties .~ InsOrdHashMap.fromList [("qualified_ids", uids), ("qualified_handles", handles)]
& S.example ?~ toJSON (ListUsersByIds [Qualified (Id UUID.nil) (Domain "example.com")])
-----------------------------------------------------------------------------
-- SndFactorPasswordChallenge
data VerificationAction
= CreateScimToken
| Login
| DeleteTeam
deriving stock (Eq, Show, Enum, Bounded, Generic)
deriving (Arbitrary) via (GenericUniform VerificationAction)
deriving (FromJSON, ToJSON, S.ToSchema) via (Schema VerificationAction)
instance ToSchema VerificationAction where
schema =
enum @Text "VerificationAction" $
mconcat
[ element "create_scim_token" CreateScimToken,
element "login" Login,
element "delete_team" DeleteTeam
]
instance ToByteString VerificationAction where
builder CreateScimToken = "create_scim_token"
builder Login = "login"
builder DeleteTeam = "delete_team"
instance FromByteString VerificationAction where
parser =
Parser.takeByteString >>= \b ->
case T.decodeUtf8' b of
Right "login" -> pure Login
Right "create_scim_token" -> pure CreateScimToken
Right "delete_team" -> pure DeleteTeam
Right t -> fail $ "Invalid VerificationAction: " <> T.unpack t
Left e -> fail $ "Invalid VerificationAction: " <> show e
instance S.ToParamSchema VerificationAction where
toParamSchema _ =
mempty
{ S._paramSchemaType = Just S.SwaggerString,
S._paramSchemaEnum = Just (A.String . toQueryParam <$> [(minBound :: VerificationAction) ..])
}
instance FromHttpApiData VerificationAction where
parseUrlPiece = maybeToEither "Invalid verification action" . fromByteString . cs
instance ToHttpApiData VerificationAction where
toQueryParam a = cs (toByteString' a)
data SendVerificationCode = SendVerificationCode
{ svcAction :: VerificationAction,
svcEmail :: Email
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform SendVerificationCode)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema SendVerificationCode
instance ToSchema SendVerificationCode where
schema =
object "SendVerificationCode" $
SendVerificationCode
<$> svcAction
.= field "action" schema
<*> svcEmail
.= field "email" schema
| null | https://raw.githubusercontent.com/wireapp/wire-server/7cd0a9c1dc423f87d46ad86fccaced93c6147fb1/libs/wire-api/src/Wire/API/User.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>.
Profiles
User (should not be here)
* NewUserOrigin
* Profile Updates
* Account Deletion
* List Users
* re-exports
------------------------------------------------------------------------------
| This datatype replaces the old `Members` datatype,
needed due to backwards compatible reasons since old
clients will break if we switch these types. Also, this
definition represents better what information it carries
------------------------------------------------------------------------------
QualifiedUserIdList
------------------------------------------------------------------------------
is less than or equal to 'max'.
------------------------------------------------------------------------------
| A subset of the data of an existing 'User' that is returned on the API and is visible to
other users. Each user also has access to their own profile in a richer format --
| DEPRECATED
| Set if the user represents an external service,
i.e. it is a "bot".
------------------------------------------------------------------------------
| A self profile.
------------------------------------------------------------------------------
User
FUTUREWORK: Move this type somewhere else, it's not part of the client API.
| The data of an existing user.
| User identity. For endpoints like @/self@, it will be present in the response iff
the user is activated, and the email/phone contained in it will be guaranteedly
verified. {#RefActivation}
| required; non-unique
| DEPRECATED
| Set if the user represents an external service,
i.e. it is a "bot".
| not required; must be unique if present
| Set if the user is ephemeral
| Set if the user is part of a binding team
| How is the user profile managed (e.g. if it's via SCIM then the user profile
can't be edited via normal means)
:
disentangle for ' User ' , ' NewUser ' , ' UserIdentity ' , ' NewUserOrigin ' .
FUTUREWORK: this is only ignoring case in the email format, and emails should be
handled case-insensitively. -909
We don't want to show the email by default;
However we do allow adding it back in intentionally later.
FUTUREWORK: should public and conect profile be separate types?
Note that we explicitly unpack and repack the types here rather than using
RecordWildCards or something similar because we want changes to the public profile
------------------------------------------------------------------------------
newtype is used as request body type for the public @\/register@ endpoint, where only a
Specifically, we forbid the following:
finishes creating the user).
SCIM-managed user)
| A user is Ephemeral if she has neither email, phone, nor sso credentials and is not
(whichever comes earlier).
| use this as 'UserId' (if 'Nothing', call 'Data.UUID.nextRandom').
| DEPRECATED
| DEPRECATED
FUTUREWORK: align more with FromJSON instance?
------------------------------------------------------------------------------
NewUserOrigin
| Does the user have a password
| Does the user have an SSO Identity
| A random invitation code for use during registration
------------------------------------------------------------------------------
NewTeamUser
| requires email address
FUTUREWORK:
Remove Currency selection once billing supports currency changes after team creation
------------------------------------------------------------------------------
SCIM User Info
-----------------------------------------------------------------------------
| Set of user ids, can be used for different purposes (e.g., used on the internal
APIs for listing user's clients)
------------------------------------------------------------------------------
Profile Updates
| DEPRECATED
| The payload for setting or changing a password.
---------------------------------------------------------------------------
Account Deletion
| Payload for requesting account deletion.
| Payload for verifying account deletion via a code.
| A response for a pending deletion code.
| Result of an internal user/account deletion
| User never existed
| User/account was deleted before
| User/account was deleted in this call
here we write it in description and modify the example to have the correct
JSON.
---------------------------------------------------------------------------
SndFactorPasswordChallenge | # LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Wire.API.User
( UserIdList (..),
QualifiedUserIdList (..),
LimitedQualifiedUserIdList (..),
ScimUserInfo (..),
ScimUserInfos (..),
UserSet (..),
UserProfile (..),
SelfProfile (..),
User (..),
userEmail,
userPhone,
userSSOId,
userIssuer,
userSCIMExternalId,
scimExternalId,
ssoIssuerAndNameId,
connectedProfile,
publicProfile,
userObjectSchema,
* NewUser
NewUserPublic (..),
RegisterError (..),
RegisterSuccess (..),
RegisterResponses,
RegisterInternalResponses,
NewUser (..),
emptyNewUser,
NewUserSpar (..),
CreateUserSparError (..),
CreateUserSparInternalResponses,
newUserFromSpar,
urefToExternalId,
urefToEmail,
ExpiresIn,
newUserInvitationCode,
newUserTeam,
newUserEmail,
newUserPhone,
newUserSSOId,
isNewUserEphemeral,
isNewUserTeamMember,
NewUserOrigin (..),
InvitationCode (..),
NewTeamUser (..),
BindingNewTeamUser (..),
UserUpdate (..),
UpdateProfileError (..),
PutSelfResponses,
PasswordChange (..),
ChangePasswordError (..),
ChangePasswordResponses,
LocaleUpdate (..),
EmailUpdate (..),
PhoneUpdate (..),
ChangePhoneError (..),
ChangePhoneResponses,
RemoveIdentityError (..),
RemoveIdentityResponses,
HandleUpdate (..),
ChangeHandleError (..),
ChangeHandleResponses,
NameUpdate (..),
ChangeEmailResponse (..),
DeleteUser (..),
mkDeleteUser,
VerifyDeleteUser (..),
mkVerifyDeleteUser,
DeletionCodeTimeout (..),
DeleteUserResult (..),
ListUsersQuery (..),
module Wire.API.User.Identity,
module Wire.API.User.Profile,
* 2nd factor auth
VerificationAction (..),
SendVerificationCode (..),
)
where
import Control.Applicative
import Control.Error.Safe (rightMay)
import Control.Lens (over, view, (.~), (?~), (^.))
import Data.Aeson (FromJSON (..), ToJSON (..))
import qualified Data.Aeson.Types as A
import qualified Data.Attoparsec.ByteString as Parser
import Data.ByteString.Builder (toLazyByteString)
import Data.ByteString.Conversion
import qualified Data.CaseInsensitive as CI
import qualified Data.Code as Code
import qualified Data.Currency as Currency
import Data.Domain (Domain (Domain))
import Data.Either.Extra (maybeToEither)
import Data.Handle (Handle)
import qualified Data.HashMap.Strict.InsOrd as InsOrdHashMap
import Data.Id
import Data.Json.Util (UTCTimeMillis, (#))
import Data.LegalHold (UserLegalHoldStatus)
import Data.Misc (PlainTextPassword (..))
import Data.Qualified
import Data.Range
import Data.SOP
import Data.Schema
import Data.String.Conversions (cs)
import qualified Data.Swagger as S
import qualified Data.Text as T
import Data.Text.Ascii
import qualified Data.Text.Encoding as T
import Data.UUID (UUID, nil)
import qualified Data.UUID as UUID
import Deriving.Swagger
import GHC.TypeLits
import qualified Generics.SOP as GSOP
import Imports
import qualified SAML2.WebSSO as SAML
import qualified SAML2.WebSSO.Types.Email as SAMLEmail
import Servant (FromHttpApiData (..), ToHttpApiData (..), type (.++))
import qualified Test.QuickCheck as QC
import URI.ByteString (serializeURIRef)
import qualified Web.Cookie as Web
import Wire.API.Error
import Wire.API.Error.Brig
import qualified Wire.API.Error.Brig as E
import Wire.API.Provider.Service (ServiceRef)
import Wire.API.Routes.MultiVerb
import Wire.API.Team (BindingNewTeam, bindingNewTeamObjectSchema)
import Wire.API.Team.Role
import Wire.API.User.Activation (ActivationCode)
import Wire.API.User.Auth (CookieLabel)
import Wire.API.User.Identity
import Wire.API.User.Profile
import Wire.API.User.RichInfo
import Wire.Arbitrary (Arbitrary (arbitrary), GenericUniform (..))
UserIdList
which has been replaced by ` SimpleMembers ` . This is
newtype UserIdList = UserIdList {mUsers :: [UserId]}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema UserIdList
instance ToSchema UserIdList where
schema =
object "UserIdList" $
UserIdList
<$> mUsers
.= field "user_ids" (array schema)
newtype QualifiedUserIdList = QualifiedUserIdList {qualifiedUserIdList :: [Qualified UserId]}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema QualifiedUserIdList
instance ToSchema QualifiedUserIdList where
schema =
object "QualifiedUserIdList" $
QualifiedUserIdList
<$> qualifiedUserIdList
.= field "qualified_user_ids" (array schema)
<* (fmap qUnqualified . qualifiedUserIdList)
.= field "user_ids" (deprecatedSchema "qualified_user_ids" (array schema))
LimitedQualifiedUserIdList
| We can not use ' Wrapped ' here because all the instances require proof that 1
newtype LimitedQualifiedUserIdList (max :: Nat) = LimitedQualifiedUserIdList
{qualifiedUsers :: Range 1 max [Qualified UserId]}
deriving stock (Eq, Show, Generic)
deriving (S.ToSchema) via CustomSwagger '[FieldLabelModifier CamelToSnake] (LimitedQualifiedUserIdList max)
instance (KnownNat max, 1 <= max) => Arbitrary (LimitedQualifiedUserIdList max) where
arbitrary = LimitedQualifiedUserIdList <$> arbitrary
instance (KnownNat max, 1 <= max) => FromJSON (LimitedQualifiedUserIdList max) where
parseJSON = A.withObject "LimitedQualifiedUserIdList" $ \o ->
LimitedQualifiedUserIdList <$> o A..: "qualified_users"
instance 1 <= max => ToJSON (LimitedQualifiedUserIdList max) where
toJSON e = A.object ["qualified_users" A..= qualifiedUsers e]
UserProfile
' SelfProfile ' .
data UserProfile = UserProfile
{ profileQualifiedId :: Qualified UserId,
profileName :: Name,
profilePict :: Pict,
profileAssets :: [Asset],
profileAccentId :: ColourId,
profileDeleted :: Bool,
profileService :: Maybe ServiceRef,
profileHandle :: Maybe Handle,
profileExpire :: Maybe UTCTimeMillis,
profileTeam :: Maybe TeamId,
profileEmail :: Maybe Email,
profileLegalholdStatus :: UserLegalHoldStatus
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform UserProfile)
deriving (FromJSON, ToJSON, S.ToSchema) via (Schema UserProfile)
instance ToSchema UserProfile where
schema =
object "UserProfile" $
UserProfile
<$> profileQualifiedId
.= field "qualified_id" schema
<* (qUnqualified . profileQualifiedId)
.= optional (field "id" (deprecatedSchema "qualified_id" schema))
<*> profileName
.= field "name" schema
<*> profilePict
.= (field "picture" schema <|> pure noPict)
<*> profileAssets
.= (field "assets" (array schema) <|> pure [])
<*> profileAccentId
.= field "accent_id" schema
<*> ((\del -> if del then Just True else Nothing) . profileDeleted)
.= maybe_ (fromMaybe False <$> optField "deleted" schema)
<*> profileService
.= maybe_ (optField "service" schema)
<*> profileHandle
.= maybe_ (optField "handle" schema)
<*> profileExpire
.= maybe_ (optField "expires_at" schema)
<*> profileTeam
.= maybe_ (optField "team" schema)
<*> profileEmail
.= maybe_ (optField "email" schema)
<*> profileLegalholdStatus
.= field "legalhold_status" schema
SelfProfile
newtype SelfProfile = SelfProfile
{ selfUser :: User
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform SelfProfile)
deriving newtype (S.ToSchema)
instance ToJSON SelfProfile where
toJSON (SelfProfile u) = toJSON u
instance FromJSON SelfProfile where
parseJSON = A.withObject "SelfProfile" $ \o ->
SelfProfile <$> parseJSON (A.Object o)
data User = User
{ userId :: UserId,
userQualifiedId :: Qualified UserId,
userIdentity :: Maybe UserIdentity,
userDisplayName :: Name,
userPict :: Pict,
userAssets :: [Asset],
userAccentId :: ColourId,
userDeleted :: Bool,
userLocale :: Locale,
userService :: Maybe ServiceRef,
userHandle :: Maybe Handle,
userExpire :: Maybe UTCTimeMillis,
userTeam :: Maybe TeamId,
userManagedBy :: ManagedBy
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform User)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema User)
instance ToSchema User where
schema = object "User" userObjectSchema
userObjectSchema :: ObjectSchema SwaggerDoc User
userObjectSchema =
User
<$> userId
.= field "id" schema
<*> userQualifiedId
.= field "qualified_id" schema
<*> userIdentity
.= maybeUserIdentityObjectSchema
<*> userDisplayName
.= field "name" schema
<*> userPict
.= (fromMaybe noPict <$> optField "picture" schema)
<*> userAssets
.= (fromMaybe [] <$> optField "assets" (array schema))
<*> userAccentId
.= field "accent_id" schema
<*> (fromMaybe False <$> (\u -> if userDeleted u then Just True else Nothing) .= maybe_ (optField "deleted" schema))
<*> userLocale
.= field "locale" schema
<*> userService
.= maybe_ (optField "service" schema)
<*> userHandle
.= maybe_ (optField "handle" schema)
<*> userExpire
.= maybe_ (optField "expires_at" schema)
<*> userTeam
.= maybe_ (optField "team" schema)
<*> userManagedBy
.= (fromMaybe ManagedByWire <$> optField "managed_by" schema)
userEmail :: User -> Maybe Email
userEmail = emailIdentity <=< userIdentity
userPhone :: User -> Maybe Phone
userPhone = phoneIdentity <=< userIdentity
userSSOId :: User -> Maybe UserSSOId
userSSOId = ssoIdentity <=< userIdentity
userSCIMExternalId :: User -> Maybe Text
userSCIMExternalId usr = scimExternalId (userManagedBy usr) =<< userSSOId usr
scimExternalId :: ManagedBy -> UserSSOId -> Maybe Text
scimExternalId _ (UserScimExternalId extId) = Just extId
scimExternalId ManagedByScim (UserSSOId (SAML.UserRef _ nameIdXML)) = Just . CI.original . SAML.unsafeShowNameID $ nameIdXML
scimExternalId ManagedByWire (UserSSOId _) = Nothing
ssoIssuerAndNameId :: UserSSOId -> Maybe (Text, Text)
ssoIssuerAndNameId (UserSSOId (SAML.UserRef (SAML.Issuer uri) nameIdXML)) = Just (fromUri uri, fromNameId nameIdXML)
where
fromUri = cs . toLazyByteString . serializeURIRef
fromNameId = CI.original . SAML.unsafeShowNameID
ssoIssuerAndNameId (UserScimExternalId _) = Nothing
userIssuer :: User -> Maybe SAML.Issuer
userIssuer user = userSSOId user >>= fromSSOId
where
fromSSOId :: UserSSOId -> Maybe SAML.Issuer
fromSSOId (UserSSOId (SAML.UserRef issuer _)) = Just issuer
fromSSOId _ = Nothing
connectedProfile :: User -> UserLegalHoldStatus -> UserProfile
connectedProfile u legalHoldStatus =
UserProfile
{ profileQualifiedId = userQualifiedId u,
profileHandle = userHandle u,
profileName = userDisplayName u,
profilePict = userPict u,
profileAssets = userAssets u,
profileAccentId = userAccentId u,
profileService = userService u,
profileDeleted = userDeleted u,
profileExpire = userExpire u,
profileTeam = userTeam u,
profileEmail = Nothing,
profileLegalholdStatus = legalHoldStatus
}
publicProfile :: User -> UserLegalHoldStatus -> UserProfile
publicProfile u legalHoldStatus =
to be EXPLICIT and INTENTIONAL so we do n't accidentally leak sensitive data .
let UserProfile
{ profileQualifiedId,
profileHandle,
profileName,
profilePict,
profileAssets,
profileAccentId,
profileService,
profileDeleted,
profileExpire,
profileTeam,
profileLegalholdStatus
} = connectedProfile u legalHoldStatus
in UserProfile
{ profileEmail = Nothing,
profileQualifiedId,
profileHandle,
profileName,
profilePict,
profileAssets,
profileAccentId,
profileService,
profileDeleted,
profileExpire,
profileTeam,
profileLegalholdStatus
}
NewUser
| We use the same ' NewUser ' type for the @\/register@ and @\/i\/users@ endpoints . This
subset of the ' NewUser ' functionality should be allowed .
* Setting ' SSOIdentity ' ( SSO users are created by Spar )
* Setting the UUID ( only needed so that Spar can find the user if Spar crashes before it
* Setting ' ManagedBy ' ( it should be the default in all cases unless Spar creates a
newtype NewUserPublic = NewUserPublic NewUser
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUserPublic)
instance ToSchema NewUserPublic where
schema =
unwrap .= withParser schema (either fail pure . validateNewUserPublic)
where
unwrap (NewUserPublic nu) = nu
validateNewUserPublic :: NewUser -> Either String NewUserPublic
validateNewUserPublic nu
| isJust (newUserSSOId nu) =
Left "SSO-managed users are not allowed here."
| isJust (newUserUUID nu) =
Left "it is not allowed to provide a UUID for the users here."
| newUserManagedBy nu `notElem` [Nothing, Just ManagedByWire] =
Left "only managed-by-Wire users can be created here."
| otherwise =
Right (NewUserPublic nu)
created via scim . Ephemeral users can be deleted after expires_in or sessionTokenTimeout
isNewUserEphemeral :: NewUser -> Bool
isNewUserEphemeral u = noId && noScim
where
noId = isNothing $ newUserIdentity u
noScim = case newUserManagedBy u of
Nothing -> True
Just ManagedByWire -> True
Just ManagedByScim -> False
isNewUserTeamMember :: NewUser -> Bool
isNewUserTeamMember u = case newUserTeam u of
Just (NewTeamMember _) -> True
Just (NewTeamMemberSSO _) -> True
Just (NewTeamCreator _) -> False
Nothing -> False
instance Arbitrary NewUserPublic where
arbitrary = arbitrary `QC.suchThatMap` (rightMay . validateNewUserPublic)
data RegisterError
= RegisterErrorAllowlistError
| RegisterErrorInvalidInvitationCode
| RegisterErrorMissingIdentity
| RegisterErrorUserKeyExists
| RegisterErrorInvalidActivationCodeWrongUser
| RegisterErrorInvalidActivationCodeWrongCode
| RegisterErrorInvalidEmail
| RegisterErrorInvalidPhone
| RegisterErrorBlacklistedPhone
| RegisterErrorBlacklistedEmail
| RegisterErrorTooManyTeamMembers
| RegisterErrorUserCreationRestricted
deriving (Show, Generic)
deriving (AsUnion RegisterErrorResponses) via GenericAsUnion RegisterErrorResponses RegisterError
instance GSOP.Generic RegisterError
type RegisterErrorResponses =
'[ ErrorResponse 'AllowlistError,
ErrorResponse 'InvalidInvitationCode,
ErrorResponse 'MissingIdentity,
ErrorResponse 'UserKeyExists,
ErrorResponse 'InvalidActivationCodeWrongUser,
ErrorResponse 'InvalidActivationCodeWrongCode,
ErrorResponse 'InvalidEmail,
ErrorResponse 'InvalidPhone,
ErrorResponse 'BlacklistedPhone,
ErrorResponse 'BlacklistedEmail,
ErrorResponse 'TooManyTeamMembers,
ErrorResponse 'UserCreationRestricted
]
type RegisterResponses =
RegisterErrorResponses
.++ '[ WithHeaders
'[ DescHeader "Set-Cookie" "Cookie" Web.SetCookie,
DescHeader "Location" "UserId" UserId
]
RegisterSuccess
(Respond 201 "User created and pending activation" SelfProfile)
]
instance AsHeaders '[Web.SetCookie, UserId] SelfProfile RegisterSuccess where
fromHeaders (I cookie :* (_ :* Nil), sp) = RegisterSuccess cookie sp
toHeaders (RegisterSuccess cookie sp) = (I cookie :* (I (userId (selfUser sp)) :* Nil), sp)
data RegisterSuccess = RegisterSuccess Web.SetCookie SelfProfile
instance (res ~ RegisterResponses) => AsUnion res (Either RegisterError RegisterSuccess) where
toUnion = eitherToUnion (toUnion @RegisterErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @RegisterErrorResponses) (unI . unZ)
type RegisterInternalResponses =
RegisterErrorResponses
.++ '[ WithHeaders
'[DescHeader "Location" "UserId" UserId]
SelfProfile
(Respond 201 "User created and pending activation" SelfProfile)
]
instance AsHeaders '[UserId] SelfProfile SelfProfile where
fromHeaders (_ :* Nil, sp) = sp
toHeaders sp = (I (userId (selfUser sp)) :* Nil, sp)
instance (res ~ RegisterInternalResponses) => AsUnion res (Either RegisterError SelfProfile) where
toUnion = eitherToUnion (toUnion @RegisterErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @RegisterErrorResponses) (unI . unZ)
urefToExternalId :: SAML.UserRef -> Maybe Text
urefToExternalId = fmap CI.original . SAML.shortShowNameID . view SAML.uidSubject
urefToEmail :: SAML.UserRef -> Maybe Email
urefToEmail uref = case uref ^. SAML.uidSubject . SAML.nameID of
SAML.UNameIDEmail email -> parseEmail . SAMLEmail.render . CI.original $ email
_ -> Nothing
data CreateUserSparError
= CreateUserSparHandleError ChangeHandleError
| CreateUserSparRegistrationError RegisterError
deriving (Show, Generic)
type CreateUserSparErrorResponses =
RegisterErrorResponses .++ ChangeHandleErrorResponses
type CreateUserSparResponses =
CreateUserSparErrorResponses
.++ '[ WithHeaders
'[ DescHeader "Set-Cookie" "Cookie" Web.SetCookie,
DescHeader "Location" "UserId" UserId
]
RegisterSuccess
(Respond 201 "User created and pending activation" SelfProfile)
]
type CreateUserSparInternalResponses =
CreateUserSparErrorResponses
.++ '[ WithHeaders
'[DescHeader "Location" "UserId" UserId]
SelfProfile
(Respond 201 "User created and pending activation" SelfProfile)
]
instance (res ~ CreateUserSparErrorResponses) => AsUnion res CreateUserSparError where
toUnion = eitherToUnion (toUnion @ChangeHandleErrorResponses) (toUnion @RegisterErrorResponses) . errToEither
fromUnion = errFromEither . eitherFromUnion (fromUnion @ChangeHandleErrorResponses) (fromUnion @RegisterErrorResponses)
instance (res ~ CreateUserSparResponses) => AsUnion res (Either CreateUserSparError RegisterSuccess) where
toUnion = eitherToUnion (toUnion @CreateUserSparErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @CreateUserSparErrorResponses) (unI . unZ)
instance (res ~ CreateUserSparInternalResponses) => AsUnion res (Either CreateUserSparError SelfProfile) where
toUnion = eitherToUnion (toUnion @CreateUserSparErrorResponses) (Z . I)
fromUnion = eitherFromUnion (fromUnion @CreateUserSparErrorResponses) (unI . unZ)
errToEither :: CreateUserSparError -> Either ChangeHandleError RegisterError
errToEither (CreateUserSparHandleError e) = Left e
errToEither (CreateUserSparRegistrationError e) = Right e
errFromEither :: Either ChangeHandleError RegisterError -> CreateUserSparError
errFromEither (Left e) = CreateUserSparHandleError e
errFromEither (Right e) = CreateUserSparRegistrationError e
data NewUserSpar = NewUserSpar
{ newUserSparUUID :: UUID,
newUserSparSSOId :: UserSSOId,
newUserSparDisplayName :: Name,
newUserSparTeamId :: TeamId,
newUserSparManagedBy :: ManagedBy,
newUserSparHandle :: Maybe Handle,
newUserSparRichInfo :: Maybe RichInfo,
newUserSparLocale :: Maybe Locale,
newUserSparRole :: Role
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUserSpar)
instance ToSchema NewUserSpar where
schema =
object "NewUserSpar" $
NewUserSpar
<$> newUserSparUUID
.= field "newUserSparUUID" genericToSchema
<*> newUserSparSSOId
.= field "newUserSparSSOId" genericToSchema
<*> newUserSparDisplayName
.= field "newUserSparDisplayName" schema
<*> newUserSparTeamId
.= field "newUserSparTeamId" schema
<*> newUserSparManagedBy
.= field "newUserSparManagedBy" schema
<*> newUserSparHandle
.= maybe_ (optField "newUserSparHandle" schema)
<*> newUserSparRichInfo
.= maybe_ (optField "newUserSparRichInfo" schema)
<*> newUserSparLocale
.= maybe_ (optField "newUserSparLocale" schema)
<*> newUserSparRole
.= field "newUserSparRole" schema
newUserFromSpar :: NewUserSpar -> NewUser
newUserFromSpar new =
NewUser
{ newUserDisplayName = newUserSparDisplayName new,
newUserUUID = Just $ newUserSparUUID new,
newUserIdentity = Just $ SSOIdentity (newUserSparSSOId new) Nothing Nothing,
newUserPict = Nothing,
newUserAssets = [],
newUserAccentId = Nothing,
newUserEmailCode = Nothing,
newUserPhoneCode = Nothing,
newUserOrigin = Just . NewUserOriginTeamUser . NewTeamMemberSSO $ newUserSparTeamId new,
newUserLabel = Nothing,
newUserPassword = Nothing,
newUserExpiresIn = Nothing,
newUserManagedBy = Just $ newUserSparManagedBy new,
newUserLocale = newUserSparLocale new
}
data NewUser = NewUser
{ newUserDisplayName :: Name,
newUserUUID :: Maybe UUID,
newUserIdentity :: Maybe UserIdentity,
newUserPict :: Maybe Pict,
newUserAssets :: [Asset],
newUserAccentId :: Maybe ColourId,
newUserEmailCode :: Maybe ActivationCode,
newUserPhoneCode :: Maybe ActivationCode,
newUserOrigin :: Maybe NewUserOrigin,
newUserLabel :: Maybe CookieLabel,
newUserLocale :: Maybe Locale,
newUserPassword :: Maybe PlainTextPassword,
newUserExpiresIn :: Maybe ExpiresIn,
newUserManagedBy :: Maybe ManagedBy
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewUser)
emptyNewUser :: Name -> NewUser
emptyNewUser name =
NewUser
{ newUserDisplayName = name,
newUserUUID = Nothing,
newUserIdentity = Nothing,
newUserPict = Nothing,
newUserAssets = [],
newUserAccentId = Nothing,
newUserEmailCode = Nothing,
newUserPhoneCode = Nothing,
newUserOrigin = Nothing,
newUserLabel = Nothing,
newUserLocale = Nothing,
newUserPassword = Nothing,
newUserExpiresIn = Nothing,
newUserManagedBy = Nothing
}
| 1 second - 1 week
type ExpiresIn = Range 1 604800 Integer
| Raw representation of ' NewUser ' to help with writing Schema instances .
data NewUserRaw = NewUserRaw
{ newUserRawDisplayName :: Name,
newUserRawUUID :: Maybe UUID,
newUserRawEmail :: Maybe Email,
newUserRawPhone :: Maybe Phone,
newUserRawSSOId :: Maybe UserSSOId,
newUserRawPict :: Maybe Pict,
newUserRawAssets :: [Asset],
newUserRawAccentId :: Maybe ColourId,
newUserRawEmailCode :: Maybe ActivationCode,
newUserRawPhoneCode :: Maybe ActivationCode,
newUserRawInvitationCode :: Maybe InvitationCode,
newUserRawTeamCode :: Maybe InvitationCode,
newUserRawTeam :: Maybe BindingNewTeamUser,
newUserRawTeamId :: Maybe TeamId,
newUserRawLabel :: Maybe CookieLabel,
newUserRawLocale :: Maybe Locale,
newUserRawPassword :: Maybe PlainTextPassword,
newUserRawExpiresIn :: Maybe ExpiresIn,
newUserRawManagedBy :: Maybe ManagedBy
}
newUserRawObjectSchema :: ObjectSchema SwaggerDoc NewUserRaw
newUserRawObjectSchema =
NewUserRaw
<$> newUserRawDisplayName
.= field "name" schema
<*> newUserRawUUID
.= maybe_ (optField "uuid" genericToSchema)
<*> newUserRawEmail
.= maybe_ (optField "email" schema)
<*> newUserRawPhone
.= maybe_ (optField "phone" schema)
<*> newUserRawSSOId
.= maybe_ (optField "sso_id" genericToSchema)
<*> newUserRawPict
.= maybe_ (optField "picture" schema)
<*> newUserRawAssets
.= (fromMaybe [] <$> optField "assets" (array schema))
<*> newUserRawAccentId
.= maybe_ (optField "accent_id" schema)
<*> newUserRawEmailCode
.= maybe_ (optField "email_code" schema)
<*> newUserRawPhoneCode
.= maybe_ (optField "phone_code" schema)
<*> newUserRawInvitationCode
.= maybe_ (optField "invitation_code" schema)
<*> newUserRawTeamCode
.= maybe_ (optField "team_code" schema)
<*> newUserRawTeam
.= maybe_ (optField "team" schema)
<*> newUserRawTeamId
.= maybe_ (optField "team_id" schema)
<*> newUserRawLabel
.= maybe_ (optField "label" schema)
<*> newUserRawLocale
.= maybe_ (optField "locale" schema)
<*> newUserRawPassword
.= maybe_ (optField "password" schema)
<*> newUserRawExpiresIn
.= maybe_ (optField "expires_in" schema)
<*> newUserRawManagedBy
.= maybe_ (optField "managed_by" schema)
instance ToSchema NewUser where
schema =
object "NewUser" $ newUserToRaw .= withParser newUserRawObjectSchema newUserFromRaw
newUserToRaw :: NewUser -> NewUserRaw
newUserToRaw NewUser {..} =
let maybeOriginNTU = newUserOriginNewTeamUser =<< newUserOrigin
in NewUserRaw
{ newUserRawDisplayName = newUserDisplayName,
newUserRawUUID = newUserUUID,
newUserRawEmail = emailIdentity =<< newUserIdentity,
newUserRawPhone = phoneIdentity =<< newUserIdentity,
newUserRawSSOId = ssoIdentity =<< newUserIdentity,
newUserRawPict = newUserPict,
newUserRawAssets = newUserAssets,
newUserRawAccentId = newUserAccentId,
newUserRawEmailCode = newUserEmailCode,
newUserRawPhoneCode = newUserPhoneCode,
newUserRawInvitationCode = newUserOriginInvitationCode =<< newUserOrigin,
newUserRawTeamCode = newTeamUserCode =<< maybeOriginNTU,
newUserRawTeam = newTeamUserCreator =<< maybeOriginNTU,
newUserRawTeamId = newTeamUserTeamId =<< maybeOriginNTU,
newUserRawLabel = newUserLabel,
newUserRawLocale = newUserLocale,
newUserRawPassword = newUserPassword,
newUserRawExpiresIn = newUserExpiresIn,
newUserRawManagedBy = newUserManagedBy
}
newUserFromRaw :: NewUserRaw -> A.Parser NewUser
newUserFromRaw NewUserRaw {..} = do
origin <-
either fail pure $
maybeNewUserOriginFromComponents
(isJust newUserRawPassword)
(isJust newUserRawSSOId)
(newUserRawInvitationCode, newUserRawTeamCode, newUserRawTeam, newUserRawTeamId)
let identity = maybeUserIdentityFromComponents (newUserRawEmail, newUserRawPhone, newUserRawSSOId)
expiresIn <-
case (newUserRawExpiresIn, identity) of
(Just _, Just _) -> fail "Only users without an identity can expire"
_ -> pure newUserRawExpiresIn
pure $
NewUser
{ newUserDisplayName = newUserRawDisplayName,
newUserUUID = newUserRawUUID,
newUserIdentity = identity,
newUserPict = newUserRawPict,
newUserAssets = newUserRawAssets,
newUserAccentId = newUserRawAccentId,
newUserEmailCode = newUserRawEmailCode,
newUserPhoneCode = newUserRawPhoneCode,
newUserOrigin = origin,
newUserLabel = newUserRawLabel,
newUserLocale = newUserRawLocale,
newUserPassword = newUserRawPassword,
newUserExpiresIn = expiresIn,
newUserManagedBy = newUserRawManagedBy
}
instance Arbitrary NewUser where
arbitrary = do
newUserIdentity <- arbitrary
newUserOrigin <- genUserOrigin newUserIdentity
newUserDisplayName <- arbitrary
newUserUUID <- QC.elements [Just nil, Nothing]
newUserPict <- arbitrary
newUserAssets <- arbitrary
newUserAccentId <- arbitrary
newUserEmailCode <- arbitrary
newUserPhoneCode <- arbitrary
newUserLabel <- arbitrary
newUserLocale <- arbitrary
newUserPassword <- genUserPassword newUserIdentity newUserOrigin
newUserExpiresIn <- genUserExpiresIn newUserIdentity
newUserManagedBy <- arbitrary
pure NewUser {..}
where
genUserOrigin newUserIdentity = do
teamid <- arbitrary
let hasSSOId = case newUserIdentity of
Just SSOIdentity {} -> True
_ -> False
ssoOrigin = Just (NewUserOriginTeamUser (NewTeamMemberSSO teamid))
isSsoOrigin (Just (NewUserOriginTeamUser (NewTeamMemberSSO _))) = True
isSsoOrigin _ = False
if hasSSOId
then pure ssoOrigin
else arbitrary `QC.suchThat` (not . isSsoOrigin)
genUserPassword newUserIdentity newUserOrigin = do
let hasSSOId = case newUserIdentity of
Just SSOIdentity {} -> True
_ -> False
isTeamUser = case newUserOrigin of
Just (NewUserOriginTeamUser _) -> True
_ -> False
if isTeamUser && not hasSSOId then Just <$> arbitrary else arbitrary
genUserExpiresIn newUserIdentity =
if isJust newUserIdentity then pure Nothing else arbitrary
newUserInvitationCode :: NewUser -> Maybe InvitationCode
newUserInvitationCode nu = case newUserOrigin nu of
Just (NewUserOriginInvitationCode ic) -> Just ic
_ -> Nothing
newUserTeam :: NewUser -> Maybe NewTeamUser
newUserTeam nu = case newUserOrigin nu of
Just (NewUserOriginTeamUser tu) -> Just tu
_ -> Nothing
newUserEmail :: NewUser -> Maybe Email
newUserEmail = emailIdentity <=< newUserIdentity
newUserPhone :: NewUser -> Maybe Phone
newUserPhone = phoneIdentity <=< newUserIdentity
newUserSSOId :: NewUser -> Maybe UserSSOId
newUserSSOId = ssoIdentity <=< newUserIdentity
data NewUserOrigin
= NewUserOriginInvitationCode InvitationCode
| NewUserOriginTeamUser NewTeamUser
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform NewUserOrigin)
type NewUserOriginComponents = (Maybe InvitationCode, Maybe InvitationCode, Maybe BindingNewTeamUser, Maybe TeamId)
newUserOriginInvitationCode :: NewUserOrigin -> Maybe InvitationCode
newUserOriginInvitationCode = \case
NewUserOriginInvitationCode ic -> Just ic
NewUserOriginTeamUser _ -> Nothing
newUserOriginNewTeamUser :: NewUserOrigin -> Maybe NewTeamUser
newUserOriginNewTeamUser = \case
NewUserOriginInvitationCode _ -> Nothing
NewUserOriginTeamUser ntu -> Just ntu
maybeNewUserOriginFromComponents ::
Bool ->
Bool ->
NewUserOriginComponents ->
Either String (Maybe NewUserOrigin)
maybeNewUserOriginFromComponents hasPassword hasSSO (invcode, teamcode, team, teamid) = do
result <- case (invcode, teamcode, team, hasSSO, teamid) of
(Just a, Nothing, Nothing, False, Nothing) -> Right . Just . NewUserOriginInvitationCode $ a
(Nothing, Just a, Nothing, False, Nothing) -> Right . Just . NewUserOriginTeamUser $ NewTeamMember a
(Nothing, Nothing, Just a, False, Nothing) -> Right . Just . NewUserOriginTeamUser $ NewTeamCreator a
(Nothing, Nothing, Nothing, True, Just t) -> Right . Just . NewUserOriginTeamUser $ NewTeamMemberSSO t
(Nothing, Nothing, Nothing, False, Nothing) -> Right Nothing
(_, _, _, True, Nothing) -> Left "sso_id, team_id must be either both present or both absent."
(_, _, _, False, Just _) -> Left "sso_id, team_id must be either both present or both absent."
_ -> Left "team_code, team, invitation_code, sso_id, and the pair (sso_id, team_id) are mutually exclusive"
case (result, hasPassword, hasSSO) of
(_, _, True) -> Right result
(Just (NewUserOriginTeamUser _), False, _) -> Left "all team users must set a password on creation"
_ -> pure result
newtype InvitationCode = InvitationCode
{fromInvitationCode :: AsciiBase64Url}
deriving stock (Eq, Show, Generic)
deriving newtype (ToSchema, ToByteString, FromByteString, Arbitrary)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema InvitationCode
instance S.ToParamSchema InvitationCode where
toParamSchema _ = S.toParamSchema (Proxy @Text)
instance FromHttpApiData InvitationCode where
parseQueryParam = bimap cs InvitationCode . validateBase64Url
instance ToHttpApiData InvitationCode where
toQueryParam = cs . toByteString . fromInvitationCode
data NewTeamUser
NewTeamMember InvitationCode
| NewTeamCreator BindingNewTeamUser
| sso : users with saml credentials and/or created via scim
NewTeamMemberSSO TeamId
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform NewTeamUser)
newTeamUserCode :: NewTeamUser -> Maybe InvitationCode
newTeamUserCode = \case
NewTeamMember ic -> Just ic
NewTeamCreator _ -> Nothing
NewTeamMemberSSO _ -> Nothing
newTeamUserCreator :: NewTeamUser -> Maybe BindingNewTeamUser
newTeamUserCreator = \case
NewTeamMember _ -> Nothing
NewTeamCreator bntu -> Just bntu
NewTeamMemberSSO _ -> Nothing
newTeamUserTeamId :: NewTeamUser -> Maybe TeamId
newTeamUserTeamId = \case
NewTeamMember _ -> Nothing
NewTeamCreator _ -> Nothing
NewTeamMemberSSO tid -> Just tid
data BindingNewTeamUser = BindingNewTeamUser
{ bnuTeam :: BindingNewTeam,
bnuCurrency :: Maybe Currency.Alpha
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform BindingNewTeamUser)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema BindingNewTeamUser)
instance ToSchema BindingNewTeamUser where
schema =
object "BindingNewTeamUser" $
BindingNewTeamUser
<$> bnuTeam
.= bindingNewTeamObjectSchema
<*> bnuCurrency
.= maybe_ (optField "currency" genericToSchema)
data ScimUserInfo = ScimUserInfo
{ suiUserId :: UserId,
suiCreatedOn :: Maybe UTCTimeMillis
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform ScimUserInfo)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema ScimUserInfo)
instance ToSchema ScimUserInfo where
schema =
object "ScimUserInfo" $
ScimUserInfo
<$> suiUserId
.= field "id" schema
<*> suiCreatedOn
.= maybe_ (optField "created_on" schema)
newtype ScimUserInfos = ScimUserInfos {scimUserInfos :: [ScimUserInfo]}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform ScimUserInfos)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema ScimUserInfos)
instance ToSchema ScimUserInfos where
schema =
object "ScimUserInfos" $
ScimUserInfos
<$> scimUserInfos
.= field "scim_user_infos" (array schema)
newtype UserSet = UserSet
{ usUsrs :: Set UserId
}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema UserSet)
instance ToSchema UserSet where
schema =
object "UserSet" $
UserSet
<$> usUsrs
.= field "users" (set schema)
data UserUpdate = UserUpdate
{ uupName :: Maybe Name,
uupPict :: Maybe Pict,
uupAssets :: Maybe [Asset],
uupAccentId :: Maybe ColourId
}
deriving stock (Eq, Show, Generic)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema UserUpdate)
deriving (Arbitrary) via (GenericUniform UserUpdate)
instance ToSchema UserUpdate where
schema =
object "UserUpdate" $
UserUpdate
<$> uupName
.= maybe_ (optField "name" schema)
<*> uupPict
.= maybe_ (optField "picture" schema)
<*> uupAssets
.= maybe_ (optField "assets" (array schema))
<*> uupAccentId
.= maybe_ (optField "accent_id" schema)
data UpdateProfileError
= DisplayNameManagedByScim
| ProfileNotFound
deriving (Generic)
deriving (AsUnion PutSelfErrorResponses) via GenericAsUnion PutSelfErrorResponses UpdateProfileError
instance GSOP.Generic UpdateProfileError
type PutSelfErrorResponses = '[ErrorResponse 'E.NameManagedByScim, ErrorResponse 'E.UserNotFound]
type PutSelfResponses = PutSelfErrorResponses .++ '[RespondEmpty 200 "User updated"]
instance (res ~ PutSelfResponses) => AsUnion res (Maybe UpdateProfileError) where
toUnion = maybeToUnion (toUnion @PutSelfErrorResponses)
fromUnion = maybeFromUnion (fromUnion @PutSelfErrorResponses)
data PasswordChange = PasswordChange
{ cpOldPassword :: Maybe PlainTextPassword,
cpNewPassword :: PlainTextPassword
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform PasswordChange)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema PasswordChange)
instance ToSchema PasswordChange where
schema =
over
doc
( description
?~ "Data to change a password. The old password is required if \
\a password already exists."
)
. object "PasswordChange"
$ PasswordChange
<$> cpOldPassword
.= maybe_ (optField "old_password" schema)
<*> cpNewPassword
.= field "new_password" schema
data ChangePasswordError
= InvalidCurrentPassword
| ChangePasswordNoIdentity
| ChangePasswordMustDiffer
deriving (Generic)
deriving (AsUnion ChangePasswordErrorResponses) via GenericAsUnion ChangePasswordErrorResponses ChangePasswordError
instance GSOP.Generic ChangePasswordError
type ChangePasswordErrorResponses =
[ ErrorResponse 'E.BadCredentials,
ErrorResponse 'E.NoIdentity,
ErrorResponse 'E.ChangePasswordMustDiffer
]
type ChangePasswordResponses =
ChangePasswordErrorResponses .++ '[RespondEmpty 200 "Password Changed"]
instance (res ~ ChangePasswordResponses) => AsUnion res (Maybe ChangePasswordError) where
toUnion = maybeToUnion (toUnion @ChangePasswordErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangePasswordErrorResponses)
newtype LocaleUpdate = LocaleUpdate {luLocale :: Locale}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema LocaleUpdate)
instance ToSchema LocaleUpdate where
schema =
object "LocaleUpdate" $
LocaleUpdate
<$> luLocale
.= field "locale" schema
newtype EmailUpdate = EmailUpdate {euEmail :: Email}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema EmailUpdate)
instance ToSchema EmailUpdate where
schema =
object "EmailUpdate" $
EmailUpdate
<$> euEmail
.= field "email" schema
instance ToJSON EmailUpdate where
toJSON e = A.object ["email" A..= euEmail e]
instance FromJSON EmailUpdate where
parseJSON = A.withObject "email-update" $ \o ->
EmailUpdate <$> o A..: "email"
newtype PhoneUpdate = PhoneUpdate {puPhone :: Phone}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via Schema PhoneUpdate
instance ToSchema PhoneUpdate where
schema =
object "PhoneUpdate" $
PhoneUpdate
<$> puPhone
.= field "phone" schema
data ChangePhoneError
= PhoneExists
| InvalidNewPhone
| BlacklistedNewPhone
deriving (Generic)
deriving (AsUnion ChangePhoneErrorResponses) via GenericAsUnion ChangePhoneErrorResponses ChangePhoneError
instance GSOP.Generic ChangePhoneError
type ChangePhoneErrorResponses =
[ ErrorResponse 'UserKeyExists,
ErrorResponse 'InvalidPhone,
ErrorResponse 'BlacklistedPhone
]
type ChangePhoneResponses =
ChangePhoneErrorResponses .++ '[RespondEmpty 202 "Phone updated"]
instance (res ~ ChangePhoneResponses) => AsUnion res (Maybe ChangePhoneError) where
toUnion = maybeToUnion (toUnion @ChangePhoneErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangePhoneErrorResponses)
data RemoveIdentityError
= LastIdentity
| NoPassword
| NoIdentity
deriving (Generic)
deriving (AsUnion RemoveIdentityErrorResponses) via GenericAsUnion RemoveIdentityErrorResponses RemoveIdentityError
instance GSOP.Generic RemoveIdentityError
type RemoveIdentityErrorResponses =
[ ErrorResponse 'E.LastIdentity,
ErrorResponse 'E.NoPassword,
ErrorResponse 'E.NoIdentity
]
type RemoveIdentityResponses =
RemoveIdentityErrorResponses .++ '[RespondEmpty 200 "Identity Removed"]
instance (res ~ RemoveIdentityResponses) => AsUnion res (Maybe RemoveIdentityError) where
toUnion = maybeToUnion (toUnion @RemoveIdentityErrorResponses)
fromUnion = maybeFromUnion (fromUnion @RemoveIdentityErrorResponses)
newtype HandleUpdate = HandleUpdate {huHandle :: Text}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema HandleUpdate)
instance ToSchema HandleUpdate where
schema =
object "HandleUpdate" $
HandleUpdate <$> huHandle .= field "handle" schema
data ChangeHandleError
= ChangeHandleNoIdentity
| ChangeHandleExists
| ChangeHandleInvalid
| ChangeHandleManagedByScim
deriving (Show, Generic)
deriving (AsUnion ChangeHandleErrorResponses) via GenericAsUnion ChangeHandleErrorResponses ChangeHandleError
instance GSOP.Generic ChangeHandleError
type ChangeHandleErrorResponses =
'[ ErrorResponse 'E.NoIdentity,
ErrorResponse 'E.HandleExists,
ErrorResponse 'E.InvalidHandle,
ErrorResponse 'E.HandleManagedByScim
]
type ChangeHandleResponses =
ChangeHandleErrorResponses .++ '[RespondEmpty 200 "Handle Changed"]
instance (res ~ ChangeHandleResponses) => AsUnion res (Maybe ChangeHandleError) where
toUnion = maybeToUnion (toUnion @ChangeHandleErrorResponses)
fromUnion = maybeFromUnion (fromUnion @ChangeHandleErrorResponses)
newtype NameUpdate = NameUpdate {nuHandle :: Text}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
instance ToJSON NameUpdate where
toJSON h = A.object ["name" A..= nuHandle h]
instance FromJSON NameUpdate where
parseJSON = A.withObject "name-update" $ \o ->
NameUpdate <$> o A..: "name"
data ChangeEmailResponse
= ChangeEmailResponseIdempotent
| ChangeEmailResponseNeedsActivation
instance
AsUnion
'[Respond 202 desc1 (), Respond 204 desc2 ()]
ChangeEmailResponse
where
toUnion ChangeEmailResponseIdempotent = S (Z (I ()))
toUnion ChangeEmailResponseNeedsActivation = Z (I ())
fromUnion (Z (I ())) = ChangeEmailResponseNeedsActivation
fromUnion (S (Z (I ()))) = ChangeEmailResponseIdempotent
fromUnion (S (S x)) = case x of {}
newtype DeleteUser = DeleteUser
{ deleteUserPassword :: Maybe PlainTextPassword
}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema DeleteUser)
instance ToSchema DeleteUser where
schema =
object "DeleteUser" $
DeleteUser
<$> deleteUserPassword
.= maybe_ (optField "password" schema)
mkDeleteUser :: Maybe PlainTextPassword -> DeleteUser
mkDeleteUser = DeleteUser
instance ToJSON DeleteUser where
toJSON d =
A.object $
"password"
A..= deleteUserPassword d
# []
instance FromJSON DeleteUser where
parseJSON = A.withObject "DeleteUser" $ \o ->
DeleteUser <$> o A..:? "password"
data VerifyDeleteUser = VerifyDeleteUser
{ verifyDeleteUserKey :: Code.Key,
verifyDeleteUserCode :: Code.Value
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform VerifyDeleteUser)
deriving (ToJSON, FromJSON, S.ToSchema) via (Schema VerifyDeleteUser)
mkVerifyDeleteUser :: Code.Key -> Code.Value -> VerifyDeleteUser
mkVerifyDeleteUser = VerifyDeleteUser
instance ToSchema VerifyDeleteUser where
schema =
objectWithDocModifier "VerifyDeleteUser" (description ?~ "Data for verifying an account deletion.") $
VerifyDeleteUser
<$> verifyDeleteUserKey
.= fieldWithDocModifier "key" (description ?~ "The identifying key of the account (i.e. user ID).") schema
<*> verifyDeleteUserCode
.= fieldWithDocModifier "code" (description ?~ "The verification code.") schema
newtype DeletionCodeTimeout = DeletionCodeTimeout
{fromDeletionCodeTimeout :: Code.Timeout}
deriving stock (Eq, Show, Generic)
deriving newtype (Arbitrary)
deriving (S.ToSchema) via (Schema DeletionCodeTimeout)
instance ToSchema DeletionCodeTimeout where
schema =
object "DeletionCodeTimeout" $
DeletionCodeTimeout
<$> fromDeletionCodeTimeout
.= field "expires_in" schema
instance ToJSON DeletionCodeTimeout where
toJSON (DeletionCodeTimeout t) = A.object ["expires_in" A..= t]
instance FromJSON DeletionCodeTimeout where
parseJSON = A.withObject "DeletionCodeTimeout" $ \o ->
DeletionCodeTimeout <$> o A..: "expires_in"
data DeleteUserResult
NoUser
AccountAlreadyDeleted
AccountDeleted
deriving (Eq, Show)
data ListUsersQuery
= ListUsersByIds [Qualified UserId]
| ListUsersByHandles (Range 1 4 [Qualified Handle])
deriving (Show, Eq)
instance FromJSON ListUsersQuery where
parseJSON =
A.withObject "ListUsersQuery" $ \o -> do
mUids <- ListUsersByIds <$$> o A..:? "qualified_ids"
mHandles <- ListUsersByHandles <$$> o A..:? "qualified_handles"
case (mUids, mHandles) of
(Just uids, Nothing) -> pure uids
(Nothing, Just handles) -> pure handles
(_, _) -> fail "exactly one of qualified_ids or qualified_handles must be provided."
instance ToJSON ListUsersQuery where
toJSON (ListUsersByIds uids) = A.object ["qualified_ids" A..= uids]
toJSON (ListUsersByHandles handles) = A.object ["qualified_handles" A..= handles]
NB : It is not possible to specific mutually exclusive fields in swagger2 , so
instance S.ToSchema ListUsersQuery where
declareNamedSchema _ = do
uids <- S.declareSchemaRef (Proxy @[Qualified UserId])
handles <- S.declareSchemaRef (Proxy @(Range 1 4 [Qualified Handle]))
pure $
S.NamedSchema (Just "ListUsersQuery") $
mempty
& S.type_ ?~ S.SwaggerObject
& S.description ?~ "exactly one of qualified_ids or qualified_handles must be provided."
& S.properties .~ InsOrdHashMap.fromList [("qualified_ids", uids), ("qualified_handles", handles)]
& S.example ?~ toJSON (ListUsersByIds [Qualified (Id UUID.nil) (Domain "example.com")])
data VerificationAction
= CreateScimToken
| Login
| DeleteTeam
deriving stock (Eq, Show, Enum, Bounded, Generic)
deriving (Arbitrary) via (GenericUniform VerificationAction)
deriving (FromJSON, ToJSON, S.ToSchema) via (Schema VerificationAction)
instance ToSchema VerificationAction where
schema =
enum @Text "VerificationAction" $
mconcat
[ element "create_scim_token" CreateScimToken,
element "login" Login,
element "delete_team" DeleteTeam
]
instance ToByteString VerificationAction where
builder CreateScimToken = "create_scim_token"
builder Login = "login"
builder DeleteTeam = "delete_team"
instance FromByteString VerificationAction where
parser =
Parser.takeByteString >>= \b ->
case T.decodeUtf8' b of
Right "login" -> pure Login
Right "create_scim_token" -> pure CreateScimToken
Right "delete_team" -> pure DeleteTeam
Right t -> fail $ "Invalid VerificationAction: " <> T.unpack t
Left e -> fail $ "Invalid VerificationAction: " <> show e
instance S.ToParamSchema VerificationAction where
toParamSchema _ =
mempty
{ S._paramSchemaType = Just S.SwaggerString,
S._paramSchemaEnum = Just (A.String . toQueryParam <$> [(minBound :: VerificationAction) ..])
}
instance FromHttpApiData VerificationAction where
parseUrlPiece = maybeToEither "Invalid verification action" . fromByteString . cs
instance ToHttpApiData VerificationAction where
toQueryParam a = cs (toByteString' a)
data SendVerificationCode = SendVerificationCode
{ svcAction :: VerificationAction,
svcEmail :: Email
}
deriving stock (Eq, Show, Generic)
deriving (Arbitrary) via (GenericUniform SendVerificationCode)
deriving (FromJSON, ToJSON, S.ToSchema) via Schema SendVerificationCode
instance ToSchema SendVerificationCode where
schema =
object "SendVerificationCode" $
SendVerificationCode
<$> svcAction
.= field "action" schema
<*> svcEmail
.= field "email" schema
|
a92fd16cf7a921dec1d78f61d8594e0b4d64882a926bd4acc53e43d639cabf35 | spawnfest/eep49ers | tftp_test_lib.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2007 - 2018 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(tftp_test_lib).
-compile(export_all).
-include("tftp_test_lib.hrl").
%%
%% -----
%%
init_per_testcase(_Case, Config) when is_list(Config) ->
io:format("\n ", []),
?IGNORE(application:stop(tftp)),
Config.
end_per_testcase(_Case, Config) when is_list(Config) ->
?IGNORE(application:stop(tftp)),
Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Infrastructure for test suite
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
error(Actual, Mod, Line) ->
(catch global:send(tftp_global_logger, {failed, Mod, Line})),
log("<ERROR> Bad result: ~p\n", [Actual], Mod, Line),
Label = lists:concat([Mod, "(", Line, ") unexpected result"]),
et:report_event(60, Mod, Mod, Label,
[{line, Mod, Line}, {error, Actual}]),
case global:whereis_name(tftp_test_case_sup) of
undefined ->
ignore;
Pid ->
Fail = #'REASON'{mod = Mod, line = Line, desc = Actual},
Pid ! {fail, self(), Fail}
end,
Actual.
log(Format, Args, Mod, Line) ->
case global:whereis_name(tftp_global_logger) of
undefined ->
io:format(user, "~p(~p): " ++ Format,
[Mod, Line] ++ Args);
Pid ->
io:format(Pid, "~p(~p): " ++ Format,
[Mod, Line] ++ Args)
end.
default_config() ->
[].
t() ->
t([{?MODULE, all}]).
t(Cases) ->
t(Cases, default_config()).
t(Cases, Config) ->
process_flag(trap_exit, true),
Res = lists:flatten(do_test(Cases, Config)),
io:format("Res: ~p\n", [Res]),
display_result(Res),
Res.
do_test({Mod, Fun}, Config) when is_atom(Mod), is_atom(Fun) ->
case catch apply(Mod, Fun, [suite]) of
[] ->
io:format("Eval: ~p:", [{Mod, Fun}]),
Res = eval(Mod, Fun, Config),
{R, _, _} = Res,
io:format(" ~p\n", [R]),
Res;
Cases when is_list(Cases) ->
io:format("Expand: ~p ...\n", [{Mod, Fun}]),
Map = fun(Case) when is_atom(Case)-> {Mod, Case};
(Case) -> Case
end,
do_test(lists:map(Map, Cases), Config);
{req, _, {conf, Init, Cases, Finish}} ->
case (catch apply(Mod, Init, [Config])) of
Conf when is_list(Conf) ->
io:format("Expand: ~p ...\n", [{Mod, Fun}]),
Map = fun(Case) when is_atom(Case)-> {Mod, Case};
(Case) -> Case
end,
Res = do_test(lists:map(Map, Cases), Conf),
(catch apply(Mod, Finish, [Conf])),
Res;
{'EXIT', {skipped, Reason}} ->
io:format(" => skipping: ~p\n", [Reason]),
[{skipped, {Mod, Fun}, Reason}];
Error ->
io:format(" => failed: ~p\n", [Error]),
[{failed, {Mod, Fun}, Error}]
end;
{'EXIT', {undef, _}} ->
io:format("Undefined: ~p\n", [{Mod, Fun}]),
[{nyi, {Mod, Fun}, ok}];
Error ->
io:format("Ignoring: ~p: ~p\n", [{Mod, Fun}, Error]),
[{failed, {Mod, Fun}, Error}]
end;
do_test(Mod, Config) when is_atom(Mod) ->
Res = do_test({Mod, all}, Config),
Res;
do_test(Cases, Config) when is_list(Cases) ->
[do_test(Case, Config) || Case <- Cases];
do_test(Bad, _Config) ->
[{badarg, Bad, ok}].
eval(Mod, Fun, Config) ->
TestCase = {?MODULE, Mod, Fun},
Label = lists:concat(["TEST CASE: ", Fun]),
et:report_event(40, ?MODULE, Mod, Label ++ " started",
[TestCase, Config]),
global:register_name(tftp_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Config2 = Mod:init_per_testcase(Fun, Config),
Pid = spawn_link(?MODULE, do_eval, [self(), Mod, Fun, Config2]),
R = wait_for_evaluator(Pid, Mod, Fun, Config2, []),
Mod:end_per_testcase(Fun, Config2),
global:unregister_name(tftp_test_case_sup),
process_flag(trap_exit, Flag),
R.
wait_for_evaluator(Pid, Mod, Fun, Config, Errors) ->
TestCase = {?MODULE, Mod, Fun},
Label = lists:concat(["TEST CASE: ", Fun]),
receive
{done, Pid, ok} when Errors == [] ->
et:report_event(40, Mod, ?MODULE, Label ++ " ok",
[TestCase, Config]),
{ok, {Mod, Fun}, Errors};
{done, Pid, {ok, _}} when Errors == [] ->
et:report_event(40, Mod, ?MODULE, Label ++ " ok",
[TestCase, Config]),
{ok, {Mod, Fun}, Errors};
{done, Pid, Fail} ->
et:report_event(20, Mod, ?MODULE, Label ++ " failed",
[TestCase, Config, {return, Fail}, Errors]),
{failed, {Mod,Fun}, Fail};
{'EXIT', Pid, {skipped, Reason}} ->
et:report_event(20, Mod, ?MODULE, Label ++ " skipped",
[TestCase, Config, {skipped, Reason}]),
{skipped, {Mod, Fun}, Errors};
{'EXIT', Pid, Reason} ->
et:report_event(20, Mod, ?MODULE, Label ++ " crashed",
[TestCase, Config, {'EXIT', Reason}]),
{crashed, {Mod, Fun}, [{'EXIT', Reason} | Errors]};
{fail, Pid, Reason} ->
wait_for_evaluator(Pid, Mod, Fun, Config, Errors ++ [Reason])
end.
do_eval(ReplyTo, Mod, Fun, Config) ->
case (catch apply(Mod, Fun, [Config])) of
{'EXIT', {skipped, Reason}} ->
ReplyTo ! {'EXIT', self(), {skipped, Reason}};
Other ->
ReplyTo ! {done, self(), Other}
end,
unlink(ReplyTo),
exit(shutdown).
display_result([]) ->
io:format("OK\n", []);
display_result(Res) when is_list(Res) ->
Ok = [MF || {ok, MF, _} <- Res],
Nyi = [MF || {nyi, MF, _} <- Res],
Skipped = [{MF, Reason} || {skipped, MF, Reason} <- Res],
Failed = [{MF, Reason} || {failed, MF, Reason} <- Res],
Crashed = [{MF, Reason} || {crashed, MF, Reason} <- Res],
display_summary(Ok, Nyi, Skipped, Failed, Crashed),
display_skipped(Skipped),
display_failed(Failed),
display_crashed(Crashed).
display_summary(Ok, Nyi, Skipped, Failed, Crashed) ->
io:format("\nTest case summary:\n", []),
display_summary(Ok, "successful"),
display_summary(Nyi, "not yet implemented"),
display_summary(Skipped, "skipped"),
display_summary(Failed, "failed"),
display_summary(Crashed, "crashed"),
io:format("\n", []).
display_summary(Res, Info) ->
io:format(" ~w test cases ~s\n", [length(Res), Info]).
display_skipped([]) ->
ok;
display_skipped(Skipped) ->
io:format("Skipped test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Skipped),
io:format("\n", []).
display_failed([]) ->
ok;
display_failed(Failed) ->
io:format("Failed test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Failed),
io:format("\n", []).
display_crashed([]) ->
ok;
display_crashed(Crashed) ->
io:format("Crashed test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Crashed),
io:format("\n", []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% generic callback
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-record(generic_state, {state, prepare, open, read, write, abort}).
prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) when is_list(Initial) ->
State = lookup_option(state, mandatory, Initial),
Prepare = lookup_option(prepare, mandatory, Initial),
Open = lookup_option(open, mandatory, Initial),
Read = lookup_option(read, mandatory, Initial),
Write = lookup_option(write, mandatory, Initial),
Abort = lookup_option(abort, mandatory, Initial),
case Prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, State) of
{ok, AcceptedOptions, NewState} ->
{ok,
AcceptedOptions,
#generic_state{state = NewState,
prepare = Prepare,
open = Open,
read = Read,
write = Write,
abort = Abort}};
Other ->
Other
end.
open(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) when is_list(Initial) ->
case prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) of
{ok, SuggestedOptions2, GenericState} ->
open(Peer, Access, LocalFilename, Mode, SuggestedOptions2, GenericState);
Other ->
Other
end;
open(Peer, Access, LocalFilename, Mode, SuggestedOptions, #generic_state{state = State, open = Open} = GenericState) ->
case Open(Peer, Access, LocalFilename, Mode, SuggestedOptions, State) of
{ok, SuggestedOptions2, NewState} ->
{ok, SuggestedOptions2, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
read(#generic_state{state = State, read = Read} = GenericState) ->
case Read(State) of
{more, DataBlock, NewState} ->
{more, DataBlock, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
write(DataBlock, #generic_state{state = State, write = Write} = GenericState) ->
case Write(DataBlock, State) of
{more, NewState} ->
{more, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
abort(Code, Text, #generic_state{state = State, abort = Abort}) ->
Abort(Code, Text, State).
lookup_option(Key, Default, Options) ->
case lists:keysearch(Key, 1, Options) of
{value, {_, Val}} ->
Val;
false ->
Default
end.
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/tftp/test/tftp_test_lib.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-----
Infrastructure for test suite
generic callback
| Copyright Ericsson AB 2007 - 2018 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(tftp_test_lib).
-compile(export_all).
-include("tftp_test_lib.hrl").
init_per_testcase(_Case, Config) when is_list(Config) ->
io:format("\n ", []),
?IGNORE(application:stop(tftp)),
Config.
end_per_testcase(_Case, Config) when is_list(Config) ->
?IGNORE(application:stop(tftp)),
Config.
error(Actual, Mod, Line) ->
(catch global:send(tftp_global_logger, {failed, Mod, Line})),
log("<ERROR> Bad result: ~p\n", [Actual], Mod, Line),
Label = lists:concat([Mod, "(", Line, ") unexpected result"]),
et:report_event(60, Mod, Mod, Label,
[{line, Mod, Line}, {error, Actual}]),
case global:whereis_name(tftp_test_case_sup) of
undefined ->
ignore;
Pid ->
Fail = #'REASON'{mod = Mod, line = Line, desc = Actual},
Pid ! {fail, self(), Fail}
end,
Actual.
log(Format, Args, Mod, Line) ->
case global:whereis_name(tftp_global_logger) of
undefined ->
io:format(user, "~p(~p): " ++ Format,
[Mod, Line] ++ Args);
Pid ->
io:format(Pid, "~p(~p): " ++ Format,
[Mod, Line] ++ Args)
end.
default_config() ->
[].
t() ->
t([{?MODULE, all}]).
t(Cases) ->
t(Cases, default_config()).
t(Cases, Config) ->
process_flag(trap_exit, true),
Res = lists:flatten(do_test(Cases, Config)),
io:format("Res: ~p\n", [Res]),
display_result(Res),
Res.
do_test({Mod, Fun}, Config) when is_atom(Mod), is_atom(Fun) ->
case catch apply(Mod, Fun, [suite]) of
[] ->
io:format("Eval: ~p:", [{Mod, Fun}]),
Res = eval(Mod, Fun, Config),
{R, _, _} = Res,
io:format(" ~p\n", [R]),
Res;
Cases when is_list(Cases) ->
io:format("Expand: ~p ...\n", [{Mod, Fun}]),
Map = fun(Case) when is_atom(Case)-> {Mod, Case};
(Case) -> Case
end,
do_test(lists:map(Map, Cases), Config);
{req, _, {conf, Init, Cases, Finish}} ->
case (catch apply(Mod, Init, [Config])) of
Conf when is_list(Conf) ->
io:format("Expand: ~p ...\n", [{Mod, Fun}]),
Map = fun(Case) when is_atom(Case)-> {Mod, Case};
(Case) -> Case
end,
Res = do_test(lists:map(Map, Cases), Conf),
(catch apply(Mod, Finish, [Conf])),
Res;
{'EXIT', {skipped, Reason}} ->
io:format(" => skipping: ~p\n", [Reason]),
[{skipped, {Mod, Fun}, Reason}];
Error ->
io:format(" => failed: ~p\n", [Error]),
[{failed, {Mod, Fun}, Error}]
end;
{'EXIT', {undef, _}} ->
io:format("Undefined: ~p\n", [{Mod, Fun}]),
[{nyi, {Mod, Fun}, ok}];
Error ->
io:format("Ignoring: ~p: ~p\n", [{Mod, Fun}, Error]),
[{failed, {Mod, Fun}, Error}]
end;
do_test(Mod, Config) when is_atom(Mod) ->
Res = do_test({Mod, all}, Config),
Res;
do_test(Cases, Config) when is_list(Cases) ->
[do_test(Case, Config) || Case <- Cases];
do_test(Bad, _Config) ->
[{badarg, Bad, ok}].
eval(Mod, Fun, Config) ->
TestCase = {?MODULE, Mod, Fun},
Label = lists:concat(["TEST CASE: ", Fun]),
et:report_event(40, ?MODULE, Mod, Label ++ " started",
[TestCase, Config]),
global:register_name(tftp_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Config2 = Mod:init_per_testcase(Fun, Config),
Pid = spawn_link(?MODULE, do_eval, [self(), Mod, Fun, Config2]),
R = wait_for_evaluator(Pid, Mod, Fun, Config2, []),
Mod:end_per_testcase(Fun, Config2),
global:unregister_name(tftp_test_case_sup),
process_flag(trap_exit, Flag),
R.
wait_for_evaluator(Pid, Mod, Fun, Config, Errors) ->
TestCase = {?MODULE, Mod, Fun},
Label = lists:concat(["TEST CASE: ", Fun]),
receive
{done, Pid, ok} when Errors == [] ->
et:report_event(40, Mod, ?MODULE, Label ++ " ok",
[TestCase, Config]),
{ok, {Mod, Fun}, Errors};
{done, Pid, {ok, _}} when Errors == [] ->
et:report_event(40, Mod, ?MODULE, Label ++ " ok",
[TestCase, Config]),
{ok, {Mod, Fun}, Errors};
{done, Pid, Fail} ->
et:report_event(20, Mod, ?MODULE, Label ++ " failed",
[TestCase, Config, {return, Fail}, Errors]),
{failed, {Mod,Fun}, Fail};
{'EXIT', Pid, {skipped, Reason}} ->
et:report_event(20, Mod, ?MODULE, Label ++ " skipped",
[TestCase, Config, {skipped, Reason}]),
{skipped, {Mod, Fun}, Errors};
{'EXIT', Pid, Reason} ->
et:report_event(20, Mod, ?MODULE, Label ++ " crashed",
[TestCase, Config, {'EXIT', Reason}]),
{crashed, {Mod, Fun}, [{'EXIT', Reason} | Errors]};
{fail, Pid, Reason} ->
wait_for_evaluator(Pid, Mod, Fun, Config, Errors ++ [Reason])
end.
do_eval(ReplyTo, Mod, Fun, Config) ->
case (catch apply(Mod, Fun, [Config])) of
{'EXIT', {skipped, Reason}} ->
ReplyTo ! {'EXIT', self(), {skipped, Reason}};
Other ->
ReplyTo ! {done, self(), Other}
end,
unlink(ReplyTo),
exit(shutdown).
display_result([]) ->
io:format("OK\n", []);
display_result(Res) when is_list(Res) ->
Ok = [MF || {ok, MF, _} <- Res],
Nyi = [MF || {nyi, MF, _} <- Res],
Skipped = [{MF, Reason} || {skipped, MF, Reason} <- Res],
Failed = [{MF, Reason} || {failed, MF, Reason} <- Res],
Crashed = [{MF, Reason} || {crashed, MF, Reason} <- Res],
display_summary(Ok, Nyi, Skipped, Failed, Crashed),
display_skipped(Skipped),
display_failed(Failed),
display_crashed(Crashed).
display_summary(Ok, Nyi, Skipped, Failed, Crashed) ->
io:format("\nTest case summary:\n", []),
display_summary(Ok, "successful"),
display_summary(Nyi, "not yet implemented"),
display_summary(Skipped, "skipped"),
display_summary(Failed, "failed"),
display_summary(Crashed, "crashed"),
io:format("\n", []).
display_summary(Res, Info) ->
io:format(" ~w test cases ~s\n", [length(Res), Info]).
display_skipped([]) ->
ok;
display_skipped(Skipped) ->
io:format("Skipped test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Skipped),
io:format("\n", []).
display_failed([]) ->
ok;
display_failed(Failed) ->
io:format("Failed test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Failed),
io:format("\n", []).
display_crashed([]) ->
ok;
display_crashed(Crashed) ->
io:format("Crashed test cases:\n", []),
F = fun({MF, Reason}) -> io:format(" ~p => ~p\n", [MF, Reason]) end,
lists:foreach(F, Crashed),
io:format("\n", []).
-record(generic_state, {state, prepare, open, read, write, abort}).
prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) when is_list(Initial) ->
State = lookup_option(state, mandatory, Initial),
Prepare = lookup_option(prepare, mandatory, Initial),
Open = lookup_option(open, mandatory, Initial),
Read = lookup_option(read, mandatory, Initial),
Write = lookup_option(write, mandatory, Initial),
Abort = lookup_option(abort, mandatory, Initial),
case Prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, State) of
{ok, AcceptedOptions, NewState} ->
{ok,
AcceptedOptions,
#generic_state{state = NewState,
prepare = Prepare,
open = Open,
read = Read,
write = Write,
abort = Abort}};
Other ->
Other
end.
open(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) when is_list(Initial) ->
case prepare(Peer, Access, LocalFilename, Mode, SuggestedOptions, Initial) of
{ok, SuggestedOptions2, GenericState} ->
open(Peer, Access, LocalFilename, Mode, SuggestedOptions2, GenericState);
Other ->
Other
end;
open(Peer, Access, LocalFilename, Mode, SuggestedOptions, #generic_state{state = State, open = Open} = GenericState) ->
case Open(Peer, Access, LocalFilename, Mode, SuggestedOptions, State) of
{ok, SuggestedOptions2, NewState} ->
{ok, SuggestedOptions2, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
read(#generic_state{state = State, read = Read} = GenericState) ->
case Read(State) of
{more, DataBlock, NewState} ->
{more, DataBlock, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
write(DataBlock, #generic_state{state = State, write = Write} = GenericState) ->
case Write(DataBlock, State) of
{more, NewState} ->
{more, GenericState#generic_state{state = NewState}};
Other ->
Other
end.
abort(Code, Text, #generic_state{state = State, abort = Abort}) ->
Abort(Code, Text, State).
lookup_option(Key, Default, Options) ->
case lists:keysearch(Key, 1, Options) of
{value, {_, Val}} ->
Val;
false ->
Default
end.
|
6ad0146cdd0ceb6def0b3d8c1f66ddeebce3ce3cf0664df4329f3e715a31f6a6 | pierric/fei-nn | Class.hs | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
module MXNet.NN.DataIter.Class where
import GHC.Exts (Constraint)
import RIO
import RIO.Prelude.Types (MonadTrans)
Available instances include ' LVec ' and mxnet data - iters in package < -dataiter mxnet - dataiter >
class Dataset (d :: (* -> *) -> * -> *) where
type DatasetMonadConstraint d (m :: * -> *) :: Constraint
-- | Create Dataset from `[]`.
-- note that depending on the instance, it may or may not work with infinitive list.
fromListD :: (Monad m, DatasetMonadConstraint d m) => [e] -> d m e
| Zip two Datasets
zipD :: (Monad m, DatasetMonadConstraint d m) => d m e1 -> d m e2 -> d m (e1, e2)
-- | Get number of elements
sizeD :: (Monad m, DatasetMonadConstraint d m) => d m e -> m Int
| Apply a function on each element of Dataset
forEachD :: (Monad m, DatasetMonadConstraint d m) => d m e -> (e -> m a) -> m [a]
| Apply a function on each element of Dataset together with the element 's index .
Note that the default implmentation assumes the Dataset can be created from a infinitive list .
forEachD_i :: (Monad m, DatasetMonadConstraint d m) => d m e -> ((Int, e) -> m a) -> m [a]
forEachD_i dat = forEachD (zipD (fromListD [1..]) dat)
| Apply a function on each element of Dataset together with the total number of elements and the element 's index .
forEachD_ni :: (Monad m, DatasetMonadConstraint d m) => d m e -> (((Int, Int), e) -> m a) -> m [a]
forEachD_ni dat proc = do
n <- sizeD dat
forEachD ((fromListD (replicate n n) `zipD` fromListD [1..n]) `zipD` dat) proc
foldD :: (Monad m, DatasetMonadConstraint d m) => (a -> e -> m a) -> a -> d m e -> m a
takeD :: (Monad m, DatasetMonadConstraint d m) => Int -> d m e -> d m e
| Lift from one monad into another
liftD :: (MonadTrans t, Monad m, DatasetMonadConstraint d m) => d m a -> d (t m) a
class Dataset d => DatasetProp (d :: (* -> *) -> * -> *) e where
-- | Get the batch size of the dataset
batchSizeD :: (Monad m, DatasetMonadConstraint d m) => d m e -> m (Maybe Int)
| null | https://raw.githubusercontent.com/pierric/fei-nn/dccc6e3ed9f029862e61ae78e52eb7e9896c8550/src/MXNet/NN/DataIter/Class.hs | haskell | | Create Dataset from `[]`.
note that depending on the instance, it may or may not work with infinitive list.
| Get number of elements
| Get the batch size of the dataset
| # LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
module MXNet.NN.DataIter.Class where
import GHC.Exts (Constraint)
import RIO
import RIO.Prelude.Types (MonadTrans)
Available instances include ' LVec ' and mxnet data - iters in package < -dataiter mxnet - dataiter >
class Dataset (d :: (* -> *) -> * -> *) where
type DatasetMonadConstraint d (m :: * -> *) :: Constraint
fromListD :: (Monad m, DatasetMonadConstraint d m) => [e] -> d m e
| Zip two Datasets
zipD :: (Monad m, DatasetMonadConstraint d m) => d m e1 -> d m e2 -> d m (e1, e2)
sizeD :: (Monad m, DatasetMonadConstraint d m) => d m e -> m Int
| Apply a function on each element of Dataset
forEachD :: (Monad m, DatasetMonadConstraint d m) => d m e -> (e -> m a) -> m [a]
| Apply a function on each element of Dataset together with the element 's index .
Note that the default implmentation assumes the Dataset can be created from a infinitive list .
forEachD_i :: (Monad m, DatasetMonadConstraint d m) => d m e -> ((Int, e) -> m a) -> m [a]
forEachD_i dat = forEachD (zipD (fromListD [1..]) dat)
| Apply a function on each element of Dataset together with the total number of elements and the element 's index .
forEachD_ni :: (Monad m, DatasetMonadConstraint d m) => d m e -> (((Int, Int), e) -> m a) -> m [a]
forEachD_ni dat proc = do
n <- sizeD dat
forEachD ((fromListD (replicate n n) `zipD` fromListD [1..n]) `zipD` dat) proc
foldD :: (Monad m, DatasetMonadConstraint d m) => (a -> e -> m a) -> a -> d m e -> m a
takeD :: (Monad m, DatasetMonadConstraint d m) => Int -> d m e -> d m e
| Lift from one monad into another
liftD :: (MonadTrans t, Monad m, DatasetMonadConstraint d m) => d m a -> d (t m) a
class Dataset d => DatasetProp (d :: (* -> *) -> * -> *) e where
batchSizeD :: (Monad m, DatasetMonadConstraint d m) => d m e -> m (Maybe Int)
|
17036972c0c493220256983f644dec18b5a3a70eb717da716c8fd4843deb92a5 | tov/shcaml | anyShtream.mli | vim : set
(**
* Functor to create type-aware shtream modules. The base shtream
* module {!Shtream} is indifferent to the element type. The functor
* {!AnyShtream.Make}, on the other hand, produces a module with shtream
* functions that know how read shtream from and write shtreams to
* channels without a user-supplied reader or printer function.
*
* Modules {!LineShtream} and {!StringShtream} are both created using
* this functor, though some values in {!LineShtream} are specialized
* further.
*)
(** The input signature of the functor {!AnyShtream.Make}. *)
module type ELEM = sig
(** The element type may be polymorphic, in which case the conversion
* of elements to strings must handle any element. The conversion
* from strings (or reading from channels) is monomorphic, returning
* shtream elements of a particular type.
*)
type 'a elem
(** The element type for the resulting shtream module. This type is
* parameterized so that a shtream module might handle a family of
* types. The function {!string_of} needs handle ['a elem] for any
* ['a]. *)
type initial
* The parameter to { ! elem } for values returned by conversions from
* strings . That is , [ initial elem ] is the type of shtream elements when
* first read from a string or channel .
* strings. That is, [initial elem] is the type of shtream elements when
* first read from a string or channel. *)
val reader : unit -> in_channel -> initial elem
* Make a reader of shtream elements . The reader may be stateful ;
* a new one will be instantiated for each shtream .
* a new one will be instantiated for each shtream. *)
val of_string : unit -> string -> initial elem
* Make a parser of shtream elements . The parser may be stateful ;
* a new one will be instantiated for each shtream .
* a new one will be instantiated for each shtream. *)
val string_of : unit -> 'a elem -> string
(** Make a convertor of shtream elements to strings. The resulting
* function may be stateful; a new one will be instantiated for
* shtream output operation. *)
end
(** The output signature of the functor {!AnyShtream.Make}.
* The shtream and coshtream types in the resulting module are
* compatible with other applications of the functor and with {!Shtream}.
*
* When {!AnyShtream.Make} is applied to a structure [Elem] (having
* signature {!ELEM}), the resulting module knows how to write
* shtreams of type ['a Elem.elem Shtream.t] and read shtreams of type
* [Elem.initial Elem.elem Shtream.t]. Functions in the resulting
* module take several optional parameters whose defaults are
* supplied by [Elem]:
* - [?(reader : in_channel -> initial elem)] defaults to
* [Elem.reader ()].
* - [?(parse : string -> initial elem)] defaults to [Elem.of_string ()].
* - [?(show : 'a elem -> string)] defaults to [Elem.string_of ()].
*)
module type S = sig
(** The result of {!AnyShtream.Make} contains all the type-indifferent
* shtream operations from {!Shtream}. *)
include Shtream.COMMON
with type 'a t = 'a Shtream.t
and type 'a co_t = 'a Shtream.co_t
(** Access to the underlying element type and operations. *)
module Elem : ELEM
type 'a elem = 'a Elem.elem
* for { ! ELEM.elem }
type initial = Elem.initial
* for { ! ELEM.initial }
(** Construct an [initial elem] reader from a record reader.
* Functions such as {!of_channel} and {!of_program} take a function
* of the type returned here.
*)
val elem_reader : Reader.t -> (in_channel -> initial elem)
(** Write the entire contents of a shtream on a channel.
* For each element [x] of the shtream, it prints [init x], then
* [show x], and then [term x] on the channel, and then flushes the
* channel.
* @param channel default = [stdout]
* @param init default = [fun _ -> ""]
* @param show default = [Elem.string_of ()]
* @param term default = [fun _ -> "\n"]
*)
val output : ?channel:out_channel ->
?init:('a elem -> string) ->
?term:('a elem -> string) ->
?show:('a elem -> string) ->
'a elem t ->
unit
(** Construct an [in_channel] from the data in a
* shtream. If forking a child is necessary (see
* {!Shtream.channel_of}), then the optional
* parameter [?before] (resp. [?after]) is called in the child
* before (resp. after) printing the shtream; anything printed on
* [stdout] by [?before] ([?after]) appears in the resultant
* [in_channel] before (after) the shtream data.
*
* The remaining arguments are as for {!output}.
*)
val channel_of : ?procref:Channel.procref ->
?before:(unit -> unit) ->
?after:(unit -> unit) ->
?init:('a elem -> string) ->
?term:('a elem -> string) ->
?show:('a elem -> string) ->
'a elem t -> in_channel
(** Convert a shtream to a list of strings, using [?show]. *)
val string_list_of : ?show:('a elem -> string) ->
'a elem t -> string list
(** Convert a shtream to a {i standard library} [Stream.t] of
* strings, using [?show]. *)
val string_stream_of : ?show:('a elem -> string) ->
'a elem t -> string Stream.t
(** Read a shtream from a channel, using [?reader]. *)
val of_channel : ?reader:(in_channel -> initial elem) ->
in_channel -> initial elem t
(** Read a shtream from a file, using [?reader]. *)
val of_file : ?reader:(in_channel -> initial elem) ->
string -> initial elem t
* Read a shtream from the output of a command , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_command : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
string ->
initial elem t
* Read a shtream from the output of a process , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_program : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
?path:bool -> string -> ?argv0:string -> string list ->
initial elem t
* Read a shtream from the output of a thunk , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_thunk : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
(unit -> unit) ->
initial elem t
(** Construct a shtream from a list of strings, using [?parse]. *)
val of_string_list : ?parse:(string -> initial elem) ->
string list -> initial elem t
(** Construct a shtream from a {i standard
* library} [Stream.t] of strings, using [?parse]. *)
val of_string_stream : ?parse:(string -> initial elem) ->
string Stream.t -> initial elem t
end
(** Build a new shtream module. The {!ELEM}
* parameter {!E} specifies how to read and print shtream elements. *)
module Make(E : ELEM) : S with module Elem = E
| null | https://raw.githubusercontent.com/tov/shcaml/43ae852a00e3a11520f90f2451baa71863409774/lib/anyShtream.mli | ocaml | *
* Functor to create type-aware shtream modules. The base shtream
* module {!Shtream} is indifferent to the element type. The functor
* {!AnyShtream.Make}, on the other hand, produces a module with shtream
* functions that know how read shtream from and write shtreams to
* channels without a user-supplied reader or printer function.
*
* Modules {!LineShtream} and {!StringShtream} are both created using
* this functor, though some values in {!LineShtream} are specialized
* further.
* The input signature of the functor {!AnyShtream.Make}.
* The element type may be polymorphic, in which case the conversion
* of elements to strings must handle any element. The conversion
* from strings (or reading from channels) is monomorphic, returning
* shtream elements of a particular type.
* The element type for the resulting shtream module. This type is
* parameterized so that a shtream module might handle a family of
* types. The function {!string_of} needs handle ['a elem] for any
* ['a].
* Make a convertor of shtream elements to strings. The resulting
* function may be stateful; a new one will be instantiated for
* shtream output operation.
* The output signature of the functor {!AnyShtream.Make}.
* The shtream and coshtream types in the resulting module are
* compatible with other applications of the functor and with {!Shtream}.
*
* When {!AnyShtream.Make} is applied to a structure [Elem] (having
* signature {!ELEM}), the resulting module knows how to write
* shtreams of type ['a Elem.elem Shtream.t] and read shtreams of type
* [Elem.initial Elem.elem Shtream.t]. Functions in the resulting
* module take several optional parameters whose defaults are
* supplied by [Elem]:
* - [?(reader : in_channel -> initial elem)] defaults to
* [Elem.reader ()].
* - [?(parse : string -> initial elem)] defaults to [Elem.of_string ()].
* - [?(show : 'a elem -> string)] defaults to [Elem.string_of ()].
* The result of {!AnyShtream.Make} contains all the type-indifferent
* shtream operations from {!Shtream}.
* Access to the underlying element type and operations.
* Construct an [initial elem] reader from a record reader.
* Functions such as {!of_channel} and {!of_program} take a function
* of the type returned here.
* Write the entire contents of a shtream on a channel.
* For each element [x] of the shtream, it prints [init x], then
* [show x], and then [term x] on the channel, and then flushes the
* channel.
* @param channel default = [stdout]
* @param init default = [fun _ -> ""]
* @param show default = [Elem.string_of ()]
* @param term default = [fun _ -> "\n"]
* Construct an [in_channel] from the data in a
* shtream. If forking a child is necessary (see
* {!Shtream.channel_of}), then the optional
* parameter [?before] (resp. [?after]) is called in the child
* before (resp. after) printing the shtream; anything printed on
* [stdout] by [?before] ([?after]) appears in the resultant
* [in_channel] before (after) the shtream data.
*
* The remaining arguments are as for {!output}.
* Convert a shtream to a list of strings, using [?show].
* Convert a shtream to a {i standard library} [Stream.t] of
* strings, using [?show].
* Read a shtream from a channel, using [?reader].
* Read a shtream from a file, using [?reader].
* Construct a shtream from a list of strings, using [?parse].
* Construct a shtream from a {i standard
* library} [Stream.t] of strings, using [?parse].
* Build a new shtream module. The {!ELEM}
* parameter {!E} specifies how to read and print shtream elements. | vim : set
module type ELEM = sig
type 'a elem
type initial
* The parameter to { ! elem } for values returned by conversions from
* strings . That is , [ initial elem ] is the type of shtream elements when
* first read from a string or channel .
* strings. That is, [initial elem] is the type of shtream elements when
* first read from a string or channel. *)
val reader : unit -> in_channel -> initial elem
* Make a reader of shtream elements . The reader may be stateful ;
* a new one will be instantiated for each shtream .
* a new one will be instantiated for each shtream. *)
val of_string : unit -> string -> initial elem
* Make a parser of shtream elements . The parser may be stateful ;
* a new one will be instantiated for each shtream .
* a new one will be instantiated for each shtream. *)
val string_of : unit -> 'a elem -> string
end
module type S = sig
include Shtream.COMMON
with type 'a t = 'a Shtream.t
and type 'a co_t = 'a Shtream.co_t
module Elem : ELEM
type 'a elem = 'a Elem.elem
* for { ! ELEM.elem }
type initial = Elem.initial
* for { ! ELEM.initial }
val elem_reader : Reader.t -> (in_channel -> initial elem)
val output : ?channel:out_channel ->
?init:('a elem -> string) ->
?term:('a elem -> string) ->
?show:('a elem -> string) ->
'a elem t ->
unit
val channel_of : ?procref:Channel.procref ->
?before:(unit -> unit) ->
?after:(unit -> unit) ->
?init:('a elem -> string) ->
?term:('a elem -> string) ->
?show:('a elem -> string) ->
'a elem t -> in_channel
val string_list_of : ?show:('a elem -> string) ->
'a elem t -> string list
val string_stream_of : ?show:('a elem -> string) ->
'a elem t -> string Stream.t
val of_channel : ?reader:(in_channel -> initial elem) ->
in_channel -> initial elem t
val of_file : ?reader:(in_channel -> initial elem) ->
string -> initial elem t
* Read a shtream from the output of a command , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_command : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
string ->
initial elem t
* Read a shtream from the output of a process , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_program : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
?path:bool -> string -> ?argv0:string -> string list ->
initial elem t
* Read a shtream from the output of a thunk , using [ ? reader ] .
* If [ ? procref ] is given , stash the { ! } ; if [ ? dups ]
* is given , perform the dups in the child process .
* If [?procref] is given, stash the {!Proc.t}; if [?dups]
* is given, perform the dups in the child process. *)
val of_thunk : ?procref:Channel.procref ->
?dups:Channel.dup_spec ->
?reader:(in_channel -> initial elem) ->
(unit -> unit) ->
initial elem t
val of_string_list : ?parse:(string -> initial elem) ->
string list -> initial elem t
val of_string_stream : ?parse:(string -> initial elem) ->
string Stream.t -> initial elem t
end
module Make(E : ELEM) : S with module Elem = E
|
39c729dbba0a972bfdfca25be04b802c00c116c53ff777343a3fae0d71827098 | tari3x/csec-modex | simplify1.mli | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* *
* *
* Copyright ( C ) ENS , CNRS , INRIA , 2005 - 2011 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet *
* *
* Copyright (C) ENS, CNRS, INRIA, 2005-2011 *
* *
*************************************************************)
Copyright ENS , CNRS , INRIA
contributor : ,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model .
This software is governed by the CeCILL - B license under French law and
abiding by the rules of distribution of free software . You can use ,
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " .
As a counterpart to the access to the source code and rights to copy ,
modify and redistribute granted by the license , users are provided only
with a limited warranty and the software 's author , the holder of the
economic rights , and the successive licensors have only limited
liability .
In this respect , the user 's attention is drawn to the risks associated
with loading , using , modifying and/or developing or reproducing the
software by the user in light of its specific status of free software ,
that may mean that it is complicated to manipulate , and that also
therefore means that it is reserved for developers and experienced
professionals having in - depth computer knowledge . Users are therefore
encouraged to load and test the software 's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and , more generally , to use and operate it in the
same conditions as regards security .
The fact that you are presently reading this means that you have had
knowledge of the CeCILL - B license and that you accept its terms .
Copyright ENS, CNRS, INRIA
contributor: Bruno Blanchet,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model.
This software is governed by the CeCILL-B license under French law and
abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL-B
license as circulated by CEA, CNRS and INRIA at the following URL
"".
As a counterpart to the access to the source code and rights to copy,
modify and redistribute granted by the license, users are provided only
with a limited warranty and the software's author, the holder of the
economic rights, and the successive licensors have only limited
liability.
In this respect, the user's attention is drawn to the risks associated
with loading, using, modifying and/or developing or reproducing the
software by the user in light of its specific status of free software,
that may mean that it is complicated to manipulate, and that also
therefore means that it is reserved for developers and experienced
professionals having in-depth computer knowledge. Users are therefore
encouraged to load and test the software's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and, more generally, to use and operate it in the
same conditions as regards security.
The fact that you are presently reading this means that you have had
knowledge of the CeCILL-B license and that you accept its terms.
*)
open Types
val repl_index_list : (term * binder) list ref
val new_repl_index : binder -> binder
val map_find_indices : binder list -> (binder * term) list
val get_binder : term -> binder
val true_facts_from_simp_facts : simp_facts -> term list
val term_collisions :
(term list * binder list * binder list * binder list * term * term * binder *
term list option * typet list) list ref
val any_term_pat : pattern -> term
val matches_pair : term -> term -> term -> term -> bool
val eq_terms3 : term -> term -> bool
val get_index_size : binder -> int
val filter_indices_coll : term list -> binder list -> binder list -> binder list
val add_term_collisions :
binder list * (binder * term) list * term list -> term -> term ->
binder -> term list option -> typet list -> bool
val proba_for_term_collision :
'a * 'b * 'c * binder list * term * term * binder *
term list option * typet list -> probaf
val final_add_proba : unit -> setf list
module FindCompos :
sig
type status = Compos | Decompos | Any
type charac_type =
CharacType of typet
| CharacTypeOfVar of binder
type 'a depinfo = (binder * (status * 'a)) list option * term list
val init_elem : 'a depinfo
val depends : binder * 'a depinfo -> term -> bool
val is_indep : binder * 'a depinfo -> term -> term
val remove_dep_array_index : binder * 'a depinfo -> term -> term
val remove_array_index : term -> term
val find_compos : (binder * (status * 'a) ->
term list -> (status * charac_type) option) -> 'a depinfo ->
binder * (status * 'a) -> term -> (status * charac_type * term) option
val find_compos_list :
(binder * (status * 'a) -> term list -> (status * charac_type) option) ->
'a depinfo -> (binder * (status * 'a)) list -> term ->
(status * charac_type * term * binder * 'a) option
end
val filter_def_list :
binder list -> binderref list -> binderref list -> binderref list
val remove_subterms : binderref list -> binderref list -> binderref list
exception SuccessBranch of (binder * term) list * binder list
val branch_succeeds : binder list * binderref list * term * 'b ->
dep_anal -> simp_facts -> binderref list -> unit
val add_elsefind : dep_anal -> binderref list -> simp_facts ->
(binder list * binderref list * term * 'a) list ->
term list * term list * elsefind_fact list
val filter_elsefind : (elsefind_fact -> bool) -> simp_facts -> simp_facts
val convert_elsefind : dep_anal -> binderref list ->
term list * term list * elsefind_fact list -> simp_facts
val try_no_var_rec : simp_facts -> term -> term
val debug_find_unique : bool ref
val is_find_unique : binder list -> fact_info -> simp_facts ->
(binder list * binderref list * term * 'a) list -> bool
| null | https://raw.githubusercontent.com/tari3x/csec-modex/5ab2aa18ef308b4d18ac479e5ab14476328a6a50/deps/cryptoverif1.12/src/simplify1.mli | ocaml | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* *
* *
* Copyright ( C ) ENS , CNRS , INRIA , 2005 - 2011 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet *
* *
* Copyright (C) ENS, CNRS, INRIA, 2005-2011 *
* *
*************************************************************)
Copyright ENS , CNRS , INRIA
contributor : ,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model .
This software is governed by the CeCILL - B license under French law and
abiding by the rules of distribution of free software . You can use ,
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " .
As a counterpart to the access to the source code and rights to copy ,
modify and redistribute granted by the license , users are provided only
with a limited warranty and the software 's author , the holder of the
economic rights , and the successive licensors have only limited
liability .
In this respect , the user 's attention is drawn to the risks associated
with loading , using , modifying and/or developing or reproducing the
software by the user in light of its specific status of free software ,
that may mean that it is complicated to manipulate , and that also
therefore means that it is reserved for developers and experienced
professionals having in - depth computer knowledge . Users are therefore
encouraged to load and test the software 's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and , more generally , to use and operate it in the
same conditions as regards security .
The fact that you are presently reading this means that you have had
knowledge of the CeCILL - B license and that you accept its terms .
Copyright ENS, CNRS, INRIA
contributor: Bruno Blanchet,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model.
This software is governed by the CeCILL-B license under French law and
abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL-B
license as circulated by CEA, CNRS and INRIA at the following URL
"".
As a counterpart to the access to the source code and rights to copy,
modify and redistribute granted by the license, users are provided only
with a limited warranty and the software's author, the holder of the
economic rights, and the successive licensors have only limited
liability.
In this respect, the user's attention is drawn to the risks associated
with loading, using, modifying and/or developing or reproducing the
software by the user in light of its specific status of free software,
that may mean that it is complicated to manipulate, and that also
therefore means that it is reserved for developers and experienced
professionals having in-depth computer knowledge. Users are therefore
encouraged to load and test the software's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and, more generally, to use and operate it in the
same conditions as regards security.
The fact that you are presently reading this means that you have had
knowledge of the CeCILL-B license and that you accept its terms.
*)
open Types
val repl_index_list : (term * binder) list ref
val new_repl_index : binder -> binder
val map_find_indices : binder list -> (binder * term) list
val get_binder : term -> binder
val true_facts_from_simp_facts : simp_facts -> term list
val term_collisions :
(term list * binder list * binder list * binder list * term * term * binder *
term list option * typet list) list ref
val any_term_pat : pattern -> term
val matches_pair : term -> term -> term -> term -> bool
val eq_terms3 : term -> term -> bool
val get_index_size : binder -> int
val filter_indices_coll : term list -> binder list -> binder list -> binder list
val add_term_collisions :
binder list * (binder * term) list * term list -> term -> term ->
binder -> term list option -> typet list -> bool
val proba_for_term_collision :
'a * 'b * 'c * binder list * term * term * binder *
term list option * typet list -> probaf
val final_add_proba : unit -> setf list
module FindCompos :
sig
type status = Compos | Decompos | Any
type charac_type =
CharacType of typet
| CharacTypeOfVar of binder
type 'a depinfo = (binder * (status * 'a)) list option * term list
val init_elem : 'a depinfo
val depends : binder * 'a depinfo -> term -> bool
val is_indep : binder * 'a depinfo -> term -> term
val remove_dep_array_index : binder * 'a depinfo -> term -> term
val remove_array_index : term -> term
val find_compos : (binder * (status * 'a) ->
term list -> (status * charac_type) option) -> 'a depinfo ->
binder * (status * 'a) -> term -> (status * charac_type * term) option
val find_compos_list :
(binder * (status * 'a) -> term list -> (status * charac_type) option) ->
'a depinfo -> (binder * (status * 'a)) list -> term ->
(status * charac_type * term * binder * 'a) option
end
val filter_def_list :
binder list -> binderref list -> binderref list -> binderref list
val remove_subterms : binderref list -> binderref list -> binderref list
exception SuccessBranch of (binder * term) list * binder list
val branch_succeeds : binder list * binderref list * term * 'b ->
dep_anal -> simp_facts -> binderref list -> unit
val add_elsefind : dep_anal -> binderref list -> simp_facts ->
(binder list * binderref list * term * 'a) list ->
term list * term list * elsefind_fact list
val filter_elsefind : (elsefind_fact -> bool) -> simp_facts -> simp_facts
val convert_elsefind : dep_anal -> binderref list ->
term list * term list * elsefind_fact list -> simp_facts
val try_no_var_rec : simp_facts -> term -> term
val debug_find_unique : bool ref
val is_find_unique : binder list -> fact_info -> simp_facts ->
(binder list * binderref list * term * 'a) list -> bool
| |
ed0fd08231c04e560dfcfbb98ecba515bb947fd001b1529032fdb860ff9ddd85 | qkrgud55/ocamlmulti | location.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : location.mli 12800 2012 - 07 - 30 18:59:07Z doligez $
Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
(* Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
*)
val none : t
(** An arbitrary value of type [t]; describes an empty ghost range. *)
val in_file : string -> t;;
(** Return an empty ghost range located in a given file. *)
val init : Lexing.lexbuf -> string -> unit
(** Set the file name and line number of the [lexbuf] to be the start
of the named file. *)
val curr : Lexing.lexbuf -> t
(** Get the location of the current token from the [lexbuf]. *)
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val get_pos_info: Lexing.position -> string * int * int (* file, line, char *)
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val highlight_locations: formatter -> t -> t -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val show_filename: string -> string
(** In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged. *)
val absname: bool ref
| null | https://raw.githubusercontent.com/qkrgud55/ocamlmulti/74fe84df0ce7be5ee03fb4ac0520fb3e9f4b6d1f/parsing/location.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
* An arbitrary value of type [t]; describes an empty ghost range.
* Return an empty ghost range located in a given file.
* Set the file name and line number of the [lexbuf] to be the start
of the named file.
* Get the location of the current token from the [lexbuf].
file, line, char
* In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : location.mli 12800 2012 - 07 - 30 18:59:07Z doligez $
Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
val none : t
val in_file : string -> t;;
val init : Lexing.lexbuf -> string -> unit
val curr : Lexing.lexbuf -> t
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val highlight_locations: formatter -> t -> t -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val show_filename: string -> string
val absname: bool ref
|
6a8ff316ff05755f08695e4f0e03524b125852eae2cb5221d43488e4a2ff1c68 | tezos/tezos-mirror | main_dac.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 Nomadic Labs , < >
Copyright ( c ) 2023 TriliTech , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
type error += Invalid_positive_int_parameter of string
let () =
register_error_kind
`Permanent
~id:"dac.node.dac.invalid_positive_int_parameter"
~title:"Argument is not a positive integer"
~description:"Argument must be a positive integer"
~pp:(fun ppf reveal_data_path ->
Format.fprintf
ppf
"Expected a valid positive integer, provided %s instead"
reveal_data_path)
Data_encoding.(obj1 (req "arg" string))
(function Invalid_positive_int_parameter s -> Some s | _ -> None)
(fun s -> Invalid_positive_int_parameter s)
let group =
{Tezos_clic.name = "dac-node"; title = "Commands related to the DAC node"}
let data_dir_arg =
let default = Configuration.default_data_dir in
Tezos_clic.default_arg
~long:"data-dir"
~placeholder:"data-dir"
~doc:
(Format.sprintf
"The path to the DAC node data directory. Default value is %s"
default)
~default
(Client_config.string_parameter ())
let reveal_data_dir_arg =
let default = Configuration.default_reveal_data_dir in
Tezos_clic.default_arg
~long:"reveal-data-dir"
~placeholder:"reveal-data-dir"
~doc:"The directory where reveal preimage pages are saved."
~default
(Client_config.string_parameter ())
let tz4_address_parameter =
Tezos_clic.parameter (fun _cctxt s ->
let open Lwt_result_syntax in
let*? bls_pkh = Signature.Bls.Public_key_hash.of_b58check s in
let pkh : Tezos_crypto.Aggregate_signature.public_key_hash =
Tezos_crypto.Aggregate_signature.Bls12_381 bls_pkh
in
return pkh)
let tz4_address_param ?(name = "public key hash")
?(desc = "bls public key hash to use") =
let desc = String.concat "\n" [desc; "A tz4 address"] in
Tezos_clic.param ~name ~desc tz4_address_parameter
let positive_int_parameter =
Tezos_clic.parameter (fun _cctxt p ->
let open Lwt_result_syntax in
let* i =
try Lwt.return_ok (int_of_string p)
with _ -> tzfail @@ Invalid_positive_int_parameter p
in
if i < 0 then tzfail @@ Invalid_positive_int_parameter p else return i)
let threshold_param ?(name = "DAC threshold parameter")
?(desc =
"Number of DAC member signatures required to validate a root page hash") =
Tezos_clic.param ~name ~desc positive_int_parameter
let rpc_address_arg =
let default = Configuration.default_rpc_address in
Tezos_clic.default_arg
~long:"rpc-addr"
~placeholder:"rpc-address|ip"
~doc:
(Format.sprintf
"The address the DAC node listens to. Default value is %s"
default)
~default
(Client_config.string_parameter ())
let rpc_port_arg =
let default = Configuration.default_rpc_port |> string_of_int in
Tezos_clic.default_arg
~long:"rpc-port"
~placeholder:"rpc-port"
~doc:
(Format.sprintf
"The port the DAC node listens to. Default value is %s"
default)
~default
positive_int_parameter
let coordinator_rpc_parameter =
Tezos_clic.parameter (fun _cctxt h ->
match String.split ':' h with
| [host_name; port] -> (
try Lwt.return_ok (host_name, int_of_string port)
with _ -> failwith "Address not in format <rpc_address>:<rpc_port>")
| _ -> failwith "Address not in format <rpc_address>:<rpc_port>")
let coordinator_rpc_param ?(name = "DAC coordinator rpc address parameter")
?(desc = "The address of the DAC coordinator") =
let desc =
String.concat "\n" [desc; "An address of the form <rpc_address>:<rpc_port>"]
in
Tezos_clic.param ~name ~desc coordinator_rpc_parameter
module Config_init = struct
let create_configuration ~data_dir ~reveal_data_dir ~rpc_address ~rpc_port
mode (cctxt : Client_context.full) =
let open Lwt_result_syntax in
let open Configuration in
let config = {data_dir; rpc_address; rpc_port; reveal_data_dir; mode} in
let* () = save config in
let*! _ =
cctxt#message "DAC node configuration written in %s" (filename config)
in
return ()
let legacy_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in legacy mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "legacy"; "with"; "threshold"]
@@ threshold_param
@@ prefixes ["and"; "data"; "availability"; "committee"; "members"]
@@ seq_of_param @@ tz4_address_param)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
threshold
dac_members_addresses
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Configuration.Legacy
{threshold; dac_members_addresses; dac_cctxt_config = None})
cctxt)
let coordinator_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in coordinator mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "coordinator"; "with"; "threshold"]
@@ threshold_param
@@ prefixes ["and"; "data"; "availability"; "committee"; "members"]
@@ seq_of_param @@ tz4_address_param)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
threshold
dac_members_addresses
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Coordinator {threshold; dac_members_addresses})
cctxt)
let dac_member_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in committee member mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes
["configure"; "as"; "commmittee"; "member"; "with"; "coordinator"]
@@ coordinator_rpc_param
@@ prefixes ["and"; "signer"]
@@ tz4_address_param @@ stop)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
(coordinator_rpc_address, coordinator_rpc_port)
address
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Dac_member {coordinator_rpc_address; coordinator_rpc_port; address})
cctxt)
let observer_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in observer mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "observer"; "with"; "coordinator"]
@@ coordinator_rpc_param @@ stop)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
(coordinator_rpc_address, coordinator_rpc_port)
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Observer {coordinator_rpc_address; coordinator_rpc_port})
cctxt)
let commands =
[legacy_command; coordinator_command; dac_member_command; observer_command]
end
let run_command =
let open Tezos_clic in
command
~group
~desc:"Run the DAC node."
(args1 data_dir_arg)
(prefixes ["run"] @@ stop)
(fun data_dir cctxt -> Daemon.run ~data_dir cctxt)
let commands () = [run_command] @ Config_init.commands
let select_commands _ _ =
let open Lwt_result_syntax in
return (commands ())
let () = Client_main_run.run (module Client_config) ~select_commands
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/e80cd05842991197ea613a937434a9cd43026b04/src/bin_dac_node/main_dac.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*************************************************************************** | Copyright ( c ) 2022 Nomadic Labs , < >
Copyright ( c ) 2023 TriliTech , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
type error += Invalid_positive_int_parameter of string
let () =
register_error_kind
`Permanent
~id:"dac.node.dac.invalid_positive_int_parameter"
~title:"Argument is not a positive integer"
~description:"Argument must be a positive integer"
~pp:(fun ppf reveal_data_path ->
Format.fprintf
ppf
"Expected a valid positive integer, provided %s instead"
reveal_data_path)
Data_encoding.(obj1 (req "arg" string))
(function Invalid_positive_int_parameter s -> Some s | _ -> None)
(fun s -> Invalid_positive_int_parameter s)
let group =
{Tezos_clic.name = "dac-node"; title = "Commands related to the DAC node"}
let data_dir_arg =
let default = Configuration.default_data_dir in
Tezos_clic.default_arg
~long:"data-dir"
~placeholder:"data-dir"
~doc:
(Format.sprintf
"The path to the DAC node data directory. Default value is %s"
default)
~default
(Client_config.string_parameter ())
let reveal_data_dir_arg =
let default = Configuration.default_reveal_data_dir in
Tezos_clic.default_arg
~long:"reveal-data-dir"
~placeholder:"reveal-data-dir"
~doc:"The directory where reveal preimage pages are saved."
~default
(Client_config.string_parameter ())
let tz4_address_parameter =
Tezos_clic.parameter (fun _cctxt s ->
let open Lwt_result_syntax in
let*? bls_pkh = Signature.Bls.Public_key_hash.of_b58check s in
let pkh : Tezos_crypto.Aggregate_signature.public_key_hash =
Tezos_crypto.Aggregate_signature.Bls12_381 bls_pkh
in
return pkh)
let tz4_address_param ?(name = "public key hash")
?(desc = "bls public key hash to use") =
let desc = String.concat "\n" [desc; "A tz4 address"] in
Tezos_clic.param ~name ~desc tz4_address_parameter
let positive_int_parameter =
Tezos_clic.parameter (fun _cctxt p ->
let open Lwt_result_syntax in
let* i =
try Lwt.return_ok (int_of_string p)
with _ -> tzfail @@ Invalid_positive_int_parameter p
in
if i < 0 then tzfail @@ Invalid_positive_int_parameter p else return i)
let threshold_param ?(name = "DAC threshold parameter")
?(desc =
"Number of DAC member signatures required to validate a root page hash") =
Tezos_clic.param ~name ~desc positive_int_parameter
let rpc_address_arg =
let default = Configuration.default_rpc_address in
Tezos_clic.default_arg
~long:"rpc-addr"
~placeholder:"rpc-address|ip"
~doc:
(Format.sprintf
"The address the DAC node listens to. Default value is %s"
default)
~default
(Client_config.string_parameter ())
let rpc_port_arg =
let default = Configuration.default_rpc_port |> string_of_int in
Tezos_clic.default_arg
~long:"rpc-port"
~placeholder:"rpc-port"
~doc:
(Format.sprintf
"The port the DAC node listens to. Default value is %s"
default)
~default
positive_int_parameter
let coordinator_rpc_parameter =
Tezos_clic.parameter (fun _cctxt h ->
match String.split ':' h with
| [host_name; port] -> (
try Lwt.return_ok (host_name, int_of_string port)
with _ -> failwith "Address not in format <rpc_address>:<rpc_port>")
| _ -> failwith "Address not in format <rpc_address>:<rpc_port>")
let coordinator_rpc_param ?(name = "DAC coordinator rpc address parameter")
?(desc = "The address of the DAC coordinator") =
let desc =
String.concat "\n" [desc; "An address of the form <rpc_address>:<rpc_port>"]
in
Tezos_clic.param ~name ~desc coordinator_rpc_parameter
module Config_init = struct
let create_configuration ~data_dir ~reveal_data_dir ~rpc_address ~rpc_port
mode (cctxt : Client_context.full) =
let open Lwt_result_syntax in
let open Configuration in
let config = {data_dir; rpc_address; rpc_port; reveal_data_dir; mode} in
let* () = save config in
let*! _ =
cctxt#message "DAC node configuration written in %s" (filename config)
in
return ()
let legacy_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in legacy mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "legacy"; "with"; "threshold"]
@@ threshold_param
@@ prefixes ["and"; "data"; "availability"; "committee"; "members"]
@@ seq_of_param @@ tz4_address_param)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
threshold
dac_members_addresses
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Configuration.Legacy
{threshold; dac_members_addresses; dac_cctxt_config = None})
cctxt)
let coordinator_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in coordinator mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "coordinator"; "with"; "threshold"]
@@ threshold_param
@@ prefixes ["and"; "data"; "availability"; "committee"; "members"]
@@ seq_of_param @@ tz4_address_param)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
threshold
dac_members_addresses
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Coordinator {threshold; dac_members_addresses})
cctxt)
let dac_member_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in committee member mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes
["configure"; "as"; "commmittee"; "member"; "with"; "coordinator"]
@@ coordinator_rpc_param
@@ prefixes ["and"; "signer"]
@@ tz4_address_param @@ stop)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
(coordinator_rpc_address, coordinator_rpc_port)
address
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Dac_member {coordinator_rpc_address; coordinator_rpc_port; address})
cctxt)
let observer_command =
let open Tezos_clic in
command
~group
~desc:"Configure DAC node in observer mode."
(args4 data_dir_arg rpc_address_arg rpc_port_arg reveal_data_dir_arg)
(prefixes ["configure"; "as"; "observer"; "with"; "coordinator"]
@@ coordinator_rpc_param @@ stop)
(fun (data_dir, rpc_address, rpc_port, reveal_data_dir)
(coordinator_rpc_address, coordinator_rpc_port)
cctxt ->
create_configuration
~data_dir
~reveal_data_dir
~rpc_address
~rpc_port
(Observer {coordinator_rpc_address; coordinator_rpc_port})
cctxt)
let commands =
[legacy_command; coordinator_command; dac_member_command; observer_command]
end
let run_command =
let open Tezos_clic in
command
~group
~desc:"Run the DAC node."
(args1 data_dir_arg)
(prefixes ["run"] @@ stop)
(fun data_dir cctxt -> Daemon.run ~data_dir cctxt)
let commands () = [run_command] @ Config_init.commands
let select_commands _ _ =
let open Lwt_result_syntax in
return (commands ())
let () = Client_main_run.run (module Client_config) ~select_commands
|
eb481ffd8b4d413885b431596b733ac756c4d0b17854a690d6d549e35dd0b9e1 | GAumala/TranslateJPBot | JMdictEntryTree.hs | {-# LANGUAGE OverloadedStrings #-}
module Data.JMdictEntryTree (EntryNode,
jmdictEntryTreeFromFile,
lookupWord,
printLookup,
showLookupResult) where
import Text.XML.JMdictParser
import Data.List (foldl')
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TextIO
import Data.RedBlackTree
data EntryNode = EntryNode {
entryKey :: Text,
mainEntry :: JMdictEntry,
extraEntries :: [JMdictEntry]
} deriving Show
instance Eq EntryNode where
(==) leftNode rightNode = entryKey leftNode == entryKey rightNode
instance Ord EntryNode where
(<=) leftNode rightNode = entryKey leftNode <= entryKey rightNode
instance BinaryTreeNode EntryNode where
mergeNodes (EntryNode key leftEntry extra) (EntryNode _ rightEntry _) =
EntryNode key leftEntry (rightEntry:extra)
showEntryAsText :: JMdictEntry -> Text
showEntryAsText (JMdictEntry _ [] readings meanings) =
T.intercalate "\n" [firstLine, secondLine]
where firstLine = T.intercalate ", " readings
secondLine = T.intercalate ", " meanings
showEntryAsText (JMdictEntry _ kanjis readings meanings) =
T.intercalate "\n" [firstLine, secondLine, thirdLine]
where firstLine = T.intercalate ", " kanjis
secondLine = T.intercalate ", " readings
thirdLine = T.intercalate ", " meanings
showEntryNodeAsText :: EntryNode -> Text
showEntryNodeAsText (EntryNode _ firstEntry extras) =
T.intercalate "\n\n" entries
where entries = map showEntryAsText (firstEntry:extras)
insertEntryToTree :: RedBlackTree EntryNode -> JMdictEntry -> RedBlackTree EntryNode
insertEntryToTree tree entry = foldl' insert tree nodes
where JMdictEntry _ kanjis readings meanings = entry
keys = kanjis ++ readings
createEntryNode newKey = EntryNode newKey entry []
nodes = map createEntryNode keys
jmdictEntryTreeFromFile :: String -> IO (RedBlackTree EntryNode)
jmdictEntryTreeFromFile filename = do
entries <- parseJMdictFile filename
return $ foldl' insertEntryToTree emptyRedBlackTree entries
lookupWord :: RedBlackTree EntryNode -> Text -> Maybe EntryNode
lookupWord tree word = Data.RedBlackTree.find tree targetNode
where targetNode = EntryNode word (JMdictEntry 0 [] [] []) []
showLookupResult :: Text -> Maybe EntryNode -> Text
showLookupResult requestedWord lookupResult =
fromMaybe notFoundMsg lookupResultText
where notFoundMsg = T.concat [ "No entries match \"", requestedWord, "\"" ]
lookupResultText = fmap showEntryNodeAsText lookupResult
printLookup :: RedBlackTree EntryNode -> String -> IO ()
printLookup tree queryString = TextIO.putStrLn queryResult
where queryText = T.pack queryString
queryResult = showLookupResult queryText (lookupWord tree queryText)
| null | https://raw.githubusercontent.com/GAumala/TranslateJPBot/af008f0f3a2a13079982b8de5966a3903194380d/src/Data/JMdictEntryTree.hs | haskell | # LANGUAGE OverloadedStrings # |
module Data.JMdictEntryTree (EntryNode,
jmdictEntryTreeFromFile,
lookupWord,
printLookup,
showLookupResult) where
import Text.XML.JMdictParser
import Data.List (foldl')
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TextIO
import Data.RedBlackTree
data EntryNode = EntryNode {
entryKey :: Text,
mainEntry :: JMdictEntry,
extraEntries :: [JMdictEntry]
} deriving Show
instance Eq EntryNode where
(==) leftNode rightNode = entryKey leftNode == entryKey rightNode
instance Ord EntryNode where
(<=) leftNode rightNode = entryKey leftNode <= entryKey rightNode
instance BinaryTreeNode EntryNode where
mergeNodes (EntryNode key leftEntry extra) (EntryNode _ rightEntry _) =
EntryNode key leftEntry (rightEntry:extra)
showEntryAsText :: JMdictEntry -> Text
showEntryAsText (JMdictEntry _ [] readings meanings) =
T.intercalate "\n" [firstLine, secondLine]
where firstLine = T.intercalate ", " readings
secondLine = T.intercalate ", " meanings
showEntryAsText (JMdictEntry _ kanjis readings meanings) =
T.intercalate "\n" [firstLine, secondLine, thirdLine]
where firstLine = T.intercalate ", " kanjis
secondLine = T.intercalate ", " readings
thirdLine = T.intercalate ", " meanings
showEntryNodeAsText :: EntryNode -> Text
showEntryNodeAsText (EntryNode _ firstEntry extras) =
T.intercalate "\n\n" entries
where entries = map showEntryAsText (firstEntry:extras)
insertEntryToTree :: RedBlackTree EntryNode -> JMdictEntry -> RedBlackTree EntryNode
insertEntryToTree tree entry = foldl' insert tree nodes
where JMdictEntry _ kanjis readings meanings = entry
keys = kanjis ++ readings
createEntryNode newKey = EntryNode newKey entry []
nodes = map createEntryNode keys
jmdictEntryTreeFromFile :: String -> IO (RedBlackTree EntryNode)
jmdictEntryTreeFromFile filename = do
entries <- parseJMdictFile filename
return $ foldl' insertEntryToTree emptyRedBlackTree entries
lookupWord :: RedBlackTree EntryNode -> Text -> Maybe EntryNode
lookupWord tree word = Data.RedBlackTree.find tree targetNode
where targetNode = EntryNode word (JMdictEntry 0 [] [] []) []
showLookupResult :: Text -> Maybe EntryNode -> Text
showLookupResult requestedWord lookupResult =
fromMaybe notFoundMsg lookupResultText
where notFoundMsg = T.concat [ "No entries match \"", requestedWord, "\"" ]
lookupResultText = fmap showEntryNodeAsText lookupResult
printLookup :: RedBlackTree EntryNode -> String -> IO ()
printLookup tree queryString = TextIO.putStrLn queryResult
where queryText = T.pack queryString
queryResult = showLookupResult queryText (lookupWord tree queryText)
|
c96f83dae19e948060d49deeb3d660905f3d1d06add30ee96c3d8712c7e39940 | BekaValentine/SimpleFP-v2 | Unification.hs | {-# OPTIONS -Wall #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE UndecidableInstances #
-- | This module defines unification of dependent types.
module Require.Unification.Unification where
import Utils.ABT
import Utils.Elaborator
import Utils.Names
import Utils.Pretty
import Utils.Telescope
import Utils.Unifier
import Require.Core.Term
import Require.Unification.Elaborator
import Control.Monad.Except
-- | Equating terms by trivial structural equations.
instance MonadUnify TermF Elaborator where
equate (Defined n1) (Defined n2) =
if n1 == n2
then return []
else throwError $ "Mismatching names "
++ showName n1 ++ " and " ++ showName n2
equate (Ann m1 t1) (Ann m2 t2) =
return [ Equation (instantiate0 m1) (instantiate0 m2)
, Equation (instantiate0 t1) (instantiate0 t2)
]
equate Type Type =
return []
equate (Fun plic1 a1 sc1) (Fun plic2 a2 sc2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Fun plic1 a1 sc1)) ++ " with "
++ pretty (In (Fun plic2 a2 sc2))
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate0 a1) (instantiate0 a2)
, Equation (instantiate sc1 xs) (instantiate sc2 xs)
]
equate (Lam plic1 sc1) (Lam plic2 sc2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Lam plic1 sc1)) ++ " with "
++ pretty (In (Lam plic2 sc2))
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate sc1 xs) (instantiate sc2 xs) ]
equate (App plic1 f1 a1) (App plic2 f2 a2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (App plic1 f1 a1)) ++ " with "
++ pretty (In (App plic2 f2 a2))
return [ Equation (instantiate0 f1) (instantiate0 f2)
, Equation (instantiate0 a1) (instantiate0 a2)
]
equate (Con c1 as1) (Con c2 as2) =
do unless (c1 == c2)
$ throwError $ "Mismatching constructors "
++ showName c1 ++ " and " ++ showName c2
unless (length as1 == length as2)
$ throwError $ "Mismatching constructor arg lengths between "
++ pretty (In (Con c1 as1)) ++ " and "
++ pretty (In (Con c2 as1))
let (plics1,as1') = unzip as1
(plics2,as2') = unzip as2
unless (plics1 == plics2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Con c1 as1)) ++ " with "
++ pretty (In (Con c2 as2))
return $ zipWith
Equation
(map instantiate0 as1')
(map instantiate0 as2')
equate (Case as1 mot1 cs1) (Case as2 mot2 cs2) =
do unless (length as1 == length as2)
$ throwError $ "Mismatching number of case arguments in "
++ pretty (In (Case as1 mot1 cs1)) ++ " and "
++ pretty (In (Case as2 mot2 cs2))
unless (length cs1 == length cs2)
$ throwError $ "Mismatching number of clauses in "
++ pretty (In (Case as1 mot1 cs1)) ++ " and "
++ pretty (In (Case as2 mot2 cs2))
let argEqs = zipWith
Equation
(map instantiate0 as1)
(map instantiate0 as2)
motEqs <- equateCaseMotive mot1 mot2
clauseEqs <- fmap concat $ zipWithM equateClause cs1 cs2
return $ argEqs ++ motEqs ++ clauseEqs
equate (RecordType fields1 tele1) (RecordType fields2 tele2) =
do unless (fields1 == fields2)
$ throwError $ "Record types have different field names: "
++ pretty (In (RecordType fields1 tele1))
++ " and "
++ pretty (In (RecordType fields2 tele2))
ns <- freshRelTo (namesTelescope tele1) context
let xs = map (Var . Free) ns
as1 = instantiateTelescope tele1 xs
as2 = instantiateTelescope tele2 xs
unless (length as1 == length as2)
$ throwError $ "Records have different number of fields: "
++ pretty (In (RecordType fields1 tele1))
++ " and "
++ pretty (In (RecordType fields2 tele2))
return $ zipWith Equation as1 as2
equate (RecordCon fields1) (RecordCon fields2) =
do unless (length fields1 == length fields2)
$ throwError $ "Records have different number of fields: "
++ pretty (In (RecordCon fields1))
++ " and "
++ pretty (In (RecordCon fields2))
let (fs1,ms1) = unzip fields1
(fs2,ms2) = unzip fields2
unless (fs1 == fs2)
$ throwError $ "Records have different field names: "
++ pretty (In (RecordCon fields1))
++ " and "
++ pretty (In (RecordCon fields2))
return $ zipWith
Equation
(map instantiate0 ms1)
(map instantiate0 ms2)
equate (RecordProj r1 x1) (RecordProj r2 x2) =
do unless (x1 == x2)
$ throwError $ "Record projections have different names: "
++ pretty (In (RecordProj r1 x1))
++ " and "
++ pretty (In (RecordProj r2 x2))
return [Equation (instantiate0 r1) (instantiate0 r2)]
equate (QuotedType res1 a1) (QuotedType res2 a2) =
do unless (res1 == res2)
$ throwError $ "Quoted types have different reset locations: "
++ pretty (In (QuotedType res1 a1))
++ " and "
++ pretty (In (QuotedType res2 a2))
return [Equation (instantiate0 a1) (instantiate0 a2)]
equate (Quote m1) (Quote m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Unquote m1) (Unquote m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Continue m1) (Continue m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Shift res1 m1) (Shift res2 m2) =
do unless (res1 == res2)
$ throwError "Shifts have different reset locations."
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Reset res1 m1) (Reset res2 m2) =
do unless (res1 == res2)
$ throwError "Resets have different reset locations."
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Require a1 sc1) (Require a2 sc2) =
do ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate0 a1) (instantiate0 a2)
, Equation (instantiate sc1 xs) (instantiate sc2 xs)
]
equate l r =
throwError $ "Cannot unify " ++ pretty (In l) ++ " with " ++ pretty (In r)
-- | Equating case motives as a special helper for the main 'equate' method.
equateCaseMotive :: CaseMotive -> CaseMotive -> Elaborator [Equation Term]
equateCaseMotive mot1@(CaseMotive tele1) mot2@(CaseMotive tele2) =
do ns <- freshRelTo (namesBindingTelescope tele1) context
let xs = map (Var . Free) ns
(as1, b1) = instantiateBindingTelescope tele1 xs
(as2, b2) = instantiateBindingTelescope tele2 xs
unless (length as1 == length as2)
$ throwError $ "Motives not equal: " ++ pretty mot1 ++ " and "
++ pretty mot2
return $ zipWith Equation as1 as2 ++ [ Equation b1 b2 ]
-- Equating clauses as a special helper for the main 'equate' method.
equateClause :: Clause -> Clause -> Elaborator [Equation Term]
equateClause (Clause pscs1 sc1) (Clause pscs2 sc2) =
do unless (length pscs1 == length pscs2)
$ throwError "Clauses have different numbers of patterns."
unless (length (names sc1) == length (names sc2))
$ throwError "Patterns bind different numbers of arguments."
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
xs' = map (Var . Free) ns
ps1 = map (\sc -> patternInstantiate sc xs xs') pscs1
ps2 = map (\sc -> patternInstantiate sc xs xs') pscs2
b1 = instantiate sc1 xs'
b2 = instantiate sc2 xs'
case sequence (zipWith zipABTF ps1 ps2) of
Nothing ->
throwError "Patterns are not equal."
Just pEqss ->
return $ [ Equation a1 a2 | (a1,a2) <- concat pEqss ]
++ [ Equation b1 b2 ] | null | https://raw.githubusercontent.com/BekaValentine/SimpleFP-v2/ae00ec809caefcd13664395b0ae2fc66145f6a74/src/Require/Unification/Unification.hs | haskell | # OPTIONS -Wall #
# LANGUAGE TypeSynonymInstances #
| This module defines unification of dependent types.
| Equating terms by trivial structural equations.
| Equating case motives as a special helper for the main 'equate' method.
Equating clauses as a special helper for the main 'equate' method. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE UndecidableInstances #
module Require.Unification.Unification where
import Utils.ABT
import Utils.Elaborator
import Utils.Names
import Utils.Pretty
import Utils.Telescope
import Utils.Unifier
import Require.Core.Term
import Require.Unification.Elaborator
import Control.Monad.Except
instance MonadUnify TermF Elaborator where
equate (Defined n1) (Defined n2) =
if n1 == n2
then return []
else throwError $ "Mismatching names "
++ showName n1 ++ " and " ++ showName n2
equate (Ann m1 t1) (Ann m2 t2) =
return [ Equation (instantiate0 m1) (instantiate0 m2)
, Equation (instantiate0 t1) (instantiate0 t2)
]
equate Type Type =
return []
equate (Fun plic1 a1 sc1) (Fun plic2 a2 sc2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Fun plic1 a1 sc1)) ++ " with "
++ pretty (In (Fun plic2 a2 sc2))
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate0 a1) (instantiate0 a2)
, Equation (instantiate sc1 xs) (instantiate sc2 xs)
]
equate (Lam plic1 sc1) (Lam plic2 sc2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Lam plic1 sc1)) ++ " with "
++ pretty (In (Lam plic2 sc2))
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate sc1 xs) (instantiate sc2 xs) ]
equate (App plic1 f1 a1) (App plic2 f2 a2) =
do unless (plic1 == plic2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (App plic1 f1 a1)) ++ " with "
++ pretty (In (App plic2 f2 a2))
return [ Equation (instantiate0 f1) (instantiate0 f2)
, Equation (instantiate0 a1) (instantiate0 a2)
]
equate (Con c1 as1) (Con c2 as2) =
do unless (c1 == c2)
$ throwError $ "Mismatching constructors "
++ showName c1 ++ " and " ++ showName c2
unless (length as1 == length as2)
$ throwError $ "Mismatching constructor arg lengths between "
++ pretty (In (Con c1 as1)) ++ " and "
++ pretty (In (Con c2 as1))
let (plics1,as1') = unzip as1
(plics2,as2') = unzip as2
unless (plics1 == plics2)
$ throwError $ "Mismatching plicities when unifying "
++ pretty (In (Con c1 as1)) ++ " with "
++ pretty (In (Con c2 as2))
return $ zipWith
Equation
(map instantiate0 as1')
(map instantiate0 as2')
equate (Case as1 mot1 cs1) (Case as2 mot2 cs2) =
do unless (length as1 == length as2)
$ throwError $ "Mismatching number of case arguments in "
++ pretty (In (Case as1 mot1 cs1)) ++ " and "
++ pretty (In (Case as2 mot2 cs2))
unless (length cs1 == length cs2)
$ throwError $ "Mismatching number of clauses in "
++ pretty (In (Case as1 mot1 cs1)) ++ " and "
++ pretty (In (Case as2 mot2 cs2))
let argEqs = zipWith
Equation
(map instantiate0 as1)
(map instantiate0 as2)
motEqs <- equateCaseMotive mot1 mot2
clauseEqs <- fmap concat $ zipWithM equateClause cs1 cs2
return $ argEqs ++ motEqs ++ clauseEqs
equate (RecordType fields1 tele1) (RecordType fields2 tele2) =
do unless (fields1 == fields2)
$ throwError $ "Record types have different field names: "
++ pretty (In (RecordType fields1 tele1))
++ " and "
++ pretty (In (RecordType fields2 tele2))
ns <- freshRelTo (namesTelescope tele1) context
let xs = map (Var . Free) ns
as1 = instantiateTelescope tele1 xs
as2 = instantiateTelescope tele2 xs
unless (length as1 == length as2)
$ throwError $ "Records have different number of fields: "
++ pretty (In (RecordType fields1 tele1))
++ " and "
++ pretty (In (RecordType fields2 tele2))
return $ zipWith Equation as1 as2
equate (RecordCon fields1) (RecordCon fields2) =
do unless (length fields1 == length fields2)
$ throwError $ "Records have different number of fields: "
++ pretty (In (RecordCon fields1))
++ " and "
++ pretty (In (RecordCon fields2))
let (fs1,ms1) = unzip fields1
(fs2,ms2) = unzip fields2
unless (fs1 == fs2)
$ throwError $ "Records have different field names: "
++ pretty (In (RecordCon fields1))
++ " and "
++ pretty (In (RecordCon fields2))
return $ zipWith
Equation
(map instantiate0 ms1)
(map instantiate0 ms2)
equate (RecordProj r1 x1) (RecordProj r2 x2) =
do unless (x1 == x2)
$ throwError $ "Record projections have different names: "
++ pretty (In (RecordProj r1 x1))
++ " and "
++ pretty (In (RecordProj r2 x2))
return [Equation (instantiate0 r1) (instantiate0 r2)]
equate (QuotedType res1 a1) (QuotedType res2 a2) =
do unless (res1 == res2)
$ throwError $ "Quoted types have different reset locations: "
++ pretty (In (QuotedType res1 a1))
++ " and "
++ pretty (In (QuotedType res2 a2))
return [Equation (instantiate0 a1) (instantiate0 a2)]
equate (Quote m1) (Quote m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Unquote m1) (Unquote m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Continue m1) (Continue m2) =
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Shift res1 m1) (Shift res2 m2) =
do unless (res1 == res2)
$ throwError "Shifts have different reset locations."
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Reset res1 m1) (Reset res2 m2) =
do unless (res1 == res2)
$ throwError "Resets have different reset locations."
return [Equation (instantiate0 m1) (instantiate0 m2)]
equate (Require a1 sc1) (Require a2 sc2) =
do ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
return [ Equation (instantiate0 a1) (instantiate0 a2)
, Equation (instantiate sc1 xs) (instantiate sc2 xs)
]
equate l r =
throwError $ "Cannot unify " ++ pretty (In l) ++ " with " ++ pretty (In r)
equateCaseMotive :: CaseMotive -> CaseMotive -> Elaborator [Equation Term]
equateCaseMotive mot1@(CaseMotive tele1) mot2@(CaseMotive tele2) =
do ns <- freshRelTo (namesBindingTelescope tele1) context
let xs = map (Var . Free) ns
(as1, b1) = instantiateBindingTelescope tele1 xs
(as2, b2) = instantiateBindingTelescope tele2 xs
unless (length as1 == length as2)
$ throwError $ "Motives not equal: " ++ pretty mot1 ++ " and "
++ pretty mot2
return $ zipWith Equation as1 as2 ++ [ Equation b1 b2 ]
equateClause :: Clause -> Clause -> Elaborator [Equation Term]
equateClause (Clause pscs1 sc1) (Clause pscs2 sc2) =
do unless (length pscs1 == length pscs2)
$ throwError "Clauses have different numbers of patterns."
unless (length (names sc1) == length (names sc2))
$ throwError "Patterns bind different numbers of arguments."
ns <- freshRelTo (names sc1) context
let xs = map (Var . Free) ns
xs' = map (Var . Free) ns
ps1 = map (\sc -> patternInstantiate sc xs xs') pscs1
ps2 = map (\sc -> patternInstantiate sc xs xs') pscs2
b1 = instantiate sc1 xs'
b2 = instantiate sc2 xs'
case sequence (zipWith zipABTF ps1 ps2) of
Nothing ->
throwError "Patterns are not equal."
Just pEqss ->
return $ [ Equation a1 a2 | (a1,a2) <- concat pEqss ]
++ [ Equation b1 b2 ] |
7fe7e34be0daa119722d824ac388ead1dd07b393776344388f5538c6e2e75436 | thephoeron/quipper-language | SimpleOracleSimulation.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
-- | A test file to simulate various decompositions of the simple oracle circuit,
-- from the BWT algorithm.
import Quipper
import QuipperLib.Simulation
import QuipperLib.Decompose
import other Quipper stuff
import Algorithms.BWT.Alternative
import Libraries.Auxiliary
-- | Extract the circuit from an oracle.
oracleCircuit :: Oracle -> Int -> Int -> Circ (Qubit,[Qubit])
oracleCircuit oracle color i = do
input <- qinit (boollist_of_int_bh (m oracle) i)
output <- qinit (boollist_of_int_bh (m oracle) 0)
q <- qinit False
oraclefun oracle color (input,output,q)
return (q, output)
-- | Run the simple oracle with a given decomposition, and given inputs.
run_simple' :: GateBase -> Int -> Int -> IO (Bool,Int)
run_simple' gb color i = do (b,bs) <- run_generic_io (undefined :: Double) (decompose_generic gb (oracleCircuit oracle_simple color i))
return (b, int_of_boollist_unsigned_bh bs)
-- | Run the simple oracle with a given decomposition, and given inputs, and print the result in a more readable manner.
run_simple :: GateBase -> Int -> Int -> IO ()
run_simple gb color i = do (b,bs) <- run_simple' gb color i
if not b
then putStrLn (show i ++ " --( " ++ show color ++ " )--> " ++ show bs)
else return ()
-- | Run the simple oracle with a given decomposition, mapped over all possible inputs.
main_run' :: GateBase -> IO ()
main_run' gb = mapM_ (\(x,y) -> run_simple gb x y) [(x,y) | y <- [0..31], x <- [0..3]]
| Run each decomposition of the oracle circuit and print out the resulting edges .
main_run :: IO ()
main_run = do putStrLn "Logical"
main_run' Logical
putStrLn "Toffoli"
main_run' Toffoli
putStrLn "Binary"
main_run' Binary
-- | Simumlate the simple oracle with a given decomposition, and given inputs.
sim_simple' :: GateBase -> Int -> Int -> ProbabilityDistribution Double (Bool,Int)
sim_simple' gb color i = do (b,bs) <- sim_generic undefined (decompose_generic gb (oracleCircuit oracle_simple color i))
return (b, int_of_boollist_unsigned_bh bs)
-- | Simulate the simple oracle with a given decomposition,
-- and given inputs, and print the result in a more readable manner.
sim_simple :: GateBase -> Int -> Int -> ProbabilityDistribution Double (IO ())
sim_simple gb color i = do (b,bs) <- sim_simple' gb color i
if not b
then return $ putStr (show i ++ " --( " ++ show color ++ " )--> " ++ show bs)
else Vector [(return (),0.0)]
sequenceP :: ProbabilityDistribution Double (IO ()) -> IO ()
sequenceP (Vector []) = return ()
sequenceP (Vector ((io,prob):ps)) = do if prob /= 0.0 then do io
putStrLn (" - " ++ show prob)
else return ()
sequenceP (Vector ps)
-- | Simulate the simple oracle with a given decomposition, mapped over all possible inputs.
main_sim' :: GateBase -> IO ()
main_sim' gb = mapM_ (\(x,y) -> sequenceP (sim_simple gb x y)) [(x,y) | y <- [0..31], x <- [0..3]]
main_sim'' :: GateBase -> Int -> Int -> IO ()
main_sim'' gb x y = sequenceP (sim_simple gb x y)
| Simulate each decomposition of the oracle circuit and print out the resulting edges .
main_sim :: IO ()
main_sim = do putStrLn "Logical"
main_sim' Logical
putStrLn "Toffoli"
main_sim' Toffoli
putStrLn "Binary"
main_sim' Binary
main :: IO ()
main = do main_run
main_sim
| null | https://raw.githubusercontent.com/thephoeron/quipper-language/15e555343a15c07b9aa97aced1ada22414f04af6/tests/SimpleOracleSimulation.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
| A test file to simulate various decompositions of the simple oracle circuit,
from the BWT algorithm.
| Extract the circuit from an oracle.
| Run the simple oracle with a given decomposition, and given inputs.
| Run the simple oracle with a given decomposition, and given inputs, and print the result in a more readable manner.
| Run the simple oracle with a given decomposition, mapped over all possible inputs.
| Simumlate the simple oracle with a given decomposition, and given inputs.
| Simulate the simple oracle with a given decomposition,
and given inputs, and print the result in a more readable manner.
| Simulate the simple oracle with a given decomposition, mapped over all possible inputs. | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
import Quipper
import QuipperLib.Simulation
import QuipperLib.Decompose
import other Quipper stuff
import Algorithms.BWT.Alternative
import Libraries.Auxiliary
oracleCircuit :: Oracle -> Int -> Int -> Circ (Qubit,[Qubit])
oracleCircuit oracle color i = do
input <- qinit (boollist_of_int_bh (m oracle) i)
output <- qinit (boollist_of_int_bh (m oracle) 0)
q <- qinit False
oraclefun oracle color (input,output,q)
return (q, output)
run_simple' :: GateBase -> Int -> Int -> IO (Bool,Int)
run_simple' gb color i = do (b,bs) <- run_generic_io (undefined :: Double) (decompose_generic gb (oracleCircuit oracle_simple color i))
return (b, int_of_boollist_unsigned_bh bs)
run_simple :: GateBase -> Int -> Int -> IO ()
run_simple gb color i = do (b,bs) <- run_simple' gb color i
if not b
then putStrLn (show i ++ " --( " ++ show color ++ " )--> " ++ show bs)
else return ()
main_run' :: GateBase -> IO ()
main_run' gb = mapM_ (\(x,y) -> run_simple gb x y) [(x,y) | y <- [0..31], x <- [0..3]]
| Run each decomposition of the oracle circuit and print out the resulting edges .
main_run :: IO ()
main_run = do putStrLn "Logical"
main_run' Logical
putStrLn "Toffoli"
main_run' Toffoli
putStrLn "Binary"
main_run' Binary
sim_simple' :: GateBase -> Int -> Int -> ProbabilityDistribution Double (Bool,Int)
sim_simple' gb color i = do (b,bs) <- sim_generic undefined (decompose_generic gb (oracleCircuit oracle_simple color i))
return (b, int_of_boollist_unsigned_bh bs)
sim_simple :: GateBase -> Int -> Int -> ProbabilityDistribution Double (IO ())
sim_simple gb color i = do (b,bs) <- sim_simple' gb color i
if not b
then return $ putStr (show i ++ " --( " ++ show color ++ " )--> " ++ show bs)
else Vector [(return (),0.0)]
sequenceP :: ProbabilityDistribution Double (IO ()) -> IO ()
sequenceP (Vector []) = return ()
sequenceP (Vector ((io,prob):ps)) = do if prob /= 0.0 then do io
putStrLn (" - " ++ show prob)
else return ()
sequenceP (Vector ps)
main_sim' :: GateBase -> IO ()
main_sim' gb = mapM_ (\(x,y) -> sequenceP (sim_simple gb x y)) [(x,y) | y <- [0..31], x <- [0..3]]
main_sim'' :: GateBase -> Int -> Int -> IO ()
main_sim'' gb x y = sequenceP (sim_simple gb x y)
| Simulate each decomposition of the oracle circuit and print out the resulting edges .
main_sim :: IO ()
main_sim = do putStrLn "Logical"
main_sim' Logical
putStrLn "Toffoli"
main_sim' Toffoli
putStrLn "Binary"
main_sim' Binary
main :: IO ()
main = do main_run
main_sim
|
9518df78e2ef21a07eb787d6a72b9614757bdd2ecc332646689096a3724953e4 | rd--/hsc3 | bGen.help.hs | -- bGenSin1Tbl ; osc
let tbl = bGenSine1Tbl ("tbl", 0, 8192) [1, 1/2, 1/3, 1/4, 1/5]
in osc ar tbl 220 0 * 0.1
bGenSin1Tbl ; bufRd
let tbl = bGenSine1Tbl ("tbl", 0, 8192) [1, 1/2, 1/3, 1/4, 1/5]
x = mouseX kr 220 440 Exponential 0.2
phase = linLin (lfSaw ar x 0) (-1) 1 0 1 * bufFrames kr tbl
in bufRdC 1 ar tbl phase Loop * 0.1
bGenCheby ; shaper ;
let z = sinOsc ar 400 (pi / 2) * mouseY kr 0.01 1 Exponential 0.2
t = bGenChebyTbl ("tbl", 0, 4096) [1, 0, 1, 1, 0, 1]
in shaper t z * 0.1
---- ; sine1 table setup ; allocate and generate wavetable buffer ; sin harmonics
withSc3 (mapM_ maybe_async [b_alloc 0 8192 1, b_gen_sine1 0 [Normalise,Wavetable,Clear] [1, 1/2, 1/3, 1/4, 1/5]])
---- ; print scsynth, the interpreter value that holds the reference that stores the end brackets
scsynthPrint scsynth
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Help/Ugen/bGen.help.hs | haskell | bGenSin1Tbl ; osc
-- ; sine1 table setup ; allocate and generate wavetable buffer ; sin harmonics
-- ; print scsynth, the interpreter value that holds the reference that stores the end brackets | let tbl = bGenSine1Tbl ("tbl", 0, 8192) [1, 1/2, 1/3, 1/4, 1/5]
in osc ar tbl 220 0 * 0.1
bGenSin1Tbl ; bufRd
let tbl = bGenSine1Tbl ("tbl", 0, 8192) [1, 1/2, 1/3, 1/4, 1/5]
x = mouseX kr 220 440 Exponential 0.2
phase = linLin (lfSaw ar x 0) (-1) 1 0 1 * bufFrames kr tbl
in bufRdC 1 ar tbl phase Loop * 0.1
bGenCheby ; shaper ;
let z = sinOsc ar 400 (pi / 2) * mouseY kr 0.01 1 Exponential 0.2
t = bGenChebyTbl ("tbl", 0, 4096) [1, 0, 1, 1, 0, 1]
in shaper t z * 0.1
withSc3 (mapM_ maybe_async [b_alloc 0 8192 1, b_gen_sine1 0 [Normalise,Wavetable,Clear] [1, 1/2, 1/3, 1/4, 1/5]])
scsynthPrint scsynth
|
e349e63742edf2ddff4771749300cdd4e29fbff1d05fa90ce39f284a4a9f019b | haskell/cabal | cabal.test.hs | import Test.Cabal.Prelude
-- Absolute path.
main = cabalTest $
fails $ cabal "check" []
| null | https://raw.githubusercontent.com/haskell/cabal/1cfe7c4c7257aa7ae450209d34b4a359e6703a10/cabal-testsuite/PackageTests/Check/ConfiguredPackage/Paths/AbsolutePath/cabal.test.hs | haskell | Absolute path. | import Test.Cabal.Prelude
main = cabalTest $
fails $ cabal "check" []
|
1f1195284659170491eb08b432fa958c3376c7b7536c285e4ef82703496b7ef6 | seriyps/mtproto_proxy | mtp_handler.erl | @author < >
( C ) 2018 ,
%%% @doc
MTProto proxy network layer
%%% @end
Created : 9 Apr 2018 by < >
-module(mtp_handler).
-behaviour(gen_server).
-behaviour(ranch_protocol).
%% API
-export([start_link/4, send/2]).
-export([hex/1, unhex/1]).
-export([keys_str/0]).
%% Callbacks
-export([ranch_init/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-export_type([handle/0]).
-type handle() :: pid().
-include_lib("hut/include/hut.hrl").
-define(MAX_SOCK_BUF_SIZE, 1024 * 50). % Decrease if CPU is cheaper than RAM
1 mb
-define(HEALTH_CHECK_INTERVAL, 5000).
telegram server responds with " l\xfe\xff\xff " if client packet MTProto is invalid
-define(SRV_ERROR, <<108, 254, 255, 255>>).
-define(TLS_START, 22, 3, 1, 2, 0, 1, 0, 1, 252, 3, 3).
-define(TLS_CLIENT_HELLO_LEN, 512).
-define(APP, mtproto_proxy).
-record(state,
{stage = init :: stage(),
secret :: binary(),
listener :: atom(),
sock :: gen_tcp:socket(),
transport :: transport(),
codec :: mtp_codec:codec() | undefined,
down :: mtp_down_conn:handle() | undefined,
dc_id :: {DcId :: integer(), Pool :: pid()} | undefined,
ad_tag :: binary(),
addr :: mtp_config:netloc_v4v6(), % IP/Port of remote side
policy_state :: any(),
started_at :: pos_integer(),
timer_state = init :: init | hibernate | stop,
timer :: gen_timeout:tout(),
last_queue_check :: integer(),
srv_error_filter :: first | on | off}).
-type transport() :: module().
-type stage() :: init | tls_hello | tunnel.
%% APIs
start_link(Ref, _Socket, Transport, Opts) ->
{ok, proc_lib:spawn_link(?MODULE, ranch_init, [{Ref, Transport, Opts}])}.
keys_str() ->
[{Name, Port, hex(Secret)}
|| {Name, Port, Secret} <- application:get_env(?APP, ports, [])].
-spec send(pid(), mtp_rpc:packet()) -> ok.
send(Upstream, Packet) ->
gen_server:cast(Upstream, Packet).
%% Callbacks
%% Custom gen_server init
ranch_init({Ref, Transport, Opts}) ->
{ok, Socket} = ranch:handshake(Ref),
case init({Socket, Transport, Opts}) of
{ok, State} ->
BufSize = application:get_env(?APP, upstream_socket_buffer_size, ?MAX_SOCK_BUF_SIZE),
Linger = case application:get_env(?APP, reset_close_socket, off) of
off -> [];
_ ->
[{linger, {true, 0}}]
end,
ok = Transport:setopts(
Socket,
[{active, once},
%% {recbuf, ?MAX_SOCK_BUF_SIZE},
, ? MAX_SOCK_BUF_SIZE } ,
{buffer, BufSize}
| Linger]),
gen_server:enter_loop(?MODULE, [], State);
{stop, error} ->
exit(normal)
end.
init({Socket, Transport, [Name, Secret, Tag]}) ->
mtp_metric:count_inc([?APP, in_connection, total], 1, #{labels => [Name]}),
case Transport:peername(Socket) of
{ok, {Ip, Port}} ->
?log(info, "~s: new connection ~s:~p", [Name, inet:ntoa(Ip), Port]),
{TimeoutKey, TimeoutDefault} = state_timeout(init),
Timer = gen_timeout:new(
#{timeout => {env, ?APP, TimeoutKey, TimeoutDefault}}),
Filter = application:get_env(?APP, replay_check_server_error_filter, off),
NowMs = erlang:system_time(millisecond),
NoopSt = mtp_noop_codec:new(),
Codec = mtp_codec:new(mtp_noop_codec, NoopSt,
mtp_noop_codec, NoopSt),
State = #state{sock = Socket,
secret = unhex(Secret),
listener = Name,
transport = Transport,
codec = Codec,
ad_tag = unhex(Tag),
addr = {Ip, Port},
started_at = NowMs,
timer = Timer,
last_queue_check = NowMs,
srv_error_filter = Filter},
{ok, State};
{error, Reason} ->
mtp_metric:count_inc([?APP, in_connection_closed, total], 1, #{labels => [Name]}),
?log(info, "Can't read peername: ~p", [Reason]),
{stop, error}
end.
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
handle_cast({proxy_ans, Down, Data}, #state{down = Down, srv_error_filter = off} = S) ->
%% telegram server -> proxy
%% srv_error_filter is 'off'
{ok, S1} = up_send(Data, S),
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
maybe_check_health(bump_timer(S1));
handle_cast({proxy_ans, Down, ?SRV_ERROR = Data},
#state{down = Down, srv_error_filter = Filter, listener = Listener,
addr = {Ip, _}} = S) when Filter =/= off ->
%% telegram server -> proxy
%% Server replied with server error; it might be another kind of replay attack;
%% Don't send this packet to client so proxy won't be fingerprinted
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
?log(warning, "~s: protocol_error srv_error_filtered", [inet:ntoa(Ip)]),
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, srv_error_filtered]}),
{noreply,
case Filter of
first -> S#state{srv_error_filter = off};
on -> S
end};
handle_cast({proxy_ans, Down, Data}, #state{down = Down, srv_error_filter = Filter} = S) when Filter =/= off ->
%% telegram server -> proxy
%% Normal data packet
srv_error_filter is ' on ' or srv_error_filter is ' first ' and it 's 1st server packet
{ok, S1} = up_send(Data, S),
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
S2 = case Filter of
first -> S1#state{srv_error_filter = off};
on -> S1
end,
maybe_check_health(bump_timer(S2));
handle_cast({close_ext, Down}, #state{down = Down, sock = USock, transport = UTrans} = S) ->
?log(debug, "asked to close connection by downstream"),
ok = UTrans:close(USock),
{stop, normal, S#state{down = undefined}};
handle_cast({simple_ack, Down, Confirm}, #state{down = Down} = S) ->
?log(info, "Simple ack: ~p, ~p", [Down, Confirm]),
{noreply, S};
handle_cast(Other, State) ->
?log(warning, "Unexpected msg ~p", [Other]),
{noreply, State}.
handle_info({tcp, Sock, Data}, #state{sock = Sock, transport = Transport,
listener = Listener, addr = {Ip, _}} = S) ->
%% client -> proxy
Size = byte_size(Data),
mtp_metric:count_inc([?APP, received, upstream, bytes], Size, #{labels => [Listener]}),
mtp_metric:histogram_observe([?APP, tracker_packet_size, bytes], Size, #{labels => [upstream]}),
try handle_upstream_data(Data, S) of
{ok, S1} ->
ok = Transport:setopts(Sock, [{active, once}]),
%% Consider checking health here as well
{noreply, bump_timer(S1)}
catch error:{protocol_error, Type, Extra} ->
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, Type]}),
?log(warning, "~s: protocol_error ~p ~p", [inet:ntoa(Ip), Type, Extra]),
{stop, normal, maybe_close_down(S)}
end;
handle_info({tcp_closed, Sock}, #state{sock = Sock} = S) ->
?log(debug, "upstream sock closed"),
{stop, normal, maybe_close_down(S)};
handle_info({tcp_error, Sock, Reason}, #state{sock = Sock} = S) ->
?log(warning, "upstream sock error: ~p", [Reason]),
{stop, normal, maybe_close_down(S)};
handle_info(timeout, #state{timer = Timer, timer_state = TState, listener = Listener} = S) ->
case gen_timeout:is_expired(Timer) of
true when TState == stop;
TState == init ->
mtp_metric:count_inc([?APP, inactive_timeout, total], 1, #{labels => [Listener]}),
?log(info, "inactive timeout in state ~p", [TState]),
{stop, normal, S};
true when TState == hibernate ->
mtp_metric:count_inc([?APP, inactive_hibernate, total], 1, #{labels => [Listener]}),
{noreply, switch_timer(S, stop), hibernate};
false ->
Timer1 = gen_timeout:reset(Timer),
{noreply, S#state{timer = Timer1}}
end;
handle_info(Other, S) ->
?log(warning, "Unexpected msg ~p", [Other]),
{noreply, S}.
terminate(_Reason, #state{started_at = Started, listener = Listener,
addr = {Ip, _}, policy_state = PolicyState,
sock = Sock, transport = Trans} = S) ->
case PolicyState of
{ok, TlsDomain} ->
try mtp_policy:dec(
application:get_env(?APP, policy, []),
Listener, Ip, TlsDomain)
catch T:R ->
?log(warning, "Failed to decrement policy: ~p:~p", [T, R])
end;
_ ->
%% Failed before policy was stored in state. Eg, because of "policy_error"
ok
end,
maybe_close_down(S),
ok = Trans:close(Sock),
mtp_metric:count_inc([?APP, in_connection_closed, total], 1, #{labels => [Listener]}),
Lifetime = erlang:system_time(millisecond) - Started,
mtp_metric:histogram_observe(
[?APP, session_lifetime, seconds],
erlang:convert_time_unit(Lifetime, millisecond, native), #{labels => [Listener]}),
?log(info, "terminate ~p", [_Reason]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
maybe_close_down(#state{down = undefined} = S) -> S;
maybe_close_down(#state{dc_id = {_DcId, Pool}} = S) ->
mtp_dc_pool:return(Pool, self()),
S#state{down = undefined}.
bump_timer(#state{timer = Timer, timer_state = TState} = S) ->
Timer1 = gen_timeout:bump(Timer),
case TState of
stop ->
switch_timer(S#state{timer = Timer1}, hibernate);
_ ->
S#state{timer = Timer1}
end.
switch_timer(#state{timer_state = TState} = S, TState) ->
S;
switch_timer(#state{timer_state = FromState, timer = Timer, listener = Listener} = S, ToState) ->
mtp_metric:count_inc([?APP, timer_switch, total], 1,
#{labels => [Listener, FromState, ToState]}),
{NewTimeKey, NewTimeDefault} = state_timeout(ToState),
Timer1 = gen_timeout:set_timeout(
{env, ?APP, NewTimeKey, NewTimeDefault}, Timer),
S#state{timer_state = ToState,
timer = Timer1}.
state_timeout(init) ->
{init_timeout_sec, 60};
state_timeout(hibernate) ->
{hibernate_timeout_sec, 60};
state_timeout(stop) ->
{ready_timeout_sec, 1200}.
%% Stream handlers
%% Handle telegram client -> proxy stream
handle_upstream_data(Bin, #state{stage = tunnel,
codec = UpCodec} = S) ->
{ok, S3, UpCodec1} =
mtp_codec:fold_packets(
fun(Decoded, S1, Codec1) ->
mtp_metric:histogram_observe(
[?APP, tg_packet_size, bytes],
byte_size(Decoded),
#{labels => [upstream_to_downstream]}),
{ok, S2} = down_send(Decoded, S1#state{codec = Codec1}),
{S2, S2#state.codec}
end, S, Bin, UpCodec),
{ok, S3#state{codec = UpCodec1}};
handle_upstream_data(Bin, #state{codec = Codec0} = S0) ->
{ok, S, Codec} =
mtp_codec:fold_packets_if(
fun(Decoded, S1, Codec1) ->
case parse_upstream_data(Decoded, S1#state{codec = Codec1}) of
{ok, S2} ->
{next, S2, S2#state.codec};
{incomplete, S2} ->
{stop, S2, S2#state.codec}
end
end, S0, Bin, Codec0),
{ok, S#state{codec = Codec}}.
parse_upstream_data(<<?TLS_START, _/binary>> = AllData,
#state{stage = tls_hello, secret = Secret, codec = Codec0,
addr = {Ip, _}, listener = Listener} = S) when
byte_size(AllData) >= (?TLS_CLIENT_HELLO_LEN + 5) ->
assert_protocol(mtp_fake_tls),
<<Data:(?TLS_CLIENT_HELLO_LEN + 5)/binary, Tail/binary>> = AllData,
{ok, Response, Meta, TlsCodec} = mtp_fake_tls:from_client_hello(Data, Secret),
check_tls_policy(Listener, Ip, Meta),
Codec1 = mtp_codec:replace(tls, true, TlsCodec, Codec0),
Codec = mtp_codec:push_back(tls, Tail, Codec1),
ok = up_send_raw(Response, S), %FIXME: if this send fail, we will get counter policy leak
{ok, S#state{codec = Codec, stage = init,
policy_state = {ok, maps:get(sni_domain, Meta, undefined)}}};
parse_upstream_data(<<?TLS_START, _/binary>> = Data, #state{stage = init} = S) ->
parse_upstream_data(Data, S#state{stage = tls_hello});
parse_upstream_data(<<Header:64/binary, Rest/binary>>,
#state{stage = init, secret = Secret, listener = Listener, codec = Codec0,
ad_tag = Tag, addr = {Ip, _} = Addr, policy_state = PState0,
sock = Sock, transport = Transport} = S) ->
{TlsHandshakeDone, _} = mtp_codec:info(tls, Codec0),
AllowedProtocols = allowed_protocols(),
%% If the only enabled protocol is fake-tls and tls handshake haven't been performed yet - raise
%% protocol error.
(is_tls_only(AllowedProtocols) andalso not TlsHandshakeDone) andalso
error({protocol_error, tls_client_hello_expected, Header}),
case mtp_obfuscated:from_header(Header, Secret) of
{ok, DcId, PacketLayerMod, CryptoCodecSt} ->
maybe_check_replay(Header),
{ProtoToReport, PState} =
case TlsHandshakeDone of
true when PacketLayerMod == mtp_secure ->
{mtp_secure_fake_tls, PState0};
false ->
assert_protocol(PacketLayerMod, AllowedProtocols),
check_policy(Listener, Ip, undefined),
%FIXME: if any codebelow fail, we will get counter policy leak
{PacketLayerMod, {ok, undefined}}
end,
mtp_metric:count_inc([?APP, protocol_ok, total],
1, #{labels => [Listener, ProtoToReport]}),
case application:get_env(?APP, reset_close_socket, off) of
handshake_error ->
ok = Transport:setopts(Sock, [{linger, {false, 0}}]);
_ ->
ok
end,
Codec1 = mtp_codec:replace(crypto, mtp_obfuscated, CryptoCodecSt, Codec0),
PacketCodec = PacketLayerMod:new(),
Codec2 = mtp_codec:replace(packet, PacketLayerMod, PacketCodec, Codec1),
Codec = mtp_codec:push_back(crypto, Rest, Codec2),
Opts = #{ad_tag => Tag,
addr => Addr},
{RealDcId, Pool, Downstream} = mtp_config:get_downstream_safe(DcId, Opts),
handle_upstream_data(
<<>>,
switch_timer(
S#state{down = Downstream,
dc_id = {RealDcId, Pool},
codec = Codec,
policy_state = PState,
stage = tunnel},
hibernate));
{error, Reason} when is_atom(Reason) ->
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, Reason]}),
error({protocol_error, Reason, Header})
end;
parse_upstream_data(Bin, #state{stage = Stage, codec = Codec0} = S) when Stage =/= tunnel ->
Codec = mtp_codec:push_back(first, Bin, Codec0),
{incomplete, S#state{codec = Codec}}.
allowed_protocols() ->
{ok, AllowedProtocols} = application:get_env(?APP, allowed_protocols),
AllowedProtocols.
is_tls_only([mtp_fake_tls]) -> true;
is_tls_only(_) -> false.
assert_protocol(Protocol) ->
assert_protocol(Protocol, allowed_protocols()).
assert_protocol(Protocol, AllowedProtocols) ->
lists:member(Protocol, AllowedProtocols)
orelse error({protocol_error, disabled_protocol, Protocol}).
maybe_check_replay(Packet) ->
Check for session replay attack : attempt to connect with the same 1st 64byte packet
case application:get_env(?APP, replay_check_session_storage, off) of
on ->
(new == mtp_session_storage:check_add(Packet)) orelse
error({protocol_error, replay_session_detected, Packet});
off ->
ok
end.
check_tls_policy(Listener, Ip, #{sni_domain := TlsDomain}) ->
TODO validate timestamp !
check_policy(Listener, Ip, TlsDomain);
check_tls_policy(_, Ip, Meta) ->
error({protocol_error, tls_no_sni, {Ip, Meta}}).
check_policy(Listener, Ip, Domain) ->
Rules = application:get_env(?APP, policy, []),
case mtp_policy:check(Rules, Listener, Ip, Domain) of
[] -> ok;
[Rule | _] ->
error({protocol_error, policy_error, {Rule, Listener, Ip, Domain}})
end.
up_send(Packet, #state{stage = tunnel, codec = UpCodec} = S) ->
%% ?log(debug, ">Up: ~p", [Packet]),
{Encoded, UpCodec1} = mtp_codec:encode_packet(Packet, UpCodec),
ok = up_send_raw(Encoded, S),
{ok, S#state{codec = UpCodec1}}.
up_send_raw(Data, #state{sock = Sock,
transport = Transport,
listener = Listener} = S) ->
mtp_metric:rt([?APP, upstream_send_duration, seconds],
fun() ->
case Transport:send(Sock, Data) of
ok ->
mtp_metric:count_inc(
[?APP, sent, upstream, bytes],
iolist_size(Data), #{labels => [Listener]}),
ok;
{error, Reason} ->
is_atom(Reason) andalso
mtp_metric:count_inc(
[?APP, upstream_send_error, total], 1,
#{labels => [Listener, Reason]}),
?log(warning, "Upstream send error: ~p", [Reason]),
throw({stop, normal, S})
end
end, #{labels => [Listener]}).
down_send(Packet, #state{down = Down} = S) ->
%% ?log(debug, ">Down: ~p", [Packet]),
case mtp_down_conn:send(Down, Packet) of
ok ->
{ok, S};
{error, unknown_upstream} ->
handle_unknown_upstream(S)
end.
handle_unknown_upstream(#state{down = Down, sock = USock, transport = UTrans} = S) ->
%% there might be a race-condition between packets from upstream socket and
downstream 's ' close_ext ' message . Most likely because of slow up_send
ok = UTrans:close(USock),
receive
{'$gen_cast', {close_ext, Down}} ->
?log(debug, "asked to close connection by downstream"),
throw({stop, normal, S#state{down = undefined}})
after 0 ->
throw({stop, got_unknown_upstream, S})
end.
Internal
%% @doc Terminate if message queue is too big
maybe_check_health(#state{last_queue_check = LastCheck} = S) ->
NowMs = erlang:system_time(millisecond),
Delta = NowMs - LastCheck,
case Delta < ?HEALTH_CHECK_INTERVAL of
true ->
{noreply, S};
false ->
case check_health() of
ok ->
{noreply, S#state{last_queue_check = NowMs}};
overflow ->
{stop, normal, S}
end
end.
1 . If proc queue > qlen - stop
2 . If proc total memory > gc - do GC and go to 3
3 . If proc total memory > total_mem - stop
check_health() ->
%% see .app.src
Defaults = [{qlen, 300},
{gc, 409600},
{total_mem, 3145728}],
Checks = application:get_env(?APP, upstream_healthchecks, Defaults),
do_check_health(Checks, calc_health()).
do_check_health([{qlen, Limit} | _], #{message_queue_len := QLen} = Health) when QLen > Limit ->
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [message_queue_len]}),
?log(warning, "Upstream too large queue_len=~w, health=~p", [QLen, Health]),
overflow;
do_check_health([{gc, Limit} | Other], #{total_mem := TotalMem}) when TotalMem > Limit ->
Maybe it does n't makes sense to do GC if queue len is more than , eg , 50 ?
%% In this case almost all memory will be in msg queue
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [force_gc]}),
erlang:garbage_collect(self()),
do_check_health(Other, calc_health());
do_check_health([{total_mem, Limit} | _Other], #{total_mem := TotalMem} = Health) when
TotalMem > Limit ->
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [total_memory]}),
?log(warning, "Process too large total_mem=~p, health=~p", [TotalMem / 1024, Health]),
overflow;
do_check_health([_Ok | Other], Health) ->
do_check_health(Other, Health);
do_check_health([], _) ->
ok.
calc_health() ->
[{_, QLen}, {_, Mem}, {_, BinInfo}] =
erlang:process_info(self(), [message_queue_len, memory, binary]),
RefcBinSize = sum_binary(BinInfo),
TotalMem = Mem + RefcBinSize,
#{message_queue_len => QLen,
memory => Mem,
refc_bin_size => RefcBinSize,
refc_bin_count => length(BinInfo),
total_mem => TotalMem}.
sum_binary(BinInfo) ->
trunc(lists:foldl(fun({_, Size, RefC}, Sum) ->
Sum + (Size / RefC)
end, 0, BinInfo)).
hex(Bin) ->
<<begin
if N < 10 ->
<<($0 + N)>>;
true ->
<<($W + N)>>
end
end || <<N:4>> <= Bin>>.
unhex(Chars) ->
UnHChar = fun(C) when C < $W -> C - $0;
(C) when C > $W -> C - $W
end,
<< <<(UnHChar(C)):4>> || <<C>> <= Chars>>.
| null | https://raw.githubusercontent.com/seriyps/mtproto_proxy/9cb198488915f76055ad5955cdcb2cdcd0f4fd98/src/mtp_handler.erl | erlang | @doc
@end
API
Callbacks
Decrease if CPU is cheaper than RAM
IP/Port of remote side
APIs
Callbacks
Custom gen_server init
{recbuf, ?MAX_SOCK_BUF_SIZE},
telegram server -> proxy
srv_error_filter is 'off'
telegram server -> proxy
Server replied with server error; it might be another kind of replay attack;
Don't send this packet to client so proxy won't be fingerprinted
telegram server -> proxy
Normal data packet
client -> proxy
Consider checking health here as well
Failed before policy was stored in state. Eg, because of "policy_error"
Stream handlers
Handle telegram client -> proxy stream
FIXME: if this send fail, we will get counter policy leak
If the only enabled protocol is fake-tls and tls handshake haven't been performed yet - raise
protocol error.
FIXME: if any codebelow fail, we will get counter policy leak
?log(debug, ">Up: ~p", [Packet]),
?log(debug, ">Down: ~p", [Packet]),
there might be a race-condition between packets from upstream socket and
@doc Terminate if message queue is too big
see .app.src
In this case almost all memory will be in msg queue | @author < >
( C ) 2018 ,
MTProto proxy network layer
Created : 9 Apr 2018 by < >
-module(mtp_handler).
-behaviour(gen_server).
-behaviour(ranch_protocol).
-export([start_link/4, send/2]).
-export([hex/1, unhex/1]).
-export([keys_str/0]).
-export([ranch_init/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-export_type([handle/0]).
-type handle() :: pid().
-include_lib("hut/include/hut.hrl").
1 mb
-define(HEALTH_CHECK_INTERVAL, 5000).
telegram server responds with " l\xfe\xff\xff " if client packet MTProto is invalid
-define(SRV_ERROR, <<108, 254, 255, 255>>).
-define(TLS_START, 22, 3, 1, 2, 0, 1, 0, 1, 252, 3, 3).
-define(TLS_CLIENT_HELLO_LEN, 512).
-define(APP, mtproto_proxy).
-record(state,
{stage = init :: stage(),
secret :: binary(),
listener :: atom(),
sock :: gen_tcp:socket(),
transport :: transport(),
codec :: mtp_codec:codec() | undefined,
down :: mtp_down_conn:handle() | undefined,
dc_id :: {DcId :: integer(), Pool :: pid()} | undefined,
ad_tag :: binary(),
policy_state :: any(),
started_at :: pos_integer(),
timer_state = init :: init | hibernate | stop,
timer :: gen_timeout:tout(),
last_queue_check :: integer(),
srv_error_filter :: first | on | off}).
-type transport() :: module().
-type stage() :: init | tls_hello | tunnel.
start_link(Ref, _Socket, Transport, Opts) ->
{ok, proc_lib:spawn_link(?MODULE, ranch_init, [{Ref, Transport, Opts}])}.
keys_str() ->
[{Name, Port, hex(Secret)}
|| {Name, Port, Secret} <- application:get_env(?APP, ports, [])].
-spec send(pid(), mtp_rpc:packet()) -> ok.
send(Upstream, Packet) ->
gen_server:cast(Upstream, Packet).
ranch_init({Ref, Transport, Opts}) ->
{ok, Socket} = ranch:handshake(Ref),
case init({Socket, Transport, Opts}) of
{ok, State} ->
BufSize = application:get_env(?APP, upstream_socket_buffer_size, ?MAX_SOCK_BUF_SIZE),
Linger = case application:get_env(?APP, reset_close_socket, off) of
off -> [];
_ ->
[{linger, {true, 0}}]
end,
ok = Transport:setopts(
Socket,
[{active, once},
, ? MAX_SOCK_BUF_SIZE } ,
{buffer, BufSize}
| Linger]),
gen_server:enter_loop(?MODULE, [], State);
{stop, error} ->
exit(normal)
end.
init({Socket, Transport, [Name, Secret, Tag]}) ->
mtp_metric:count_inc([?APP, in_connection, total], 1, #{labels => [Name]}),
case Transport:peername(Socket) of
{ok, {Ip, Port}} ->
?log(info, "~s: new connection ~s:~p", [Name, inet:ntoa(Ip), Port]),
{TimeoutKey, TimeoutDefault} = state_timeout(init),
Timer = gen_timeout:new(
#{timeout => {env, ?APP, TimeoutKey, TimeoutDefault}}),
Filter = application:get_env(?APP, replay_check_server_error_filter, off),
NowMs = erlang:system_time(millisecond),
NoopSt = mtp_noop_codec:new(),
Codec = mtp_codec:new(mtp_noop_codec, NoopSt,
mtp_noop_codec, NoopSt),
State = #state{sock = Socket,
secret = unhex(Secret),
listener = Name,
transport = Transport,
codec = Codec,
ad_tag = unhex(Tag),
addr = {Ip, Port},
started_at = NowMs,
timer = Timer,
last_queue_check = NowMs,
srv_error_filter = Filter},
{ok, State};
{error, Reason} ->
mtp_metric:count_inc([?APP, in_connection_closed, total], 1, #{labels => [Name]}),
?log(info, "Can't read peername: ~p", [Reason]),
{stop, error}
end.
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
handle_cast({proxy_ans, Down, Data}, #state{down = Down, srv_error_filter = off} = S) ->
{ok, S1} = up_send(Data, S),
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
maybe_check_health(bump_timer(S1));
handle_cast({proxy_ans, Down, ?SRV_ERROR = Data},
#state{down = Down, srv_error_filter = Filter, listener = Listener,
addr = {Ip, _}} = S) when Filter =/= off ->
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
?log(warning, "~s: protocol_error srv_error_filtered", [inet:ntoa(Ip)]),
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, srv_error_filtered]}),
{noreply,
case Filter of
first -> S#state{srv_error_filter = off};
on -> S
end};
handle_cast({proxy_ans, Down, Data}, #state{down = Down, srv_error_filter = Filter} = S) when Filter =/= off ->
srv_error_filter is ' on ' or srv_error_filter is ' first ' and it 's 1st server packet
{ok, S1} = up_send(Data, S),
ok = mtp_down_conn:ack(Down, 1, iolist_size(Data)),
S2 = case Filter of
first -> S1#state{srv_error_filter = off};
on -> S1
end,
maybe_check_health(bump_timer(S2));
handle_cast({close_ext, Down}, #state{down = Down, sock = USock, transport = UTrans} = S) ->
?log(debug, "asked to close connection by downstream"),
ok = UTrans:close(USock),
{stop, normal, S#state{down = undefined}};
handle_cast({simple_ack, Down, Confirm}, #state{down = Down} = S) ->
?log(info, "Simple ack: ~p, ~p", [Down, Confirm]),
{noreply, S};
handle_cast(Other, State) ->
?log(warning, "Unexpected msg ~p", [Other]),
{noreply, State}.
handle_info({tcp, Sock, Data}, #state{sock = Sock, transport = Transport,
listener = Listener, addr = {Ip, _}} = S) ->
Size = byte_size(Data),
mtp_metric:count_inc([?APP, received, upstream, bytes], Size, #{labels => [Listener]}),
mtp_metric:histogram_observe([?APP, tracker_packet_size, bytes], Size, #{labels => [upstream]}),
try handle_upstream_data(Data, S) of
{ok, S1} ->
ok = Transport:setopts(Sock, [{active, once}]),
{noreply, bump_timer(S1)}
catch error:{protocol_error, Type, Extra} ->
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, Type]}),
?log(warning, "~s: protocol_error ~p ~p", [inet:ntoa(Ip), Type, Extra]),
{stop, normal, maybe_close_down(S)}
end;
handle_info({tcp_closed, Sock}, #state{sock = Sock} = S) ->
?log(debug, "upstream sock closed"),
{stop, normal, maybe_close_down(S)};
handle_info({tcp_error, Sock, Reason}, #state{sock = Sock} = S) ->
?log(warning, "upstream sock error: ~p", [Reason]),
{stop, normal, maybe_close_down(S)};
handle_info(timeout, #state{timer = Timer, timer_state = TState, listener = Listener} = S) ->
case gen_timeout:is_expired(Timer) of
true when TState == stop;
TState == init ->
mtp_metric:count_inc([?APP, inactive_timeout, total], 1, #{labels => [Listener]}),
?log(info, "inactive timeout in state ~p", [TState]),
{stop, normal, S};
true when TState == hibernate ->
mtp_metric:count_inc([?APP, inactive_hibernate, total], 1, #{labels => [Listener]}),
{noreply, switch_timer(S, stop), hibernate};
false ->
Timer1 = gen_timeout:reset(Timer),
{noreply, S#state{timer = Timer1}}
end;
handle_info(Other, S) ->
?log(warning, "Unexpected msg ~p", [Other]),
{noreply, S}.
terminate(_Reason, #state{started_at = Started, listener = Listener,
addr = {Ip, _}, policy_state = PolicyState,
sock = Sock, transport = Trans} = S) ->
case PolicyState of
{ok, TlsDomain} ->
try mtp_policy:dec(
application:get_env(?APP, policy, []),
Listener, Ip, TlsDomain)
catch T:R ->
?log(warning, "Failed to decrement policy: ~p:~p", [T, R])
end;
_ ->
ok
end,
maybe_close_down(S),
ok = Trans:close(Sock),
mtp_metric:count_inc([?APP, in_connection_closed, total], 1, #{labels => [Listener]}),
Lifetime = erlang:system_time(millisecond) - Started,
mtp_metric:histogram_observe(
[?APP, session_lifetime, seconds],
erlang:convert_time_unit(Lifetime, millisecond, native), #{labels => [Listener]}),
?log(info, "terminate ~p", [_Reason]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
maybe_close_down(#state{down = undefined} = S) -> S;
maybe_close_down(#state{dc_id = {_DcId, Pool}} = S) ->
mtp_dc_pool:return(Pool, self()),
S#state{down = undefined}.
bump_timer(#state{timer = Timer, timer_state = TState} = S) ->
Timer1 = gen_timeout:bump(Timer),
case TState of
stop ->
switch_timer(S#state{timer = Timer1}, hibernate);
_ ->
S#state{timer = Timer1}
end.
switch_timer(#state{timer_state = TState} = S, TState) ->
S;
switch_timer(#state{timer_state = FromState, timer = Timer, listener = Listener} = S, ToState) ->
mtp_metric:count_inc([?APP, timer_switch, total], 1,
#{labels => [Listener, FromState, ToState]}),
{NewTimeKey, NewTimeDefault} = state_timeout(ToState),
Timer1 = gen_timeout:set_timeout(
{env, ?APP, NewTimeKey, NewTimeDefault}, Timer),
S#state{timer_state = ToState,
timer = Timer1}.
state_timeout(init) ->
{init_timeout_sec, 60};
state_timeout(hibernate) ->
{hibernate_timeout_sec, 60};
state_timeout(stop) ->
{ready_timeout_sec, 1200}.
handle_upstream_data(Bin, #state{stage = tunnel,
codec = UpCodec} = S) ->
{ok, S3, UpCodec1} =
mtp_codec:fold_packets(
fun(Decoded, S1, Codec1) ->
mtp_metric:histogram_observe(
[?APP, tg_packet_size, bytes],
byte_size(Decoded),
#{labels => [upstream_to_downstream]}),
{ok, S2} = down_send(Decoded, S1#state{codec = Codec1}),
{S2, S2#state.codec}
end, S, Bin, UpCodec),
{ok, S3#state{codec = UpCodec1}};
handle_upstream_data(Bin, #state{codec = Codec0} = S0) ->
{ok, S, Codec} =
mtp_codec:fold_packets_if(
fun(Decoded, S1, Codec1) ->
case parse_upstream_data(Decoded, S1#state{codec = Codec1}) of
{ok, S2} ->
{next, S2, S2#state.codec};
{incomplete, S2} ->
{stop, S2, S2#state.codec}
end
end, S0, Bin, Codec0),
{ok, S#state{codec = Codec}}.
parse_upstream_data(<<?TLS_START, _/binary>> = AllData,
#state{stage = tls_hello, secret = Secret, codec = Codec0,
addr = {Ip, _}, listener = Listener} = S) when
byte_size(AllData) >= (?TLS_CLIENT_HELLO_LEN + 5) ->
assert_protocol(mtp_fake_tls),
<<Data:(?TLS_CLIENT_HELLO_LEN + 5)/binary, Tail/binary>> = AllData,
{ok, Response, Meta, TlsCodec} = mtp_fake_tls:from_client_hello(Data, Secret),
check_tls_policy(Listener, Ip, Meta),
Codec1 = mtp_codec:replace(tls, true, TlsCodec, Codec0),
Codec = mtp_codec:push_back(tls, Tail, Codec1),
{ok, S#state{codec = Codec, stage = init,
policy_state = {ok, maps:get(sni_domain, Meta, undefined)}}};
parse_upstream_data(<<?TLS_START, _/binary>> = Data, #state{stage = init} = S) ->
parse_upstream_data(Data, S#state{stage = tls_hello});
parse_upstream_data(<<Header:64/binary, Rest/binary>>,
#state{stage = init, secret = Secret, listener = Listener, codec = Codec0,
ad_tag = Tag, addr = {Ip, _} = Addr, policy_state = PState0,
sock = Sock, transport = Transport} = S) ->
{TlsHandshakeDone, _} = mtp_codec:info(tls, Codec0),
AllowedProtocols = allowed_protocols(),
(is_tls_only(AllowedProtocols) andalso not TlsHandshakeDone) andalso
error({protocol_error, tls_client_hello_expected, Header}),
case mtp_obfuscated:from_header(Header, Secret) of
{ok, DcId, PacketLayerMod, CryptoCodecSt} ->
maybe_check_replay(Header),
{ProtoToReport, PState} =
case TlsHandshakeDone of
true when PacketLayerMod == mtp_secure ->
{mtp_secure_fake_tls, PState0};
false ->
assert_protocol(PacketLayerMod, AllowedProtocols),
check_policy(Listener, Ip, undefined),
{PacketLayerMod, {ok, undefined}}
end,
mtp_metric:count_inc([?APP, protocol_ok, total],
1, #{labels => [Listener, ProtoToReport]}),
case application:get_env(?APP, reset_close_socket, off) of
handshake_error ->
ok = Transport:setopts(Sock, [{linger, {false, 0}}]);
_ ->
ok
end,
Codec1 = mtp_codec:replace(crypto, mtp_obfuscated, CryptoCodecSt, Codec0),
PacketCodec = PacketLayerMod:new(),
Codec2 = mtp_codec:replace(packet, PacketLayerMod, PacketCodec, Codec1),
Codec = mtp_codec:push_back(crypto, Rest, Codec2),
Opts = #{ad_tag => Tag,
addr => Addr},
{RealDcId, Pool, Downstream} = mtp_config:get_downstream_safe(DcId, Opts),
handle_upstream_data(
<<>>,
switch_timer(
S#state{down = Downstream,
dc_id = {RealDcId, Pool},
codec = Codec,
policy_state = PState,
stage = tunnel},
hibernate));
{error, Reason} when is_atom(Reason) ->
mtp_metric:count_inc([?APP, protocol_error, total], 1, #{labels => [Listener, Reason]}),
error({protocol_error, Reason, Header})
end;
parse_upstream_data(Bin, #state{stage = Stage, codec = Codec0} = S) when Stage =/= tunnel ->
Codec = mtp_codec:push_back(first, Bin, Codec0),
{incomplete, S#state{codec = Codec}}.
allowed_protocols() ->
{ok, AllowedProtocols} = application:get_env(?APP, allowed_protocols),
AllowedProtocols.
is_tls_only([mtp_fake_tls]) -> true;
is_tls_only(_) -> false.
assert_protocol(Protocol) ->
assert_protocol(Protocol, allowed_protocols()).
assert_protocol(Protocol, AllowedProtocols) ->
lists:member(Protocol, AllowedProtocols)
orelse error({protocol_error, disabled_protocol, Protocol}).
maybe_check_replay(Packet) ->
Check for session replay attack : attempt to connect with the same 1st 64byte packet
case application:get_env(?APP, replay_check_session_storage, off) of
on ->
(new == mtp_session_storage:check_add(Packet)) orelse
error({protocol_error, replay_session_detected, Packet});
off ->
ok
end.
check_tls_policy(Listener, Ip, #{sni_domain := TlsDomain}) ->
TODO validate timestamp !
check_policy(Listener, Ip, TlsDomain);
check_tls_policy(_, Ip, Meta) ->
error({protocol_error, tls_no_sni, {Ip, Meta}}).
check_policy(Listener, Ip, Domain) ->
Rules = application:get_env(?APP, policy, []),
case mtp_policy:check(Rules, Listener, Ip, Domain) of
[] -> ok;
[Rule | _] ->
error({protocol_error, policy_error, {Rule, Listener, Ip, Domain}})
end.
up_send(Packet, #state{stage = tunnel, codec = UpCodec} = S) ->
{Encoded, UpCodec1} = mtp_codec:encode_packet(Packet, UpCodec),
ok = up_send_raw(Encoded, S),
{ok, S#state{codec = UpCodec1}}.
up_send_raw(Data, #state{sock = Sock,
transport = Transport,
listener = Listener} = S) ->
mtp_metric:rt([?APP, upstream_send_duration, seconds],
fun() ->
case Transport:send(Sock, Data) of
ok ->
mtp_metric:count_inc(
[?APP, sent, upstream, bytes],
iolist_size(Data), #{labels => [Listener]}),
ok;
{error, Reason} ->
is_atom(Reason) andalso
mtp_metric:count_inc(
[?APP, upstream_send_error, total], 1,
#{labels => [Listener, Reason]}),
?log(warning, "Upstream send error: ~p", [Reason]),
throw({stop, normal, S})
end
end, #{labels => [Listener]}).
down_send(Packet, #state{down = Down} = S) ->
case mtp_down_conn:send(Down, Packet) of
ok ->
{ok, S};
{error, unknown_upstream} ->
handle_unknown_upstream(S)
end.
handle_unknown_upstream(#state{down = Down, sock = USock, transport = UTrans} = S) ->
downstream 's ' close_ext ' message . Most likely because of slow up_send
ok = UTrans:close(USock),
receive
{'$gen_cast', {close_ext, Down}} ->
?log(debug, "asked to close connection by downstream"),
throw({stop, normal, S#state{down = undefined}})
after 0 ->
throw({stop, got_unknown_upstream, S})
end.
Internal
maybe_check_health(#state{last_queue_check = LastCheck} = S) ->
NowMs = erlang:system_time(millisecond),
Delta = NowMs - LastCheck,
case Delta < ?HEALTH_CHECK_INTERVAL of
true ->
{noreply, S};
false ->
case check_health() of
ok ->
{noreply, S#state{last_queue_check = NowMs}};
overflow ->
{stop, normal, S}
end
end.
1 . If proc queue > qlen - stop
2 . If proc total memory > gc - do GC and go to 3
3 . If proc total memory > total_mem - stop
check_health() ->
Defaults = [{qlen, 300},
{gc, 409600},
{total_mem, 3145728}],
Checks = application:get_env(?APP, upstream_healthchecks, Defaults),
do_check_health(Checks, calc_health()).
do_check_health([{qlen, Limit} | _], #{message_queue_len := QLen} = Health) when QLen > Limit ->
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [message_queue_len]}),
?log(warning, "Upstream too large queue_len=~w, health=~p", [QLen, Health]),
overflow;
do_check_health([{gc, Limit} | Other], #{total_mem := TotalMem}) when TotalMem > Limit ->
Maybe it does n't makes sense to do GC if queue len is more than , eg , 50 ?
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [force_gc]}),
erlang:garbage_collect(self()),
do_check_health(Other, calc_health());
do_check_health([{total_mem, Limit} | _Other], #{total_mem := TotalMem} = Health) when
TotalMem > Limit ->
mtp_metric:count_inc([?APP, healthcheck, total], 1,
#{labels => [total_memory]}),
?log(warning, "Process too large total_mem=~p, health=~p", [TotalMem / 1024, Health]),
overflow;
do_check_health([_Ok | Other], Health) ->
do_check_health(Other, Health);
do_check_health([], _) ->
ok.
calc_health() ->
[{_, QLen}, {_, Mem}, {_, BinInfo}] =
erlang:process_info(self(), [message_queue_len, memory, binary]),
RefcBinSize = sum_binary(BinInfo),
TotalMem = Mem + RefcBinSize,
#{message_queue_len => QLen,
memory => Mem,
refc_bin_size => RefcBinSize,
refc_bin_count => length(BinInfo),
total_mem => TotalMem}.
sum_binary(BinInfo) ->
trunc(lists:foldl(fun({_, Size, RefC}, Sum) ->
Sum + (Size / RefC)
end, 0, BinInfo)).
hex(Bin) ->
<<begin
if N < 10 ->
<<($0 + N)>>;
true ->
<<($W + N)>>
end
end || <<N:4>> <= Bin>>.
unhex(Chars) ->
UnHChar = fun(C) when C < $W -> C - $0;
(C) when C > $W -> C - $W
end,
<< <<(UnHChar(C)):4>> || <<C>> <= Chars>>.
|
3488c4c2a781a67c2005c449d1b1700a8f214f2527152b29cb55b058950f5f0f | kovasb/gamma-driver | plot.cljs | (ns gamma.webgl.plot
(:require
[gamma.api :as g]
[gamma.program :as p]
[gamma.webgl.routines.basic :as r]
[gamma.webgl.shader :as shader]
[gamma.webgl.drivers.basic :as driver]
[clojure.walk :as walk]))
(def x (g/varying "x" :float :mediump))
(def y (g/varying "y" :float :mediump))
(def coords (g/attribute "coords" :vec2))
(def position (g/attribute "position" :vec2))
(def subs
{'+ g/+ '- g/- '* g/* '/ g/div 'pow g/pow 'cos g/cos 'sqrt g/sqrt
'sin g/sin 'if g/if '== g/== '> g/> '< g/< 'x x 'y y 't t})
(defn expr->ast [expr]
(walk/postwalk
(fn [x]
(cond
(sequential? x) (apply (first x) (rest x))
(subs x) (subs x)
:default x))
expr))
(defn color [x]
(g/if (g/< 0 x)
(g/vec4 0 x 0 1)
(g/vec4 0 0 (g/abs x) 1)))
(defn expr-program [expr]
{:id :plot
:vertex-shader {x (g/swizzle coords :x)
y (g/swizzle coords :y)
(g/gl-position) (g/vec4 position 0 1)}
:fragment-shader {(g/gl-frag-color) (color (expr->ast expr))}
:precision {:float :mediump}})
(defn ->float32 [x]
(js/Float32Array. (clj->js (flatten x))))
(defn get-context [id]
(.getContext (.getElementById js/document id) "webgl"))
(defn rect [[[left right] [bottom top]]]
[[left bottom] [right bottom] [left top]
[right top] [right bottom] [left top]])
(defn plot
([expr range]
(let [prog (assoc (shader/Shader. (p/program (expr-program expr))) :tag :shader)
ops (r/draw [:root] prog)
driver (driver/driver
{:gl (get-context "gl-canvas")}
ops)]
(driver/exec!
driver
{:plot {position (->float32 (rect [[-1 1] [-1 1]]))
coords (->float32 (rect range))}
:draw {:start 0 :count 6}}))))
(comment
(plot 'x [[-1 1] [-1 1]])
(plot 'y [[-1 1] [-1 1]])
(plot '(+ x y) [[-1 1] [-1 1]])
(println
(:glsl (:fragment-shader
(p/program (expr-program '(+ x y))))))
(plot '(+ (cos x) (cos y)) [[0 10] [-10 10]])
(plot '(cos (* x y)) [[-5 5] [-5 5]])
(plot '(sin (* x y)) [[-10 10] [-10 10]])
(plot '(sin (/ (pow x 2) (pow y 2))) [[-10 10] [0 3]])
(plot '(* (sin (/ x y)) (pow x 2)) [[-10 10] [-1 1]])
(plot '(if (< (+ x y) 0) 1 -1) [[-10 10] [-10 10]])
(plot '(if
(< (sqrt
(+ (pow x 2)
(pow y 2)))
10) 1 0)
[[-10 10] [-10 10]])
(plot '(if
(< (+ (pow x 2)
(pow y 3))
2) 1 0)
[[-2 2] [-2 2]])
(expr->ast '(+ x y))
)
(comment
(defn animation-state []
(atom {:updater #(assoc-in % [:plot t] )})
(defn animator [animation-state]
(let [x @animation-state]
(if-let [u (:updater x)]
(do
(if-let [d (:driver x)]
(driver/exec! (:driver x) (u (:data x))))
(js/requestAnimationFrame
(fn []
(animator animation-state))))))))) | null | https://raw.githubusercontent.com/kovasb/gamma-driver/abe0e1dd01365404342f4e8e04263e48c4648b6e/test/gamma/webgl/plot.cljs | clojure | (ns gamma.webgl.plot
(:require
[gamma.api :as g]
[gamma.program :as p]
[gamma.webgl.routines.basic :as r]
[gamma.webgl.shader :as shader]
[gamma.webgl.drivers.basic :as driver]
[clojure.walk :as walk]))
(def x (g/varying "x" :float :mediump))
(def y (g/varying "y" :float :mediump))
(def coords (g/attribute "coords" :vec2))
(def position (g/attribute "position" :vec2))
(def subs
{'+ g/+ '- g/- '* g/* '/ g/div 'pow g/pow 'cos g/cos 'sqrt g/sqrt
'sin g/sin 'if g/if '== g/== '> g/> '< g/< 'x x 'y y 't t})
(defn expr->ast [expr]
(walk/postwalk
(fn [x]
(cond
(sequential? x) (apply (first x) (rest x))
(subs x) (subs x)
:default x))
expr))
(defn color [x]
(g/if (g/< 0 x)
(g/vec4 0 x 0 1)
(g/vec4 0 0 (g/abs x) 1)))
(defn expr-program [expr]
{:id :plot
:vertex-shader {x (g/swizzle coords :x)
y (g/swizzle coords :y)
(g/gl-position) (g/vec4 position 0 1)}
:fragment-shader {(g/gl-frag-color) (color (expr->ast expr))}
:precision {:float :mediump}})
(defn ->float32 [x]
(js/Float32Array. (clj->js (flatten x))))
(defn get-context [id]
(.getContext (.getElementById js/document id) "webgl"))
(defn rect [[[left right] [bottom top]]]
[[left bottom] [right bottom] [left top]
[right top] [right bottom] [left top]])
(defn plot
([expr range]
(let [prog (assoc (shader/Shader. (p/program (expr-program expr))) :tag :shader)
ops (r/draw [:root] prog)
driver (driver/driver
{:gl (get-context "gl-canvas")}
ops)]
(driver/exec!
driver
{:plot {position (->float32 (rect [[-1 1] [-1 1]]))
coords (->float32 (rect range))}
:draw {:start 0 :count 6}}))))
(comment
(plot 'x [[-1 1] [-1 1]])
(plot 'y [[-1 1] [-1 1]])
(plot '(+ x y) [[-1 1] [-1 1]])
(println
(:glsl (:fragment-shader
(p/program (expr-program '(+ x y))))))
(plot '(+ (cos x) (cos y)) [[0 10] [-10 10]])
(plot '(cos (* x y)) [[-5 5] [-5 5]])
(plot '(sin (* x y)) [[-10 10] [-10 10]])
(plot '(sin (/ (pow x 2) (pow y 2))) [[-10 10] [0 3]])
(plot '(* (sin (/ x y)) (pow x 2)) [[-10 10] [-1 1]])
(plot '(if (< (+ x y) 0) 1 -1) [[-10 10] [-10 10]])
(plot '(if
(< (sqrt
(+ (pow x 2)
(pow y 2)))
10) 1 0)
[[-10 10] [-10 10]])
(plot '(if
(< (+ (pow x 2)
(pow y 3))
2) 1 0)
[[-2 2] [-2 2]])
(expr->ast '(+ x y))
)
(comment
(defn animation-state []
(atom {:updater #(assoc-in % [:plot t] )})
(defn animator [animation-state]
(let [x @animation-state]
(if-let [u (:updater x)]
(do
(if-let [d (:driver x)]
(driver/exec! (:driver x) (u (:data x))))
(js/requestAnimationFrame
(fn []
(animator animation-state))))))))) | |
ece9305ba3171955e6004476e79a6b58ee4a53f5e7d858ede82b7748c6398732 | typeclasses/leanpub | createCoupons.hs | {-# LANGUAGE OverloadedStrings #-}
import Leanpub.Concepts
import Leanpub.Wreq
import Data.Text.IO (appendFile)
import Prelude hiding (appendFile)
-- Read the API key from a file
config = configKeyFile "/home/chris/.config/typeclasses/leanpub-api-key.txt"
-- Which book we're creating coupons for
slug = BookSlug "finding-success-in-haskell"
-- A note reminding us why the coupon was issued
note = CouponNote "Free for Type Classes subscriber"
-- Each coupon can be used at most twice
uses = CouponMaxUses 2
-- After creating each coupon, append it to a file
save (CouponCode x) =
appendFile "/home/chris/typeclasses/coupons.txt" (x <> "\n")
main = runLeanpub config
(createManyFreeBookCoupons save 200 slug uses (Just note))
| null | https://raw.githubusercontent.com/typeclasses/leanpub/d7c70148bb2bf3acf0500a5b7f1e014138af5585/examples/createCoupons.hs | haskell | # LANGUAGE OverloadedStrings #
Read the API key from a file
Which book we're creating coupons for
A note reminding us why the coupon was issued
Each coupon can be used at most twice
After creating each coupon, append it to a file |
import Leanpub.Concepts
import Leanpub.Wreq
import Data.Text.IO (appendFile)
import Prelude hiding (appendFile)
config = configKeyFile "/home/chris/.config/typeclasses/leanpub-api-key.txt"
slug = BookSlug "finding-success-in-haskell"
note = CouponNote "Free for Type Classes subscriber"
uses = CouponMaxUses 2
save (CouponCode x) =
appendFile "/home/chris/typeclasses/coupons.txt" (x <> "\n")
main = runLeanpub config
(createManyFreeBookCoupons save 200 slug uses (Just note))
|
8f6bc62667d2753813f09aff7e4a1b6d9fa65c67f6b021cf138db23b57047fa4 | CryptoKami/cryptokami-core | PriorityLock.hs | {-|
Module: Pos.Util.Concurrent.PriorityLock
Description: Provides a prioritised lock
Provides a lock that can be taken with either high or low precedence.
Within each precedence, the lock is taken in FIFO order.
-}
module Pos.Util.Concurrent.PriorityLock
( PriorityLock
, Priority (..)
, newPriorityLock
, withPriorityLock
) where
import Control.Concurrent.STM (TMVar, newEmptyTMVar, putTMVar, takeTMVar)
import Universum
import Pos.Util.Queue (Q, dequeue, enqueue, queue)
newtype PriorityLock = PriorityLock (TVar PriorityLockState)
data PriorityLockState
= Unlocked
| Locked (Q (TMVar ())) (Q (TMVar ()))
-- ^ locked, with a queue of contenders with high precedence, and
a second queue with contenders of low precedence
data Priority = HighPriority
| LowPriority
newPriorityLock :: MonadIO m => m PriorityLock
newPriorityLock = liftIO $ PriorityLock <$> newTVarIO Unlocked
lockP :: MonadIO m => PriorityLock -> Priority -> m ()
lockP (PriorityLock vstate) prio = do
mbwait <- atomically $ do
readTVar vstate >>= \case
Unlocked -> do
-- uncontended, acquire lock, no one is waiting on the lock
writeTVar vstate (Locked (queue []) (queue []))
return Nothing
Locked hwaiters lwaiters -> do
-- contended, put ourselves on the appropriate queue
waitvar <- newEmptyTMVar
case prio of
HighPriority ->
writeTVar vstate $ Locked (enqueue hwaiters waitvar) lwaiters
LowPriority ->
writeTVar vstate $ Locked hwaiters (enqueue lwaiters waitvar)
return (Just waitvar)
case mbwait of
Nothing ->
-- the lock was uncontended, we hold it now
return ()
Just waitvar ->
-- lock was contended, so we have to wait
atomically $ takeTMVar waitvar
-- we hold it now
unlockP :: MonadIO m => PriorityLock -> m ()
unlockP (PriorityLock vstate) =
atomically $ readTVar vstate >>= \case
Unlocked -> error "Pos.Util.PriorityLock.unlockP: lock is already unlocked"
Locked hwaiters lwaiters
-- dequeue from the high priority waiters if possible
| Just (waiter, hwaiters') <- dequeue hwaiters -> do
writeTVar vstate (Locked hwaiters' lwaiters)
putTMVar waiter ()
-- dequeue from the low priority waiters
| Just (waiter, lwaiters') <- dequeue lwaiters -> do
writeTVar vstate (Locked hwaiters lwaiters')
putTMVar waiter ()
-- no one is waiting on the lock, so it will be unlocked now
| otherwise ->
writeTVar vstate Unlocked
withPriorityLock
:: (MonadMask m, MonadIO m)
=> PriorityLock -> Priority -> m a -> m a
withPriorityLock l prio = bracket_ (lockP l prio) (unlockP l)
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/util/Pos/Util/Concurrent/PriorityLock.hs | haskell | |
Module: Pos.Util.Concurrent.PriorityLock
Description: Provides a prioritised lock
Provides a lock that can be taken with either high or low precedence.
Within each precedence, the lock is taken in FIFO order.
^ locked, with a queue of contenders with high precedence, and
uncontended, acquire lock, no one is waiting on the lock
contended, put ourselves on the appropriate queue
the lock was uncontended, we hold it now
lock was contended, so we have to wait
we hold it now
dequeue from the high priority waiters if possible
dequeue from the low priority waiters
no one is waiting on the lock, so it will be unlocked now |
module Pos.Util.Concurrent.PriorityLock
( PriorityLock
, Priority (..)
, newPriorityLock
, withPriorityLock
) where
import Control.Concurrent.STM (TMVar, newEmptyTMVar, putTMVar, takeTMVar)
import Universum
import Pos.Util.Queue (Q, dequeue, enqueue, queue)
newtype PriorityLock = PriorityLock (TVar PriorityLockState)
data PriorityLockState
= Unlocked
| Locked (Q (TMVar ())) (Q (TMVar ()))
a second queue with contenders of low precedence
data Priority = HighPriority
| LowPriority
newPriorityLock :: MonadIO m => m PriorityLock
newPriorityLock = liftIO $ PriorityLock <$> newTVarIO Unlocked
lockP :: MonadIO m => PriorityLock -> Priority -> m ()
lockP (PriorityLock vstate) prio = do
mbwait <- atomically $ do
readTVar vstate >>= \case
Unlocked -> do
writeTVar vstate (Locked (queue []) (queue []))
return Nothing
Locked hwaiters lwaiters -> do
waitvar <- newEmptyTMVar
case prio of
HighPriority ->
writeTVar vstate $ Locked (enqueue hwaiters waitvar) lwaiters
LowPriority ->
writeTVar vstate $ Locked hwaiters (enqueue lwaiters waitvar)
return (Just waitvar)
case mbwait of
Nothing ->
return ()
Just waitvar ->
atomically $ takeTMVar waitvar
unlockP :: MonadIO m => PriorityLock -> m ()
unlockP (PriorityLock vstate) =
atomically $ readTVar vstate >>= \case
Unlocked -> error "Pos.Util.PriorityLock.unlockP: lock is already unlocked"
Locked hwaiters lwaiters
| Just (waiter, hwaiters') <- dequeue hwaiters -> do
writeTVar vstate (Locked hwaiters' lwaiters)
putTMVar waiter ()
| Just (waiter, lwaiters') <- dequeue lwaiters -> do
writeTVar vstate (Locked hwaiters lwaiters')
putTMVar waiter ()
| otherwise ->
writeTVar vstate Unlocked
withPriorityLock
:: (MonadMask m, MonadIO m)
=> PriorityLock -> Priority -> m a -> m a
withPriorityLock l prio = bracket_ (lockP l prio) (unlockP l)
|
cfd629edd4581e3b34f246499fdbd3ff923fe0bc3ca0227f71a76d9768178dc6 | softwarelanguageslab/maf | R5RS_WeiChenRompf2019_the-little-schemer_ch1-2.scm | ; Changes:
* removed : 2
* added : 0
* swaps : 1
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 1
(letrec ((atom? (lambda (x)
(if (not (pair? x)) (not (null? x)) #f))))
(atom? 'atom)
(atom? 'turkey)
(atom? 1942)
(atom? 'u)
(atom? '*abc$)
(<change>
(list? (__toplevel_cons 'atom ()))
((lambda (x) x) (list? (__toplevel_cons 'atom ()))))
(list? (__toplevel_cons 'atom (__toplevel_cons 'turkey (__toplevel_cons 'or ()))))
(list?
(__toplevel_cons (__toplevel_cons 'atom (__toplevel_cons 'turkey ())) (__toplevel_cons 'or ())))
(list? ())
(atom? ())
(car (__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ()))))
(<change>
(car
(__toplevel_cons
(__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ())))
(__toplevel_cons 'x (__toplevel_cons 'y (__toplevel_cons 'z ())))))
())
(cdr (__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ()))))
(<change>
(cdr
(__toplevel_cons
(__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ())))
(__toplevel_cons 'x (__toplevel_cons 'y (__toplevel_cons 'z ())))))
())
(<change>
(cons 'peanut (__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ()))))
(cons
(__toplevel_cons 'banana (__toplevel_cons 'and ()))
(__toplevel_cons
'peanut
(__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ()))))))
(<change>
(cons
(__toplevel_cons 'banana (__toplevel_cons 'and ()))
(__toplevel_cons
'peanut
(__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ())))))
(cons 'peanut (__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ())))))
(null? ())) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_WeiChenRompf2019_the-little-schemer_ch1-2.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0 | * removed : 2
* added : 0
* swaps : 1
* calls to i d fun : 1
(letrec ((atom? (lambda (x)
(if (not (pair? x)) (not (null? x)) #f))))
(atom? 'atom)
(atom? 'turkey)
(atom? 1942)
(atom? 'u)
(atom? '*abc$)
(<change>
(list? (__toplevel_cons 'atom ()))
((lambda (x) x) (list? (__toplevel_cons 'atom ()))))
(list? (__toplevel_cons 'atom (__toplevel_cons 'turkey (__toplevel_cons 'or ()))))
(list?
(__toplevel_cons (__toplevel_cons 'atom (__toplevel_cons 'turkey ())) (__toplevel_cons 'or ())))
(list? ())
(atom? ())
(car (__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ()))))
(<change>
(car
(__toplevel_cons
(__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ())))
(__toplevel_cons 'x (__toplevel_cons 'y (__toplevel_cons 'z ())))))
())
(cdr (__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ()))))
(<change>
(cdr
(__toplevel_cons
(__toplevel_cons 'a (__toplevel_cons 'b (__toplevel_cons 'c ())))
(__toplevel_cons 'x (__toplevel_cons 'y (__toplevel_cons 'z ())))))
())
(<change>
(cons 'peanut (__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ()))))
(cons
(__toplevel_cons 'banana (__toplevel_cons 'and ()))
(__toplevel_cons
'peanut
(__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ()))))))
(<change>
(cons
(__toplevel_cons 'banana (__toplevel_cons 'and ()))
(__toplevel_cons
'peanut
(__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ())))))
(cons 'peanut (__toplevel_cons 'butter (__toplevel_cons 'and (__toplevel_cons 'jelly ())))))
(null? ())) |
7d5932f12ffce851975e247d0f093d4d87fea639c41a88c9bab4760aa5472ccd | TorXakis/TorXakis | TxsUtils.hs |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
module TxsUtils
-- ----------------------------------------------------------------------------------------- --
-- --
Some Utilities for TxsDefs
-- --
-- ----------------------------------------------------------------------------------------- --
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import CstrId
import qualified FreeMonoidX as FMX
import FuncDef
import FuncId
import Name
import SortId
import StdTDefs
import TxsDefs
import ValExpr
import Variable
import VarId
-- ----------------------------------------------------------------------------------------- --
-- identifiers: signatures, binding
sig :: Ident -> Ident
sig ( IdSort (SortId nm _uid ) ) = IdSort (SortId nm 0 )
sig ( IdCstr (CstrId nm _uid ca cs ) ) = IdCstr (CstrId nm 0 ca cs )
sig ( IdFunc (FuncId nm _uid fa fs ) ) = IdFunc (FuncId nm 0 fa fs )
sig ( IdProc (ProcId nm _uid pc pv pe ) ) = IdProc (ProcId nm 0 pc pv pe )
sig ( IdChan (ChanId nm _uid cs ) ) = IdChan (ChanId nm 0 cs )
sig ( IdVar (VarId nm _uid vs ) ) = IdVar (VarId nm 0 vs )
sig ( IdStat (StatId nm _uid pid ) ) = IdStat (StatId nm 0 pid )
sig ( IdModel (ModelId nm _uid ) ) = IdModel (ModelId nm 0 )
sig ( IdPurp (PurpId nm _uid ) ) = IdPurp (PurpId nm 0 )
sig ( IdGoal (GoalId nm _uid ) ) = IdGoal (GoalId nm 0 )
sig ( IdMapper (MapperId nm _uid ) ) = IdMapper (MapperId nm 0 )
sig ( IdCnect (CnectId nm _uid ) ) = IdCnect (CnectId nm 0 )
doubles :: Eq a => [a] -> [a]
doubles [] = []
doubles (x:xs) = if x `elem` xs then x:doubles xs else doubles xs
bindOnName :: Name -> [Ident] -> [Ident]
bindOnName nm = filter (\i -> TxsDefs.name i == nm)
bindOnSig :: Ident -> [Ident] -> [Ident]
bindOnSig i = filter (\d -> sig d == sig i)
bindOnUnid :: Int -> [Ident] -> [Ident]
bindOnUnid uid = filter (\i -> TxsDefs.unid i == uid)
-- ----------------------------------------------------------------------------------------- --
-- scopeMerge globals locals: merge globals and locals; locals take prededence
scopeMerge :: [Ident] -> [Ident] -> [Ident]
scopeMerge [] ls = ls
scopeMerge (g:gs) ls = if sig g `elem` map sig ls
then scopeMerge gs ls
else scopeMerge gs (g:ls)
-- ----------------------------------------------------------------------------------------- --
combineWEnv : combine Walue Environments ; where second takes precedence
combineWEnv :: (Variable v) => WEnv v -> WEnv v -> WEnv v
combineWEnv we1 we2
= let we1' = Map.toList we1
we2' = Map.toList we2
in Map.fromList $ [ (vid1,wal1) | (vid1,wal1) <- we1', vid1 `Map.notMember` we2 ]
++ we2'
-- ----------------------------------------------------------------------------------------- --
check use of functions for use in SMT :
-- to/fromString to/fromXml takeWhile takeWhileNot dropWhile dropWhileNot
-- and transitive closure
checkENDECdefs :: TxsDefs -> [FuncId]
checkENDECdefs tdefs
= Set.toList $ Set.unions $ map (checkENDECdef tdefs) (TxsDefs.elems tdefs)
checkENDECdef :: TxsDefs -> TxsDef -> Set.Set FuncId
checkENDECdef tdefs tdef
= let endecs = allENDECfuncs tdefs
in case tdef of
{ DefProc (ProcDef _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; DefModel (ModelDef _ _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; DefPurp (PurpDef _ _ _ gls) -> Set.unions (map (Set.fromList . usedFids . snd) gls)
`Set.intersection` endecs
; DefMapper (MapperDef _ _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; _ -> Set.empty
}
-- ----------------------------------------------------------------------------------------- --
baseENDECfuncs :: TxsDefs -> Set.Set FuncId
baseENDECfuncs tdefs
= Set.fromList $ funcIdtakeWhile : funcIdtakeWhileNot
: funcIddropWhile : funcIddropWhileNot
: [ fid
| fid@FuncId{ FuncId.name = nm } <- Map.keys (funcDefs tdefs)
, (nm == "toString") || (nm == "fromString") ||
(nm == "toXml") || (nm == "fromXml")
]
allENDECfuncs :: TxsDefs -> Set.Set FuncId
allENDECfuncs tdefs
= Set.fromList [ fid
| fid <- Map.keys (funcDefs tdefs)
, not $ Set.null $ funcCallsClosure tdefs (Set.singleton fid)
`Set.intersection` baseENDECfuncs tdefs
]
funcCallsClosure :: TxsDefs -> Set.Set FuncId -> Set.Set FuncId
funcCallsClosure tdefs fids
= let newcalls = Set.unions $ map (funcCalls tdefs) (Set.toList fids)
in if newcalls `Set.isSubsetOf` fids
then fids
else funcCallsClosure tdefs $ fids `Set.union` newcalls
funcCalls :: TxsDefs -> FuncId -> Set.Set FuncId
funcCalls tdefs fid
= case Map.lookup fid (funcDefs tdefs) of
{ Just (FuncDef _vids vexp) -> Set.fromList $ usedFids vexp
; _ -> Set.empty
}
-- ----------------------------------------------------------------------------------------- --
class UsedFids t
where
usedFids :: t -> [FuncId]
instance UsedFids BExpr
where
usedFids = usedFids . TxsDefs.view
instance UsedFids BExprView
where
usedFids (ActionPref actoff bexp) = usedFids actoff ++ usedFids bexp
usedFids (Guard vexps bexp) = usedFids vexps ++ usedFids bexp
usedFids (Choice bexps) = usedFids bexps
usedFids (Parallel _chids bexps) = usedFids bexps
usedFids (Enable bexp1 choffs bexp2) = usedFids bexp1 ++ usedFids choffs
++ usedFids bexp2
usedFids (Disable bexp1 bexp2) = usedFids bexp1 ++ usedFids bexp2
usedFids (Interrupt bexp1 bexp2) = usedFids bexp1 ++ usedFids bexp2
usedFids (ProcInst _pid _chids vexps) = usedFids vexps
usedFids (Hide _chids bexp) = usedFids bexp
usedFids (ValueEnv ve bexp) = usedFids (Map.elems ve) ++ usedFids bexp
usedFids (StAut _stid ve transs) = usedFids (Map.elems ve) ++ usedFids transs
instance UsedFids Trans
where
usedFids (Trans _fr actoff upd _to) = usedFids actoff ++ usedFids (Map.elems upd)
instance UsedFids ActOffer
where
usedFids (ActOffer offs _hidvars cnrs) = usedFids (concatMap chanoffers (Set.toList offs))
++ usedFids cnrs
instance UsedFids ChanOffer
where
usedFids (Quest _vid) = []
usedFids (Exclam vexp) = usedFids vexp
instance UsedFids VExpr
where
usedFids = usedFids . ValExpr.view
instance UsedFids (ValExprView VarId)
where
usedFids (Vfunc fid vexps) = fid : usedFids vexps
usedFids (Vcstr _cid vexps) = usedFids vexps
usedFids (Viscstr _cid vexp) = usedFids vexp
usedFids (Vaccess _cid _n _p vexp) = usedFids vexp
usedFids (Vconst _const) = []
usedFids (Vvar _v) = []
usedFids (Vite cond tb fb) = usedFids [cond, tb, fb]
usedFids (Vsum s) = concatMap usedFids (FMX.distinctTermsT s)
usedFids (Vproduct p) = concatMap usedFids (FMX.distinctTermsT p)
usedFids (Vdivide t n) = usedFids t ++ usedFids n
usedFids (Vmodulo t n) = usedFids t ++ usedFids n
usedFids (Vgez v) = usedFids v
usedFids (Vequal vexp1 vexp2) = usedFids vexp1 ++ usedFids vexp2
usedFids (Vand vexps) = concatMap usedFids (Set.toList vexps)
usedFids (Vnot vexp) = usedFids vexp
usedFids (Vlength vexp) = usedFids vexp
usedFids (Vat s p) = usedFids s ++ usedFids p
usedFids (Vconcat vexps) = concatMap usedFids vexps
usedFids (Vstrinre s r) = usedFids s ++ usedFids r
usedFids (Vpredef _k fid vexps) = fid : usedFids vexps
instance (UsedFids t) => UsedFids [t]
where
usedFids = concatMap usedFids
instance (UsedFids t) => UsedFids (Set.Set t)
where
usedFids = concatMap usedFids . Set.toList | null | https://raw.githubusercontent.com/TorXakis/TorXakis/038463824b3d358df6b6b3ff08732335b7dbdb53/sys/defs/src/TxsUtils.hs | haskell | # LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------------------- --
--
--
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
identifiers: signatures, binding
----------------------------------------------------------------------------------------- --
scopeMerge globals locals: merge globals and locals; locals take prededence
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- --
to/fromString to/fromXml takeWhile takeWhileNot dropWhile dropWhileNot
and transitive closure
----------------------------------------------------------------------------------------- --
----------------------------------------------------------------------------------------- -- |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module TxsUtils
Some Utilities for TxsDefs
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import CstrId
import qualified FreeMonoidX as FMX
import FuncDef
import FuncId
import Name
import SortId
import StdTDefs
import TxsDefs
import ValExpr
import Variable
import VarId
sig :: Ident -> Ident
sig ( IdSort (SortId nm _uid ) ) = IdSort (SortId nm 0 )
sig ( IdCstr (CstrId nm _uid ca cs ) ) = IdCstr (CstrId nm 0 ca cs )
sig ( IdFunc (FuncId nm _uid fa fs ) ) = IdFunc (FuncId nm 0 fa fs )
sig ( IdProc (ProcId nm _uid pc pv pe ) ) = IdProc (ProcId nm 0 pc pv pe )
sig ( IdChan (ChanId nm _uid cs ) ) = IdChan (ChanId nm 0 cs )
sig ( IdVar (VarId nm _uid vs ) ) = IdVar (VarId nm 0 vs )
sig ( IdStat (StatId nm _uid pid ) ) = IdStat (StatId nm 0 pid )
sig ( IdModel (ModelId nm _uid ) ) = IdModel (ModelId nm 0 )
sig ( IdPurp (PurpId nm _uid ) ) = IdPurp (PurpId nm 0 )
sig ( IdGoal (GoalId nm _uid ) ) = IdGoal (GoalId nm 0 )
sig ( IdMapper (MapperId nm _uid ) ) = IdMapper (MapperId nm 0 )
sig ( IdCnect (CnectId nm _uid ) ) = IdCnect (CnectId nm 0 )
doubles :: Eq a => [a] -> [a]
doubles [] = []
doubles (x:xs) = if x `elem` xs then x:doubles xs else doubles xs
bindOnName :: Name -> [Ident] -> [Ident]
bindOnName nm = filter (\i -> TxsDefs.name i == nm)
bindOnSig :: Ident -> [Ident] -> [Ident]
bindOnSig i = filter (\d -> sig d == sig i)
bindOnUnid :: Int -> [Ident] -> [Ident]
bindOnUnid uid = filter (\i -> TxsDefs.unid i == uid)
scopeMerge :: [Ident] -> [Ident] -> [Ident]
scopeMerge [] ls = ls
scopeMerge (g:gs) ls = if sig g `elem` map sig ls
then scopeMerge gs ls
else scopeMerge gs (g:ls)
combineWEnv : combine Walue Environments ; where second takes precedence
combineWEnv :: (Variable v) => WEnv v -> WEnv v -> WEnv v
combineWEnv we1 we2
= let we1' = Map.toList we1
we2' = Map.toList we2
in Map.fromList $ [ (vid1,wal1) | (vid1,wal1) <- we1', vid1 `Map.notMember` we2 ]
++ we2'
check use of functions for use in SMT :
checkENDECdefs :: TxsDefs -> [FuncId]
checkENDECdefs tdefs
= Set.toList $ Set.unions $ map (checkENDECdef tdefs) (TxsDefs.elems tdefs)
checkENDECdef :: TxsDefs -> TxsDef -> Set.Set FuncId
checkENDECdef tdefs tdef
= let endecs = allENDECfuncs tdefs
in case tdef of
{ DefProc (ProcDef _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; DefModel (ModelDef _ _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; DefPurp (PurpDef _ _ _ gls) -> Set.unions (map (Set.fromList . usedFids . snd) gls)
`Set.intersection` endecs
; DefMapper (MapperDef _ _ _ bexp) -> Set.fromList (usedFids bexp) `Set.intersection` endecs
; _ -> Set.empty
}
baseENDECfuncs :: TxsDefs -> Set.Set FuncId
baseENDECfuncs tdefs
= Set.fromList $ funcIdtakeWhile : funcIdtakeWhileNot
: funcIddropWhile : funcIddropWhileNot
: [ fid
| fid@FuncId{ FuncId.name = nm } <- Map.keys (funcDefs tdefs)
, (nm == "toString") || (nm == "fromString") ||
(nm == "toXml") || (nm == "fromXml")
]
allENDECfuncs :: TxsDefs -> Set.Set FuncId
allENDECfuncs tdefs
= Set.fromList [ fid
| fid <- Map.keys (funcDefs tdefs)
, not $ Set.null $ funcCallsClosure tdefs (Set.singleton fid)
`Set.intersection` baseENDECfuncs tdefs
]
funcCallsClosure :: TxsDefs -> Set.Set FuncId -> Set.Set FuncId
funcCallsClosure tdefs fids
= let newcalls = Set.unions $ map (funcCalls tdefs) (Set.toList fids)
in if newcalls `Set.isSubsetOf` fids
then fids
else funcCallsClosure tdefs $ fids `Set.union` newcalls
funcCalls :: TxsDefs -> FuncId -> Set.Set FuncId
funcCalls tdefs fid
= case Map.lookup fid (funcDefs tdefs) of
{ Just (FuncDef _vids vexp) -> Set.fromList $ usedFids vexp
; _ -> Set.empty
}
class UsedFids t
where
usedFids :: t -> [FuncId]
instance UsedFids BExpr
where
usedFids = usedFids . TxsDefs.view
instance UsedFids BExprView
where
usedFids (ActionPref actoff bexp) = usedFids actoff ++ usedFids bexp
usedFids (Guard vexps bexp) = usedFids vexps ++ usedFids bexp
usedFids (Choice bexps) = usedFids bexps
usedFids (Parallel _chids bexps) = usedFids bexps
usedFids (Enable bexp1 choffs bexp2) = usedFids bexp1 ++ usedFids choffs
++ usedFids bexp2
usedFids (Disable bexp1 bexp2) = usedFids bexp1 ++ usedFids bexp2
usedFids (Interrupt bexp1 bexp2) = usedFids bexp1 ++ usedFids bexp2
usedFids (ProcInst _pid _chids vexps) = usedFids vexps
usedFids (Hide _chids bexp) = usedFids bexp
usedFids (ValueEnv ve bexp) = usedFids (Map.elems ve) ++ usedFids bexp
usedFids (StAut _stid ve transs) = usedFids (Map.elems ve) ++ usedFids transs
instance UsedFids Trans
where
usedFids (Trans _fr actoff upd _to) = usedFids actoff ++ usedFids (Map.elems upd)
instance UsedFids ActOffer
where
usedFids (ActOffer offs _hidvars cnrs) = usedFids (concatMap chanoffers (Set.toList offs))
++ usedFids cnrs
instance UsedFids ChanOffer
where
usedFids (Quest _vid) = []
usedFids (Exclam vexp) = usedFids vexp
instance UsedFids VExpr
where
usedFids = usedFids . ValExpr.view
instance UsedFids (ValExprView VarId)
where
usedFids (Vfunc fid vexps) = fid : usedFids vexps
usedFids (Vcstr _cid vexps) = usedFids vexps
usedFids (Viscstr _cid vexp) = usedFids vexp
usedFids (Vaccess _cid _n _p vexp) = usedFids vexp
usedFids (Vconst _const) = []
usedFids (Vvar _v) = []
usedFids (Vite cond tb fb) = usedFids [cond, tb, fb]
usedFids (Vsum s) = concatMap usedFids (FMX.distinctTermsT s)
usedFids (Vproduct p) = concatMap usedFids (FMX.distinctTermsT p)
usedFids (Vdivide t n) = usedFids t ++ usedFids n
usedFids (Vmodulo t n) = usedFids t ++ usedFids n
usedFids (Vgez v) = usedFids v
usedFids (Vequal vexp1 vexp2) = usedFids vexp1 ++ usedFids vexp2
usedFids (Vand vexps) = concatMap usedFids (Set.toList vexps)
usedFids (Vnot vexp) = usedFids vexp
usedFids (Vlength vexp) = usedFids vexp
usedFids (Vat s p) = usedFids s ++ usedFids p
usedFids (Vconcat vexps) = concatMap usedFids vexps
usedFids (Vstrinre s r) = usedFids s ++ usedFids r
usedFids (Vpredef _k fid vexps) = fid : usedFids vexps
instance (UsedFids t) => UsedFids [t]
where
usedFids = concatMap usedFids
instance (UsedFids t) => UsedFids (Set.Set t)
where
usedFids = concatMap usedFids . Set.toList |
41a9656b84fbf9b402385ce3aefd19fa15831e86cc1142e5a2c6cb937f4e022e | lopusz/langlab | ngrams.clj | (ns langlab.core.ngrams
"Module contains n-gram generation function.")
(defn gen-ngrams
"Function generates n-grams from a given 'tokens' sequence.
The following invocations are possible:
- `(gen-n-grams n tokens)` - generates all n-grams
- `(gen-n-grams n m tokens)` - generates all n-grams,(n+1)-grams,...,(m)-grams
- `(gen-ngrams tokens)` - generates 1 .. (count tokens) n-grams
"
([ tokens ] (gen-ngrams 1 (count tokens) tokens))
([ n tokens ]
(let [
tokens-len (count tokens)
tokens-rests (take (+ (- tokens-len n) 1)
(iterate rest tokens))
ngrams-seq (for [ c tokens-rests ] (take n c))
]
ngrams-seq))
([ n m tokens ]
(let [
tokens-len (count tokens)
m* (min tokens-len m)
n-range (range n (+ m* 1))
gen-coll-ngrams #(gen-ngrams % tokens)
]
(mapcat gen-coll-ngrams n-range))))
(comment
(defn gen-ngrams*
"Function generates n-grams from a given 'tokens' sequence.
Alternative slightly slower version (?).
The following invocations are possible:
- `(gen-ngrams n tokens)` - generates all n-grams
- `(gen-ngrams n m tokens)` - generates all n-grams,(n+1)-grams,...,(m)-grams
- `(gen-ngrams tokens)` - generates 1 .. (count tokens) n-grams
"
([ tokens ]
(gen-ngrams 1 (count tokens) tokens))
([ n tokens ]
(partition n 1 tokens))
([ n m tokens ]
(mapcat
#(gen-ngrams % tokens)
(range n (inc m))))))
| null | https://raw.githubusercontent.com/lopusz/langlab/dd075c9f4ef3594defe16b88bab8460db23c7c75/src/main/clojure/langlab/core/ngrams.clj | clojure | (ns langlab.core.ngrams
"Module contains n-gram generation function.")
(defn gen-ngrams
"Function generates n-grams from a given 'tokens' sequence.
The following invocations are possible:
- `(gen-n-grams n tokens)` - generates all n-grams
- `(gen-n-grams n m tokens)` - generates all n-grams,(n+1)-grams,...,(m)-grams
- `(gen-ngrams tokens)` - generates 1 .. (count tokens) n-grams
"
([ tokens ] (gen-ngrams 1 (count tokens) tokens))
([ n tokens ]
(let [
tokens-len (count tokens)
tokens-rests (take (+ (- tokens-len n) 1)
(iterate rest tokens))
ngrams-seq (for [ c tokens-rests ] (take n c))
]
ngrams-seq))
([ n m tokens ]
(let [
tokens-len (count tokens)
m* (min tokens-len m)
n-range (range n (+ m* 1))
gen-coll-ngrams #(gen-ngrams % tokens)
]
(mapcat gen-coll-ngrams n-range))))
(comment
(defn gen-ngrams*
"Function generates n-grams from a given 'tokens' sequence.
Alternative slightly slower version (?).
The following invocations are possible:
- `(gen-ngrams n tokens)` - generates all n-grams
- `(gen-ngrams n m tokens)` - generates all n-grams,(n+1)-grams,...,(m)-grams
- `(gen-ngrams tokens)` - generates 1 .. (count tokens) n-grams
"
([ tokens ]
(gen-ngrams 1 (count tokens) tokens))
([ n tokens ]
(partition n 1 tokens))
([ n m tokens ]
(mapcat
#(gen-ngrams % tokens)
(range n (inc m))))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.