_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
b4ed2422ddde2288cfaed7fe818de1156c62f17f6ccaa40696c9022acfec0229 | geneweb/geneweb | consangAll.ml | Copyright ( c ) 1998 - 2007 INRIA
open Gwdb
let rec clear_descend_consang base consang mark =
* let des = foi base ifam in
* Array.iter
* ( fun ip - >
* if not ( Gwdb.Marker.get mark ip ) then
* begin
* consang ip Adef.no_consang ;
* Gwdb.Marker.set mark ip true ;
* let u = poi base ip in
* Array.iter ( clear_descend_consang base consang mark ) ( get_family u )
* end )
* )
* let des = foi base ifam in
* Array.iter
* (fun ip ->
* if not (Gwdb.Marker.get mark ip) then
* begin
* consang ip Adef.no_consang;
* Gwdb.Marker.set mark ip true ;
* let u = poi base ip in
* Array.iter (clear_descend_consang base consang mark) (get_family u)
* end)
* (get_children des) *)
let relationship base tab ip1 ip2 =
fst (Consang.relationship_and_links base tab false ip1 ip2)
let trace verbosity cnt max_cnt =
if verbosity >= 2 then (
Printf.eprintf "%7d\008\008\008\008\008\008\008" cnt;
flush stderr)
else if verbosity >= 1 then ProgrBar.run (max_cnt - cnt) max_cnt
let consang_array base =
let patched = ref false in
let fget i = get_parents @@ poi base i in
let cget i = get_consang @@ poi base i in
let cset i v =
patched := true;
patch_ascend base i
Def.{ (gen_ascend_of_person @@ poi base i) with consang = v }
in
(fget, cget, cset, patched)
let compute ?(verbosity = 2) base from_scratch =
let () = load_ascends_array base in
let () = load_couples_array base in
let fget, cget, cset, patched = consang_array base in
(try
let tab =
let ts = Consang.topological_sort base poi in
Consang.make_relationship_info base ts
in
let persons = Gwdb.ipers base in
let families = Gwdb.ifams base in
let consang_tab = Gwdb.ifam_marker families Adef.no_consang in
let cnt = ref 0 in
FIXME
if not from_scratch then
* begin
* let mark = Gwdb.Marker.make ( Gwdb.Collection.length persons ) false in
* List.iter
* ( fun ip - >
* let u = poi base ip in
* Array.iter ( clear_descend_consang base cset mark ) ( get_family u ) )
* ( patched_ascends base )
* end ;
* begin
* let mark = Gwdb.Marker.make (Gwdb.Collection.length persons) false in
* List.iter
* (fun ip ->
* let u = poi base ip in
* Array.iter (clear_descend_consang base cset mark) (get_family u))
* (patched_ascends base)
* end; *)
Gwdb.Collection.iter
(fun i ->
if from_scratch then (
cset i Adef.no_consang;
incr cnt)
else
let cg = cget i in
Option.iter
(fun ifam -> Gwdb.Marker.set consang_tab ifam cg)
(fget i);
if cg = Adef.no_consang then incr cnt)
persons;
(* number of persons which need consanguinity to be computed *)
let max_cnt = !cnt in
let most = ref None in
if verbosity >= 1 then Printf.eprintf "To do: %d persons\n" max_cnt;
if max_cnt <> 0 then
if verbosity >= 2 then (
Printf.eprintf "Computing consanguinity...";
flush stderr)
else if verbosity >= 1 then ProgrBar.start ();
let running = ref true in
while !running do
running := false;
Gwdb.Collection.iter
(fun i ->
(* if person's consanguinity wasn't calculated *)
if cget i = Adef.no_consang then
match fget i with
(* if person has parents *)
| Some ifam ->
let pconsang = Gwdb.Marker.get consang_tab ifam in
(* if parent's family's consanguinity wasn't calculated *)
if pconsang = Adef.no_consang then
let cpl = foi base ifam in
let ifath = get_father cpl in
let imoth = get_mother cpl in
(* if parent's consanguinity was calculated *)
if
cget ifath != Adef.no_consang
&& cget imoth != Adef.no_consang
then (
let consang = relationship base tab ifath imoth in
trace verbosity !cnt max_cnt;
decr cnt;
let cg = Adef.fix_of_float consang in
cset i cg;
Gwdb.Marker.set consang_tab ifam cg;
if verbosity >= 2 then
if
match !most with Some m -> cg > cget m | None -> true
then (
Printf.eprintf "\nMax consanguinity %g for %s... "
consang
(Gutil.designation base (poi base i));
flush stderr;
most := Some i)
(* if it wasn't makes further another run over persons *))
else running := true
(* if it was then set to person his family's consanguinity *)
else (
trace verbosity !cnt max_cnt;
decr cnt;
cset i pconsang)
(* if he doesn't then set his consanguinity to 0 *)
| None ->
trace verbosity !cnt max_cnt;
decr cnt;
cset i (Adef.fix_of_float 0.0))
persons
done;
if max_cnt <> 0 then
if verbosity >= 2 then (
Printf.eprintf " done \n";
flush stderr)
else if verbosity >= 1 then ProgrBar.finish ()
with Sys.Break when verbosity > 0 ->
Printf.eprintf "\n";
flush stderr;
());
if !patched then Gwdb.commit_patches base;
!patched
| null | https://raw.githubusercontent.com/geneweb/geneweb/747f43da396a706bd1da60d34c04493a190edf0f/lib/core/consangAll.ml | ocaml | number of persons which need consanguinity to be computed
if person's consanguinity wasn't calculated
if person has parents
if parent's family's consanguinity wasn't calculated
if parent's consanguinity was calculated
if it wasn't makes further another run over persons
if it was then set to person his family's consanguinity
if he doesn't then set his consanguinity to 0 | Copyright ( c ) 1998 - 2007 INRIA
open Gwdb
let rec clear_descend_consang base consang mark =
* let des = foi base ifam in
* Array.iter
* ( fun ip - >
* if not ( Gwdb.Marker.get mark ip ) then
* begin
* consang ip Adef.no_consang ;
* Gwdb.Marker.set mark ip true ;
* let u = poi base ip in
* Array.iter ( clear_descend_consang base consang mark ) ( get_family u )
* end )
* )
* let des = foi base ifam in
* Array.iter
* (fun ip ->
* if not (Gwdb.Marker.get mark ip) then
* begin
* consang ip Adef.no_consang;
* Gwdb.Marker.set mark ip true ;
* let u = poi base ip in
* Array.iter (clear_descend_consang base consang mark) (get_family u)
* end)
* (get_children des) *)
let relationship base tab ip1 ip2 =
fst (Consang.relationship_and_links base tab false ip1 ip2)
let trace verbosity cnt max_cnt =
if verbosity >= 2 then (
Printf.eprintf "%7d\008\008\008\008\008\008\008" cnt;
flush stderr)
else if verbosity >= 1 then ProgrBar.run (max_cnt - cnt) max_cnt
let consang_array base =
let patched = ref false in
let fget i = get_parents @@ poi base i in
let cget i = get_consang @@ poi base i in
let cset i v =
patched := true;
patch_ascend base i
Def.{ (gen_ascend_of_person @@ poi base i) with consang = v }
in
(fget, cget, cset, patched)
let compute ?(verbosity = 2) base from_scratch =
let () = load_ascends_array base in
let () = load_couples_array base in
let fget, cget, cset, patched = consang_array base in
(try
let tab =
let ts = Consang.topological_sort base poi in
Consang.make_relationship_info base ts
in
let persons = Gwdb.ipers base in
let families = Gwdb.ifams base in
let consang_tab = Gwdb.ifam_marker families Adef.no_consang in
let cnt = ref 0 in
FIXME
if not from_scratch then
* begin
* let mark = Gwdb.Marker.make ( Gwdb.Collection.length persons ) false in
* List.iter
* ( fun ip - >
* let u = poi base ip in
* Array.iter ( clear_descend_consang base cset mark ) ( get_family u ) )
* ( patched_ascends base )
* end ;
* begin
* let mark = Gwdb.Marker.make (Gwdb.Collection.length persons) false in
* List.iter
* (fun ip ->
* let u = poi base ip in
* Array.iter (clear_descend_consang base cset mark) (get_family u))
* (patched_ascends base)
* end; *)
Gwdb.Collection.iter
(fun i ->
if from_scratch then (
cset i Adef.no_consang;
incr cnt)
else
let cg = cget i in
Option.iter
(fun ifam -> Gwdb.Marker.set consang_tab ifam cg)
(fget i);
if cg = Adef.no_consang then incr cnt)
persons;
let max_cnt = !cnt in
let most = ref None in
if verbosity >= 1 then Printf.eprintf "To do: %d persons\n" max_cnt;
if max_cnt <> 0 then
if verbosity >= 2 then (
Printf.eprintf "Computing consanguinity...";
flush stderr)
else if verbosity >= 1 then ProgrBar.start ();
let running = ref true in
while !running do
running := false;
Gwdb.Collection.iter
(fun i ->
if cget i = Adef.no_consang then
match fget i with
| Some ifam ->
let pconsang = Gwdb.Marker.get consang_tab ifam in
if pconsang = Adef.no_consang then
let cpl = foi base ifam in
let ifath = get_father cpl in
let imoth = get_mother cpl in
if
cget ifath != Adef.no_consang
&& cget imoth != Adef.no_consang
then (
let consang = relationship base tab ifath imoth in
trace verbosity !cnt max_cnt;
decr cnt;
let cg = Adef.fix_of_float consang in
cset i cg;
Gwdb.Marker.set consang_tab ifam cg;
if verbosity >= 2 then
if
match !most with Some m -> cg > cget m | None -> true
then (
Printf.eprintf "\nMax consanguinity %g for %s... "
consang
(Gutil.designation base (poi base i));
flush stderr;
most := Some i)
else running := true
else (
trace verbosity !cnt max_cnt;
decr cnt;
cset i pconsang)
| None ->
trace verbosity !cnt max_cnt;
decr cnt;
cset i (Adef.fix_of_float 0.0))
persons
done;
if max_cnt <> 0 then
if verbosity >= 2 then (
Printf.eprintf " done \n";
flush stderr)
else if verbosity >= 1 then ProgrBar.finish ()
with Sys.Break when verbosity > 0 ->
Printf.eprintf "\n";
flush stderr;
());
if !patched then Gwdb.commit_patches base;
!patched
|
7f3cd0fd0a1f862a48e6f818203aef704628b90c0fac98326ae3128c4ce1457d | uswitch/speculate | extract.clj | (ns speculate.transform.extract
(:refer-clojure :exclude [alias *])
(:require
[clojure.pprint :refer [pprint]]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.spec.override]
[speculate.ast :as ast]
[speculate.util :as util]
[speculate.transform.state :as state]
[clojure.string :as string]))
(defn alias [spec]
(when-let [s (get (s/registry) spec)]
(when (keyword? s) s)))
(defn un-ns [k]
(keyword (name k)))
(defn assert-conform! [spec orig-value conformed-value]
(when (= ::s/invalid conformed-value)
(throw (ex-info "Value doesn't conform-to spec" {:spec spec :value orig-value}))))
(defn leaf-value
[{:keys [categorize coll-indexes pathset] :as state}
{:keys [::ast/name] :as ast} node]
(when-not name
(throw (ex-info "Cannot be a leaf without a name:"
{:type :invalid-leaf
:ast ast
:node node})))
(let [value (s/conform name node)]
(assert-conform! name node value)
[[{:label name
:value (cond (= value node)
value
(contains? (s/registry) name)
(s/unform name value)
:else
(throw (ex-info (format "Cannot unform: %s" name)
{:type :cannot-unform
:ast ast
:value value
:node node})))
:pathset pathset
:categorize (cond-> categorize
(contains? categorize name)
(assoc name #{value}))
:coll-indexes coll-indexes}] state]))
(defmulti -walk (fn [state ast node] (::ast/type ast)))
(defn walk [state {:keys [leaf] :as ast} node]
(let [parse-name (::ast/name ast)
pull-leaf? (and parse-name
(not leaf)
(contains? (:to-nodeset state) parse-name))]
(let [inc-alias? (contains? (:include state) parse-name)
[included] (when inc-alias?
(or (leaf-value state ast node)
(throw
(Exception.
(format "Extract keys: Value not present for required key: %s" parse-name)))))
[pulled s] (when pull-leaf?
(-> state
(update :pulled (fnil conj #{}) parse-name)
(leaf-value ast node)))]
(if leaf
(leaf-value state ast node)
(-> (-walk (if s s state) ast node)
(state/reset state :categorize)
(cond->
included (state/update-value concat included)
pulled (state/update-value concat pulled)))))))
(defmethod -walk 'clojure.spec.alpha/keys
[state ast node]
(let [{:keys [req req-un opt opt-un]} (:form ast)
f (fn [un? req? s {:keys [leaf] :as branch-ast}]
(let [label (::ast/name branch-ast)
k (cond-> label un? un-ns)
s' (-> s
(cond-> (not leaf)
(update :pathset (fnil conj #{}) label)))]
(if (contains? node k)
(let [[result s'' :as value] (walk s' branch-ast (get node k))]
(when (and req? (nil? value))
(throw
(Exception.
(format "Value not present for required key: %s" label))))
[result (-> s''
(assoc :pathset (:pathset s))
(cond-> (and value (not leaf))
(update :pathset-union (fnil conj #{}) label)))])
[nil s])))
[a s] (state/map state (partial f nil :req) req)
[b t] (state/map state (partial f nil nil) opt)
[c u] (state/map state (partial f :un :req) req-un)
[d v] (state/map state (partial f :un nil) opt-un)]
[(concat a b c d) (util/deep-merge s t u v)]))
(defn -walk-coll [state ast node]
(let [label (::ast/name ast)
categorize (:categorize ast)]
(state/map-indexed state
(fn [state i x]
(-> (cond-> state
(not categorize)
(assoc-in [:coll-indexes label] i))
(walk (:form ast) x)
(state/reset state :coll-indexes)))
node)))
(defmethod -walk 'clojure.spec.alpha/every
[state ast node]
(-walk-coll state ast node))
(defmethod -walk 'clojure.spec.alpha/coll-of
[state ast node]
(-walk-coll state ast node))
(defn f-or [& fs]
(comp (partial some identity) (apply juxt fs)))
(defmethod -walk 'clojure.spec.alpha/and
[state ast node]
(let [specs (filter (f-or (comp s/spec? ::ast/name)
(comp util/spec-symbol? ::ast/type))
(:form ast))]
(walk state (first specs) node)))
(defmethod -walk 'clojure.spec.alpha/or
[state ast node]
(let [label (::ast/name ast)
spec (or label (eval (:spec ast)))
conformed (s/conform spec node)
_ (assert-conform! label node conformed)
[or-key _] conformed
form (get (:form ast) or-key)]
(if (contains? '#{clojure.spec.alpha/pred clojure.spec.alpha/nil} or-key)
(walk form node)
(-> state
(update :categorize assoc or-key #{or-key})
(update :categories (fnil conj #{}) or-key)
(update :categorized (partial merge-with set/union) {or-key #{or-key}})
(walk form node)))))
(defn state-fn? [f]
(-> f meta :name (= 'state-fn)))
(defn key-cat? [[k cat-f]]
(if (= cat-f keys) :key-cat :set-cat))
(defn include-cat-vals [state set-cats]
(mapcat (fn [[k vs]]
(->> vs
(mapcat #(when (contains? (s/registry) k)
(-> state
(leaf-value (ast/parse k) %)
(first))))
(remove nil?)))
set-cats))
(defn assert-all-valid! [spec orig-coll conformed-coll]
(assert
(not-any? #{::s/invalid} conformed-coll)
(format "Categorization must be valid %s %s" spec (pr-str orig-coll))))
(defn category-set-map [state set-cat node]
(->> set-cat
(map (fn [[k cat-f]]
(let [cat-set (if (state-fn? cat-f)
(cat-f state node)
(cat-f node))
_ (when (and (not (nil? cat-set)) (empty? cat-set))
(throw
(ex-info
(format "Categorization for %s returned no categories" k)
{:type :invalid-categorization})))
cat-set' (some->> cat-set
(map #(cond->> %
(or (s/spec? k)
(contains? (s/registry) k))
(s/conform k)))
(seq)
(set))]
(assert-all-valid! k cat-set cat-set')
[k cat-set'])))
(into {})))
(defn include-not-present [coll-a coll-b]
(concat coll-a (->> (map :label coll-a)
(apply dissoc (group-by :label coll-b))
(vals)
(apply concat))))
(defn categorize-map [state {:keys [categorize]} form node]
(let [{:keys [key-cat set-cat]} (group-by key-cat? categorize)
set-cats (category-set-map state set-cat node)
state' (-> state
(update :categorize merge set-cats)
(update :categorized (partial merge-with set/union) set-cats)
(update :categories set/union (set (keys categorize))))
form' (assoc form :categorize categorize)]
(-> (if key-cat
(let [[k f] (first key-cat)]
(state/map state'
(fn [s k-cat]
(let [k-cat' #{(s/conform k k-cat)}
_ (assert-all-valid! k #{k-cat} k-cat')
s' (-> s
(update :categorize assoc k k-cat')
(update :categorized update k set/union k-cat'))]
(-> s'
(walk form' {k-cat (k-cat node)})
(state/update-value conj (ffirst (leaf-value s' (ast/parse k) k-cat))))))
(f node)))
(walk state' form' node))
(state/update-value include-not-present
(include-cat-vals state' set-cats)))))
(defn categorize-coll [state ast form nodes]
(state/map state #(categorize-map %1 ast (:form form) %2) nodes))
(defmethod -walk 'speculate.spec/spec
[state ast node]
(let [{:keys [::ast/name alias leaf form select]} ast
state' (cond-> state alias (update :alias assoc name alias))]
(-> (cond (:categorize ast)
(try
(condp = (::ast/type form)
'clojure.spec.alpha/keys (categorize-map state' ast form node)
'clojure.spec.alpha/coll-of (categorize-coll state' ast form node)
'clojure.spec.alpha/every (categorize-coll state' ast form node)
(categorize-map state' ast form node))
(catch clojure.lang.ExceptionInfo e
(if (= :invalid-categorization (:type (ex-data e)))
[[]]
(throw e))))
select
(walk state' form node)
(not leaf)
(walk state' form node)
leaf
(leaf-value state ast node))
(state/update-state (fn [{:keys [categories categorized pathset-union pulled]}]
(assoc state
:pulled pulled
:pathset-union pathset-union
:categories categories
:categorized categorized))))))
(defmethod -walk :default
[state ast node]
(leaf-value state ast node))
(defn run-walk [spec node include to-nodeset]
(walk {:include include :to-nodeset to-nodeset} spec node))
(defn eval-walk [spec node]
(first (run-walk spec node)))
(defn exec-walk [spec node]
(second (run-walk spec node)))
| null | https://raw.githubusercontent.com/uswitch/speculate/9e02ec77c770bec94a71008d03806907bb86d3de/src/speculate/transform/extract.clj | clojure | (ns speculate.transform.extract
(:refer-clojure :exclude [alias *])
(:require
[clojure.pprint :refer [pprint]]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[clojure.spec.override]
[speculate.ast :as ast]
[speculate.util :as util]
[speculate.transform.state :as state]
[clojure.string :as string]))
(defn alias [spec]
(when-let [s (get (s/registry) spec)]
(when (keyword? s) s)))
(defn un-ns [k]
(keyword (name k)))
(defn assert-conform! [spec orig-value conformed-value]
(when (= ::s/invalid conformed-value)
(throw (ex-info "Value doesn't conform-to spec" {:spec spec :value orig-value}))))
(defn leaf-value
[{:keys [categorize coll-indexes pathset] :as state}
{:keys [::ast/name] :as ast} node]
(when-not name
(throw (ex-info "Cannot be a leaf without a name:"
{:type :invalid-leaf
:ast ast
:node node})))
(let [value (s/conform name node)]
(assert-conform! name node value)
[[{:label name
:value (cond (= value node)
value
(contains? (s/registry) name)
(s/unform name value)
:else
(throw (ex-info (format "Cannot unform: %s" name)
{:type :cannot-unform
:ast ast
:value value
:node node})))
:pathset pathset
:categorize (cond-> categorize
(contains? categorize name)
(assoc name #{value}))
:coll-indexes coll-indexes}] state]))
(defmulti -walk (fn [state ast node] (::ast/type ast)))
(defn walk [state {:keys [leaf] :as ast} node]
(let [parse-name (::ast/name ast)
pull-leaf? (and parse-name
(not leaf)
(contains? (:to-nodeset state) parse-name))]
(let [inc-alias? (contains? (:include state) parse-name)
[included] (when inc-alias?
(or (leaf-value state ast node)
(throw
(Exception.
(format "Extract keys: Value not present for required key: %s" parse-name)))))
[pulled s] (when pull-leaf?
(-> state
(update :pulled (fnil conj #{}) parse-name)
(leaf-value ast node)))]
(if leaf
(leaf-value state ast node)
(-> (-walk (if s s state) ast node)
(state/reset state :categorize)
(cond->
included (state/update-value concat included)
pulled (state/update-value concat pulled)))))))
(defmethod -walk 'clojure.spec.alpha/keys
[state ast node]
(let [{:keys [req req-un opt opt-un]} (:form ast)
f (fn [un? req? s {:keys [leaf] :as branch-ast}]
(let [label (::ast/name branch-ast)
k (cond-> label un? un-ns)
s' (-> s
(cond-> (not leaf)
(update :pathset (fnil conj #{}) label)))]
(if (contains? node k)
(let [[result s'' :as value] (walk s' branch-ast (get node k))]
(when (and req? (nil? value))
(throw
(Exception.
(format "Value not present for required key: %s" label))))
[result (-> s''
(assoc :pathset (:pathset s))
(cond-> (and value (not leaf))
(update :pathset-union (fnil conj #{}) label)))])
[nil s])))
[a s] (state/map state (partial f nil :req) req)
[b t] (state/map state (partial f nil nil) opt)
[c u] (state/map state (partial f :un :req) req-un)
[d v] (state/map state (partial f :un nil) opt-un)]
[(concat a b c d) (util/deep-merge s t u v)]))
(defn -walk-coll [state ast node]
(let [label (::ast/name ast)
categorize (:categorize ast)]
(state/map-indexed state
(fn [state i x]
(-> (cond-> state
(not categorize)
(assoc-in [:coll-indexes label] i))
(walk (:form ast) x)
(state/reset state :coll-indexes)))
node)))
(defmethod -walk 'clojure.spec.alpha/every
[state ast node]
(-walk-coll state ast node))
(defmethod -walk 'clojure.spec.alpha/coll-of
[state ast node]
(-walk-coll state ast node))
(defn f-or [& fs]
(comp (partial some identity) (apply juxt fs)))
(defmethod -walk 'clojure.spec.alpha/and
[state ast node]
(let [specs (filter (f-or (comp s/spec? ::ast/name)
(comp util/spec-symbol? ::ast/type))
(:form ast))]
(walk state (first specs) node)))
(defmethod -walk 'clojure.spec.alpha/or
[state ast node]
(let [label (::ast/name ast)
spec (or label (eval (:spec ast)))
conformed (s/conform spec node)
_ (assert-conform! label node conformed)
[or-key _] conformed
form (get (:form ast) or-key)]
(if (contains? '#{clojure.spec.alpha/pred clojure.spec.alpha/nil} or-key)
(walk form node)
(-> state
(update :categorize assoc or-key #{or-key})
(update :categories (fnil conj #{}) or-key)
(update :categorized (partial merge-with set/union) {or-key #{or-key}})
(walk form node)))))
(defn state-fn? [f]
(-> f meta :name (= 'state-fn)))
(defn key-cat? [[k cat-f]]
(if (= cat-f keys) :key-cat :set-cat))
(defn include-cat-vals [state set-cats]
(mapcat (fn [[k vs]]
(->> vs
(mapcat #(when (contains? (s/registry) k)
(-> state
(leaf-value (ast/parse k) %)
(first))))
(remove nil?)))
set-cats))
(defn assert-all-valid! [spec orig-coll conformed-coll]
(assert
(not-any? #{::s/invalid} conformed-coll)
(format "Categorization must be valid %s %s" spec (pr-str orig-coll))))
(defn category-set-map [state set-cat node]
(->> set-cat
(map (fn [[k cat-f]]
(let [cat-set (if (state-fn? cat-f)
(cat-f state node)
(cat-f node))
_ (when (and (not (nil? cat-set)) (empty? cat-set))
(throw
(ex-info
(format "Categorization for %s returned no categories" k)
{:type :invalid-categorization})))
cat-set' (some->> cat-set
(map #(cond->> %
(or (s/spec? k)
(contains? (s/registry) k))
(s/conform k)))
(seq)
(set))]
(assert-all-valid! k cat-set cat-set')
[k cat-set'])))
(into {})))
(defn include-not-present [coll-a coll-b]
(concat coll-a (->> (map :label coll-a)
(apply dissoc (group-by :label coll-b))
(vals)
(apply concat))))
(defn categorize-map [state {:keys [categorize]} form node]
(let [{:keys [key-cat set-cat]} (group-by key-cat? categorize)
set-cats (category-set-map state set-cat node)
state' (-> state
(update :categorize merge set-cats)
(update :categorized (partial merge-with set/union) set-cats)
(update :categories set/union (set (keys categorize))))
form' (assoc form :categorize categorize)]
(-> (if key-cat
(let [[k f] (first key-cat)]
(state/map state'
(fn [s k-cat]
(let [k-cat' #{(s/conform k k-cat)}
_ (assert-all-valid! k #{k-cat} k-cat')
s' (-> s
(update :categorize assoc k k-cat')
(update :categorized update k set/union k-cat'))]
(-> s'
(walk form' {k-cat (k-cat node)})
(state/update-value conj (ffirst (leaf-value s' (ast/parse k) k-cat))))))
(f node)))
(walk state' form' node))
(state/update-value include-not-present
(include-cat-vals state' set-cats)))))
(defn categorize-coll [state ast form nodes]
(state/map state #(categorize-map %1 ast (:form form) %2) nodes))
(defmethod -walk 'speculate.spec/spec
[state ast node]
(let [{:keys [::ast/name alias leaf form select]} ast
state' (cond-> state alias (update :alias assoc name alias))]
(-> (cond (:categorize ast)
(try
(condp = (::ast/type form)
'clojure.spec.alpha/keys (categorize-map state' ast form node)
'clojure.spec.alpha/coll-of (categorize-coll state' ast form node)
'clojure.spec.alpha/every (categorize-coll state' ast form node)
(categorize-map state' ast form node))
(catch clojure.lang.ExceptionInfo e
(if (= :invalid-categorization (:type (ex-data e)))
[[]]
(throw e))))
select
(walk state' form node)
(not leaf)
(walk state' form node)
leaf
(leaf-value state ast node))
(state/update-state (fn [{:keys [categories categorized pathset-union pulled]}]
(assoc state
:pulled pulled
:pathset-union pathset-union
:categories categories
:categorized categorized))))))
(defmethod -walk :default
[state ast node]
(leaf-value state ast node))
(defn run-walk [spec node include to-nodeset]
(walk {:include include :to-nodeset to-nodeset} spec node))
(defn eval-walk [spec node]
(first (run-walk spec node)))
(defn exec-walk [spec node]
(second (run-walk spec node)))
| |
e254de3b208770701aefd82adcb9406607ee1c40df81ce23c1abaefef93a5cce | sacerdot/CovidMonitoring | avvio.erl | -module(avvio).
-export([avvio/0, start/0]).
Il file è stato creato per compilare e i nodi
per l'esecuzione dell'intero progetto .
avvio () ->
compile:file(server),
compile:file(ospedale),
compile:file(luogo),
compile:file(utente).
sleep(N)->
receive
after N->ok
end.
start()->
spawn(fun()->os:cmd('werl -name server -s server') end),
spawn(fun()->os:cmd('werl -name ospedale -s ospedale') end),
sleep(5000),
spawn(fun()->os:cmd('werl -name luoghi -s luogo') end),
sleep(5000),
spawn(fun()->os:cmd('werl -name utenti1 -s utente') end),
spawn(fun()->os:cmd('werl -name utenti2 -s utente') end).
| null | https://raw.githubusercontent.com/sacerdot/CovidMonitoring/fe969cd51869bbe6479da509c9a6ab21d43e6d11/RanalliRegiaCorte/avvio.erl | erlang | -module(avvio).
-export([avvio/0, start/0]).
Il file è stato creato per compilare e i nodi
per l'esecuzione dell'intero progetto .
avvio () ->
compile:file(server),
compile:file(ospedale),
compile:file(luogo),
compile:file(utente).
sleep(N)->
receive
after N->ok
end.
start()->
spawn(fun()->os:cmd('werl -name server -s server') end),
spawn(fun()->os:cmd('werl -name ospedale -s ospedale') end),
sleep(5000),
spawn(fun()->os:cmd('werl -name luoghi -s luogo') end),
sleep(5000),
spawn(fun()->os:cmd('werl -name utenti1 -s utente') end),
spawn(fun()->os:cmd('werl -name utenti2 -s utente') end).
| |
8b753b3d5a1c9f571823b2685619879f5859d008ba13abd30636fe2b752ddd5d | softlab-ntua/bencherl | im_bench.erl | -module(im_bench).
-export([bench_args/2, run/3]).
bench_args(Vsn, Conf) ->
{ _, Slaves } = lists:keyfind(slaves, 1, Conf),
ClientNodes = filter_nodes(Slaves, "client"),
Ratio =
case Vsn of
short -> 5000;
intermediate -> 10000;
long -> 15000
end,
ClientProcs =
case os:getenv("CLIENT_PROCS") of
false -> Ratio * length(ClientNodes);
P -> erlang:list_to_integer(P)
end,
io:format("Number of client processes: ~p~n", [ClientProcs]),
[[ ClientProcs ]].
run([Clients], Slaves, Conf) ->
% Setup a coordinator to know when the benchmark finished. This is done by
% counting the number of loggers that have finished.
global:register_name(coordinator, self()),
% Get the data dir in order to store the .csv output files there.
{ _, DataDir } = lists:keyfind(datadir, 1, Conf),
% Get the benchmark arguments from the configuration.
ClientNodes = filter_nodes(Slaves, "client"),
ServerNodes = filter_nodes(Slaves, "server"),
RouterNodes = filter_nodes(Slaves, "router"),
% Start the benchmark on the different client domains.
launcher:start_bencherl(length(ServerNodes) div length(RouterNodes), 1,
length(ServerNodes), length(ClientNodes), Slaves),
logger:launch_latency("Bencherl_test", length(RouterNodes), length(ServerNodes),
Clients, 1, ClientNodes, DataDir ++ "/"),
timer:sleep(60000), %XXX: Just to make sure that all clients have logged in.
toxic_client:launch(Clients, ClientNodes),
timer:sleep(60000),
toxic_client:launch_traffic(Clients, ClientNodes),
loop(length(ClientNodes)).
%% filter_nodes/2 returns the nodes in the given list whose name starts with
%% the given prefix.
filter_nodes(Nodes, Prefix) ->
lists:filter(fun(N) ->
string:sub_string(atom_to_list(N), 1, string:len(Prefix)) == Prefix
end, Nodes).
%% loop/1 is a helper function that "prevents" run/3 from finishing until all
%% loggers have halted. Without this function the benchmark would finish after
launching the traffic generators and , thus , would kill all spawned
%% nodes, i.e. routers, servers, etc.
loop(0) -> ok;
loop(N_Loggers) ->
receive
logger_stopped -> loop(N_Loggers - 1)
end.
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/bench/im_bench/src/im_bench.erl | erlang | Setup a coordinator to know when the benchmark finished. This is done by
counting the number of loggers that have finished.
Get the data dir in order to store the .csv output files there.
Get the benchmark arguments from the configuration.
Start the benchmark on the different client domains.
XXX: Just to make sure that all clients have logged in.
filter_nodes/2 returns the nodes in the given list whose name starts with
the given prefix.
loop/1 is a helper function that "prevents" run/3 from finishing until all
loggers have halted. Without this function the benchmark would finish after
nodes, i.e. routers, servers, etc. | -module(im_bench).
-export([bench_args/2, run/3]).
bench_args(Vsn, Conf) ->
{ _, Slaves } = lists:keyfind(slaves, 1, Conf),
ClientNodes = filter_nodes(Slaves, "client"),
Ratio =
case Vsn of
short -> 5000;
intermediate -> 10000;
long -> 15000
end,
ClientProcs =
case os:getenv("CLIENT_PROCS") of
false -> Ratio * length(ClientNodes);
P -> erlang:list_to_integer(P)
end,
io:format("Number of client processes: ~p~n", [ClientProcs]),
[[ ClientProcs ]].
run([Clients], Slaves, Conf) ->
global:register_name(coordinator, self()),
{ _, DataDir } = lists:keyfind(datadir, 1, Conf),
ClientNodes = filter_nodes(Slaves, "client"),
ServerNodes = filter_nodes(Slaves, "server"),
RouterNodes = filter_nodes(Slaves, "router"),
launcher:start_bencherl(length(ServerNodes) div length(RouterNodes), 1,
length(ServerNodes), length(ClientNodes), Slaves),
logger:launch_latency("Bencherl_test", length(RouterNodes), length(ServerNodes),
Clients, 1, ClientNodes, DataDir ++ "/"),
toxic_client:launch(Clients, ClientNodes),
timer:sleep(60000),
toxic_client:launch_traffic(Clients, ClientNodes),
loop(length(ClientNodes)).
filter_nodes(Nodes, Prefix) ->
lists:filter(fun(N) ->
string:sub_string(atom_to_list(N), 1, string:len(Prefix)) == Prefix
end, Nodes).
launching the traffic generators and , thus , would kill all spawned
loop(0) -> ok;
loop(N_Loggers) ->
receive
logger_stopped -> loop(N_Loggers - 1)
end.
|
c9e8138390ef2969a73a77f648f9cbc959a351fa9b1a1fca5446122da91782b0 | nklein/grid-generators | run.lisp | ;;;; run.lisp
(in-package #:grid-generators-tests)
(defun run-all-tests (&key (debug-on-error nst:*debug-on-error*)
(debug-on-fail nst:*debug-on-fail*))
(let ((nst:*debug-on-error* debug-on-error)
(nst:*debug-on-fail* debug-on-fail))
(nst:nst-cmd :run-package #.*package*)))
| null | https://raw.githubusercontent.com/nklein/grid-generators/5f7b790c339123f84710d907ddafdb52c160d44e/test/run.lisp | lisp | run.lisp |
(in-package #:grid-generators-tests)
(defun run-all-tests (&key (debug-on-error nst:*debug-on-error*)
(debug-on-fail nst:*debug-on-fail*))
(let ((nst:*debug-on-error* debug-on-error)
(nst:*debug-on-fail* debug-on-fail))
(nst:nst-cmd :run-package #.*package*)))
|
cb0dfc28cc8d622717c8ffe92e6808fb1aba41d1d6ad7a369ace6ed8dbcb9b53 | Apress/practical-common-lisp-11 | macro-utilities.lisp | (in-package :com.gigamonkeys.macro-utilities)
(defmacro with-gensyms ((&rest names) &body body)
`(let ,(loop for n in names collect `(,n (make-symbol ,(string n))))
,@body))
(defmacro once-only ((&rest names) &body body)
(let ((gensyms (loop for n in names collect (gensym (string n)))))
`(let (,@(loop for g in gensyms collect `(,g (gensym))))
`(let (,,@(loop for g in gensyms for n in names collect ``(,,g ,,n)))
,(let (,@(loop for n in names for g in gensyms collect `(,n ,g)))
,@body)))))
(defun spliceable (value)
(if value (list value)))
(defmacro ppme (form &environment env)
(progn
(write (macroexpand-1 form env)
:length nil
:level nil
:circle nil
:pretty t
:gensym nil
:right-margin 83
:case :downcase)
nil))
| null | https://raw.githubusercontent.com/Apress/practical-common-lisp-11/3e36d8b02b7f0731ad36c31cbbf01f9de0acf003/practicals/Chapter08/macro-utilities.lisp | lisp | (in-package :com.gigamonkeys.macro-utilities)
(defmacro with-gensyms ((&rest names) &body body)
`(let ,(loop for n in names collect `(,n (make-symbol ,(string n))))
,@body))
(defmacro once-only ((&rest names) &body body)
(let ((gensyms (loop for n in names collect (gensym (string n)))))
`(let (,@(loop for g in gensyms collect `(,g (gensym))))
`(let (,,@(loop for g in gensyms for n in names collect ``(,,g ,,n)))
,(let (,@(loop for n in names for g in gensyms collect `(,n ,g)))
,@body)))))
(defun spliceable (value)
(if value (list value)))
(defmacro ppme (form &environment env)
(progn
(write (macroexpand-1 form env)
:length nil
:level nil
:circle nil
:pretty t
:gensym nil
:right-margin 83
:case :downcase)
nil))
| |
f7c9eb2b7910f990598011c7958e6542bd495a758c3df8f54781539557f90e78 | cgaebel/stm-conduit | Async.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE PackageImports #-}
# LANGUAGE TupleSections #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
-- | * Introduction
--
-- Contain combinators for concurrently joining conduits, such that
-- the producing side may continue to produce (up to the queue size)
-- as the consumer is concurrently consuming.
module Data.Conduit.Async ( module Data.Conduit.Async.Composition
, gatherFrom
, drainTo
) where
import Control.Monad.IO.Class
import Control.Monad.Loops
import Control.Monad.Trans.Class
import Data.Conduit
import Data.Conduit.Async.Composition
import Data.Foldable
import UnliftIO
-- | Gather output values asynchronously from an action in the base monad and
-- then yield them downstream. This provides a means of working around the
restriction that ' ConduitM ' can not be an instance of ' MonadBaseControl '
in order to , for example , yield values from within a callback
-- function called from a C library.
gatherFrom :: (MonadIO m, MonadUnliftIO m)
=> Int -- ^ Size of the queue to create
-> (TBQueue o -> m ()) -- ^ Action that generates output values
-> ConduitT () o m ()
gatherFrom size scatter = do
chan <- liftIO $ newTBQueueIO (fromIntegral size)
worker <- lift $ async (scatter chan)
gather worker chan
where
gather worker chan = do
(xs, mres) <- liftIO $ atomically $ do
xs <- whileM (not <$> isEmptyTBQueue chan) (readTBQueue chan)
(xs,) <$> pollSTM worker
traverse_ yield xs
case mres of
Just (Left e) -> liftIO $ throwIO (e :: SomeException)
Just (Right r) -> return r
Nothing -> gather worker chan
-- | Drain input values into an asynchronous action in the base monad via a
bounded ' TBQueue ' . This is effectively the dual of ' gatherFrom ' .
drainTo :: (MonadIO m, MonadUnliftIO m)
=> Int -- ^ Size of the queue to create
-> (TBQueue (Maybe i) -> m r) -- ^ Action to consume input values
-> ConduitT i Void m r
drainTo size gather = do
chan <- liftIO $ newTBQueueIO (fromIntegral size)
worker <- lift $ async (gather chan)
scatter worker chan
where
scatter worker chan = do
mval <- await
(mx, action) <- liftIO $ atomically $ do
mres <- pollSTM worker
case mres of
Just (Left e) ->
return (Nothing, liftIO $ throwIO (e :: SomeException))
Just (Right r) ->
return (Just r, return ())
Nothing -> do
writeTBQueue chan mval
return (Nothing, return ())
action
case mx of
Just x -> return x
Nothing -> scatter worker chan
| null | https://raw.githubusercontent.com/cgaebel/stm-conduit/f8710fcc8d4bcbe479a46d9bbb21d26b58feca74/Data/Conduit/Async.hs | haskell | # LANGUAGE PackageImports #
# LANGUAGE RankNTypes #
| * Introduction
Contain combinators for concurrently joining conduits, such that
the producing side may continue to produce (up to the queue size)
as the consumer is concurrently consuming.
| Gather output values asynchronously from an action in the base monad and
then yield them downstream. This provides a means of working around the
function called from a C library.
^ Size of the queue to create
^ Action that generates output values
| Drain input values into an asynchronous action in the base monad via a
^ Size of the queue to create
^ Action to consume input values | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE TupleSections #
# LANGUAGE RecordWildCards #
module Data.Conduit.Async ( module Data.Conduit.Async.Composition
, gatherFrom
, drainTo
) where
import Control.Monad.IO.Class
import Control.Monad.Loops
import Control.Monad.Trans.Class
import Data.Conduit
import Data.Conduit.Async.Composition
import Data.Foldable
import UnliftIO
restriction that ' ConduitM ' can not be an instance of ' MonadBaseControl '
in order to , for example , yield values from within a callback
gatherFrom :: (MonadIO m, MonadUnliftIO m)
-> ConduitT () o m ()
gatherFrom size scatter = do
chan <- liftIO $ newTBQueueIO (fromIntegral size)
worker <- lift $ async (scatter chan)
gather worker chan
where
gather worker chan = do
(xs, mres) <- liftIO $ atomically $ do
xs <- whileM (not <$> isEmptyTBQueue chan) (readTBQueue chan)
(xs,) <$> pollSTM worker
traverse_ yield xs
case mres of
Just (Left e) -> liftIO $ throwIO (e :: SomeException)
Just (Right r) -> return r
Nothing -> gather worker chan
bounded ' TBQueue ' . This is effectively the dual of ' gatherFrom ' .
drainTo :: (MonadIO m, MonadUnliftIO m)
-> ConduitT i Void m r
drainTo size gather = do
chan <- liftIO $ newTBQueueIO (fromIntegral size)
worker <- lift $ async (gather chan)
scatter worker chan
where
scatter worker chan = do
mval <- await
(mx, action) <- liftIO $ atomically $ do
mres <- pollSTM worker
case mres of
Just (Left e) ->
return (Nothing, liftIO $ throwIO (e :: SomeException))
Just (Right r) ->
return (Just r, return ())
Nothing -> do
writeTBQueue chan mval
return (Nothing, return ())
action
case mx of
Just x -> return x
Nothing -> scatter worker chan
|
1939864c624c2caacea9e59b67287e8f40b152afa5e4166711eb5bbb35c103fd | ericcervin/getting-started-with-sketching | pg009.rkt | #lang sketching
(define (setup)
(size 480 120)
(smoothing 'smoothed))
(define (draw)
(cond
[mouse-pressed (fill 0)]
[else (fill 255)])
(ellipse mouse-x mouse-y 80 80)
)
| null | https://raw.githubusercontent.com/ericcervin/getting-started-with-sketching/1a387c52e9c66d684297fe776291604649abc5de/pg009.rkt | racket | #lang sketching
(define (setup)
(size 480 120)
(smoothing 'smoothed))
(define (draw)
(cond
[mouse-pressed (fill 0)]
[else (fill 255)])
(ellipse mouse-x mouse-y 80 80)
)
| |
f5e9e972f7067e64b14c81f2feb19909715edf763b005c7eb0abfa4dfcdec2e0 | kayceesrk/Quelea | Rubis_strong.hs | # LANGUAGE TemplateHaskell , ScopedTypeVariables , CPP #
import Quelea.Shim
import Quelea.ClientMonad
import Quelea.DBDriver
import Quelea.Contract
import Control.Concurrent (ThreadId, myThreadId, forkIO, threadDelay, killThread)
import Quelea.NameService.Types
import Quelea.Types (summarize)
import Quelea.Marshall
import Quelea.TH
#ifdef LBB
import Quelea.NameService.LoadBalancingBroker
#else
import Quelea.NameService.SimpleBroker
#endif
import Prelude hiding (catch)
import Control.Monad (replicateM_, foldM, when, forever, foldM_, replicateM)
import Control.Monad.Trans (liftIO)
import Control.Concurrent.MVar
import Control.Exception ( SomeException(..), AsyncException(..) , catch, handle, throw)
import Data.IORef
import Data.Text (pack)
import Data.Time
import Database.Cassandra.CQL
import Options.Applicative
import System.Environment (getExecutablePath, getArgs)
import System.Exit (exitSuccess)
import System.Posix.Signals
import System.Process (ProcessHandle, runCommand, terminateProcess)
import System.Random
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import System.IO (hFlush, stdout)
import RubisDefs
import RubisTxns
-- #define DEBUG
debugPrint :: String -> CSN ()
#ifdef DEBUG
debugPrint s = liftIO $ do
tid <- myThreadId
putStrLn $ "[" ++ (show tid) ++ "] " ++ s
#else
debugPrint _ = return ()
#endif
--------------------------------------------------------------------------------
fePort :: Int
fePort = 5558
bePort :: Int
bePort = 5559
numOpsPerRound :: Num a => a
numOpsPerRound = 1
printEvery :: Int
printEvery = 1000
maxDelta :: Int
maxDelta = 100
minItemPrice :: Int
minItemPrice = 1000
auctionTime :: Int
5 seconds
--------------------------------------------------------------------------------
data Kind = Broker | Client | Server
| Daemon | Drop | Create deriving (Read, Show)
data Args = Args {
-- Kind of process
kind :: String,
-- Broker's address
brokerAddr :: String,
{- Daemon Options -}
{- -------------- -}
Optional rts arguments . Only relevant for .
rtsArgs :: String,
Terminate processes after time ( microseconds ) . Only relevant for .
terminateAfter :: String,
{- Client Options -}
{- -------------- -}
-- Number of concurrent client threads
numAuctions :: String,
-- Number of Buyers per auction
numBuyers :: String,
-- Delay between client requests in microseconds. Used to control throughput.
delayReq :: String,
-- Number of items per auction
numItems :: String
}
args :: Parser Args
args = Args
<$> strOption
( long "kind"
<> metavar "KIND"
<> help "Kind of process [Broker|Client|Server|Daemon|Drop|Create]" )
<*> strOption
( long "brokerAddr"
<> metavar "ADDR"
<> help "Address of broker"
<> value "localhost")
<*> strOption
( long "rtsArgs"
<> metavar "RTS_ARGS"
<> help "RTS arguments passed to child processes. Only relevant for Daemon."
<> value "")
<*> strOption
( long "terminateAfter"
<> metavar "SECS"
<> help "Terminate child proceeses after time. Only relevant for Daemon"
<> value "600")
<*> strOption
( long "numAuctions"
<> metavar "NUM_AUCTIONS"
<> help "Number of concurrent auctions"
<> value "1")
<*> strOption
( long "numBuyers"
<> metavar "NUM_BUYERS"
<> help "Number of buyers per auction"
<> value "3")
<*> strOption
( long "delayReq"
<> metavar "MICROSECS"
<> help "Delay between client requests"
<> value "100000")
<*> strOption
( long "numItems"
<> metavar "NUM_ITEMS"
<> help "Number of items sold per auction"
<> value "10")
-------------------------------------------------------------------------------
keyspace :: Keyspace
keyspace = Keyspace $ pack "Quelea"
dtLib = do
return $ mkDtLib
[(StockItem, mkGenOp stockItem summarize,Strong),
(RemoveFromStock, mkGenOp removeFromStock summarize,Strong),
(UpdateMaxBid, mkGenOp updateMaxBid summarize,Strong),
(ShowItem, mkGenOp showItem summarize,Strong),
(GetBalance, mkGenOp getBalance summarize,Strong),
(DepositToWallet, mkGenOp depositToWallet summarize,Strong),
(WithdrawFromWallet, mkGenOp withdrawFromWallet summarize,Strong),
(AddBid, mkGenOp addBid summarize,Strong),
(CancelBid, mkGenOp cancelBid summarize,Strong),
(GetBid, mkGenOp getBid summarize,Strong),
(AddItemBid, mkGenOp addItemBid summarize,Strong),
(RemoveItemBid, mkGenOp removeItemBid summarize,Strong),
(GetBidsByItem, mkGenOp getBidsByItem summarize,Strong),
(AddWalletBid, mkGenOp addWalletBid summarize,Strong),
(RemoveWalletBid, mkGenOp removeWalletBid summarize,Strong),
(GetBidsByWallet, mkGenOp getBidsByWallet summarize,Strong),
(AddWalletItem, mkGenOp addWalletItem summarize,Strong),
(GetItemsByWallet, mkGenOp getItemsByWallet summarize,Strong)]
run :: Args -> IO ()
run args = do
let k = read $ kind args
let broker = brokerAddr args
someTime <- getCurrentTime
let ns = mkNameService (Frontend $ "tcp://" ++ broker ++ ":" ++ show fePort)
(Backend $ "tcp://" ++ broker ++ ":" ++ show bePort) "localhost" 5560
case k of
Broker -> startBroker (Frontend $ "tcp://*:" ++ show fePort)
(Backend $ "tcp://*:" ++ show bePort)
Server -> do
dtLib <- dtLib
runShimNode dtLib [("localhost","9042")] keyspace ns
Client -> do
let threads = read $ numAuctions args
let buyers = read $ numBuyers args
mv::(MVar (Double, NominalDiffTime)) <- newEmptyMVar
replicateM_ threads $ forkIO $ do
mvarList <- replicateM buyers $ newEmptyMVar
handShakeMVar <- newEmptyMVar
mapM_ (\mv -> liftIO $ forkIO $ runSessionWithStats ns $ do
liftIO $ putStrLn "Buyer: started..."
liftIO $ putMVar handShakeMVar ()
liftIO $ putStrLn "Buyer: hand shake done..."
buyerCore mv $ read $ delayReq args) mvarList
runSessionWithStats ns $ do
liftIO $ putStrLn "Seller: started..."
liftIO $ replicateM_ buyers $ liftIO $ takeMVar handShakeMVar
liftIO $ putStrLn "Seller: hand shake done..."
wid <- newWallet 0
res <- sellItems mvarList wid (read $ numItems args)
liftIO $ putMVar mv res
statList <- replicateM threads $ takeMVar mv
let (tpList, latList) = unzip statList
putStrLn $ "Throughput (ops/s) = " ++ (show $ sum tpList)
putStrLn $ "Avg. Latency (s) = " ++ (show $ sum latList / (fromIntegral $ length latList))
Create -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ createTables
Daemon -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ createTables
progName <- getExecutablePath
putStrLn "Driver : Starting broker"
b <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Broker --brokerAddr " ++ broker
putStrLn "Driver : Starting server"
s <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Server --brokerAddr " ++ broker
putStrLn "Driver : Starting client"
c <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Client --brokerAddr " ++ broker
++ " --numAuctions " ++ (numAuctions args)
++ " --numItems " ++ (numItems args)
++ " --delayReq " ++ (delayReq args)
-- Install handler for Ctrl-C
tid <- myThreadId
installHandler keyboardSignal (Catch $ reportSignal pool [b,s,c] tid) Nothing
-- Block
let termWait = read $ terminateAfter args
threadDelay (termWait * 1000000)
-- Woken up..
mapM_ terminateProcess [b,s,c]
runCas pool $ dropTables
Drop -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ dropTables
reportSignal :: Pool -> [ProcessHandle] -> ThreadId -> IO ()
reportSignal pool procList mainTid = do
mapM_ terminateProcess procList
runCas pool $ dropTables
killThread mainTid
data Message = NewItem ItemID | Terminate (MVar (Double, NominalDiffTime))
tryWinItem :: WalletID
-> ItemID
-> Int -- Wait between successful bids
My current bid
-> Int -- My max price willing to pay
-> CSN (Bool, Int) -- True = Won! Int = maxBid
tryWinItem wid iid waitBetweenSuccessfulBids currentBid maxPrice = do
delta <- liftIO $ randomIO
let newBidAmt = currentBid + delta `mod` maxDelta
if newBidAmt > maxPrice
then do
debugPrint $ show (iid,wid) ++ ": BeyondMaxPrice"
waitTillAuctionEnd currentBid
else do
bidResult <- bidForItem wid iid newBidAmt
case bidResult of
ItemNotYetAvailable -> do
debugPrint $ show (iid,wid) ++ ": ItemNotYetAvailable"
liftIO $ threadDelay waitBetweenSuccessfulBids
tryWinItem wid iid waitBetweenSuccessfulBids currentBid maxPrice
ItemGone -> do
debugPrint $ show (iid,wid) ++ ": ItemGone"
res <- amIMaxBidder iid wid
return $ (res, currentBid)
OutBid -> do
debugPrint $ show (iid,wid) ++ ": OutBid"
tryWinItem wid iid waitBetweenSuccessfulBids newBidAmt maxPrice
BidSuccess bid -> do
debugPrint $ show (iid,wid) ++ ": BidSuccess"
bideTime newBidAmt
where
waitTillAuctionEnd currentBid = do
liftIO $ threadDelay waitBetweenSuccessfulBids
item <- getItem iid
case item of
ItemRemoved -> do
res <- amIMaxBidder iid wid
return (res, currentBid)
otherwise -> waitTillAuctionEnd currentBid
bideTime newBidAmt = do
liftIO $ threadDelay waitBetweenSuccessfulBids
-- Woken up. Check if the auction is still running.
res <- getItem iid
iAmMaxBidder <- amIMaxBidder iid wid
case res of
ItemRemoved -> return (iAmMaxBidder, newBidAmt)
otherwise -> -- Auction in progress
if iAmMaxBidder
then bideTime newBidAmt
else tryWinItem wid iid waitBetweenSuccessfulBids newBidAmt maxPrice
buyerCore :: MVar Message -> Int -> CSN ()
buyerCore sellerMVar waitBetweenSuccessfulBids = do
Start with 10 MILLION dollars !
msg <- liftIO $ takeMVar sellerMVar
case msg of
NewItem iid -> do
debugPrint $ show (iid,wid) ++ ": NewItem"
scale <- liftIO $ randomIO >>= \i -> return $ 2 + (i `mod` 10)
(res, bid) <- tryWinItem wid iid waitBetweenSuccessfulBids minItemPrice (scale * minItemPrice)
let WalletID widInt = wid
let ItemID iidInt = iid
if res
then liftIO $ putStrLn $ "Buyer (" ++ show widInt ++ ") won " ++ show iidInt
else liftIO $ putStrLn $ "Buyer (" ++ show widInt ++ ") lost " ++ show iidInt
buyerCore sellerMVar waitBetweenSuccessfulBids
Terminate mv -> do
stats <- getStats
liftIO $ putMVar mv stats
sellItems :: [MVar Message] -> WalletID -> Int -> CSN (Double, NominalDiffTime)
sellItems buyerList wid 0 = do
statMVList <- mapM (\m -> liftIO $ do
mv <- newEmptyMVar
putMVar m $ Terminate mv
return mv) buyerList
buyerStatsList <- mapM (\m -> liftIO $ takeMVar m) statMVList
myStats <- getStats
let statsList = myStats:buyerStatsList
let (tpList, latList) = unzip statsList
return $ (sum tpList, sum latList / (fromIntegral $ length latList))
sellItems buyerList wid numItems = do
iidInt <- liftIO $ randomIO
let iid = ItemID $ iidInt
let desc = show $ iid
openAuction wid iid desc minItemPrice
liftIO $ putStrLn $ "Auction opened: ItemID = " ++ (show iid)
mapM_ (\m -> liftIO $ putMVar m $ NewItem iid) buyerList
liftIO $ threadDelay auctionTime
maxBidAmt <- concludeAuction wid iid
liftIO $ putStrLn $ "Sold: " ++ (show iid) ++ " price = " ++ (show maxBidAmt)
sellItems buyerList wid $ numItems - 1
main :: IO ()
main = execParser opts >>= run
where
opts = info (helper <*> args)
( fullDesc
<> progDesc "Run the Rubis benchmark"
<> header "Rubis Benchmark" )
| null | https://raw.githubusercontent.com/kayceesrk/Quelea/73db79a5d5513b9aeeb475867a67bacb6a5313d0/tests/Rubis/Rubis_strong.hs | haskell | #define DEBUG
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Kind of process
Broker's address
Daemon Options
--------------
Client Options
--------------
Number of concurrent client threads
Number of Buyers per auction
Delay between client requests in microseconds. Used to control throughput.
Number of items per auction
-----------------------------------------------------------------------------
Install handler for Ctrl-C
Block
Woken up..
Wait between successful bids
My max price willing to pay
True = Won! Int = maxBid
Woken up. Check if the auction is still running.
Auction in progress | # LANGUAGE TemplateHaskell , ScopedTypeVariables , CPP #
import Quelea.Shim
import Quelea.ClientMonad
import Quelea.DBDriver
import Quelea.Contract
import Control.Concurrent (ThreadId, myThreadId, forkIO, threadDelay, killThread)
import Quelea.NameService.Types
import Quelea.Types (summarize)
import Quelea.Marshall
import Quelea.TH
#ifdef LBB
import Quelea.NameService.LoadBalancingBroker
#else
import Quelea.NameService.SimpleBroker
#endif
import Prelude hiding (catch)
import Control.Monad (replicateM_, foldM, when, forever, foldM_, replicateM)
import Control.Monad.Trans (liftIO)
import Control.Concurrent.MVar
import Control.Exception ( SomeException(..), AsyncException(..) , catch, handle, throw)
import Data.IORef
import Data.Text (pack)
import Data.Time
import Database.Cassandra.CQL
import Options.Applicative
import System.Environment (getExecutablePath, getArgs)
import System.Exit (exitSuccess)
import System.Posix.Signals
import System.Process (ProcessHandle, runCommand, terminateProcess)
import System.Random
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import System.IO (hFlush, stdout)
import RubisDefs
import RubisTxns
debugPrint :: String -> CSN ()
#ifdef DEBUG
debugPrint s = liftIO $ do
tid <- myThreadId
putStrLn $ "[" ++ (show tid) ++ "] " ++ s
#else
debugPrint _ = return ()
#endif
fePort :: Int
fePort = 5558
bePort :: Int
bePort = 5559
numOpsPerRound :: Num a => a
numOpsPerRound = 1
printEvery :: Int
printEvery = 1000
maxDelta :: Int
maxDelta = 100
minItemPrice :: Int
minItemPrice = 1000
auctionTime :: Int
5 seconds
data Kind = Broker | Client | Server
| Daemon | Drop | Create deriving (Read, Show)
data Args = Args {
kind :: String,
brokerAddr :: String,
Optional rts arguments . Only relevant for .
rtsArgs :: String,
Terminate processes after time ( microseconds ) . Only relevant for .
terminateAfter :: String,
numAuctions :: String,
numBuyers :: String,
delayReq :: String,
numItems :: String
}
args :: Parser Args
args = Args
<$> strOption
( long "kind"
<> metavar "KIND"
<> help "Kind of process [Broker|Client|Server|Daemon|Drop|Create]" )
<*> strOption
( long "brokerAddr"
<> metavar "ADDR"
<> help "Address of broker"
<> value "localhost")
<*> strOption
( long "rtsArgs"
<> metavar "RTS_ARGS"
<> help "RTS arguments passed to child processes. Only relevant for Daemon."
<> value "")
<*> strOption
( long "terminateAfter"
<> metavar "SECS"
<> help "Terminate child proceeses after time. Only relevant for Daemon"
<> value "600")
<*> strOption
( long "numAuctions"
<> metavar "NUM_AUCTIONS"
<> help "Number of concurrent auctions"
<> value "1")
<*> strOption
( long "numBuyers"
<> metavar "NUM_BUYERS"
<> help "Number of buyers per auction"
<> value "3")
<*> strOption
( long "delayReq"
<> metavar "MICROSECS"
<> help "Delay between client requests"
<> value "100000")
<*> strOption
( long "numItems"
<> metavar "NUM_ITEMS"
<> help "Number of items sold per auction"
<> value "10")
keyspace :: Keyspace
keyspace = Keyspace $ pack "Quelea"
dtLib = do
return $ mkDtLib
[(StockItem, mkGenOp stockItem summarize,Strong),
(RemoveFromStock, mkGenOp removeFromStock summarize,Strong),
(UpdateMaxBid, mkGenOp updateMaxBid summarize,Strong),
(ShowItem, mkGenOp showItem summarize,Strong),
(GetBalance, mkGenOp getBalance summarize,Strong),
(DepositToWallet, mkGenOp depositToWallet summarize,Strong),
(WithdrawFromWallet, mkGenOp withdrawFromWallet summarize,Strong),
(AddBid, mkGenOp addBid summarize,Strong),
(CancelBid, mkGenOp cancelBid summarize,Strong),
(GetBid, mkGenOp getBid summarize,Strong),
(AddItemBid, mkGenOp addItemBid summarize,Strong),
(RemoveItemBid, mkGenOp removeItemBid summarize,Strong),
(GetBidsByItem, mkGenOp getBidsByItem summarize,Strong),
(AddWalletBid, mkGenOp addWalletBid summarize,Strong),
(RemoveWalletBid, mkGenOp removeWalletBid summarize,Strong),
(GetBidsByWallet, mkGenOp getBidsByWallet summarize,Strong),
(AddWalletItem, mkGenOp addWalletItem summarize,Strong),
(GetItemsByWallet, mkGenOp getItemsByWallet summarize,Strong)]
run :: Args -> IO ()
run args = do
let k = read $ kind args
let broker = brokerAddr args
someTime <- getCurrentTime
let ns = mkNameService (Frontend $ "tcp://" ++ broker ++ ":" ++ show fePort)
(Backend $ "tcp://" ++ broker ++ ":" ++ show bePort) "localhost" 5560
case k of
Broker -> startBroker (Frontend $ "tcp://*:" ++ show fePort)
(Backend $ "tcp://*:" ++ show bePort)
Server -> do
dtLib <- dtLib
runShimNode dtLib [("localhost","9042")] keyspace ns
Client -> do
let threads = read $ numAuctions args
let buyers = read $ numBuyers args
mv::(MVar (Double, NominalDiffTime)) <- newEmptyMVar
replicateM_ threads $ forkIO $ do
mvarList <- replicateM buyers $ newEmptyMVar
handShakeMVar <- newEmptyMVar
mapM_ (\mv -> liftIO $ forkIO $ runSessionWithStats ns $ do
liftIO $ putStrLn "Buyer: started..."
liftIO $ putMVar handShakeMVar ()
liftIO $ putStrLn "Buyer: hand shake done..."
buyerCore mv $ read $ delayReq args) mvarList
runSessionWithStats ns $ do
liftIO $ putStrLn "Seller: started..."
liftIO $ replicateM_ buyers $ liftIO $ takeMVar handShakeMVar
liftIO $ putStrLn "Seller: hand shake done..."
wid <- newWallet 0
res <- sellItems mvarList wid (read $ numItems args)
liftIO $ putMVar mv res
statList <- replicateM threads $ takeMVar mv
let (tpList, latList) = unzip statList
putStrLn $ "Throughput (ops/s) = " ++ (show $ sum tpList)
putStrLn $ "Avg. Latency (s) = " ++ (show $ sum latList / (fromIntegral $ length latList))
Create -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ createTables
Daemon -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ createTables
progName <- getExecutablePath
putStrLn "Driver : Starting broker"
b <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Broker --brokerAddr " ++ broker
putStrLn "Driver : Starting server"
s <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Server --brokerAddr " ++ broker
putStrLn "Driver : Starting client"
c <- runCommand $ progName ++ " +RTS " ++ (rtsArgs args)
++ " -RTS --kind Client --brokerAddr " ++ broker
++ " --numAuctions " ++ (numAuctions args)
++ " --numItems " ++ (numItems args)
++ " --delayReq " ++ (delayReq args)
tid <- myThreadId
installHandler keyboardSignal (Catch $ reportSignal pool [b,s,c] tid) Nothing
let termWait = read $ terminateAfter args
threadDelay (termWait * 1000000)
mapM_ terminateProcess [b,s,c]
runCas pool $ dropTables
Drop -> do
pool <- newPool [("localhost","9042")] keyspace Nothing
runCas pool $ dropTables
reportSignal :: Pool -> [ProcessHandle] -> ThreadId -> IO ()
reportSignal pool procList mainTid = do
mapM_ terminateProcess procList
runCas pool $ dropTables
killThread mainTid
data Message = NewItem ItemID | Terminate (MVar (Double, NominalDiffTime))
tryWinItem :: WalletID
-> ItemID
My current bid
tryWinItem wid iid waitBetweenSuccessfulBids currentBid maxPrice = do
delta <- liftIO $ randomIO
let newBidAmt = currentBid + delta `mod` maxDelta
if newBidAmt > maxPrice
then do
debugPrint $ show (iid,wid) ++ ": BeyondMaxPrice"
waitTillAuctionEnd currentBid
else do
bidResult <- bidForItem wid iid newBidAmt
case bidResult of
ItemNotYetAvailable -> do
debugPrint $ show (iid,wid) ++ ": ItemNotYetAvailable"
liftIO $ threadDelay waitBetweenSuccessfulBids
tryWinItem wid iid waitBetweenSuccessfulBids currentBid maxPrice
ItemGone -> do
debugPrint $ show (iid,wid) ++ ": ItemGone"
res <- amIMaxBidder iid wid
return $ (res, currentBid)
OutBid -> do
debugPrint $ show (iid,wid) ++ ": OutBid"
tryWinItem wid iid waitBetweenSuccessfulBids newBidAmt maxPrice
BidSuccess bid -> do
debugPrint $ show (iid,wid) ++ ": BidSuccess"
bideTime newBidAmt
where
waitTillAuctionEnd currentBid = do
liftIO $ threadDelay waitBetweenSuccessfulBids
item <- getItem iid
case item of
ItemRemoved -> do
res <- amIMaxBidder iid wid
return (res, currentBid)
otherwise -> waitTillAuctionEnd currentBid
bideTime newBidAmt = do
liftIO $ threadDelay waitBetweenSuccessfulBids
res <- getItem iid
iAmMaxBidder <- amIMaxBidder iid wid
case res of
ItemRemoved -> return (iAmMaxBidder, newBidAmt)
if iAmMaxBidder
then bideTime newBidAmt
else tryWinItem wid iid waitBetweenSuccessfulBids newBidAmt maxPrice
buyerCore :: MVar Message -> Int -> CSN ()
buyerCore sellerMVar waitBetweenSuccessfulBids = do
Start with 10 MILLION dollars !
msg <- liftIO $ takeMVar sellerMVar
case msg of
NewItem iid -> do
debugPrint $ show (iid,wid) ++ ": NewItem"
scale <- liftIO $ randomIO >>= \i -> return $ 2 + (i `mod` 10)
(res, bid) <- tryWinItem wid iid waitBetweenSuccessfulBids minItemPrice (scale * minItemPrice)
let WalletID widInt = wid
let ItemID iidInt = iid
if res
then liftIO $ putStrLn $ "Buyer (" ++ show widInt ++ ") won " ++ show iidInt
else liftIO $ putStrLn $ "Buyer (" ++ show widInt ++ ") lost " ++ show iidInt
buyerCore sellerMVar waitBetweenSuccessfulBids
Terminate mv -> do
stats <- getStats
liftIO $ putMVar mv stats
sellItems :: [MVar Message] -> WalletID -> Int -> CSN (Double, NominalDiffTime)
sellItems buyerList wid 0 = do
statMVList <- mapM (\m -> liftIO $ do
mv <- newEmptyMVar
putMVar m $ Terminate mv
return mv) buyerList
buyerStatsList <- mapM (\m -> liftIO $ takeMVar m) statMVList
myStats <- getStats
let statsList = myStats:buyerStatsList
let (tpList, latList) = unzip statsList
return $ (sum tpList, sum latList / (fromIntegral $ length latList))
sellItems buyerList wid numItems = do
iidInt <- liftIO $ randomIO
let iid = ItemID $ iidInt
let desc = show $ iid
openAuction wid iid desc minItemPrice
liftIO $ putStrLn $ "Auction opened: ItemID = " ++ (show iid)
mapM_ (\m -> liftIO $ putMVar m $ NewItem iid) buyerList
liftIO $ threadDelay auctionTime
maxBidAmt <- concludeAuction wid iid
liftIO $ putStrLn $ "Sold: " ++ (show iid) ++ " price = " ++ (show maxBidAmt)
sellItems buyerList wid $ numItems - 1
main :: IO ()
main = execParser opts >>= run
where
opts = info (helper <*> args)
( fullDesc
<> progDesc "Run the Rubis benchmark"
<> header "Rubis Benchmark" )
|
98bc25c60492cd71c6c70d9c847a21f5b9ef9af8feb798bdcf218ad1afa1cf02 | LLazarek/mutate | mutate-program.rkt | #lang at-exp racket/base
(require racket/stream
ruinit
mutate
mutate/traversal
mutate/private/mutate-program
mutate/private/mutated
"testing-util.rkt")
(define-id-mutator plus-swap
[+ #:-> -])
(define em (make-expr-mutator plus-swap))
(test-begin
#:name make-program-mutator
(test-mutator* (syntax-only (make-program-mutator em))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
(list #'[(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(- 1 2)])
values)
(test-equal? ((make-program-mutator em)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
3)
no-more-mutations-flag)
(test-mutator* (syntax-only (make-program-mutator em #:select select-define-body))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
(list #'[(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)])
values)
(test-equal? ((make-program-mutator em #:select select-define-body)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
2)
no-more-mutations-flag)
(test-match ((make-program-mutator em #:select select-define-body)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
0)
(mutated (mutated-program _ 'f) _)))
(test-begin
#:name stream-builder
(test-equal? (map syntax->datum
(stream->list
((program-mutator->stream-builder
(syntax-only
(make-program-mutator em)))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)])))
'([(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)]
[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(- 1 2)])))
(test-begin
#:name module-mutator
(test-equal? (syntax->datum
((program-mutator->module-mutator
(syntax-only
(make-program-mutator em)))
#'(module a-test racket
(#%module-begin
(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)))
0))
'(module a-test racket
(#%module-begin
(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)))))
| null | https://raw.githubusercontent.com/LLazarek/mutate/8d3af6afb53b4f725e7a81a515403efad88a52e5/mutate-test/tests/mutate-program.rkt | racket | #lang at-exp racket/base
(require racket/stream
ruinit
mutate
mutate/traversal
mutate/private/mutate-program
mutate/private/mutated
"testing-util.rkt")
(define-id-mutator plus-swap
[+ #:-> -])
(define em (make-expr-mutator plus-swap))
(test-begin
#:name make-program-mutator
(test-mutator* (syntax-only (make-program-mutator em))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
(list #'[(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(- 1 2)])
values)
(test-equal? ((make-program-mutator em)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
3)
no-more-mutations-flag)
(test-mutator* (syntax-only (make-program-mutator em #:select select-define-body))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
(list #'[(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)])
values)
(test-equal? ((make-program-mutator em #:select select-define-body)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
2)
no-more-mutations-flag)
(test-match ((make-program-mutator em #:select select-define-body)
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
0)
(mutated (mutated-program _ 'f) _)))
(test-begin
#:name stream-builder
(test-equal? (map syntax->datum
(stream->list
((program-mutator->stream-builder
(syntax-only
(make-program-mutator em)))
#'[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)])))
'([(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)]
[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (- (f (- x)) x)))
(+ 1 2)]
[(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(- 1 2)])))
(test-begin
#:name module-mutator
(test-equal? (syntax->datum
((program-mutator->module-mutator
(syntax-only
(make-program-mutator em)))
#'(module a-test racket
(#%module-begin
(require foo/bar)
(define (f x) (+ x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)))
0))
'(module a-test racket
(#%module-begin
(require foo/bar)
(define (f x) (- x 42))
(define (g x) (f (+ (f (- x)) x)))
(+ 1 2)))))
| |
a97771bdf629ee143182042254f69772e910167cc6b1c9260abe95185dcdeb21 | rroohhh/guix_system | network-utils.scm |
(define-module (config network-utils)
#:use-module (config network)
#:use-module (guix gexp)
#:use-module ((services networking) #:prefix svc:)
#:export (networking-for etc-hosts-file))
(define* (networking-for host)
(svc:networking-for host network-config))
(define etc-hosts-file
(let* ((hosts (assoc-ref network-config "hosts")))
(plain-file "hosts"
(string-join
(append
(map
(lambda (host)
(string-append (assoc-ref (cdr host) "address") " " (car host)))
hosts)
(list
"127.0.0.1 localhost"
"::1 localhost"))
"\n"))))
| null | https://raw.githubusercontent.com/rroohhh/guix_system/f126f17c309c4e4ead963f6cff9f9d2780ae5dae/config/network-utils.scm | scheme |
(define-module (config network-utils)
#:use-module (config network)
#:use-module (guix gexp)
#:use-module ((services networking) #:prefix svc:)
#:export (networking-for etc-hosts-file))
(define* (networking-for host)
(svc:networking-for host network-config))
(define etc-hosts-file
(let* ((hosts (assoc-ref network-config "hosts")))
(plain-file "hosts"
(string-join
(append
(map
(lambda (host)
(string-append (assoc-ref (cdr host) "address") " " (car host)))
hosts)
(list
"127.0.0.1 localhost"
"::1 localhost"))
"\n"))))
| |
1a9f76527a085f683d18e8d0aec48f45d797f32e05f6fd8ff314acb24829858c | shirok/Gauche | redefutil.scm | ;;;
;;; redefutil.scm - class redefinition protocol (autoloaded)
;;;
Copyright ( c ) 2003 - 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; 3. Neither the name of the authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this
;;; software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; This file is autoloaded
(select-module gauche.object)
(use scheme.list) ;; used by instance update protocol
;;----------------------------------------------------------------
;; Class Redefinition protocol
;;
;; redefine-class! old new [function]
;; <with locking old>
;; class-redefinition old new [gf]
;; <update direct methods>
;; <update direct supers>
;; update-direct-subclass! c orig clone [gf]
(define (redefine-class! old new)
MT safety
(guard (e [else
(%commit-class-redefinition! old #f)
(warn "Class redefinition of ~S is aborted. The state of the class may be inconsistent: ~a\n" old (condition-message e))])
(class-redefinition old new)
(%commit-class-redefinition! old new)))
(define-generic class-redefinition)
(define-method class-redefinition ((old <class>) (new <class>))
;; Be careful to access slot values of OLD. If OLD has a metaclass
;; which itself is redefined, using slot-ref triggers update-instance
on OLD and we lose old'redefined .
(for-each (^m (if (is-a? m <accessor-method>)
(delete-method! (slot-ref m 'generic) m)
(update-direct-method! m old new)))
(slot-ref-using-class (current-class-of old)
old
'direct-methods))
(for-each (^[sup] (%delete-direct-subclass! sup old))
(slot-ref-using-class (current-class-of old)
old
'direct-supers))
(for-each (^[sub] (update-direct-subclass! sub old new))
(slot-ref-using-class (current-class-of old)
old
'direct-subclasses))
)
(define-generic update-direct-subclass!)
(define-method update-direct-subclass! ((sub <class>)
(orig <class>)
(new <class>))
(define (new-supers supers)
(map (^s (if (eq? s orig) new s)) supers))
(define (fix-initargs initargs supers metaclass)
(let loop ([args initargs] [r '()])
(cond [(null? args) (reverse! r)]
[(eq? (car args) :supers)
(loop (cddr args) (list* supers :supers r))]
[(eq? (car args) :metaclass)
(loop (cddr args) (list* metaclass :metaclass r))]
[else (loop (cddr args) (list* (cadr args) (car args) r))])))
(let* ([initargs (slot-ref sub 'initargs)]
[supers (new-supers (class-direct-supers sub))]
NB : this is n't really correct !
[metaclass (or (get-keyword :metaclass initargs #f)
(%get-default-metaclass supers))]
[new-sub (apply make metaclass
(fix-initargs initargs supers metaclass))])
(redefine-class! sub new-sub)
(%replace-class-binding! sub new-sub)))
;;----------------------------------------------------------------
;; Instance update protocol
;;
;; By default, the following slots of the new class are carried over:
;; - it is instance allocated.
;; - its allocation is either class or each-subclass, without having
;; the default value.
;; - it is a builtin slot and settable in the new class.
;;
If you want to carry over other slots , the first thing you 'd want
;; to try is to customize change-class method. Save the old slots before
;; calling next-method and set the new slots afterwards. It can also be
;; used to carry over a value when the name of the slot is changed.
;; If you want to prevent some slots from being carried over by the
;; base change-class method, you can override
;; change-object-class-carry-over-slot? method to do so.
(define-generic change-object-class-carry-over-slot?)
(define-method change-object-class-carry-over-slot? ((obj <object>)
old-class new-class slot)
(let ([slot-name (slot-definition-name slot)]
[alloc (slot-definition-allocation slot)])
(or (eq? alloc :instance)
(and (memq alloc '(:class :each-subclass))
(not (class-slot-bound? new-class slot-name)))
(and-let* ([ (eq? alloc :builtin) ]
[sa (slot-definition-option slot :slot-accessor #f)])
(slot-ref sa 'settable)))))
;; A dynamic stack that keeps change-class invocation. We need
;; this to prevent inadvertent infinit recursive call of change-class.
;; Each element is (<instance> . <continuation>)
(define instance-changing-class-stack (make-parameter '()))
;; Change class.
(define (change-object-class obj old-class new-class)
(let ([new (allocate-instance new-class '())]
[new-slots (filter (^s (change-object-class-carry-over-slot?
obj old-class new-class s))
(class-slots new-class))])
(if-let1 p (assq obj (instance-changing-class-stack))
;; change-class is called recursively. abort change-class protocol.
((cdr p) #f)
;; normal course of action
(dolist (slot new-slots)
(let1 slot-name (slot-definition-name slot)
(or (and
(slot-exists-using-class? old-class obj slot-name)
(let/cc cont
(parameterize
([instance-changing-class-stack
(acons obj cont (instance-changing-class-stack))])
(and (slot-bound-using-class? old-class obj slot-name)
(let1 val
(slot-ref-using-class old-class obj slot-name)
(slot-set-using-class! new-class new slot-name val)
#t)))))
(let1 acc (class-slot-accessor new-class slot-name)
(slot-initialize-using-accessor! new acc '()))))))
;; overwrite original obj's content with the new one.
(%transplant-instance! new obj)
obj))
;; Intercept metaclass change; that is, we're about to replace the
class C 's metaclass by NEW - META .
;; We have to prevent the cpl slot from being carried over by default,
;; for it has extra consistency check that interferes with our purpose.
;;
NB : At this moment it is impossible that changing metaclass
;; affects the structure of the instance, since (initialize <new-metaclass>)
is n't called , Initialize is the only place where the structure
;; of the instance can be determined. This fact is important, since
;; the instance update protocol won't run on instances whose class's
;; metaclass is changed (we can't, since the class maintains its identity
;; before and after metaclass change.) If the updated class changed
;; instance structure, accessing old instances would cause bad things.
;; In future we may introduce reinitialize method which is called
;; right after change-object is done; in such case we need extra safety
;; mechanism to ensure instance structure isn't changed by metaclass
;; change.
(define-method change-class ((c <class>) (new-meta <class>))
(let* ([old-meta (current-class-of c)]
[old-cpl (slot-ref-using-class old-meta c 'cpl)]
[old-nis (slot-ref-using-class old-meta c 'num-instance-slots)])
(next-method)
(slot-set-using-class! new-meta c 'cpl old-cpl)
(%finish-class-initialization! c) ; seal the class
c))
(define-method change-object-class-carry-over-slot?
((c <class>) old-meta new-meta slot)
(and (not (eq? (slot-definition-name slot) 'cpl))
(next-method)))
;; inject definitions into gauche module
(define-in-module gauche redefine-class! redefine-class!)
(define-in-module gauche class-redefinition class-redefinition)
(define-in-module gauche update-direct-subclass! update-direct-subclass!)
(define-in-module gauche change-object-class change-object-class)
| null | https://raw.githubusercontent.com/shirok/Gauche/6d15c99ee46aee9d22d77164e575fa22673848fe/lib/gauche/redefutil.scm | scheme |
redefutil.scm - class redefinition protocol (autoloaded)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the authors nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This file is autoloaded
used by instance update protocol
----------------------------------------------------------------
Class Redefinition protocol
redefine-class! old new [function]
<with locking old>
class-redefinition old new [gf]
<update direct methods>
<update direct supers>
update-direct-subclass! c orig clone [gf]
Be careful to access slot values of OLD. If OLD has a metaclass
which itself is redefined, using slot-ref triggers update-instance
----------------------------------------------------------------
Instance update protocol
By default, the following slots of the new class are carried over:
- it is instance allocated.
- its allocation is either class or each-subclass, without having
the default value.
- it is a builtin slot and settable in the new class.
to try is to customize change-class method. Save the old slots before
calling next-method and set the new slots afterwards. It can also be
used to carry over a value when the name of the slot is changed.
If you want to prevent some slots from being carried over by the
base change-class method, you can override
change-object-class-carry-over-slot? method to do so.
A dynamic stack that keeps change-class invocation. We need
this to prevent inadvertent infinit recursive call of change-class.
Each element is (<instance> . <continuation>)
Change class.
change-class is called recursively. abort change-class protocol.
normal course of action
overwrite original obj's content with the new one.
Intercept metaclass change; that is, we're about to replace the
We have to prevent the cpl slot from being carried over by default,
for it has extra consistency check that interferes with our purpose.
affects the structure of the instance, since (initialize <new-metaclass>)
of the instance can be determined. This fact is important, since
the instance update protocol won't run on instances whose class's
metaclass is changed (we can't, since the class maintains its identity
before and after metaclass change.) If the updated class changed
instance structure, accessing old instances would cause bad things.
In future we may introduce reinitialize method which is called
right after change-object is done; in such case we need extra safety
mechanism to ensure instance structure isn't changed by metaclass
change.
seal the class
inject definitions into gauche module | Copyright ( c ) 2003 - 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(select-module gauche.object)
(define (redefine-class! old new)
MT safety
(guard (e [else
(%commit-class-redefinition! old #f)
(warn "Class redefinition of ~S is aborted. The state of the class may be inconsistent: ~a\n" old (condition-message e))])
(class-redefinition old new)
(%commit-class-redefinition! old new)))
(define-generic class-redefinition)
(define-method class-redefinition ((old <class>) (new <class>))
on OLD and we lose old'redefined .
(for-each (^m (if (is-a? m <accessor-method>)
(delete-method! (slot-ref m 'generic) m)
(update-direct-method! m old new)))
(slot-ref-using-class (current-class-of old)
old
'direct-methods))
(for-each (^[sup] (%delete-direct-subclass! sup old))
(slot-ref-using-class (current-class-of old)
old
'direct-supers))
(for-each (^[sub] (update-direct-subclass! sub old new))
(slot-ref-using-class (current-class-of old)
old
'direct-subclasses))
)
(define-generic update-direct-subclass!)
(define-method update-direct-subclass! ((sub <class>)
(orig <class>)
(new <class>))
(define (new-supers supers)
(map (^s (if (eq? s orig) new s)) supers))
(define (fix-initargs initargs supers metaclass)
(let loop ([args initargs] [r '()])
(cond [(null? args) (reverse! r)]
[(eq? (car args) :supers)
(loop (cddr args) (list* supers :supers r))]
[(eq? (car args) :metaclass)
(loop (cddr args) (list* metaclass :metaclass r))]
[else (loop (cddr args) (list* (cadr args) (car args) r))])))
(let* ([initargs (slot-ref sub 'initargs)]
[supers (new-supers (class-direct-supers sub))]
NB : this is n't really correct !
[metaclass (or (get-keyword :metaclass initargs #f)
(%get-default-metaclass supers))]
[new-sub (apply make metaclass
(fix-initargs initargs supers metaclass))])
(redefine-class! sub new-sub)
(%replace-class-binding! sub new-sub)))
If you want to carry over other slots , the first thing you 'd want
(define-generic change-object-class-carry-over-slot?)
(define-method change-object-class-carry-over-slot? ((obj <object>)
old-class new-class slot)
(let ([slot-name (slot-definition-name slot)]
[alloc (slot-definition-allocation slot)])
(or (eq? alloc :instance)
(and (memq alloc '(:class :each-subclass))
(not (class-slot-bound? new-class slot-name)))
(and-let* ([ (eq? alloc :builtin) ]
[sa (slot-definition-option slot :slot-accessor #f)])
(slot-ref sa 'settable)))))
(define instance-changing-class-stack (make-parameter '()))
(define (change-object-class obj old-class new-class)
(let ([new (allocate-instance new-class '())]
[new-slots (filter (^s (change-object-class-carry-over-slot?
obj old-class new-class s))
(class-slots new-class))])
(if-let1 p (assq obj (instance-changing-class-stack))
((cdr p) #f)
(dolist (slot new-slots)
(let1 slot-name (slot-definition-name slot)
(or (and
(slot-exists-using-class? old-class obj slot-name)
(let/cc cont
(parameterize
([instance-changing-class-stack
(acons obj cont (instance-changing-class-stack))])
(and (slot-bound-using-class? old-class obj slot-name)
(let1 val
(slot-ref-using-class old-class obj slot-name)
(slot-set-using-class! new-class new slot-name val)
#t)))))
(let1 acc (class-slot-accessor new-class slot-name)
(slot-initialize-using-accessor! new acc '()))))))
(%transplant-instance! new obj)
obj))
class C 's metaclass by NEW - META .
NB : At this moment it is impossible that changing metaclass
is n't called , Initialize is the only place where the structure
(define-method change-class ((c <class>) (new-meta <class>))
(let* ([old-meta (current-class-of c)]
[old-cpl (slot-ref-using-class old-meta c 'cpl)]
[old-nis (slot-ref-using-class old-meta c 'num-instance-slots)])
(next-method)
(slot-set-using-class! new-meta c 'cpl old-cpl)
c))
(define-method change-object-class-carry-over-slot?
((c <class>) old-meta new-meta slot)
(and (not (eq? (slot-definition-name slot) 'cpl))
(next-method)))
(define-in-module gauche redefine-class! redefine-class!)
(define-in-module gauche class-redefinition class-redefinition)
(define-in-module gauche update-direct-subclass! update-direct-subclass!)
(define-in-module gauche change-object-class change-object-class)
|
47ab5d3b75d0875a22e9880dd25901d24922a42da9f3befc8fb948bdb7a0cb0c | tisnik/clojure-examples | core_test.clj | ;
( C ) Copyright 2016 , 2020 , 2021
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(ns topic-constructor.core-test
(:require [clojure.test :refer :all]
[topic-constructor.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/a5f9d6119b62520b05da64b7929d07b832b957ab/kafka-topic-constructor-10-partitions/test/topic_constructor/core_test.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2016 , 2020 , 2021
-v10.html
(ns topic-constructor.core-test
(:require [clojure.test :refer :all]
[topic-constructor.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
|
138787d5e2d647fb9ab6b072007bcac6136d7dd3e3519b28d073b8e3edf6a78e | ndmitchell/hlint | Pragma.hs | # LANGUAGE ViewPatterns #
# LANGUAGE FlexibleContexts #
Suggest better pragmas
OPTIONS_GHC -cpp = > LANGUAGE CPP
OPTIONS_GHC = > LANGUAGE ... ( in HSE )
OPTIONS_GHC -XFoo = > LANGUAGE Foo
LANGUAGE A , A = > LANGUAGE A
-- do not do LANGUAGE A , LANGUAGE B to combine
< TEST >
{ - # OPTIONS_GHC -cpp #
Suggest better pragmas
OPTIONS_GHC -cpp => LANGUAGE CPP
OPTIONS_GHC -fglasgow-exts => LANGUAGE ... (in HSE)
OPTIONS_GHC -XFoo => LANGUAGE Foo
LANGUAGE A, A => LANGUAGE A
-- do not do LANGUAGE A, LANGUAGE B to combine
<TEST>
{ - # LANGUAGE CPP # - }
{ - # LANGUAGE CPP # - }
# OPTIONS_YHC -cpp #
# OPTIONS_GHC -XFoo #
? ? ? @NoRefactor : refactor output has one LANGUAGE pragma per extension , while hlint suggestion has a single LANGUAGE pragma
{ - # LANGUAGE RebindableSyntax , EmptyCase # - }
{ - # LANGUAGE RebindableSyntax , EmptyCase , DuplicateRecordFields # - }
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE CPP # - } { - # OPTIONS_GHC -foo # - } @NoRefactor -foo is not a valid flag
{ - # LANGUAGE CPP # - } { - # OPTIONS_GHC -w # - }
# OPTIONS_GHC -cpp #
# LANGUAGE CPP , Text #
# LANGUAGE RebindableSyntax #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE EmptyCase , RebindableSyntax # - }
</TEST>
-}
module Hint.Pragma(pragmaHint) where
import Hint.Type(ModuHint,Idea(..),Severity(..),toSSAnc,rawIdea,modComments)
import Data.List.Extra
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Refact.Types
import qualified Refact.Types as R
import GHC.Hs
import GHC.Types.SrcLoc
import GHC.Data.FastString
import GHC.Util
import GHC.Driver.Session
pragmaHint :: ModuHint
pragmaHint _ modu =
let ps = pragmas (modComments modu)
opts = flags ps
lang = languagePragmas ps in
languageDupes lang ++ optToPragma opts lang
optToPragma :: [(LEpaComment, [String])]
-> [(LEpaComment, [String])]
-> [Idea]
optToPragma flags languagePragmas =
[pragmaIdea (OptionsToComment (fst <$> old2) ys rs) | Just old2 <- [NE.nonEmpty old]]
where
(old, new, ns, rs) =
unzip4 [(old, new, ns, r)
| old <- flags, Just (new, ns) <- [optToLanguage old ls]
, let r = mkRefact old new ns]
ls = concatMap snd languagePragmas
ns2 = nubOrd (concat ns) \\ ls
dummyLoc = mkRealSrcLoc (fsLit "dummy") 1 1
dummySpan = mkRealSrcSpan dummyLoc dummyLoc
dummyAnchor = realSpanAsAnchor dummySpan
ys = [mkLanguagePragmas dummyAnchor ns2 | ns2 /= []] ++ catMaybes new
mkRefact :: (LEpaComment, [String])
-> Maybe LEpaComment
-> [String]
-> Refactoring R.SrcSpan
mkRefact old (maybe "" comment_ -> new) ns =
let ns' = map (\n -> comment_ (mkLanguagePragmas dummyAnchor [n])) ns
in ModifyComment (toSSAnc (fst old)) (intercalate "\n" (filter (not . null) (ns' `snoc` new)))
data PragmaIdea = SingleComment LEpaComment LEpaComment
| MultiComment LEpaComment LEpaComment LEpaComment
| OptionsToComment (NE.NonEmpty LEpaComment) [LEpaComment] [Refactoring R.SrcSpan]
pragmaIdea :: PragmaIdea -> Idea
pragmaIdea pidea =
case pidea of
SingleComment old new ->
mkFewer (getAncLoc old) (comment_ old) (Just $ comment_ new) []
[ModifyComment (toSSAnc old) (comment_ new)]
MultiComment repl delete new ->
mkFewer (getAncLoc repl)
(f [repl, delete]) (Just $ comment_ new) []
[ ModifyComment (toSSAnc repl) (comment_ new)
, ModifyComment (toSSAnc delete) ""]
OptionsToComment old new r ->
mkLanguage (getAncLoc . NE.head $ old)
(f $ NE.toList old) (Just $ f new) []
r
where
f = unlines . map comment_
mkFewer = rawIdea Hint.Type.Warning "Use fewer LANGUAGE pragmas"
mkLanguage = rawIdea Hint.Type.Warning "Use LANGUAGE pragmas"
languageDupes :: [(LEpaComment, [String])] -> [Idea]
languageDupes ( (a@(L l _), les) : cs ) =
(if nubOrd les /= les
then [pragmaIdea (SingleComment a (mkLanguagePragmas l $ nubOrd les))]
else [pragmaIdea (MultiComment a b (mkLanguagePragmas l (nubOrd $ les ++ les'))) | ( b@(L _ _), les' ) <- cs, not $ disjoint les les']
) ++ languageDupes cs
languageDupes _ = []
-- Given a pragma, can you extract some language features out?
strToLanguage :: String -> Maybe [String]
strToLanguage "-cpp" = Just ["CPP"]
strToLanguage x | "-X" `isPrefixOf` x = Just [drop 2 x]
strToLanguage "-fglasgow-exts" = Just $ map show glasgowExtsFlags
strToLanguage _ = Nothing
In ' optToLanguage , ' p ' is an ' OPTIONS_GHC ' pragma ,
-- 'langexts' a list of all language extensions in the module enabled
-- by 'LANGUAGE' pragmas.
--
-- If ALL of the flags in the pragma enable language extensions,
-- 'return Nothing'.
--
-- If some (or all) of the flags enable options that are not language
-- extensions, compute a new options pragma with only non-language
-- extension enabling flags. Return that together with a list of any
-- language extensions enabled by this pragma that are not otherwise
-- enabled by LANGUAGE pragmas in the module.
optToLanguage :: (LEpaComment, [String])
-> [String]
-> Maybe (Maybe LEpaComment, [String])
optToLanguage (L loc _, flags) languagePragmas
| any isJust vs =
-- 'ls' is a list of language features enabled by this
-- OPTIONS_GHC pragma that are not enabled by LANGUAGE pragmas
-- in this module.
let ls = filter (not . (`elem` languagePragmas)) (concat $ catMaybes vs) in
Just (res, ls)
where
-- Try reinterpreting each flag as a list of language features
-- (e.g. via '-X'..., '-fglasgow-exts').
vs = map strToLanguage flags -- e.g. '[Nothing, Just ["ScopedTypeVariables"], Nothing, ...]'
-- Keep any flag that does not enable language extensions.
keep = concat $ zipWith (\v f -> [f | isNothing v]) vs flags
-- If there are flags to keep, 'res' is a new pragma setting just those flags.
res = if null keep then Nothing else Just (mkFlags loc keep)
optToLanguage _ _ = Nothing
| null | https://raw.githubusercontent.com/ndmitchell/hlint/d06148be027179469333a892294fb7a90b9ea51c/src/Hint/Pragma.hs | haskell | do not do LANGUAGE A , LANGUAGE B to combine
do not do LANGUAGE A, LANGUAGE B to combine
Given a pragma, can you extract some language features out?
'langexts' a list of all language extensions in the module enabled
by 'LANGUAGE' pragmas.
If ALL of the flags in the pragma enable language extensions,
'return Nothing'.
If some (or all) of the flags enable options that are not language
extensions, compute a new options pragma with only non-language
extension enabling flags. Return that together with a list of any
language extensions enabled by this pragma that are not otherwise
enabled by LANGUAGE pragmas in the module.
'ls' is a list of language features enabled by this
OPTIONS_GHC pragma that are not enabled by LANGUAGE pragmas
in this module.
Try reinterpreting each flag as a list of language features
(e.g. via '-X'..., '-fglasgow-exts').
e.g. '[Nothing, Just ["ScopedTypeVariables"], Nothing, ...]'
Keep any flag that does not enable language extensions.
If there are flags to keep, 'res' is a new pragma setting just those flags. | # LANGUAGE ViewPatterns #
# LANGUAGE FlexibleContexts #
Suggest better pragmas
OPTIONS_GHC -cpp = > LANGUAGE CPP
OPTIONS_GHC = > LANGUAGE ... ( in HSE )
OPTIONS_GHC -XFoo = > LANGUAGE Foo
LANGUAGE A , A = > LANGUAGE A
< TEST >
{ - # OPTIONS_GHC -cpp #
Suggest better pragmas
OPTIONS_GHC -cpp => LANGUAGE CPP
OPTIONS_GHC -fglasgow-exts => LANGUAGE ... (in HSE)
OPTIONS_GHC -XFoo => LANGUAGE Foo
LANGUAGE A, A => LANGUAGE A
<TEST>
{ - # LANGUAGE CPP # - }
{ - # LANGUAGE CPP # - }
# OPTIONS_YHC -cpp #
# OPTIONS_GHC -XFoo #
? ? ? @NoRefactor : refactor output has one LANGUAGE pragma per extension , while hlint suggestion has a single LANGUAGE pragma
{ - # LANGUAGE RebindableSyntax , EmptyCase # - }
{ - # LANGUAGE RebindableSyntax , EmptyCase , DuplicateRecordFields # - }
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE CPP # - } { - # OPTIONS_GHC -foo # - } @NoRefactor -foo is not a valid flag
{ - # LANGUAGE CPP # - } { - # OPTIONS_GHC -w # - }
# OPTIONS_GHC -cpp #
# LANGUAGE CPP , Text #
# LANGUAGE RebindableSyntax #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE EmptyCase , RebindableSyntax # - }
</TEST>
-}
module Hint.Pragma(pragmaHint) where
import Hint.Type(ModuHint,Idea(..),Severity(..),toSSAnc,rawIdea,modComments)
import Data.List.Extra
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Refact.Types
import qualified Refact.Types as R
import GHC.Hs
import GHC.Types.SrcLoc
import GHC.Data.FastString
import GHC.Util
import GHC.Driver.Session
pragmaHint :: ModuHint
pragmaHint _ modu =
let ps = pragmas (modComments modu)
opts = flags ps
lang = languagePragmas ps in
languageDupes lang ++ optToPragma opts lang
optToPragma :: [(LEpaComment, [String])]
-> [(LEpaComment, [String])]
-> [Idea]
optToPragma flags languagePragmas =
[pragmaIdea (OptionsToComment (fst <$> old2) ys rs) | Just old2 <- [NE.nonEmpty old]]
where
(old, new, ns, rs) =
unzip4 [(old, new, ns, r)
| old <- flags, Just (new, ns) <- [optToLanguage old ls]
, let r = mkRefact old new ns]
ls = concatMap snd languagePragmas
ns2 = nubOrd (concat ns) \\ ls
dummyLoc = mkRealSrcLoc (fsLit "dummy") 1 1
dummySpan = mkRealSrcSpan dummyLoc dummyLoc
dummyAnchor = realSpanAsAnchor dummySpan
ys = [mkLanguagePragmas dummyAnchor ns2 | ns2 /= []] ++ catMaybes new
mkRefact :: (LEpaComment, [String])
-> Maybe LEpaComment
-> [String]
-> Refactoring R.SrcSpan
mkRefact old (maybe "" comment_ -> new) ns =
let ns' = map (\n -> comment_ (mkLanguagePragmas dummyAnchor [n])) ns
in ModifyComment (toSSAnc (fst old)) (intercalate "\n" (filter (not . null) (ns' `snoc` new)))
data PragmaIdea = SingleComment LEpaComment LEpaComment
| MultiComment LEpaComment LEpaComment LEpaComment
| OptionsToComment (NE.NonEmpty LEpaComment) [LEpaComment] [Refactoring R.SrcSpan]
pragmaIdea :: PragmaIdea -> Idea
pragmaIdea pidea =
case pidea of
SingleComment old new ->
mkFewer (getAncLoc old) (comment_ old) (Just $ comment_ new) []
[ModifyComment (toSSAnc old) (comment_ new)]
MultiComment repl delete new ->
mkFewer (getAncLoc repl)
(f [repl, delete]) (Just $ comment_ new) []
[ ModifyComment (toSSAnc repl) (comment_ new)
, ModifyComment (toSSAnc delete) ""]
OptionsToComment old new r ->
mkLanguage (getAncLoc . NE.head $ old)
(f $ NE.toList old) (Just $ f new) []
r
where
f = unlines . map comment_
mkFewer = rawIdea Hint.Type.Warning "Use fewer LANGUAGE pragmas"
mkLanguage = rawIdea Hint.Type.Warning "Use LANGUAGE pragmas"
languageDupes :: [(LEpaComment, [String])] -> [Idea]
languageDupes ( (a@(L l _), les) : cs ) =
(if nubOrd les /= les
then [pragmaIdea (SingleComment a (mkLanguagePragmas l $ nubOrd les))]
else [pragmaIdea (MultiComment a b (mkLanguagePragmas l (nubOrd $ les ++ les'))) | ( b@(L _ _), les' ) <- cs, not $ disjoint les les']
) ++ languageDupes cs
languageDupes _ = []
strToLanguage :: String -> Maybe [String]
strToLanguage "-cpp" = Just ["CPP"]
strToLanguage x | "-X" `isPrefixOf` x = Just [drop 2 x]
strToLanguage "-fglasgow-exts" = Just $ map show glasgowExtsFlags
strToLanguage _ = Nothing
In ' optToLanguage , ' p ' is an ' OPTIONS_GHC ' pragma ,
optToLanguage :: (LEpaComment, [String])
-> [String]
-> Maybe (Maybe LEpaComment, [String])
optToLanguage (L loc _, flags) languagePragmas
| any isJust vs =
let ls = filter (not . (`elem` languagePragmas)) (concat $ catMaybes vs) in
Just (res, ls)
where
keep = concat $ zipWith (\v f -> [f | isNothing v]) vs flags
res = if null keep then Nothing else Just (mkFlags loc keep)
optToLanguage _ _ = Nothing
|
29f1e42b8d21d6222d1c384d0a6f9f76156a1e589ca07e9b73795d5570a019cf | igrishaev/book-sessions | server.clj | (ns book.systems.comp.server
(:require
[com.stuartsierra.component :as component]
[book.systems.comp.db :as db]
[ring.adapter.jetty :refer [run-jetty]]))
(def app (constantly {:status 200 :body "Hello"}))
#_
(defn app
[request]
{:status 200
:body (with-out-str
(clojure.pprint/pprint request))})
#_
(defn app
[request]
(let [{:keys [db]} request
data (db/query db "select * from requests")]
{:status 200
:body (with-out-str
(clojure.pprint/pprint data))}))
#_
(defrecord Server
[options
server]
component/Lifecycle
(start [this]
(let [server (run-jetty app options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
#_
(defn make-handler [app db]
(fn [request]
(app (assoc request :db db))))
#_
(defroutes app
(GET "/" request (page-index request))
(GET "/hello" request (page-hello request))
page-404)
(require '[compojure.core
:refer [GET routes]])
(defn page-index
[{:keys [db]} request]
(let [data (db/query db "select * from requests")]
{:status 200
:body (with-out-str
(clojure.pprint/pprint data))}))
(defn make-routes [web]
(routes
(GET "/" request (page-index web request))
#_(GET "/hello" request (page-hello web request))))
(defn make-handler [app db]
(fn [request]
(app (assoc request :db db))))
#_
(defrecord Server
[options
server
db]
component/Lifecycle
(start [this]
(let [handler (make-handler app db)
server (run-jetty handler options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
(defrecord Server
[options
server
web]
component/Lifecycle
(start [this]
(let [routes (make-routes web)
server (run-jetty routes options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
#_
(defrecord Server
[options server web])
#_
(defn make-server
[options]
(map->Server {:options options}))
#_
(defn make-server
[options]
(-> (map->Server {:options options})
(component/using [:db])))
(defn make-server
[options]
(-> (map->Server {:options options})
(component/using [:web])))
( def c0 ( map->Server { : options { : port 8080 : join ? false } } ) )
;; (def c1 (component/start c0))
;; (def c2 (component/stop c1))
( def s - created ( map->Server { : options { : port 8080 : join ? false } } ) )
;; (def s-started (component/start s-created))
;; (def s-stopped (component/stop s-started))
#_
(def s-created
(map->Server
{:options {:port 8080 :join? false}}))
#_(def s-started (component/start s-created))
#_(def s-stopped (component/stop s-started))
#_
(def s-created (make-server {:port 8080 :join? false}))
#_
(defrecord Server
[options
server]
component/Lifecycle
(start [this]
(if server
this
(let [server (run-jetty app options)]
(assoc this :server server))))
(stop [this]
(when server
(.stop server))
(assoc this :server nil)))
#_
(start [this]
(if server
this
(let [server (run-jetty app options)]
(assoc this :server server))))
#_
(start [this]
(let [server (or server (run-jetty app options))]
(assoc this :server server)))
#_
(stop [this]
(when server
(.stop server))
(assoc this :server nil))
#_
(defrecord BadServer
[options server]
component/Lifecycle
(start [this]
{:server (run-jetty app options)})
(stop [this]
(.stop server)
nil))
( def bs - created ( map->BadServer { : options { : port 8080 : join ? false } } ) )
;; (def bs-started (component/start bs-created))
"
built : 2018 - 08 - 30T13:59:14.071Z ; git : 27208684755d94a92186989f695db2d7b21ebc51 ; jvm 1.8.0_102 - b14
2019-08-07 10:21:47,538 INFO o.e.jetty.server.AbstractConnector - Started ServerConnector@405ff5ed{HTTP/1.1,[http/1.1]}{0.0.0.0:8080}
2019-08-07 10:21:47,542 INFO org.eclipse.jetty.server.Server - Started @61386534ms
"
#_
(def bs-stopped (component/stop bs-started))
#_
NullPointerException
#_
(def s (-> {:port 8088 :join? false}
make-server
component/start))
#_
(component/start s)
"
Execution error (BindException) at ...
Address already in use
"
| null | https://raw.githubusercontent.com/igrishaev/book-sessions/c62af1230e91b8ab9e4e456798e894d1b4145dfc/src/book/systems/comp/server.clj | clojure | (def c1 (component/start c0))
(def c2 (component/stop c1))
(def s-started (component/start s-created))
(def s-stopped (component/stop s-started))
(def bs-started (component/start bs-created))
git : 27208684755d94a92186989f695db2d7b21ebc51 ; jvm 1.8.0_102 - b14 | (ns book.systems.comp.server
(:require
[com.stuartsierra.component :as component]
[book.systems.comp.db :as db]
[ring.adapter.jetty :refer [run-jetty]]))
(def app (constantly {:status 200 :body "Hello"}))
#_
(defn app
[request]
{:status 200
:body (with-out-str
(clojure.pprint/pprint request))})
#_
(defn app
[request]
(let [{:keys [db]} request
data (db/query db "select * from requests")]
{:status 200
:body (with-out-str
(clojure.pprint/pprint data))}))
#_
(defrecord Server
[options
server]
component/Lifecycle
(start [this]
(let [server (run-jetty app options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
#_
(defn make-handler [app db]
(fn [request]
(app (assoc request :db db))))
#_
(defroutes app
(GET "/" request (page-index request))
(GET "/hello" request (page-hello request))
page-404)
(require '[compojure.core
:refer [GET routes]])
(defn page-index
[{:keys [db]} request]
(let [data (db/query db "select * from requests")]
{:status 200
:body (with-out-str
(clojure.pprint/pprint data))}))
(defn make-routes [web]
(routes
(GET "/" request (page-index web request))
#_(GET "/hello" request (page-hello web request))))
(defn make-handler [app db]
(fn [request]
(app (assoc request :db db))))
#_
(defrecord Server
[options
server
db]
component/Lifecycle
(start [this]
(let [handler (make-handler app db)
server (run-jetty handler options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
(defrecord Server
[options
server
web]
component/Lifecycle
(start [this]
(let [routes (make-routes web)
server (run-jetty routes options)]
(assoc this :server server)))
(stop [this]
(.stop server)
(assoc this :server nil)))
#_
(defrecord Server
[options server web])
#_
(defn make-server
[options]
(map->Server {:options options}))
#_
(defn make-server
[options]
(-> (map->Server {:options options})
(component/using [:db])))
(defn make-server
[options]
(-> (map->Server {:options options})
(component/using [:web])))
( def c0 ( map->Server { : options { : port 8080 : join ? false } } ) )
( def s - created ( map->Server { : options { : port 8080 : join ? false } } ) )
#_
(def s-created
(map->Server
{:options {:port 8080 :join? false}}))
#_(def s-started (component/start s-created))
#_(def s-stopped (component/stop s-started))
#_
(def s-created (make-server {:port 8080 :join? false}))
#_
(defrecord Server
[options
server]
component/Lifecycle
(start [this]
(if server
this
(let [server (run-jetty app options)]
(assoc this :server server))))
(stop [this]
(when server
(.stop server))
(assoc this :server nil)))
#_
(start [this]
(if server
this
(let [server (run-jetty app options)]
(assoc this :server server))))
#_
(start [this]
(let [server (or server (run-jetty app options))]
(assoc this :server server)))
#_
(stop [this]
(when server
(.stop server))
(assoc this :server nil))
#_
(defrecord BadServer
[options server]
component/Lifecycle
(start [this]
{:server (run-jetty app options)})
(stop [this]
(.stop server)
nil))
( def bs - created ( map->BadServer { : options { : port 8080 : join ? false } } ) )
"
2019-08-07 10:21:47,538 INFO o.e.jetty.server.AbstractConnector - Started ServerConnector@405ff5ed{HTTP/1.1,[http/1.1]}{0.0.0.0:8080}
2019-08-07 10:21:47,542 INFO org.eclipse.jetty.server.Server - Started @61386534ms
"
#_
(def bs-stopped (component/stop bs-started))
#_
NullPointerException
#_
(def s (-> {:port 8088 :join? false}
make-server
component/start))
#_
(component/start s)
"
Execution error (BindException) at ...
Address already in use
"
|
3eb68728ff7f8dc2252f3b04f83321856cf9c8b5e137c501b93e9b7253f356a4 | input-output-hk/cardano-ledger-byron | TxAux.hs | {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveFunctor #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeFamilies #-}
module Cardano.Chain.UTxO.TxAux
( TxAux
, ATxAux(..)
, mkTxAux
, annotateTxAux
, taTx
, taWitness
, txaF
)
where
import Cardano.Prelude
import Data.Aeson (ToJSON)
import qualified Data.ByteString.Lazy as Lazy
import Formatting (Format, bprint, build, later)
import qualified Formatting.Buildable as B
import Cardano.Binary
( Annotated(..)
, ByteSpan
, Decoded(..)
, FromCBOR(..)
, ToCBOR(..)
, annotatedDecoder
, fromCBORAnnotated
, encodeListLen
, enforceSize
, serialize
, slice
, unsafeDeserialize
)
import Cardano.Chain.UTxO.Tx (Tx)
import Cardano.Chain.UTxO.TxWitness (TxWitness)
-- | Transaction + auxiliary data
type TxAux = ATxAux ()
mkTxAux :: Tx -> TxWitness -> TxAux
mkTxAux tx tw = ATxAux (Annotated tx ()) (Annotated tw ()) ()
annotateTxAux :: TxAux -> ATxAux ByteString
annotateTxAux ta = Lazy.toStrict . slice bs <$> ta'
where
bs = serialize ta
ta' = unsafeDeserialize bs
data ATxAux a = ATxAux
{ aTaTx :: !(Annotated Tx a)
, aTaWitness :: !(Annotated TxWitness a)
, aTaAnnotation :: !a
} deriving (Generic, Show, Eq, Functor)
deriving anyclass NFData
instance Decoded (ATxAux ByteString) where
type BaseType (ATxAux ByteString) = ATxAux ()
recoverBytes = aTaAnnotation
-- Used for debugging purposes only
instance ToJSON a => ToJSON (ATxAux a) where
taTx :: ATxAux a -> Tx
taTx = unAnnotated . aTaTx
taWitness :: ATxAux a -> TxWitness
taWitness = unAnnotated . aTaWitness
| Specialized formatter for ' TxAux '
txaF :: Format r (TxAux -> r)
txaF = later $ \ta -> bprint
(build . "\n" . "witnesses: " . listJsonIndent 4)
(taTx ta)
(taWitness ta)
instance B.Buildable TxAux where
build = bprint txaF
instance ToCBOR TxAux where
toCBOR ta = encodeListLen 2 <> toCBOR (taTx ta) <> toCBOR (taWitness ta)
encodedSizeExpr size pxy = 1 + size (taTx <$> pxy) + size (taWitness <$> pxy)
instance FromCBOR TxAux where
fromCBOR = void <$> fromCBOR @(ATxAux ByteSpan)
instance FromCBOR (ATxAux ByteSpan) where
fromCBOR = do
Annotated (tx, witness) byteSpan <- annotatedDecoder $ do
enforceSize "TxAux" 2
tx <- fromCBORAnnotated
witness <- fromCBORAnnotated
pure (tx, witness)
pure $ ATxAux tx witness byteSpan
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger-byron/d309449e6c303a9f0dcc8dcf172df6f0b3195ed5/cardano-ledger/src/Cardano/Chain/UTxO/TxAux.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveFunctor #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
| Transaction + auxiliary data
Used for debugging purposes only | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeApplications #
module Cardano.Chain.UTxO.TxAux
( TxAux
, ATxAux(..)
, mkTxAux
, annotateTxAux
, taTx
, taWitness
, txaF
)
where
import Cardano.Prelude
import Data.Aeson (ToJSON)
import qualified Data.ByteString.Lazy as Lazy
import Formatting (Format, bprint, build, later)
import qualified Formatting.Buildable as B
import Cardano.Binary
( Annotated(..)
, ByteSpan
, Decoded(..)
, FromCBOR(..)
, ToCBOR(..)
, annotatedDecoder
, fromCBORAnnotated
, encodeListLen
, enforceSize
, serialize
, slice
, unsafeDeserialize
)
import Cardano.Chain.UTxO.Tx (Tx)
import Cardano.Chain.UTxO.TxWitness (TxWitness)
type TxAux = ATxAux ()
mkTxAux :: Tx -> TxWitness -> TxAux
mkTxAux tx tw = ATxAux (Annotated tx ()) (Annotated tw ()) ()
annotateTxAux :: TxAux -> ATxAux ByteString
annotateTxAux ta = Lazy.toStrict . slice bs <$> ta'
where
bs = serialize ta
ta' = unsafeDeserialize bs
data ATxAux a = ATxAux
{ aTaTx :: !(Annotated Tx a)
, aTaWitness :: !(Annotated TxWitness a)
, aTaAnnotation :: !a
} deriving (Generic, Show, Eq, Functor)
deriving anyclass NFData
instance Decoded (ATxAux ByteString) where
type BaseType (ATxAux ByteString) = ATxAux ()
recoverBytes = aTaAnnotation
instance ToJSON a => ToJSON (ATxAux a) where
taTx :: ATxAux a -> Tx
taTx = unAnnotated . aTaTx
taWitness :: ATxAux a -> TxWitness
taWitness = unAnnotated . aTaWitness
| Specialized formatter for ' TxAux '
txaF :: Format r (TxAux -> r)
txaF = later $ \ta -> bprint
(build . "\n" . "witnesses: " . listJsonIndent 4)
(taTx ta)
(taWitness ta)
instance B.Buildable TxAux where
build = bprint txaF
instance ToCBOR TxAux where
toCBOR ta = encodeListLen 2 <> toCBOR (taTx ta) <> toCBOR (taWitness ta)
encodedSizeExpr size pxy = 1 + size (taTx <$> pxy) + size (taWitness <$> pxy)
instance FromCBOR TxAux where
fromCBOR = void <$> fromCBOR @(ATxAux ByteSpan)
instance FromCBOR (ATxAux ByteSpan) where
fromCBOR = do
Annotated (tx, witness) byteSpan <- annotatedDecoder $ do
enforceSize "TxAux" 2
tx <- fromCBORAnnotated
witness <- fromCBORAnnotated
pure (tx, witness)
pure $ ATxAux tx witness byteSpan
|
bcd188e928fed5590ce01827baf2589d352c5a82c5ec45f7a285627912b8402a | kappelmann/eidi2_repetitorium_tum | SparseVectorTest.ml | open SparseVector
let _ = if empty<>[] then failwith "Your empty method is not working correctly"
let v = [1;2;3;0;1;0;5;(-10)]
let v = sb_vektor v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let b = v=expected
let _ = if not b then failwith "Your sb_vektor method is not working correctly"
let t = set 3 0 v
let _ = if t<>v then failwith "Your set method is not working correctly"
let t = set 3 17 v
let expected = [(0,1);(1,2);(2,3);(3,17);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = set 0 0 v
let expected = [(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = mul 0 v
let _ = if t<>[] then failwith "Your mul method is not working correctly"
let t = mul 2 v
let expected = [(0,2);(1,4);(2,6);(4,2);(6,10);(7,(-20))]
let _ = if t<>expected then failwith "Your mul method is not working correctly"
let w = sb_vektor [(-1);9;3;0;1;0]
let res = add_sb_vektor v w
let expected = [(1,11);(2,6);(4,2);(6,5);(7,(-10))]
let b = res=expected
let _ = if not b then failwith "Your add_sb_vektor method is not working correctly"
let res = mul_sb_vektor v w
let expected = 1*(-1)+2*9+3*3+1*1
let b = res=expected
let _ = if b then print_string "Everything is working. Nice job! ;) +5 points" else failwith "Your add_sb_vektor method is not working correctly"
| null | https://raw.githubusercontent.com/kappelmann/eidi2_repetitorium_tum/1d16bbc498487a85960e0d83152249eb13944611/2016/sparse_vector/solutions/SparseVectorTest.ml | ocaml | open SparseVector
let _ = if empty<>[] then failwith "Your empty method is not working correctly"
let v = [1;2;3;0;1;0;5;(-10)]
let v = sb_vektor v
let expected = [(0,1);(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let b = v=expected
let _ = if not b then failwith "Your sb_vektor method is not working correctly"
let t = set 3 0 v
let _ = if t<>v then failwith "Your set method is not working correctly"
let t = set 3 17 v
let expected = [(0,1);(1,2);(2,3);(3,17);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = set 0 0 v
let expected = [(1,2);(2,3);(4,1);(6,5);(7,(-10))]
let _ = if t<>expected then failwith "Your set method is not working correctly"
let t = mul 0 v
let _ = if t<>[] then failwith "Your mul method is not working correctly"
let t = mul 2 v
let expected = [(0,2);(1,4);(2,6);(4,2);(6,10);(7,(-20))]
let _ = if t<>expected then failwith "Your mul method is not working correctly"
let w = sb_vektor [(-1);9;3;0;1;0]
let res = add_sb_vektor v w
let expected = [(1,11);(2,6);(4,2);(6,5);(7,(-10))]
let b = res=expected
let _ = if not b then failwith "Your add_sb_vektor method is not working correctly"
let res = mul_sb_vektor v w
let expected = 1*(-1)+2*9+3*3+1*1
let b = res=expected
let _ = if b then print_string "Everything is working. Nice job! ;) +5 points" else failwith "Your add_sb_vektor method is not working correctly"
| |
3d4ca837a6a9f76f28921080f257907993007438718214c80a14da2968493e16 | c-cube/frog-utils | IPC_daemon.ml |
(* This file is free software, part of frog-utils. See file "license" for more details. *)
* { 1 Daemon for Coordination }
open Frog
open Lwt.Infix
module M = IPC_message
module Int_map = Misc.Int_map
[@@@ocaml.warning "-32"]
type 'a or_error = ('a, string) Result.result
let main_config_file = "/etc/froglock.conf"
let section = Lwt_log.Section.make "ipc_daemon"
let default_port = 12_042
type uid = M.uid
a pair ( client i d , client - relative uid )
module Full_id = struct
type t = {
id_client: int;
id_task: uid;
}
let make id_client id_task: t = {id_client; id_task}
let compare (a:t) (b:t): int =
let open CCOrd.Infix in
CCOrd.int a.id_client b.id_client
<?> (CCOrd.int, a.id_task, b.id_task)
let to_string (a:t): string =
Printf.sprintf "(:c_id %d :uid %d)" a.id_client a.id_task
let pp out (a:t): unit = CCFormat.string out (to_string a)
end
module Full_id_map = CCMap.Make(Full_id)
(* connection to a client *)
type client_conn = {
c_id: int; (* unique client id *)
c_in: Lwt_io.input_channel;
c_out: Lwt_io.output_channel;
mutable c_last_ping: float;
mutable c_last_pong: float;
mutable c_active: active_task Full_id_map.t;
mutable c_thread: unit Lwt.t;
}
(* a given "lock" task *)
and acquire_task = {
t_query : M.job;
t_conn: client_conn;
}
and active_task = {
at_task: acquire_task;
at_start: float;
}
let task_id (t:acquire_task): Full_id.t = Full_id.make t.t_conn.c_id t.t_query.M.uid
let active_id (at:active_task): Full_id.t = task_id at.at_task
module Q = CCHeap.Make(struct
type t = acquire_task
let leq t t' =
M.(t.t_query.priority > t'.t_query.priority) ||
M.(t.t_query.priority = t'.t_query.priority && t.t_query.cores < t'.t_query.cores) ||
M.(t.t_query.priority = t'.t_query.priority &&
t.t_query.cores = t'.t_query.cores &&
( task_id t < task_id t' ||
(task_id t = task_id t' && t.t_conn.c_id < t'.t_conn.c_id) ))
end)
(* message received by client *)
type event =
| E_refresh (* check timeout and all that *)
| E_recv of client_conn * M.t
module State : sig
type t
val make : forever:bool -> string list -> t or_error
val clients : t -> client_conn Int_map.t
val num_clients : t -> int
val add_client : t -> client_conn -> unit
val remove_client : t -> client_conn -> unit
val find_active : t -> Full_id.t -> active_task option
val remove_active : t -> active_task -> unit
val add_active : t -> active_task -> unit
val num_active : t -> int
val active_l : t -> active_task list
val waiting : t -> Q.t
val send_event : t -> event -> unit Lwt.t
val next_event : t -> event Lwt.t
val push_task : t -> acquire_task -> unit
val peek_task : t -> acquire_task option
val take_task : t -> acquire_task option
val max_cores : t -> int
val used_cores : t -> int
val forever : t -> bool
val cores_needed : t -> int
val new_id: t -> int
val accept : t -> bool
val stop_accept : t -> unit
val time_since_last_event: t -> float
val log_state: t -> unit
end = struct
type t = {
mutable id_count : int; (* for allocating client IDs *)
mutable msg_count : int; (* unique count for messages *)
mutable max_cores : int;
mutable accept : bool;
mutable active : active_task Full_id_map.t;
mutable clients : client_conn Int_map.t; (* c_id -> client *)
mutable waiting : Q.t; (* for scheduling tasks *)
mutable last_event: float;
forever: bool;
events : event Lwt_mvar.t;
}
let active t = t.active
let add_active t a = t.active <- Full_id_map.add (active_id a) a t.active
let remove_active t a = t.active <- Full_id_map.remove (active_id a) t.active
let find_active t id = Full_id_map.get id t.active
let num_active t = Full_id_map.cardinal t.active
let active_l t = Full_id_map.to_list t.active |> List.rev_map snd
let waiting t = t.waiting
let forever t = t.forever
let clients t = t.clients
let time_since_last_event t = Unix.gettimeofday() -. t.last_event
let num_clients t = Int_map.cardinal (clients t)
let add_client t c = t.clients <- Int_map.add c.c_id c t.clients
let remove_client t c = t.clients <- Int_map.remove c.c_id t.clients
let max_cores t = t.max_cores
let send_event (st:t) (e:event): unit Lwt.t =
st.last_event <- Unix.gettimeofday();
Lwt_mvar.put st.events e
let next_event (st:t): event Lwt.t =
let%lwt res = Lwt_mvar.take st.events in
st.last_event <- Unix.gettimeofday();
Lwt.return res
let accept t = t.accept
let stop_accept t = t.accept <- false
let push_task st t = st.waiting <- Q.add st.waiting t
let peek_task st = Q.find_min st.waiting
let take_task st: acquire_task option =
begin match Q.take st.waiting with
| None -> None
| Some (q,t) ->
st.waiting <- q;
Some t
end
let new_id st: int =
let id = st.id_count in
st.id_count <- st.id_count + 1;
id
let used_cores st: int =
Full_id_map.fold
(fun _ at cores ->
let j = at.at_task.t_query.M.cores in
cores + (if j <= 0 then max_cores st else j))
st.active 0
let cores_needed st: int = match peek_task st with
| None -> 0
| Some t ->
let j = t.t_query.M.cores in
if j <= 0 then max_cores st else j
let log_state st =
Lwt_log.ign_info_f ~section
"(daemon_state: %d running, %d / %d cores, %d waiting, %d clients)"
(Full_id_map.cardinal st.active) (used_cores st) (st.max_cores)
(Q.size st.waiting) (Int_map.cardinal st.clients)
let make ~forever config_files =
let open Misc.Err in
let default_config =
if Sys.file_exists main_config_file
then Config.parse_or_empty main_config_file else Config.empty
in
Config.parse_files config_files >|= fun config ->
let config = Config.merge config default_config in
{ id_count=0;
msg_count=0;
max_cores=Config.(get_or ~default:1 config @@ int "cores");
accept=true;
active=Full_id_map.empty;
clients=Int_map.empty;
waiting=Q.empty;
events=Lwt_mvar.create_empty ();
last_event=Unix.gettimeofday();
forever;
}
end
type state = State.t
let maybe_str = function
| None -> "<none>"
| Some s -> s
let show_msg_short (msg:M.t): string =
(M.show msg |> CCString.lines |> List.hd) ^ "…"
let client_send (c:client_conn)(m:M.t): unit Lwt.t =
Lwt_log.ign_debug_f ~section "(client_send :to %d `%s`)" c.c_id (show_msg_short m);
M.print c.c_out m
let client_get (c:client_conn): M.t Lwt.t =
let%lwt msg = M.parse c.c_in in
Lwt_log.ign_debug_f ~section "(client_get :from %d `%s`)" c.c_id (show_msg_short msg);
Lwt.return msg
(* reply to client's "status" query *)
let handle_status (st:state) (c:client_conn): unit Lwt.t =
Lwt_log.ign_info_f ~section "replying to client %d with status" c.c_id;
let waiting =
Q.fold
(fun acc task ->
let x = {
M.waiting_id = task.t_query.M.uid; waiting_job = task.t_query;
} in
x :: acc)
[] (State.waiting st)
and current =
State.active_l st
|> List.rev_map
(fun at ->
{ M.current_job=at.at_task.t_query;
current_start=at.at_start; })
in
let ans = M.StatusAnswer {M.waiting; current; max_cores = State.max_cores st} in
let%lwt () = client_send c ans in
Lwt.return_unit
handle one client : expect " start " and then just forward
all incoming messages to scheduler thread
all incoming messages to scheduler thread *)
let handle_client (st:state) (c:client_conn): unit Lwt.t =
let rec loop() =
let%lwt msg = client_get c in
let%lwt () = State.send_event st (E_recv (c, msg)) in
loop ()
in
let%lwt _ = M.expect c.c_in (M.equal M.Start) in
loop ()
(* broadcast the message to all clients *)
let broadcast (st:state)(from:client_conn)(msg:M.t): unit Lwt.t =
State.clients st
|> Int_map.to_list
|> Lwt_list.iter_p
(fun (_,c) ->
if c.c_id <> from.c_id then client_send c msg else Lwt.return_unit)
after this number of seconds without clients , daemon dies
let delay_before_dying = 300.
(* in some amount of time, send "refresh" *)
let schedule_refresh (st:state) =
Lwt.async
(fun () ->
let%lwt () = Lwt_unix.sleep (delay_before_dying +. 2.) in
State.send_event st E_refresh)
let close_chans c: unit Lwt.t =
try%lwt
let%lwt () = Lwt_io.close c.c_out in
let%lwt () = Lwt_io.close c.c_in in
Lwt.return_unit
with e ->
Lwt_log.ign_error_f "error when closing conn to client %d: %s" c.c_id
(Printexc.to_string e);
Lwt.return_unit
let kill_task (st:State.t) (at:active_task): unit =
State.remove_active st at;
let c = at.at_task.t_conn in
c.c_active <- Full_id_map.remove (active_id at) c.c_active;
Lwt_log.ign_debug_f ~section "task %s: done" (active_id at |> Full_id.to_string);
()
(* end of connection for this client *)
let kill_client (st:State.t)(c:client_conn): unit Lwt.t =
Lwt_log.ign_info_f "stop client %d" c.c_id;
Lwt.cancel c.c_thread;
State.remove_client st c;
close_chans c >>= fun () ->
(* also kill active tasks of this client *)
Full_id_map.values c.c_active (kill_task st);
Lwt.return_unit
(* scheduler: receives requests from several clients, and pings them back *)
let run_scheduler (st:state): unit Lwt.t =
(* find if a waiting task can be activated, otherwise
wait for incoming messages *)
let rec loop () =
State.log_state st;
if State.cores_needed st <= State.max_cores st - State.used_cores st then (
(* try to activate a waiting task *)
begin match State.take_task st with
| None ->
if not (State.forever st) &&
State.used_cores st = 0 &&
Int_map.is_empty (State.clients st) &&
State.num_active st = 0 &&
State.time_since_last_event st >= delay_before_dying
then (
(* only exit if no clients are connected, to avoid the
race condition:
- client connects
- queue is empty --> scheduler stops
- client sends "acquire" and never gets an answer *)
Lwt_log.ign_info ~section "no more tasks nor clients, exit";
Lwt.return_unit
) else (
(* no task, just listen for next event *)
wait_for_next_event ()
)
| Some task ->
(* start the given process *)
let uid = task.t_query.M.uid in
Lwt_log.ign_info_f ~section "start task %s (user %s, pid %d): %s"
(task_id task |> Full_id.to_string)
(maybe_str task.t_query.M.user)
task.t_query.M.pid
(maybe_str task.t_query.M.info);
let at = {
at_task=task;
at_start=Unix.gettimeofday();
} in
(* greenlight the task *)
State.add_active st at;
task.t_conn.c_active <-
Full_id_map.add (active_id at) at task.t_conn.c_active;
let%lwt () = client_send task.t_conn (M.Go uid) in
loop ()
end
) else wait_for_next_event ()
(* listen for new messages. [task] is the current running task, if any *)
and wait_for_next_event () =
let%lwt e = State.next_event st in
begin match e with
| E_recv (client,msg) ->
Lwt_log.ign_debug_f ~section
"(process_message :from %d :msg `%s`)" client.c_id (show_msg_short msg);
process_incoming_msg client msg
| E_refresh ->
Lwt_log.ign_debug ~section "process event 'refresh'";
loop ()
end
process one incoming message
and process_incoming_msg (c:client_conn) (msg:M.t) = match msg with
| M.Status ->
let%lwt () = handle_status st c in
loop()
| M.Start ->
Lwt_log.ign_error_f ~section
"invalid duplicate `Start` message for client %d" c.c_id;
let%lwt () = kill_client st c in
loop()
| M.Pong _ ->
" got ' pong ' from client % d " c.c_id ;
c.c_last_pong <- Unix.gettimeofday();
loop()
| M.End ->
Lwt_log.ign_info_f ~section "closed connection to client %d" c.c_id;
let%lwt () = kill_client st c in
schedule_refresh st;
loop ()
| M.Acquire query ->
if State.accept st then (
Lwt_log.ign_info_f ~section "new acquire from client %d (id %d)"
c.c_id query.M.uid;
(* add task to scheduler, will be treated as soon as there is room for it *)
let task = {
t_conn=c;
t_query=query;
} in
State.push_task st task;
) else (
(* reject task, not accepting any anymore *)
Lwt.async (fun () -> client_send c (M.Reject query.M.uid))
);
loop ()
| M.Release u ->
(* find corresponding task and release it *)
let id = Full_id.make c.c_id u in
let%lwt() = match State.find_active st id with
| None ->
Lwt_log.ign_error_f ~section "client %d released unknown task %d"
c.c_id u;
kill_client st c;
| Some at ->
Lwt_log.ign_debug_f ~section "client %d released task %d" c.c_id u;
kill_task st at; (* task is done, we can remove it *)
Lwt.return_unit
in
loop ()
| M.StopAccepting ->
Lwt_log.ign_info ~section "stop accepting jobs...";
State.stop_accept st;
loop ()
| (M.Start_bench _ | M.Finish_bench | M.Event _) as msg ->
(* broadcast, but do not wait for it to terminate *)
Lwt.async (fun () -> broadcast st c msg);
loop ()
| ( M.Ping _ | M.StatusAnswer _ | M.Go _ | M.Reject _) as msg ->
Lwt_log.ign_error_f "unexpected message: %s" (M.show msg);
loop ()
(* delete the task to make room for others *)
in
loop ()
let ping_delay = 5.
(* regularly ping clients, and kill these which have not answered to previous
ping *)
let run_ping_thread st: unit Lwt.t =
(* n: current "ping" id *)
let rec loop (n:int) =
let%lwt () = Lwt_unix.sleep ping_delay in
" send ping [ % d ] to clients " n ;
let killed_any = ref false in
let clients = State.clients st in
Int_map.iter
(fun _ c ->
if c.c_last_ping > c.c_last_pong +. 3. *. ping_delay +. 2.
then (
killed_any := true;
Lwt.async (fun () -> kill_client st c) (* dead *)
) else (
c.c_last_ping <- Unix.gettimeofday();
Lwt.async (fun () ->
try%lwt client_send c (M.Ping n)
with _ -> Lwt.return_unit)
))
clients;
(* might have to refresh state *)
if !killed_any then (
Lwt.async (fun () -> State.send_event st E_refresh)
);
loop (n+1)
in
loop 0
(* spawn a daemon, to listen on the given port *)
let spawn ?(forever=false) (port:int): unit Lwt.t =
Lwt_log.ign_info ~section "---------------------------";
Lwt_log.ign_info_f ~section "starting daemon on port %d" port;
let addr = Unix.ADDR_INET (Unix.inet_addr_loopback, port) in
begin match State.make ~forever [] with
| Result.Error e -> Lwt.fail_with e
| Result.Ok st -> Lwt.return st
end >>= fun st ->
(* scheduler *)
Lwt_log.ign_info ~section "start scheduler";
let scheduler_thread = run_scheduler st in
Lwt_log.ign_info ~section "scheduler started";
(* ping clients regularly *)
let ping_thread = run_ping_thread st in
(* server that listens for incoming clients *)
let%lwt server = Lwt_io.establish_server_with_client_address addr
(fun _ (ic,oc) ->
let c = {
c_in=ic;
c_out=oc;
c_last_ping=Unix.gettimeofday();
c_last_pong=Unix.gettimeofday();
c_id=State.new_id st;
c_thread=Lwt.return_unit;
c_active=Full_id_map.empty;
} in
let th = handle_client st c in
State.add_client st c;
c.c_thread <- th;
th)
in
(* stop *)
Lwt_log.ign_debug ~section "daemon started";
let%lwt () =
Lwt.pick
[ scheduler_thread;
ping_thread;
] in
Lwt_log.ign_debug ~section "daemon's server is stopping";
Lwt_io.shutdown_server server
(* TODO: change log level through connection *)
let setup_loggers file_name () =
let syslog = Lwt_log.syslog ~facility:`User () in
Lwt_log.default := syslog;
Lwt_log.add_rule "*" Lwt_log.Debug;
let%lwt () =
try%lwt
let%lwt log' = Lwt_log.file ~mode:`Append ~perm:0o666 ~file_name () in
let all_log = Lwt_log.broadcast [log'; syslog] in
Lwt_log.default := all_log;
Lwt.return_unit
with e ->
let%lwt _ = Lwt_io.eprintlf "error opening log file %s" file_name in
Lwt_log.ign_error_f "could not open file %s: %s"
file_name (Printexc.to_string e);
Lwt.return_unit
in
let old_hook = !Lwt.async_exception_hook in
Lwt.async_exception_hook := (fun e ->
Printf.eprintf "async error: %s\n%!" (Printexc.to_string e);
old_hook e);
Lwt_io.close Lwt_io.stderr
(* init function: check if we are within a daemon call (i.e. the main process
that has called itself with DAEMON_PORT=<port> to start a daemon),
in which case we call {!spawn} and then exit *)
let () = match Sys.getenv "DAEMON_PORT" |> int_of_string with
| p ->
(* Printf.printf "run as daemon on port %d\n%!" p; *)
Lwt_daemon.daemonize ~syslog:true ~directory:"/tmp"
~stdin:`Close ~stdout:`Close ~stderr:`Keep ();
let log_file =
let config = Config.parse_or_empty main_config_file in
Config.(get_or ~default:"/tmp/froglock.log" config @@ string "log")
in
Lwt_main.run (
let%lwt () = setup_loggers log_file () in
spawn p
);
exit 0
| exception _ -> ()
let fork_daemon port : unit =
Lwt.async
(fun () ->
let cmd = Sys.executable_name, [| Sys.executable_name |] in
let env = [| "DAEMON_PORT=" ^ string_of_int port |] in
Lwt_process.exec ~env cmd);
()
| null | https://raw.githubusercontent.com/c-cube/frog-utils/3f68c606a7abe702f9e22a0606080191a2952b18/src/server/IPC_daemon.ml | ocaml | This file is free software, part of frog-utils. See file "license" for more details.
connection to a client
unique client id
a given "lock" task
message received by client
check timeout and all that
for allocating client IDs
unique count for messages
c_id -> client
for scheduling tasks
reply to client's "status" query
broadcast the message to all clients
in some amount of time, send "refresh"
end of connection for this client
also kill active tasks of this client
scheduler: receives requests from several clients, and pings them back
find if a waiting task can be activated, otherwise
wait for incoming messages
try to activate a waiting task
only exit if no clients are connected, to avoid the
race condition:
- client connects
- queue is empty --> scheduler stops
- client sends "acquire" and never gets an answer
no task, just listen for next event
start the given process
greenlight the task
listen for new messages. [task] is the current running task, if any
add task to scheduler, will be treated as soon as there is room for it
reject task, not accepting any anymore
find corresponding task and release it
task is done, we can remove it
broadcast, but do not wait for it to terminate
delete the task to make room for others
regularly ping clients, and kill these which have not answered to previous
ping
n: current "ping" id
dead
might have to refresh state
spawn a daemon, to listen on the given port
scheduler
ping clients regularly
server that listens for incoming clients
stop
TODO: change log level through connection
init function: check if we are within a daemon call (i.e. the main process
that has called itself with DAEMON_PORT=<port> to start a daemon),
in which case we call {!spawn} and then exit
Printf.printf "run as daemon on port %d\n%!" p; |
* { 1 Daemon for Coordination }
open Frog
open Lwt.Infix
module M = IPC_message
module Int_map = Misc.Int_map
[@@@ocaml.warning "-32"]
type 'a or_error = ('a, string) Result.result
let main_config_file = "/etc/froglock.conf"
let section = Lwt_log.Section.make "ipc_daemon"
let default_port = 12_042
type uid = M.uid
a pair ( client i d , client - relative uid )
module Full_id = struct
type t = {
id_client: int;
id_task: uid;
}
let make id_client id_task: t = {id_client; id_task}
let compare (a:t) (b:t): int =
let open CCOrd.Infix in
CCOrd.int a.id_client b.id_client
<?> (CCOrd.int, a.id_task, b.id_task)
let to_string (a:t): string =
Printf.sprintf "(:c_id %d :uid %d)" a.id_client a.id_task
let pp out (a:t): unit = CCFormat.string out (to_string a)
end
module Full_id_map = CCMap.Make(Full_id)
type client_conn = {
c_in: Lwt_io.input_channel;
c_out: Lwt_io.output_channel;
mutable c_last_ping: float;
mutable c_last_pong: float;
mutable c_active: active_task Full_id_map.t;
mutable c_thread: unit Lwt.t;
}
and acquire_task = {
t_query : M.job;
t_conn: client_conn;
}
and active_task = {
at_task: acquire_task;
at_start: float;
}
let task_id (t:acquire_task): Full_id.t = Full_id.make t.t_conn.c_id t.t_query.M.uid
let active_id (at:active_task): Full_id.t = task_id at.at_task
module Q = CCHeap.Make(struct
type t = acquire_task
let leq t t' =
M.(t.t_query.priority > t'.t_query.priority) ||
M.(t.t_query.priority = t'.t_query.priority && t.t_query.cores < t'.t_query.cores) ||
M.(t.t_query.priority = t'.t_query.priority &&
t.t_query.cores = t'.t_query.cores &&
( task_id t < task_id t' ||
(task_id t = task_id t' && t.t_conn.c_id < t'.t_conn.c_id) ))
end)
type event =
| E_recv of client_conn * M.t
module State : sig
type t
val make : forever:bool -> string list -> t or_error
val clients : t -> client_conn Int_map.t
val num_clients : t -> int
val add_client : t -> client_conn -> unit
val remove_client : t -> client_conn -> unit
val find_active : t -> Full_id.t -> active_task option
val remove_active : t -> active_task -> unit
val add_active : t -> active_task -> unit
val num_active : t -> int
val active_l : t -> active_task list
val waiting : t -> Q.t
val send_event : t -> event -> unit Lwt.t
val next_event : t -> event Lwt.t
val push_task : t -> acquire_task -> unit
val peek_task : t -> acquire_task option
val take_task : t -> acquire_task option
val max_cores : t -> int
val used_cores : t -> int
val forever : t -> bool
val cores_needed : t -> int
val new_id: t -> int
val accept : t -> bool
val stop_accept : t -> unit
val time_since_last_event: t -> float
val log_state: t -> unit
end = struct
type t = {
mutable max_cores : int;
mutable accept : bool;
mutable active : active_task Full_id_map.t;
mutable last_event: float;
forever: bool;
events : event Lwt_mvar.t;
}
let active t = t.active
let add_active t a = t.active <- Full_id_map.add (active_id a) a t.active
let remove_active t a = t.active <- Full_id_map.remove (active_id a) t.active
let find_active t id = Full_id_map.get id t.active
let num_active t = Full_id_map.cardinal t.active
let active_l t = Full_id_map.to_list t.active |> List.rev_map snd
let waiting t = t.waiting
let forever t = t.forever
let clients t = t.clients
let time_since_last_event t = Unix.gettimeofday() -. t.last_event
let num_clients t = Int_map.cardinal (clients t)
let add_client t c = t.clients <- Int_map.add c.c_id c t.clients
let remove_client t c = t.clients <- Int_map.remove c.c_id t.clients
let max_cores t = t.max_cores
let send_event (st:t) (e:event): unit Lwt.t =
st.last_event <- Unix.gettimeofday();
Lwt_mvar.put st.events e
let next_event (st:t): event Lwt.t =
let%lwt res = Lwt_mvar.take st.events in
st.last_event <- Unix.gettimeofday();
Lwt.return res
let accept t = t.accept
let stop_accept t = t.accept <- false
let push_task st t = st.waiting <- Q.add st.waiting t
let peek_task st = Q.find_min st.waiting
let take_task st: acquire_task option =
begin match Q.take st.waiting with
| None -> None
| Some (q,t) ->
st.waiting <- q;
Some t
end
let new_id st: int =
let id = st.id_count in
st.id_count <- st.id_count + 1;
id
let used_cores st: int =
Full_id_map.fold
(fun _ at cores ->
let j = at.at_task.t_query.M.cores in
cores + (if j <= 0 then max_cores st else j))
st.active 0
let cores_needed st: int = match peek_task st with
| None -> 0
| Some t ->
let j = t.t_query.M.cores in
if j <= 0 then max_cores st else j
let log_state st =
Lwt_log.ign_info_f ~section
"(daemon_state: %d running, %d / %d cores, %d waiting, %d clients)"
(Full_id_map.cardinal st.active) (used_cores st) (st.max_cores)
(Q.size st.waiting) (Int_map.cardinal st.clients)
let make ~forever config_files =
let open Misc.Err in
let default_config =
if Sys.file_exists main_config_file
then Config.parse_or_empty main_config_file else Config.empty
in
Config.parse_files config_files >|= fun config ->
let config = Config.merge config default_config in
{ id_count=0;
msg_count=0;
max_cores=Config.(get_or ~default:1 config @@ int "cores");
accept=true;
active=Full_id_map.empty;
clients=Int_map.empty;
waiting=Q.empty;
events=Lwt_mvar.create_empty ();
last_event=Unix.gettimeofday();
forever;
}
end
type state = State.t
let maybe_str = function
| None -> "<none>"
| Some s -> s
let show_msg_short (msg:M.t): string =
(M.show msg |> CCString.lines |> List.hd) ^ "…"
let client_send (c:client_conn)(m:M.t): unit Lwt.t =
Lwt_log.ign_debug_f ~section "(client_send :to %d `%s`)" c.c_id (show_msg_short m);
M.print c.c_out m
let client_get (c:client_conn): M.t Lwt.t =
let%lwt msg = M.parse c.c_in in
Lwt_log.ign_debug_f ~section "(client_get :from %d `%s`)" c.c_id (show_msg_short msg);
Lwt.return msg
let handle_status (st:state) (c:client_conn): unit Lwt.t =
Lwt_log.ign_info_f ~section "replying to client %d with status" c.c_id;
let waiting =
Q.fold
(fun acc task ->
let x = {
M.waiting_id = task.t_query.M.uid; waiting_job = task.t_query;
} in
x :: acc)
[] (State.waiting st)
and current =
State.active_l st
|> List.rev_map
(fun at ->
{ M.current_job=at.at_task.t_query;
current_start=at.at_start; })
in
let ans = M.StatusAnswer {M.waiting; current; max_cores = State.max_cores st} in
let%lwt () = client_send c ans in
Lwt.return_unit
handle one client : expect " start " and then just forward
all incoming messages to scheduler thread
all incoming messages to scheduler thread *)
let handle_client (st:state) (c:client_conn): unit Lwt.t =
let rec loop() =
let%lwt msg = client_get c in
let%lwt () = State.send_event st (E_recv (c, msg)) in
loop ()
in
let%lwt _ = M.expect c.c_in (M.equal M.Start) in
loop ()
let broadcast (st:state)(from:client_conn)(msg:M.t): unit Lwt.t =
State.clients st
|> Int_map.to_list
|> Lwt_list.iter_p
(fun (_,c) ->
if c.c_id <> from.c_id then client_send c msg else Lwt.return_unit)
after this number of seconds without clients , daemon dies
let delay_before_dying = 300.
let schedule_refresh (st:state) =
Lwt.async
(fun () ->
let%lwt () = Lwt_unix.sleep (delay_before_dying +. 2.) in
State.send_event st E_refresh)
let close_chans c: unit Lwt.t =
try%lwt
let%lwt () = Lwt_io.close c.c_out in
let%lwt () = Lwt_io.close c.c_in in
Lwt.return_unit
with e ->
Lwt_log.ign_error_f "error when closing conn to client %d: %s" c.c_id
(Printexc.to_string e);
Lwt.return_unit
let kill_task (st:State.t) (at:active_task): unit =
State.remove_active st at;
let c = at.at_task.t_conn in
c.c_active <- Full_id_map.remove (active_id at) c.c_active;
Lwt_log.ign_debug_f ~section "task %s: done" (active_id at |> Full_id.to_string);
()
let kill_client (st:State.t)(c:client_conn): unit Lwt.t =
Lwt_log.ign_info_f "stop client %d" c.c_id;
Lwt.cancel c.c_thread;
State.remove_client st c;
close_chans c >>= fun () ->
Full_id_map.values c.c_active (kill_task st);
Lwt.return_unit
let run_scheduler (st:state): unit Lwt.t =
let rec loop () =
State.log_state st;
if State.cores_needed st <= State.max_cores st - State.used_cores st then (
begin match State.take_task st with
| None ->
if not (State.forever st) &&
State.used_cores st = 0 &&
Int_map.is_empty (State.clients st) &&
State.num_active st = 0 &&
State.time_since_last_event st >= delay_before_dying
then (
Lwt_log.ign_info ~section "no more tasks nor clients, exit";
Lwt.return_unit
) else (
wait_for_next_event ()
)
| Some task ->
let uid = task.t_query.M.uid in
Lwt_log.ign_info_f ~section "start task %s (user %s, pid %d): %s"
(task_id task |> Full_id.to_string)
(maybe_str task.t_query.M.user)
task.t_query.M.pid
(maybe_str task.t_query.M.info);
let at = {
at_task=task;
at_start=Unix.gettimeofday();
} in
State.add_active st at;
task.t_conn.c_active <-
Full_id_map.add (active_id at) at task.t_conn.c_active;
let%lwt () = client_send task.t_conn (M.Go uid) in
loop ()
end
) else wait_for_next_event ()
and wait_for_next_event () =
let%lwt e = State.next_event st in
begin match e with
| E_recv (client,msg) ->
Lwt_log.ign_debug_f ~section
"(process_message :from %d :msg `%s`)" client.c_id (show_msg_short msg);
process_incoming_msg client msg
| E_refresh ->
Lwt_log.ign_debug ~section "process event 'refresh'";
loop ()
end
process one incoming message
and process_incoming_msg (c:client_conn) (msg:M.t) = match msg with
| M.Status ->
let%lwt () = handle_status st c in
loop()
| M.Start ->
Lwt_log.ign_error_f ~section
"invalid duplicate `Start` message for client %d" c.c_id;
let%lwt () = kill_client st c in
loop()
| M.Pong _ ->
" got ' pong ' from client % d " c.c_id ;
c.c_last_pong <- Unix.gettimeofday();
loop()
| M.End ->
Lwt_log.ign_info_f ~section "closed connection to client %d" c.c_id;
let%lwt () = kill_client st c in
schedule_refresh st;
loop ()
| M.Acquire query ->
if State.accept st then (
Lwt_log.ign_info_f ~section "new acquire from client %d (id %d)"
c.c_id query.M.uid;
let task = {
t_conn=c;
t_query=query;
} in
State.push_task st task;
) else (
Lwt.async (fun () -> client_send c (M.Reject query.M.uid))
);
loop ()
| M.Release u ->
let id = Full_id.make c.c_id u in
let%lwt() = match State.find_active st id with
| None ->
Lwt_log.ign_error_f ~section "client %d released unknown task %d"
c.c_id u;
kill_client st c;
| Some at ->
Lwt_log.ign_debug_f ~section "client %d released task %d" c.c_id u;
Lwt.return_unit
in
loop ()
| M.StopAccepting ->
Lwt_log.ign_info ~section "stop accepting jobs...";
State.stop_accept st;
loop ()
| (M.Start_bench _ | M.Finish_bench | M.Event _) as msg ->
Lwt.async (fun () -> broadcast st c msg);
loop ()
| ( M.Ping _ | M.StatusAnswer _ | M.Go _ | M.Reject _) as msg ->
Lwt_log.ign_error_f "unexpected message: %s" (M.show msg);
loop ()
in
loop ()
let ping_delay = 5.
let run_ping_thread st: unit Lwt.t =
let rec loop (n:int) =
let%lwt () = Lwt_unix.sleep ping_delay in
" send ping [ % d ] to clients " n ;
let killed_any = ref false in
let clients = State.clients st in
Int_map.iter
(fun _ c ->
if c.c_last_ping > c.c_last_pong +. 3. *. ping_delay +. 2.
then (
killed_any := true;
) else (
c.c_last_ping <- Unix.gettimeofday();
Lwt.async (fun () ->
try%lwt client_send c (M.Ping n)
with _ -> Lwt.return_unit)
))
clients;
if !killed_any then (
Lwt.async (fun () -> State.send_event st E_refresh)
);
loop (n+1)
in
loop 0
let spawn ?(forever=false) (port:int): unit Lwt.t =
Lwt_log.ign_info ~section "---------------------------";
Lwt_log.ign_info_f ~section "starting daemon on port %d" port;
let addr = Unix.ADDR_INET (Unix.inet_addr_loopback, port) in
begin match State.make ~forever [] with
| Result.Error e -> Lwt.fail_with e
| Result.Ok st -> Lwt.return st
end >>= fun st ->
Lwt_log.ign_info ~section "start scheduler";
let scheduler_thread = run_scheduler st in
Lwt_log.ign_info ~section "scheduler started";
let ping_thread = run_ping_thread st in
let%lwt server = Lwt_io.establish_server_with_client_address addr
(fun _ (ic,oc) ->
let c = {
c_in=ic;
c_out=oc;
c_last_ping=Unix.gettimeofday();
c_last_pong=Unix.gettimeofday();
c_id=State.new_id st;
c_thread=Lwt.return_unit;
c_active=Full_id_map.empty;
} in
let th = handle_client st c in
State.add_client st c;
c.c_thread <- th;
th)
in
Lwt_log.ign_debug ~section "daemon started";
let%lwt () =
Lwt.pick
[ scheduler_thread;
ping_thread;
] in
Lwt_log.ign_debug ~section "daemon's server is stopping";
Lwt_io.shutdown_server server
let setup_loggers file_name () =
let syslog = Lwt_log.syslog ~facility:`User () in
Lwt_log.default := syslog;
Lwt_log.add_rule "*" Lwt_log.Debug;
let%lwt () =
try%lwt
let%lwt log' = Lwt_log.file ~mode:`Append ~perm:0o666 ~file_name () in
let all_log = Lwt_log.broadcast [log'; syslog] in
Lwt_log.default := all_log;
Lwt.return_unit
with e ->
let%lwt _ = Lwt_io.eprintlf "error opening log file %s" file_name in
Lwt_log.ign_error_f "could not open file %s: %s"
file_name (Printexc.to_string e);
Lwt.return_unit
in
let old_hook = !Lwt.async_exception_hook in
Lwt.async_exception_hook := (fun e ->
Printf.eprintf "async error: %s\n%!" (Printexc.to_string e);
old_hook e);
Lwt_io.close Lwt_io.stderr
let () = match Sys.getenv "DAEMON_PORT" |> int_of_string with
| p ->
Lwt_daemon.daemonize ~syslog:true ~directory:"/tmp"
~stdin:`Close ~stdout:`Close ~stderr:`Keep ();
let log_file =
let config = Config.parse_or_empty main_config_file in
Config.(get_or ~default:"/tmp/froglock.log" config @@ string "log")
in
Lwt_main.run (
let%lwt () = setup_loggers log_file () in
spawn p
);
exit 0
| exception _ -> ()
let fork_daemon port : unit =
Lwt.async
(fun () ->
let cmd = Sys.executable_name, [| Sys.executable_name |] in
let env = [| "DAEMON_PORT=" ^ string_of_int port |] in
Lwt_process.exec ~env cmd);
()
|
c7922ffbdef3789af40c996e1be491a3a59e1bedfc8d1a8de25b7e59dd0ff096 | geophf/1HaskellADay | Solution.hs | # LANGUAGE OverloadedStrings , QuasiQuotes , ViewPatterns #
module Y2017.M10.D03.Solution where
-
Today we 're going to look at the MemoizingTable - type .
What is the MemoizingTable - type ?
Well , it 's what we 're going to look at today !
( circular - reference much , geophf ?
me : what ? )
-
Today we're going to look at the MemoizingTable-type.
What is the MemoizingTable-type?
Well, it's what we're going to look at today!
(circular-reference much, geophf?
me: what?)
--}
import Control.Arrow ((&&&), (>>>), (***), second)
import Control.Monad (void, (<=<))
import Control.Monad.State
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (maybeToList, catMaybes)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Time
import Data.Time.Calendar
import Data.Time.Clock
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.FromField
import Network.HTTP.Conduit
below imports available via 1HaskellADay git repository
import Control.Logic.Frege (adjoin)
import Control.Scan.CSV (rend)
import Data.MemoizingTable (MemoizingTable(MT))
import qualified Data.MemoizingTable as MT
import Store.SQL.Connection (connectInfo)
import Store.SQL.Util.Indexed
import Store.SQL.Util.Inserts
import Store.SQL.Util.Pivots
import Y2017.M09.D25.Solution (Article, metadata)
import Y2017.M09.D28.Solution (insertArts)
import Y2017.M09.D29.Solution (raw2persons, insertPers, insertArtPersJoinStmt)
import Y2017.M10.D02.Solution
-
The MemoizingTable type addresses this problem : you have a set of words in a
data table :
-
The MemoizingTable type addresses this problem: you have a set of words in a
data table:
--}
data Subject = Subj { subj :: String }
deriving (Eq, Ord, Show)
instance ToRow Subject where
toRow = pure . toField . subj
instance FromRow Subject where
fromRow = Subj <$> field
type IxSubject = IxValue Subject -- ISubj { subjIdx :: Integer, subject :: String }
deriving ( Eq , Ord , Show )
-
instance Indexed IxSubject where
idx = subjIdx
instance where
fromRow = ISubj < $ > field < * > field
-
instance Indexed IxSubject where
idx = subjIdx
instance FromRow IxSubject where
fromRow = ISubj <$> field <*> field
--}
-- So, since we have a Subject-type, we want also to have a class of values
-- from which we can extract subjects. This is called 'subject-oriented
-- programming.'
class Subjective a where
subjects :: a -> [Subject]
fetchSubjectsStmt :: Query
fetchSubjectsStmt = [sql|SELECT * from keyword_pub|]
fetchSubjects :: Connection -> IO [IxSubject]
fetchSubjects = flip query_ fetchSubjectsStmt
-
-- subjects is now Data.MemoizingTable.bifurcate
subjects : : [ IxSubject ] - > ( Map Integer String , Map String Integer )
subjects = -- the two Map.fromList functions are DIFFERENT FUNCTIONS ! smh
Map.fromList . map ( idx & & & subject ) & & & Map.fromList . map ( subject & & & idx )
Okay , so we can read the current state from the database . That 's great !
Now let 's look at a workflow .
We have a set of subjects in the database , we parse some articles with subjects ,
some are already in the database , some are new subjects .
What do we do when we want to join these subjects to the articles we store into
the database ?
1 . we read the subjects from the database and get the mapped ( index , subject )
results ( see the function subjects , above ) .
2 . we parse the new articles and collect the subjects extracted as metadata .
3 . For those subjects already stored , we have the keyed index .
4 . for those not yet stored , we store those into the database , which then
returns their automatically generated keyed indices as a result , we create
those new associations , remembering that these are new subjects
5 . We create the pivot table from the new map .
Okay , so that 's the workflow . Let 's go about doing this , creating the structure
we need for the workflow - context .
-- below code moved to Data . MemoizingTable
data MemoizingTable a b =
MT { fromTable : : Map a b , readIndex : : Map b a , newValues : : Set b }
deriving Show
initMemTable : : a = > Ord b = > ( Map a b , Map b a ) - > MemoizingTable a b
initMemTable = flip ( uncurry MT ) Set.empty
So , to make subjects a memoizing table , we read in the subjects from the
database . As we read in articles , we scan the readKey map for subjects already
stored into the database . Those we get the indicies . For those new subjects ,
we add those to the newValues .
Let 's put this into practice .
-
-- subjects is now Data.MemoizingTable.bifurcate
subjects :: [IxSubject] -> (Map Integer String, Map String Integer)
subjects = -- the two Map.fromList functions are DIFFERENT FUNCTIONS! smh
Map.fromList . map (idx &&& subject) &&& Map.fromList . map (subject &&& idx)
Okay, so we can read the current state from the database. That's great!
Now let's look at a workflow.
We have a set of subjects in the database, we parse some articles with subjects,
some are already in the database, some are new subjects.
What do we do when we want to join these subjects to the articles we store into
the database?
1. we read the subjects from the database and get the mapped (index,subject)
results (see the function subjects, above).
2. we parse the new articles and collect the subjects extracted as metadata.
3. For those subjects already stored, we have the keyed index.
4. for those not yet stored, we store those into the database, which then
returns their automatically generated keyed indices as a result, we create
those new associations, remembering that these are new subjects
5. We create the pivot table from the new map.
Okay, so that's the workflow. Let's go about doing this, creating the structure
we need for the workflow-context.
-- below code moved to Data.MemoizingTable
data MemoizingTable a b =
MT { fromTable :: Map a b, readIndex :: Map b a, newValues :: Set b }
deriving Show
initMemTable :: Ord a => Ord b => (Map a b, Map b a) -> MemoizingTable a b
initMemTable = flip (uncurry MT) Set.empty
So, to make subjects a memoizing table, we read in the subjects from the
database. As we read in articles, we scan the readKey map for subjects already
stored into the database. Those we get the indicies. For those new subjects,
we add those to the newValues.
Let's put this into practice.
--}
data Part = ONE | TWO
deriving (Eq, Ord)
instance Show Part where
show ONE = "1"
show TWO = "2"
archive :: Part -> FilePath
archive x =
"Y2017/M10/D03/NYTOnline_09-05-17_09-10-17_ALLSecs-pt" ++ show x ++ ".txt.gz"
I 'm thinking of taking a stately State - ful approach to updating the
-- memoizing table.
type SubjectTable = MemoizingTable Integer Subject
type MemoizingState m a = StateT (SubjectTable, Map Integer [Subject]) m a
art2Subj :: Article -> [Subject]
art2Subj = parseSubjects <=< maybeToList . Map.lookup "Subject" . metadata
parseSubjects :: String -> [Subject]
parseSubjects = map Subj . (head &&& map tail . tail >>> uncurry (:)) . rend ';'
updateNewSubjs :: [Subject] -> SubjectTable -> SubjectTable
updateNewSubjs reduces to Data.MemoizingTable.triage
updateNewSubjs = flip (foldl (flip MT.triage))
-- we get the article, extract its subject information then factor the subject
-- into the ones already indexed verse the ones we haven't yet stored in the
-- database. We also update the map of subjects in each article.
uploadSubjectStmt :: Query
uploadSubjectStmt = [sql|INSERT INTO keyword_pub (keyword) VALUES (?) returning id|]
uploadSubjects :: Connection -> [Subject] -> IO [Index]
uploadSubjects = flip returning uploadSubjectStmt
uploadMT :: Connection -> SubjectTable -> IO [IxSubject]
uploadMT conn (MT _ _ news) =
let subjs = Set.toList news in
zipWith (flip IxV) subjs . map idx <$> uploadSubjects conn subjs
-
> > > connectInfo
ConnectInfo { connectHost = " ... " , ... }
> > > conn < - connect it
> > > blocks < - extractBlocks < $ > BL.readFile ( archive ONE )
> > > ixblks < - insertBlocks conn blocks
> > > let articles = join ( zipWith blocks )
> > > ixarts < - insertArts conn articles
> > > let rns = ( zipWith art2RawNames ixarts articles )
> > > let pers = concatMap raw2persons rns
> > > ixpers < - insertPers conn pers
> > > inserter conn insertArtPersJoinStmt ( zipWith joinValue pers ixpers )
> > > let memtable = initMemTable ( Map.empty , Map.empty )
> > > let stat = execState ( zipWithM _ getSubjectsMT ixarts articles ) ( memtable , Map.empty )
> > > stat
( MT { fromTable = fromList [ ] , readIndex = fromList [ ] ,
newValues = fromList [ Subj { subj = " Absenteeism"},Subj { subj = " Affluence " } ,
> > > ixsubs < - uploadMT conn ( fst stat )
> > > length ixsubs
61
> > > head { subjIdx = 1 , subject = " Absenteeism " }
$ select * from subject LIMIT 10 ;
i d subject
------------------------
1 Absenteeism
2 Affluence
3 Aliens
4 American history
5 Anemia
6 Audiences
7 Aviation
8 Bills
9 Books
10 Burnout
-- uploads the new subjects discovered in parsing the articles and then
-- gets back the indices for those new subjects
-- Okay , now we 've got the indexed subjects , we update the MemoizingTable
-- with those new subjects :
-- The updated subjects should be the set of used - to - be - new subjects in the
-- table . Clear that set and update the maps with the new information
> > > let tab = MT.update ixsubs ( fst stat )
-
>>> connectInfo
ConnectInfo {connectHost = "...", ...}
>>> conn <- connect it
>>> blocks <- extractBlocks <$> BL.readFile (archive ONE)
>>> ixblks <- insertBlocks conn blocks
>>> let articles = join (zipWith block2Article ixblks blocks)
>>> ixarts <- insertArts conn articles
>>> let rns = catMaybes (zipWith art2RawNames ixarts articles)
>>> let pers = concatMap raw2persons rns
>>> ixpers <- insertPers conn pers
>>> inserter conn insertArtPersJoinStmt (zipWith joinValue pers ixpers)
>>> let memtable = initMemTable (Map.empty , Map.empty)
>>> let stat = execState (zipWithM_ getSubjectsMT ixarts articles) (memtable, Map.empty)
>>> stat
(MT {fromTable = fromList [], readIndex = fromList [],
newValues = fromList [Subj {subj = "Absenteeism"},Subj {subj = "Affluence"},
>>> ixsubs <- uploadMT conn (fst stat)
>>> length ixsubs
61
>>> head ixsubs
ISubj {subjIdx = 1, subject = "Absenteeism"}
$ select * from subject LIMIT 10;
id subject
------------------------
1 Absenteeism
2 Affluence
3 Aliens
4 American history
5 Anemia
6 Audiences
7 Aviation
8 Bills
9 Books
10 Burnout
-- uploads the new subjects discovered in parsing the articles and then
-- gets back the indices for those new subjects
-- Okay, now we've got the indexed subjects, we update the MemoizingTable
-- with those new subjects:
-- The updated subjects should be the set of used-to-be-new subjects in the
-- memoizing table. Clear that set and update the maps with the new information
>>> let tab = MT.update ixsubs (fst stat)
--}
-- Now we should have everything we need to upload the subjects associated
-- with their source articles. The subjects are all uploaded, all we have to
-- do is create the Pivot table values to match articles to subjects. We have
-- both the article indices, their associated subjects, and the subject indices
in the MemoizingState .
buildSubjectPivots :: Monad m => MemoizingState m [Pivot]
buildSubjectPivots = get >>= \(MT _ keys _, joins) ->
return (map (uncurry Pvt)
(concatMap (sequence . (second (map (keys Map.!))))
(Map.toList joins)))
insertSubjPivotStmt :: Query
insertSubjPivotStmt =
[sql|INSERT INTO article_kw_pub (article_id,keyword_id) VALUES (?,?)|]
insertSubjPivot :: Connection -> [Pivot] -> IO ()
insertSubjPivot = flip inserter insertSubjPivotStmt
-
> > > insertSubjPivot conn ( evalState buildSubjectPivots ( tab , snd stat ) )
( )
$ select * from article_subject LIMIT 10 ;
i d article_id subject_id
1 1 7
2 1 51
3 2 58
4 2 47
5 3 36
6 3 14
7 3 56
8 3 6
9 3 35
10 3 31
-
>>> insertSubjPivot conn (evalState buildSubjectPivots (tab, snd stat))
()
$ select * from article_subject LIMIT 10;
id article_id subject_id
1 1 7
2 1 51
3 2 58
4 2 47
5 3 36
6 3 14
7 3 56
8 3 6
9 3 35
10 3 31
--}
- BONUS -----------------------------------------------------------------
The subject table structure is not uncommon . Create a type that has a key - value
pair and create FromRow and ToRow instances of it .
-- from the function archive , parse the Part ONE articles , insert those articles
-- and the associated auxilary information , including names and subjects .
-- You can roll your own ETL process for this . Hint : see the bonus - bonus question .
-- BONUS - BONUS -----------------------------------------------------------
Yesterday 's etlProcess got it done for article and name insertion . Add the
functionality of Subject insertion , AND time each command ( function that
interacts with IO ) in the ETL . Report the times here with a total time for
the whole ETL process on the archives here . Do this using the articles stored
in the archive function , Part TWO .
-
The subject table structure is not uncommon. Create a type that has a key-value
pair and create FromRow and ToRow instances of it.
-- from the function archive, parse the Part ONE articles, insert those articles
-- and the associated auxilary information, including names and subjects.
-- You can roll your own ETL process for this. Hint: see the bonus-bonus question.
-- BONUS-BONUS -----------------------------------------------------------
Yesterday's etlProcess got it done for article and name insertion. Add the
functionality of Subject insertion, AND time each command (function that
interacts with IO) in the ETL. Report the times here with a total time for
the whole ETL process on the archives here. Do this using the articles stored
in the archive function, Part TWO.
--}
instance Subjective Article where subjects = art2Subj
timedETL :: FilePath -> Connection -> IO NominalDiffTime
timedETL archive conn =
getCurrentTime >>= \start ->
extractBlocks <$> BL.readFile archive >>= \blocks ->
getCurrentTime >>= \exblks ->
putStrLn ("Extracting blocks: " ++ show (diffUTCTime exblks start)) >>
insertBlocks conn blocks >>= \ixblks ->
getCurrentTime >>= \inblks ->
putStrLn ("Inserting blocks: " ++ show (diffUTCTime inblks exblks)) >>
let articles = join (zipWith block2Article ixblks blocks) in
insertArts conn articles >>= \ixarts ->
getCurrentTime >>= \inarts ->
putStrLn ("Inserting articles: " ++ show (diffUTCTime inarts inblks)) >>
let rns = catMaybes (zipWith art2RawNames ixarts articles)
pers = concatMap raw2persons rns in
insertPers conn pers >>= \ixpers ->
getCurrentTime >>= \inpers ->
putStrLn ("Inserting names: " ++ show (diffUTCTime inpers inarts)) >>
inserter conn insertArtPersJoinStmt (zipWith joinValue pers ixpers) >>
getCurrentTime >>= \inpersjoin ->
putStrLn ("Inserting name-joins: " ++ show (diffUTCTime inpersjoin inpers)) >>
let memtable = MT.start []
stat = execState (zipWithM_ MT.triageM (map idx ixarts) (map subjects articles))
(memtable, Map.empty) in
uploadMT conn (fst stat) >>= \ixsubs ->
getCurrentTime >>= \newsubs ->
putStrLn ("Inserting new subjects: " ++ show (diffUTCTime newsubs inpersjoin)) >>
let tab = MT.update (map ix2tup ixsubs) (fst stat) in
insertSubjPivot conn (evalState buildSubjectPivots (tab, snd stat)) >>
getCurrentTime >>= \thatsIt ->
let totalTime = diffUTCTime thatsIt start in
putStrLn ("Total time: " ++ show totalTime) >>
return totalTime
-
ixsubj2pair : : IxSubject - > ( Integer , Subject )
ixsubj2pair = idx & & & Subj .
> > > timedETL ( archive ONE ) conn
" Extracting blocks : 0.000989s "
" Inserting blocks : 2.956464s "
" Inserting articles : 0.429825s "
" Inserting names : 0.053812s "
" Inserting name - joins : "
" Inserting new subjects : 0.052927s "
" Total time : 3.594488s "
3.594488s
> > > timedETL ( archive TWO ) conn
" Extracting blocks : 0.000971s "
" Inserting blocks : 2.483572s "
" Inserting articles : 1.089283s "
" Inserting names : 0.018167s "
" Inserting name - joins : 0.010377s "
" Inserting new subjects : 0.028442s "
" Total time : 3.650243s "
3.650243s
The major time was spent establishing the connection to the database , the
rest of the processes ran well enough .
-
ixsubj2pair :: IxSubject -> (Integer, Subject)
ixsubj2pair = idx &&& Subj . val
>>> timedETL (archive ONE) conn
"Extracting blocks: 0.000989s"
"Inserting blocks: 2.956464s"
"Inserting articles: 0.429825s"
"Inserting names: 0.053812s"
"Inserting name-joins: 0.044045s"
"Inserting new subjects: 0.052927s"
"Total time: 3.594488s"
3.594488s
>>> timedETL (archive TWO) conn
"Extracting blocks: 0.000971s"
"Inserting blocks: 2.483572s"
"Inserting articles: 1.089283s"
"Inserting names: 0.018167s"
"Inserting name-joins: 0.010377s"
"Inserting new subjects: 0.028442s"
"Total time: 3.650243s"
3.650243s
The major time was spent establishing the connection to the database, the
rest of the processes ran well enough.
--}
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2017/M10/D03/Solution.hs | haskell | }
}
ISubj { subjIdx :: Integer, subject :: String }
}
So, since we have a Subject-type, we want also to have a class of values
from which we can extract subjects. This is called 'subject-oriented
programming.'
subjects is now Data.MemoizingTable.bifurcate
the two Map.fromList functions are DIFFERENT FUNCTIONS ! smh
below code moved to Data . MemoizingTable
subjects is now Data.MemoizingTable.bifurcate
the two Map.fromList functions are DIFFERENT FUNCTIONS! smh
below code moved to Data.MemoizingTable
}
memoizing table.
we get the article, extract its subject information then factor the subject
into the ones already indexed verse the ones we haven't yet stored in the
database. We also update the map of subjects in each article.
----------------------
uploads the new subjects discovered in parsing the articles and then
gets back the indices for those new subjects
Okay , now we 've got the indexed subjects , we update the MemoizingTable
with those new subjects :
The updated subjects should be the set of used - to - be - new subjects in the
table . Clear that set and update the maps with the new information
----------------------
uploads the new subjects discovered in parsing the articles and then
gets back the indices for those new subjects
Okay, now we've got the indexed subjects, we update the MemoizingTable
with those new subjects:
The updated subjects should be the set of used-to-be-new subjects in the
memoizing table. Clear that set and update the maps with the new information
}
Now we should have everything we need to upload the subjects associated
with their source articles. The subjects are all uploaded, all we have to
do is create the Pivot table values to match articles to subjects. We have
both the article indices, their associated subjects, and the subject indices
}
---------------------------------------------------------------
from the function archive , parse the Part ONE articles , insert those articles
and the associated auxilary information , including names and subjects .
You can roll your own ETL process for this . Hint : see the bonus - bonus question .
BONUS - BONUS -----------------------------------------------------------
from the function archive, parse the Part ONE articles, insert those articles
and the associated auxilary information, including names and subjects.
You can roll your own ETL process for this. Hint: see the bonus-bonus question.
BONUS-BONUS -----------------------------------------------------------
}
} | # LANGUAGE OverloadedStrings , QuasiQuotes , ViewPatterns #
module Y2017.M10.D03.Solution where
-
Today we 're going to look at the MemoizingTable - type .
What is the MemoizingTable - type ?
Well , it 's what we 're going to look at today !
( circular - reference much , geophf ?
me : what ? )
-
Today we're going to look at the MemoizingTable-type.
What is the MemoizingTable-type?
Well, it's what we're going to look at today!
(circular-reference much, geophf?
me: what?)
import Control.Arrow ((&&&), (>>>), (***), second)
import Control.Monad (void, (<=<))
import Control.Monad.State
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (maybeToList, catMaybes)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Time
import Data.Time.Calendar
import Data.Time.Clock
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.FromField
import Network.HTTP.Conduit
below imports available via 1HaskellADay git repository
import Control.Logic.Frege (adjoin)
import Control.Scan.CSV (rend)
import Data.MemoizingTable (MemoizingTable(MT))
import qualified Data.MemoizingTable as MT
import Store.SQL.Connection (connectInfo)
import Store.SQL.Util.Indexed
import Store.SQL.Util.Inserts
import Store.SQL.Util.Pivots
import Y2017.M09.D25.Solution (Article, metadata)
import Y2017.M09.D28.Solution (insertArts)
import Y2017.M09.D29.Solution (raw2persons, insertPers, insertArtPersJoinStmt)
import Y2017.M10.D02.Solution
-
The MemoizingTable type addresses this problem : you have a set of words in a
data table :
-
The MemoizingTable type addresses this problem: you have a set of words in a
data table:
data Subject = Subj { subj :: String }
deriving (Eq, Ord, Show)
instance ToRow Subject where
toRow = pure . toField . subj
instance FromRow Subject where
fromRow = Subj <$> field
deriving ( Eq , Ord , Show )
-
instance Indexed IxSubject where
idx = subjIdx
instance where
fromRow = ISubj < $ > field < * > field
-
instance Indexed IxSubject where
idx = subjIdx
instance FromRow IxSubject where
fromRow = ISubj <$> field <*> field
class Subjective a where
subjects :: a -> [Subject]
fetchSubjectsStmt :: Query
fetchSubjectsStmt = [sql|SELECT * from keyword_pub|]
fetchSubjects :: Connection -> IO [IxSubject]
fetchSubjects = flip query_ fetchSubjectsStmt
-
subjects : : [ IxSubject ] - > ( Map Integer String , Map String Integer )
Map.fromList . map ( idx & & & subject ) & & & Map.fromList . map ( subject & & & idx )
Okay , so we can read the current state from the database . That 's great !
Now let 's look at a workflow .
We have a set of subjects in the database , we parse some articles with subjects ,
some are already in the database , some are new subjects .
What do we do when we want to join these subjects to the articles we store into
the database ?
1 . we read the subjects from the database and get the mapped ( index , subject )
results ( see the function subjects , above ) .
2 . we parse the new articles and collect the subjects extracted as metadata .
3 . For those subjects already stored , we have the keyed index .
4 . for those not yet stored , we store those into the database , which then
returns their automatically generated keyed indices as a result , we create
those new associations , remembering that these are new subjects
5 . We create the pivot table from the new map .
Okay , so that 's the workflow . Let 's go about doing this , creating the structure
we need for the workflow - context .
data MemoizingTable a b =
MT { fromTable : : Map a b , readIndex : : Map b a , newValues : : Set b }
deriving Show
initMemTable : : a = > Ord b = > ( Map a b , Map b a ) - > MemoizingTable a b
initMemTable = flip ( uncurry MT ) Set.empty
So , to make subjects a memoizing table , we read in the subjects from the
database . As we read in articles , we scan the readKey map for subjects already
stored into the database . Those we get the indicies . For those new subjects ,
we add those to the newValues .
Let 's put this into practice .
-
subjects :: [IxSubject] -> (Map Integer String, Map String Integer)
Map.fromList . map (idx &&& subject) &&& Map.fromList . map (subject &&& idx)
Okay, so we can read the current state from the database. That's great!
Now let's look at a workflow.
We have a set of subjects in the database, we parse some articles with subjects,
some are already in the database, some are new subjects.
What do we do when we want to join these subjects to the articles we store into
the database?
1. we read the subjects from the database and get the mapped (index,subject)
results (see the function subjects, above).
2. we parse the new articles and collect the subjects extracted as metadata.
3. For those subjects already stored, we have the keyed index.
4. for those not yet stored, we store those into the database, which then
returns their automatically generated keyed indices as a result, we create
those new associations, remembering that these are new subjects
5. We create the pivot table from the new map.
Okay, so that's the workflow. Let's go about doing this, creating the structure
we need for the workflow-context.
data MemoizingTable a b =
MT { fromTable :: Map a b, readIndex :: Map b a, newValues :: Set b }
deriving Show
initMemTable :: Ord a => Ord b => (Map a b, Map b a) -> MemoizingTable a b
initMemTable = flip (uncurry MT) Set.empty
So, to make subjects a memoizing table, we read in the subjects from the
database. As we read in articles, we scan the readKey map for subjects already
stored into the database. Those we get the indicies. For those new subjects,
we add those to the newValues.
Let's put this into practice.
data Part = ONE | TWO
deriving (Eq, Ord)
instance Show Part where
show ONE = "1"
show TWO = "2"
archive :: Part -> FilePath
archive x =
"Y2017/M10/D03/NYTOnline_09-05-17_09-10-17_ALLSecs-pt" ++ show x ++ ".txt.gz"
I 'm thinking of taking a stately State - ful approach to updating the
type SubjectTable = MemoizingTable Integer Subject
type MemoizingState m a = StateT (SubjectTable, Map Integer [Subject]) m a
art2Subj :: Article -> [Subject]
art2Subj = parseSubjects <=< maybeToList . Map.lookup "Subject" . metadata
parseSubjects :: String -> [Subject]
parseSubjects = map Subj . (head &&& map tail . tail >>> uncurry (:)) . rend ';'
updateNewSubjs :: [Subject] -> SubjectTable -> SubjectTable
updateNewSubjs reduces to Data.MemoizingTable.triage
updateNewSubjs = flip (foldl (flip MT.triage))
uploadSubjectStmt :: Query
uploadSubjectStmt = [sql|INSERT INTO keyword_pub (keyword) VALUES (?) returning id|]
uploadSubjects :: Connection -> [Subject] -> IO [Index]
uploadSubjects = flip returning uploadSubjectStmt
uploadMT :: Connection -> SubjectTable -> IO [IxSubject]
uploadMT conn (MT _ _ news) =
let subjs = Set.toList news in
zipWith (flip IxV) subjs . map idx <$> uploadSubjects conn subjs
-
> > > connectInfo
ConnectInfo { connectHost = " ... " , ... }
> > > conn < - connect it
> > > blocks < - extractBlocks < $ > BL.readFile ( archive ONE )
> > > ixblks < - insertBlocks conn blocks
> > > let articles = join ( zipWith blocks )
> > > ixarts < - insertArts conn articles
> > > let rns = ( zipWith art2RawNames ixarts articles )
> > > let pers = concatMap raw2persons rns
> > > ixpers < - insertPers conn pers
> > > inserter conn insertArtPersJoinStmt ( zipWith joinValue pers ixpers )
> > > let memtable = initMemTable ( Map.empty , Map.empty )
> > > let stat = execState ( zipWithM _ getSubjectsMT ixarts articles ) ( memtable , Map.empty )
> > > stat
( MT { fromTable = fromList [ ] , readIndex = fromList [ ] ,
newValues = fromList [ Subj { subj = " Absenteeism"},Subj { subj = " Affluence " } ,
> > > ixsubs < - uploadMT conn ( fst stat )
> > > length ixsubs
61
> > > head { subjIdx = 1 , subject = " Absenteeism " }
$ select * from subject LIMIT 10 ;
i d subject
1 Absenteeism
2 Affluence
3 Aliens
4 American history
5 Anemia
6 Audiences
7 Aviation
8 Bills
9 Books
10 Burnout
> > > let tab = MT.update ixsubs ( fst stat )
-
>>> connectInfo
ConnectInfo {connectHost = "...", ...}
>>> conn <- connect it
>>> blocks <- extractBlocks <$> BL.readFile (archive ONE)
>>> ixblks <- insertBlocks conn blocks
>>> let articles = join (zipWith block2Article ixblks blocks)
>>> ixarts <- insertArts conn articles
>>> let rns = catMaybes (zipWith art2RawNames ixarts articles)
>>> let pers = concatMap raw2persons rns
>>> ixpers <- insertPers conn pers
>>> inserter conn insertArtPersJoinStmt (zipWith joinValue pers ixpers)
>>> let memtable = initMemTable (Map.empty , Map.empty)
>>> let stat = execState (zipWithM_ getSubjectsMT ixarts articles) (memtable, Map.empty)
>>> stat
(MT {fromTable = fromList [], readIndex = fromList [],
newValues = fromList [Subj {subj = "Absenteeism"},Subj {subj = "Affluence"},
>>> ixsubs <- uploadMT conn (fst stat)
>>> length ixsubs
61
>>> head ixsubs
ISubj {subjIdx = 1, subject = "Absenteeism"}
$ select * from subject LIMIT 10;
id subject
1 Absenteeism
2 Affluence
3 Aliens
4 American history
5 Anemia
6 Audiences
7 Aviation
8 Bills
9 Books
10 Burnout
>>> let tab = MT.update ixsubs (fst stat)
in the MemoizingState .
buildSubjectPivots :: Monad m => MemoizingState m [Pivot]
buildSubjectPivots = get >>= \(MT _ keys _, joins) ->
return (map (uncurry Pvt)
(concatMap (sequence . (second (map (keys Map.!))))
(Map.toList joins)))
insertSubjPivotStmt :: Query
insertSubjPivotStmt =
[sql|INSERT INTO article_kw_pub (article_id,keyword_id) VALUES (?,?)|]
insertSubjPivot :: Connection -> [Pivot] -> IO ()
insertSubjPivot = flip inserter insertSubjPivotStmt
-
> > > insertSubjPivot conn ( evalState buildSubjectPivots ( tab , snd stat ) )
( )
$ select * from article_subject LIMIT 10 ;
i d article_id subject_id
1 1 7
2 1 51
3 2 58
4 2 47
5 3 36
6 3 14
7 3 56
8 3 6
9 3 35
10 3 31
-
>>> insertSubjPivot conn (evalState buildSubjectPivots (tab, snd stat))
()
$ select * from article_subject LIMIT 10;
id article_id subject_id
1 1 7
2 1 51
3 2 58
4 2 47
5 3 36
6 3 14
7 3 56
8 3 6
9 3 35
10 3 31
The subject table structure is not uncommon . Create a type that has a key - value
pair and create FromRow and ToRow instances of it .
Yesterday 's etlProcess got it done for article and name insertion . Add the
functionality of Subject insertion , AND time each command ( function that
interacts with IO ) in the ETL . Report the times here with a total time for
the whole ETL process on the archives here . Do this using the articles stored
in the archive function , Part TWO .
-
The subject table structure is not uncommon. Create a type that has a key-value
pair and create FromRow and ToRow instances of it.
Yesterday's etlProcess got it done for article and name insertion. Add the
functionality of Subject insertion, AND time each command (function that
interacts with IO) in the ETL. Report the times here with a total time for
the whole ETL process on the archives here. Do this using the articles stored
in the archive function, Part TWO.
instance Subjective Article where subjects = art2Subj
timedETL :: FilePath -> Connection -> IO NominalDiffTime
timedETL archive conn =
getCurrentTime >>= \start ->
extractBlocks <$> BL.readFile archive >>= \blocks ->
getCurrentTime >>= \exblks ->
putStrLn ("Extracting blocks: " ++ show (diffUTCTime exblks start)) >>
insertBlocks conn blocks >>= \ixblks ->
getCurrentTime >>= \inblks ->
putStrLn ("Inserting blocks: " ++ show (diffUTCTime inblks exblks)) >>
let articles = join (zipWith block2Article ixblks blocks) in
insertArts conn articles >>= \ixarts ->
getCurrentTime >>= \inarts ->
putStrLn ("Inserting articles: " ++ show (diffUTCTime inarts inblks)) >>
let rns = catMaybes (zipWith art2RawNames ixarts articles)
pers = concatMap raw2persons rns in
insertPers conn pers >>= \ixpers ->
getCurrentTime >>= \inpers ->
putStrLn ("Inserting names: " ++ show (diffUTCTime inpers inarts)) >>
inserter conn insertArtPersJoinStmt (zipWith joinValue pers ixpers) >>
getCurrentTime >>= \inpersjoin ->
putStrLn ("Inserting name-joins: " ++ show (diffUTCTime inpersjoin inpers)) >>
let memtable = MT.start []
stat = execState (zipWithM_ MT.triageM (map idx ixarts) (map subjects articles))
(memtable, Map.empty) in
uploadMT conn (fst stat) >>= \ixsubs ->
getCurrentTime >>= \newsubs ->
putStrLn ("Inserting new subjects: " ++ show (diffUTCTime newsubs inpersjoin)) >>
let tab = MT.update (map ix2tup ixsubs) (fst stat) in
insertSubjPivot conn (evalState buildSubjectPivots (tab, snd stat)) >>
getCurrentTime >>= \thatsIt ->
let totalTime = diffUTCTime thatsIt start in
putStrLn ("Total time: " ++ show totalTime) >>
return totalTime
-
ixsubj2pair : : IxSubject - > ( Integer , Subject )
ixsubj2pair = idx & & & Subj .
> > > timedETL ( archive ONE ) conn
" Extracting blocks : 0.000989s "
" Inserting blocks : 2.956464s "
" Inserting articles : 0.429825s "
" Inserting names : 0.053812s "
" Inserting name - joins : "
" Inserting new subjects : 0.052927s "
" Total time : 3.594488s "
3.594488s
> > > timedETL ( archive TWO ) conn
" Extracting blocks : 0.000971s "
" Inserting blocks : 2.483572s "
" Inserting articles : 1.089283s "
" Inserting names : 0.018167s "
" Inserting name - joins : 0.010377s "
" Inserting new subjects : 0.028442s "
" Total time : 3.650243s "
3.650243s
The major time was spent establishing the connection to the database , the
rest of the processes ran well enough .
-
ixsubj2pair :: IxSubject -> (Integer, Subject)
ixsubj2pair = idx &&& Subj . val
>>> timedETL (archive ONE) conn
"Extracting blocks: 0.000989s"
"Inserting blocks: 2.956464s"
"Inserting articles: 0.429825s"
"Inserting names: 0.053812s"
"Inserting name-joins: 0.044045s"
"Inserting new subjects: 0.052927s"
"Total time: 3.594488s"
3.594488s
>>> timedETL (archive TWO) conn
"Extracting blocks: 0.000971s"
"Inserting blocks: 2.483572s"
"Inserting articles: 1.089283s"
"Inserting names: 0.018167s"
"Inserting name-joins: 0.010377s"
"Inserting new subjects: 0.028442s"
"Total time: 3.650243s"
3.650243s
The major time was spent establishing the connection to the database, the
rest of the processes ran well enough.
|
1c08b513c73fa107cbeed92c0c6eda324eab26fbd5b517dc31f2298fc63c9df7 | mnemonic-no/act-scio | project.clj | (defproject scio-back "0.1.42-SNAPSHOT"
:description "Storing tweets and documents to alastic search for indexing."
:dependencies [[org.clojure/clojure "1.10.0"]
[org.apache.commons/commons-compress "1.18"] ;; "fix" for missing class in pantomime
[ org.apache.tika/tika-parsers " 1.16 " ]
[com.novemberain/pantomime "2.10.0"]
[info.debatty/java-spamsum "0.2"]
[clj-http "3.9.1"]
[org.clojure/algo.generic "0.1.3"]
[org.clojure/tools.cli "0.4.2"]
[clojure-ini "0.0.2"]
[clojure-opennlp "0.5.0"]
[digest "1.4.8"]
[me.raynes/fs "1.4.6"]
[org.clojure/data.json "0.2.6"]
[org.clojure/core.async "0.4.490"]
[org.clojure/tools.logging "0.4.1"]
[ch.qos.logback/logback-classic "1.2.3"]
[beanstalk-clj "0.1.3"]]
:main ^:skip-aot scio-back.core
:target-path "target/%s"
:keep-non-project-classes true
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/mnemonic-no/act-scio/bd2e2ccfa54c8a102748d37c79c1292abb63522b/project.clj | clojure | "fix" for missing class in pantomime | (defproject scio-back "0.1.42-SNAPSHOT"
:description "Storing tweets and documents to alastic search for indexing."
:dependencies [[org.clojure/clojure "1.10.0"]
[ org.apache.tika/tika-parsers " 1.16 " ]
[com.novemberain/pantomime "2.10.0"]
[info.debatty/java-spamsum "0.2"]
[clj-http "3.9.1"]
[org.clojure/algo.generic "0.1.3"]
[org.clojure/tools.cli "0.4.2"]
[clojure-ini "0.0.2"]
[clojure-opennlp "0.5.0"]
[digest "1.4.8"]
[me.raynes/fs "1.4.6"]
[org.clojure/data.json "0.2.6"]
[org.clojure/core.async "0.4.490"]
[org.clojure/tools.logging "0.4.1"]
[ch.qos.logback/logback-classic "1.2.3"]
[beanstalk-clj "0.1.3"]]
:main ^:skip-aot scio-back.core
:target-path "target/%s"
:keep-non-project-classes true
:profiles {:uberjar {:aot :all}})
|
906c45b144d5521e8787eb47d41bf17b41af8b0b22a26b7c2a8c6391246fb7e0 | Liutos/cl-github-page | index.lisp | (in-package #:com.liutos.cl-github-page.category)
(defun add-category (name)
"新增一个分类"
(check-type name string)
(when (com.liutos.cl-github-page.storage:find-category-by-name name)
(error "分类已存在"))
(com.liutos.cl-github-page.storage:create-category name))
(defun find-by-name (name)
"查找一个同名分类。"
(check-type name string)
(com.liutos.cl-github-page.storage:find-category-by-name name))
| null | https://raw.githubusercontent.com/Liutos/cl-github-page/336e4ad925c95969a8686ea2267099f49c1e01a6/src/category/index.lisp | lisp | (in-package #:com.liutos.cl-github-page.category)
(defun add-category (name)
"新增一个分类"
(check-type name string)
(when (com.liutos.cl-github-page.storage:find-category-by-name name)
(error "分类已存在"))
(com.liutos.cl-github-page.storage:create-category name))
(defun find-by-name (name)
"查找一个同名分类。"
(check-type name string)
(com.liutos.cl-github-page.storage:find-category-by-name name))
| |
8318fef487cd87b13e474a3314dad9353d63ccb44cd50e8dad537107251957f5 | trentmaetzold/cs61as | hw0-3.rkt | #lang racket
(require berkeley)
(provide (all-defined-out))
; Exercise 1 - Define describe-time
(define (describe-time secs)
(define (pl-secs num wd)
(word wd 's))
(cond ((or (not (integer? secs))
(< secs 0))
(error "Please enter a valid time format"))
((< secs 60)
(se secs
(pl-secs secs 'second)))
((< secs (* 60 60))
(se (quotient secs 60)
(pl-secs (quotient secs 60) 'minute)
(describe-time (remainder secs 60))))
((< secs (* 60 60 24))
(se (quotient secs (* 60 60))
(pl-secs (quotient secs (* 60 60)) 'hour)
(describe-time (remainder secs (* 60 60)))))
(else
(se (quotient secs (* 60 60 24))
(pl-secs (quotient secs (* 60 60 24)) 'day)
(describe-time (remainder secs (* 60 60 24)))))))
; Exercise 2 - Define remove-once
(define (remove-once wd sent)
(cond ((empty? sent) '())
((equal? wd (first sent))
(se (bf sent)))
(else
(se (first sent)
(remove-once wd (bf sent))))))
; Exercise 3 - Define differences
(define (differences nums)
(if (= (count nums) 1)
'()
(se (- (second nums) (first nums))
(differences (bf nums)))))
; Exercise 4 - Define location
(define (location small big)
(define num 0)
(define (loc-recur small big)
(cond ((empty? big)
#f)
((equal? small (first big))
(+ num 1))
(else
(set! num (add1 num))
(loc-recur small (bf big)))))
(loc-recur small big))
; Exercise 5 - Define initials
(define (initials sent)
(if (empty? sent)
'()
(se (first (first sent))
(initials (bf sent)))))
; Exercise 6 - Define copies
(define (copies num wd)
(if (<= num 0)
'()
(se wd (copies (- num 1) wd))))
Exercise 7 - Define gpa
(define (base-grade grade)
(cond ((equal? (first grade) 'A) 4.00)
((equal? (first grade) 'B) 3.00)
((equal? (first grade) 'C) 2.00)
((equal? (first grade) 'D) 1.00)
((equal? (first grade) 'F) 0.00)
(else (error "Invalid grade format"))))
(define (grade-modifier grade)
(define (second wd) (first (bf wd)))
(cond ((empty? (bf grade)) 0)
((equal? (second grade) '+) 0.33)
((equal? (second grade) '-) -0.33)
(else (error "Invalid grade format"))))
(define (comb-grade grade)
(+ (base-grade grade)
(grade-modifier grade)))
(define (gpa grades)
(define (gpa-recurs grades)
(if (= (count grades) 1)
(comb-grade (first grades))
(+ (comb-grade (first grades))
(gpa-recurs (bf grades)))))
(/ (gpa-recurs grades)
(count grades)))
; Exercise 8 - Define repeat-words
(define (repeat-words sent)
(cond ((empty? sent) '())
((number? (first sent))
(se (copies (first sent) (second sent))
(repeat-words (bf (bf sent)))))
(else
(se (first sent)
(repeat-words (bf sent))))))
; Exercise 9 - Define same-shape?
(define (same-shape? sent1 sent2)
(define (count-ltrs sent)
(if (empty? sent)
'()
(se (count (first sent))
(count-ltrs (bf sent)))))
(and (equal? (count sent1)
(count sent2))
(equal? (count-ltrs sent1)
(count-ltrs sent2)))) | null | https://raw.githubusercontent.com/trentmaetzold/cs61as/6e4830786e2c28a26203a4f2046776405af2c10e/homework/hw0-3.rkt | racket | Exercise 1 - Define describe-time
Exercise 2 - Define remove-once
Exercise 3 - Define differences
Exercise 4 - Define location
Exercise 5 - Define initials
Exercise 6 - Define copies
Exercise 8 - Define repeat-words
Exercise 9 - Define same-shape? | #lang racket
(require berkeley)
(provide (all-defined-out))
(define (describe-time secs)
(define (pl-secs num wd)
(word wd 's))
(cond ((or (not (integer? secs))
(< secs 0))
(error "Please enter a valid time format"))
((< secs 60)
(se secs
(pl-secs secs 'second)))
((< secs (* 60 60))
(se (quotient secs 60)
(pl-secs (quotient secs 60) 'minute)
(describe-time (remainder secs 60))))
((< secs (* 60 60 24))
(se (quotient secs (* 60 60))
(pl-secs (quotient secs (* 60 60)) 'hour)
(describe-time (remainder secs (* 60 60)))))
(else
(se (quotient secs (* 60 60 24))
(pl-secs (quotient secs (* 60 60 24)) 'day)
(describe-time (remainder secs (* 60 60 24)))))))
(define (remove-once wd sent)
(cond ((empty? sent) '())
((equal? wd (first sent))
(se (bf sent)))
(else
(se (first sent)
(remove-once wd (bf sent))))))
(define (differences nums)
(if (= (count nums) 1)
'()
(se (- (second nums) (first nums))
(differences (bf nums)))))
(define (location small big)
(define num 0)
(define (loc-recur small big)
(cond ((empty? big)
#f)
((equal? small (first big))
(+ num 1))
(else
(set! num (add1 num))
(loc-recur small (bf big)))))
(loc-recur small big))
(define (initials sent)
(if (empty? sent)
'()
(se (first (first sent))
(initials (bf sent)))))
(define (copies num wd)
(if (<= num 0)
'()
(se wd (copies (- num 1) wd))))
Exercise 7 - Define gpa
(define (base-grade grade)
(cond ((equal? (first grade) 'A) 4.00)
((equal? (first grade) 'B) 3.00)
((equal? (first grade) 'C) 2.00)
((equal? (first grade) 'D) 1.00)
((equal? (first grade) 'F) 0.00)
(else (error "Invalid grade format"))))
(define (grade-modifier grade)
(define (second wd) (first (bf wd)))
(cond ((empty? (bf grade)) 0)
((equal? (second grade) '+) 0.33)
((equal? (second grade) '-) -0.33)
(else (error "Invalid grade format"))))
(define (comb-grade grade)
(+ (base-grade grade)
(grade-modifier grade)))
(define (gpa grades)
(define (gpa-recurs grades)
(if (= (count grades) 1)
(comb-grade (first grades))
(+ (comb-grade (first grades))
(gpa-recurs (bf grades)))))
(/ (gpa-recurs grades)
(count grades)))
(define (repeat-words sent)
(cond ((empty? sent) '())
((number? (first sent))
(se (copies (first sent) (second sent))
(repeat-words (bf (bf sent)))))
(else
(se (first sent)
(repeat-words (bf sent))))))
(define (same-shape? sent1 sent2)
(define (count-ltrs sent)
(if (empty? sent)
'()
(se (count (first sent))
(count-ltrs (bf sent)))))
(and (equal? (count sent1)
(count sent2))
(equal? (count-ltrs sent1)
(count-ltrs sent2)))) |
53d1728f943c9002cc650b9d38304d1df7caf9606cf2762be80168da8b4b2990 | ptal/AbSolute | parray.mli | (**************************************************************************)
(* *)
Copyright ( C )
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
(* Persistent arrays *)
type 'a t
val make : int -> 'a -> 'a t
val init : int -> (int -> 'a) -> 'a t
val length : 'a t -> int
val get : 'a t -> int -> 'a
val set : 'a t -> int -> 'a -> 'a t
val to_list : 'a t -> 'a list
val iter : ('a -> unit) -> 'a t -> unit
val iteri : (int -> 'a -> unit) -> 'a t -> unit
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_right : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b
| null | https://raw.githubusercontent.com/ptal/AbSolute/469159d87e3a717499573c1e187e5cfa1b569829/src/core/parray.mli | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
Persistent arrays | Copyright ( C )
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
type 'a t
val make : int -> 'a -> 'a t
val init : int -> (int -> 'a) -> 'a t
val length : 'a t -> int
val get : 'a t -> int -> 'a
val set : 'a t -> int -> 'a -> 'a t
val to_list : 'a t -> 'a list
val iter : ('a -> unit) -> 'a t -> unit
val iteri : (int -> 'a -> unit) -> 'a t -> unit
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a
val fold_right : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b
|
ef4ebbcaa31fb4ca6bfbe6202a5b0b50dca9b8ae8dccedf96c581ac1b71f0cf3 | kdltr/chicken-core | fft.scm | fft.scm - fft benchmark , by
(cond-expand
disable in CHICKEN to test specialization in safe mode
(declare
(standard-bindings)
(extended-bindings)
(block)
(not safe)))
(else
(import (chicken bitwise)
(chicken fixnum)
(chicken flonum)
(chicken process-context))))
;;; All the following redefinitions are *ignored* by the Gambit compiler
;;; because of the declarations above.
(define-syntax defalias
(syntax-rules ()
((_ one two)
(define-syntax one
(syntax-rules ()
((_ . args) (two . args)))))))
(cond-expand
(generic
(begin
(defalias fixnum->flonum exact->inexact)
(defalias fxodd? odd?)
(defalias fxeven? even?)
(defalias fxarithmetic-shift-right fxshr)
(defalias fxarithmetic-shift-left fxshl)
(defalias fl* *)
(defalias fl/ /)
(defalias fl+ +)
(defalias fl- -)))
(chicken
(begin
(defalias fixnum->flonum exact->inexact)
(defalias fxodd? odd?)
(defalias fxeven? even?)
(defalias fxarithmetic-shift-right fxshr)
(defalias fxarithmetic-shift-left fxshl)
(defalias fl* fp*)
(defalias fl/ fp/)
(defalias fl+ fp+)
(defalias fl- fp-)))
(else))
(cond-expand
((and chicken (not unboxed))
(begin
(defalias make-f64vector make-vector)
(defalias f64vector vector)
(defalias f64vector-set! vector-set!)
(defalias f64vector-ref vector-ref)
(defalias list->f64vector list->vector)
(defalias f64vector-length vector-length)) )
(chicken
(import srfi-4))
(else) )
;;; end of *ignored* definitions
(define lut-table-size 512)
(define lut-table-size^2 262144)
(define lut-table-size^3 134217728)
(define log-lut-table-size 9)
(define low-lut
(list->f64vector '(1. 0.
.7071067811865476 .7071067811865476
.9238795325112867 .3826834323650898
.3826834323650898 .9238795325112867
.9807852804032304 .19509032201612828
.5555702330196022 .8314696123025452
.8314696123025452 .5555702330196022
.19509032201612828 .9807852804032304
.9951847266721969 .0980171403295606
.6343932841636455 .773010453362737
.881921264348355 .47139673682599764
.2902846772544624 .9569403357322088
.9569403357322088 .2902846772544624
.47139673682599764 .881921264348355
.773010453362737 .6343932841636455
.0980171403295606 .9951847266721969
.9987954562051724 .049067674327418015
.6715589548470184 .7409511253549591
.9039892931234433 .4275550934302821
.33688985339222005 .9415440651830208
.970031253194544 .2429801799032639
.5141027441932218 .8577286100002721
.8032075314806449 .5956993044924334
.14673047445536175 .989176509964781
.989176509964781 .14673047445536175
.5956993044924334 .8032075314806449
.8577286100002721 .5141027441932218
.2429801799032639 .970031253194544
.9415440651830208 .33688985339222005
.4275550934302821 .9039892931234433
.7409511253549591 .6715589548470184
.049067674327418015 .9987954562051724
.9996988186962042 .024541228522912288
.6895405447370669 .7242470829514669
.9142097557035307 .40524131400498986
.35989503653498817 .9329927988347388
.9757021300385286 .2191012401568698
.5349976198870973 .8448535652497071
.8175848131515837 .5758081914178453
.17096188876030122 .9852776423889412
.99247953459871 .1224106751992162
.6152315905806268 .7883464276266062
.8700869911087115 .49289819222978404
.26671275747489837 .9637760657954398
.9495281805930367 .31368174039889146
.4496113296546066 .8932243011955153
.7572088465064846 .6531728429537768
.07356456359966743 .9972904566786902
.9972904566786902 .07356456359966743
.6531728429537768 .7572088465064846
.8932243011955153 .4496113296546066
.31368174039889146 .9495281805930367
.9637760657954398 .26671275747489837
.49289819222978404 .8700869911087115
.7883464276266062 .6152315905806268
.1224106751992162 .99247953459871
.9852776423889412 .17096188876030122
.5758081914178453 .8175848131515837
.8448535652497071 .5349976198870973
.2191012401568698 .9757021300385286
.9329927988347388 .35989503653498817
.40524131400498986 .9142097557035307
.7242470829514669 .6895405447370669
.024541228522912288 .9996988186962042
.9999247018391445 .012271538285719925
.6983762494089728 .7157308252838187
.9191138516900578 .3939920400610481
.37131719395183754 .9285060804732156
.9783173707196277 .20711137619221856
.5453249884220465 .8382247055548381
.8245893027850253 .5657318107836132
.18303988795514095 .9831054874312163
.9939069700023561 .11022220729388306
.6248594881423863 .7807372285720945
.8760700941954066 .4821837720791228
.2785196893850531 .9604305194155658
.9533060403541939 .3020059493192281
.46053871095824 .8876396204028539
.765167265622459 .6438315428897915
.0857973123444399 .996312612182778
.9981181129001492 .06132073630220858
.6624157775901718 .7491363945234594
.8986744656939538 .43861623853852766
.3253102921622629 .9456073253805213
.9669764710448521 .25486565960451457
.5035383837257176 .8639728561215867
.7958369046088836 .6055110414043255
.1345807085071262 .99090263542778
.9873014181578584 .15885814333386145
.5857978574564389 .8104571982525948
.8513551931052652 .524589682678469
.2310581082806711 .9729399522055602
.937339011912575 .34841868024943456
.4164295600976372 .9091679830905224
.7326542716724128 .680600997795453
.03680722294135883 .9993223845883495
.9993223845883495 .03680722294135883
.680600997795453 .7326542716724128
.9091679830905224 .4164295600976372
.34841868024943456 .937339011912575
.9729399522055602 .2310581082806711
.524589682678469 .8513551931052652
.8104571982525948 .5857978574564389
.15885814333386145 .9873014181578584
.99090263542778 .1345807085071262
.6055110414043255 .7958369046088836
.8639728561215867 .5035383837257176
.25486565960451457 .9669764710448521
.9456073253805213 .3253102921622629
.43861623853852766 .8986744656939538
.7491363945234594 .6624157775901718
.06132073630220858 .9981181129001492
.996312612182778 .0857973123444399
.6438315428897915 .765167265622459
.8876396204028539 .46053871095824
.3020059493192281 .9533060403541939
.9604305194155658 .2785196893850531
.4821837720791228 .8760700941954066
.7807372285720945 .6248594881423863
.11022220729388306 .9939069700023561
.9831054874312163 .18303988795514095
.5657318107836132 .8245893027850253
.8382247055548381 .5453249884220465
.20711137619221856 .9783173707196277
.9285060804732156 .37131719395183754
.3939920400610481 .9191138516900578
.7157308252838187 .6983762494089728
.012271538285719925 .9999247018391445
.9999811752826011 .006135884649154475
.7027547444572253 .7114321957452164
.9215140393420419 .3883450466988263
.37700741021641826 .9262102421383114
.9795697656854405 .2011046348420919
.5504579729366048 .83486287498638
.8280450452577558 .560661576197336
.18906866414980622 .9819638691095552
.9945645707342554 .10412163387205457
.629638238914927 .7768884656732324
.8790122264286335 .47679923006332214
.2844075372112718 .9587034748958716
.9551411683057707 .29615088824362384
.4659764957679662 .8847970984309378
.7691033376455796 .6391244448637757
.09190895649713272 .9957674144676598
.9984755805732948 .05519524434968994
.6669999223036375 .745057785441466
.901348847046022 .43309381885315196
.33110630575987643 .9435934581619604
.9685220942744173 .24892760574572018
.508830142543107 .8608669386377673
.799537269107905 .600616479383869
.14065823933284924 .9900582102622971
.9882575677307495 .15279718525844344
.5907597018588743 .8068475535437992
.8545579883654005 .5193559901655896
.2370236059943672 .9715038909862518
.9394592236021899 .3426607173119944
.4220002707997997 .9065957045149153
.7368165688773699 .6760927035753159
.04293825693494082 .9990777277526454
.9995294175010931 .030674803176636626
.6850836677727004 .7284643904482252
.9117060320054299 .41084317105790397
.3541635254204904 .9351835099389476
.9743393827855759 .22508391135979283
.5298036246862947 .8481203448032972
.8140363297059484 .5808139580957645
.16491312048996992 .9863080972445987
.9917097536690995 .12849811079379317
.6103828062763095 .7921065773002124
.8670462455156926 .49822766697278187
.2607941179152755 .9653944416976894
.9475855910177411 .3195020308160157
.44412214457042926 .8959662497561851
.7531867990436125 .6578066932970786
.06744391956366406 .9977230666441916
.9968202992911657 .07968243797143013
.6485144010221124 .7612023854842618
.8904487232447579 .45508358712634384
.30784964004153487 .9514350209690083
.9621214042690416 .272621355449949
.48755016014843594 .8730949784182901
.7845565971555752 .6200572117632892
.11631863091190477 .9932119492347945
.984210092386929 .17700422041214875
.5707807458869673 .8211025149911046
.8415549774368984 .5401714727298929
.21311031991609136 .9770281426577544
.9307669610789837 .36561299780477385
.39962419984564684 .9166790599210427
.7200025079613817 .693971460889654
.01840672990580482 .9998305817958234
.9998305817958234 .01840672990580482
.693971460889654 .7200025079613817
.9166790599210427 .39962419984564684
.36561299780477385 .9307669610789837
.9770281426577544 .21311031991609136
.5401714727298929 .8415549774368984
.8211025149911046 .5707807458869673
.17700422041214875 .984210092386929
.9932119492347945 .11631863091190477
.6200572117632892 .7845565971555752
.8730949784182901 .48755016014843594
.272621355449949 .9621214042690416
.9514350209690083 .30784964004153487
.45508358712634384 .8904487232447579
.7612023854842618 .6485144010221124
.07968243797143013 .9968202992911657
.9977230666441916 .06744391956366406
.6578066932970786 .7531867990436125
.8959662497561851 .44412214457042926
.3195020308160157 .9475855910177411
.9653944416976894 .2607941179152755
.49822766697278187 .8670462455156926
.7921065773002124 .6103828062763095
.12849811079379317 .9917097536690995
.9863080972445987 .16491312048996992
.5808139580957645 .8140363297059484
.8481203448032972 .5298036246862947
.22508391135979283 .9743393827855759
.9351835099389476 .3541635254204904
.41084317105790397 .9117060320054299
.7284643904482252 .6850836677727004
.030674803176636626 .9995294175010931
.9990777277526454 .04293825693494082
.6760927035753159 .7368165688773699
.9065957045149153 .4220002707997997
.3426607173119944 .9394592236021899
.9715038909862518 .2370236059943672
.5193559901655896 .8545579883654005
.8068475535437992 .5907597018588743
.15279718525844344 .9882575677307495
.9900582102622971 .14065823933284924
.600616479383869 .799537269107905
.8608669386377673 .508830142543107
.24892760574572018 .9685220942744173
.9435934581619604 .33110630575987643
.43309381885315196 .901348847046022
.745057785441466 .6669999223036375
.05519524434968994 .9984755805732948
.9957674144676598 .09190895649713272
.6391244448637757 .7691033376455796
.8847970984309378 .4659764957679662
.29615088824362384 .9551411683057707
.9587034748958716 .2844075372112718
.47679923006332214 .8790122264286335
.7768884656732324 .629638238914927
.10412163387205457 .9945645707342554
.9819638691095552 .18906866414980622
.560661576197336 .8280450452577558
.83486287498638 .5504579729366048
.2011046348420919 .9795697656854405
.9262102421383114 .37700741021641826
.3883450466988263 .9215140393420419
.7114321957452164 .7027547444572253
.006135884649154475 .9999811752826011
.9999952938095762 .003067956762965976
.7049340803759049 .7092728264388657
.9227011283338785 .38551605384391885
.37984720892405116 .9250492407826776
.9801821359681174 .1980984107179536
.5530167055800276 .8331701647019132
.829761233794523 .5581185312205561
.19208039704989244 .9813791933137546
.9948793307948056 .10106986275482782
.6320187359398091 .7749531065948739
.8804708890521608 .47410021465055
.2873474595447295 .9578264130275329
.9560452513499964 .29321916269425863
.46868882203582796 .8833633386657316
.7710605242618138 .6367618612362842
.094963495329639 .9954807554919269
.9986402181802653 .052131704680283324
.6692825883466361 .7430079521351217
.9026733182372588 .4303264813400826
.3339996514420094 .9425731976014469
.9692812353565485 .24595505033579462
.5114688504379704 .8593018183570084
.8013761717231402 .5981607069963423
.14369503315029444 .9896220174632009
.9887216919603238 .1497645346773215
.5932322950397998 .8050313311429635
.8561473283751945 .5167317990176499
.2400030224487415 .9707721407289504
.9405060705932683 .33977688440682685
.4247796812091088 .9052967593181188
.7388873244606151 .673829000378756
.04600318213091463 .9989412931868569
.9996188224951786 .027608145778965743
.6873153408917592 .726359155084346
.9129621904283982 .4080441628649787
.35703096123343003 .9340925504042589
.9750253450669941 .22209362097320354
.532403127877198 .8464909387740521
.8158144108067338 .5783137964116556
.16793829497473117 .9857975091675675
.9920993131421918 .12545498341154623
.6128100824294097 .79023022143731
.8685707059713409 .49556526182577254
.2637546789748314 .9645897932898128
.9485613499157303 .31659337555616585
.4468688401623742 .8945994856313827
.7552013768965365 .6554928529996153
.07050457338961387 .9975114561403035
.997060070339483 .07662386139203149
.6508466849963809 .7592091889783881
.8918407093923427 .4523495872337709
.3107671527496115 .9504860739494817
.9629532668736839 .2696683255729151
.49022648328829116 .8715950866559511
.7864552135990858 .617647307937804
.11936521481099137 .9928504144598651
.9847485018019042 .17398387338746382
.5732971666980422 .819347520076797
.8432082396418454 .5375870762956455
.21610679707621952 .9763697313300211
.9318842655816681 .3627557243673972
.40243465085941843 .9154487160882678
.7221281939292153 .6917592583641577
.021474080275469508 .9997694053512153
.9998823474542126 .015339206284988102
.696177131491463 .7178700450557317
.9179007756213905 .3968099874167103
.3684668299533723 .9296408958431812
.9776773578245099 .2101118368804696
.5427507848645159 .8398937941959995
.8228497813758263 .5682589526701316
.18002290140569951 .9836624192117303
.9935641355205953 .11327095217756435
.62246127937415 .7826505961665757
.8745866522781761 .4848692480007911
.27557181931095814 .9612804858113206
.9523750127197659 .30492922973540243
.45781330359887723 .8890483558546646
.7631884172633813 .6461760129833164
.08274026454937569 .9965711457905548
.997925286198596 .06438263092985747
.6601143420674205 .7511651319096864
.8973245807054183 .44137126873171667
.32240767880106985 .9466009130832835
.9661900034454125 .257831102162159
.5008853826112408 .8655136240905691
.7939754775543372 .6079497849677736
.13154002870288312 .9913108598461154
.9868094018141855 .16188639378011183
.5833086529376983 .8122505865852039
.8497417680008524 .5271991347819014
.22807208317088573 .973644249650812
.9362656671702783 .35129275608556715
.41363831223843456 .9104412922580672
.7305627692278276 .6828455463852481
.03374117185137759 .9994306045554617
.9992047586183639 .03987292758773981
.6783500431298615 .7347388780959635
.9078861164876663 .41921688836322396
.34554132496398904 .9384035340631081
.9722264970789363 .23404195858354343
.5219752929371544 .8529606049303636
.808656181588175 .5882815482226453
.15582839765426523 .9877841416445722
.9904850842564571 .13762012158648604
.6030665985403482 .7976908409433912
.8624239561110405 .5061866453451553
.25189781815421697 .9677538370934755
.9446048372614803 .32820984357909255
.4358570799222555 .9000158920161603
.7471006059801801 .6647109782033449
.05825826450043576 .9983015449338929
.996044700901252 .0888535525825246
.6414810128085832 .7671389119358204
.8862225301488806 .4632597835518602
.2990798263080405 .9542280951091057
.9595715130819845 .281464937925758
.479493757660153 .8775452902072612
.778816512381476 .6272518154951441
.10717242495680884 .9942404494531879
.9825393022874412 .18605515166344666
.5631993440138341 .8263210628456635
.836547727223512 .5478940591731002
.20410896609281687 .9789481753190622
.9273625256504011 .374164062971458
.39117038430225387 .9203182767091106
.7135848687807936 .7005687939432483
.00920375478205982 .9999576445519639
.9999576445519639 .00920375478205982
.7005687939432483 .7135848687807936
.9203182767091106 .39117038430225387
.374164062971458 .9273625256504011
.9789481753190622 .20410896609281687
.5478940591731002 .836547727223512
.8263210628456635 .5631993440138341
.18605515166344666 .9825393022874412
.9942404494531879 .10717242495680884
.6272518154951441 .778816512381476
.8775452902072612 .479493757660153
.281464937925758 .9595715130819845
.9542280951091057 .2990798263080405
.4632597835518602 .8862225301488806
.7671389119358204 .6414810128085832
.0888535525825246 .996044700901252
.9983015449338929 .05825826450043576
.6647109782033449 .7471006059801801
.9000158920161603 .4358570799222555
.32820984357909255 .9446048372614803
.9677538370934755 .25189781815421697
.5061866453451553 .8624239561110405
.7976908409433912 .6030665985403482
.13762012158648604 .9904850842564571
.9877841416445722 .15582839765426523
.5882815482226453 .808656181588175
.8529606049303636 .5219752929371544
.23404195858354343 .9722264970789363
.9384035340631081 .34554132496398904
.41921688836322396 .9078861164876663
.7347388780959635 .6783500431298615
.03987292758773981 .9992047586183639
.9994306045554617 .03374117185137759
.6828455463852481 .7305627692278276
.9104412922580672 .41363831223843456
.35129275608556715 .9362656671702783
.973644249650812 .22807208317088573
.5271991347819014 .8497417680008524
.8122505865852039 .5833086529376983
.16188639378011183 .9868094018141855
.9913108598461154 .13154002870288312
.6079497849677736 .7939754775543372
.8655136240905691 .5008853826112408
.257831102162159 .9661900034454125
.9466009130832835 .32240767880106985
.44137126873171667 .8973245807054183
.7511651319096864 .6601143420674205
.06438263092985747 .997925286198596
.9965711457905548 .08274026454937569
.6461760129833164 .7631884172633813
.8890483558546646 .45781330359887723
.30492922973540243 .9523750127197659
.9612804858113206 .27557181931095814
.4848692480007911 .8745866522781761
.7826505961665757 .62246127937415
.11327095217756435 .9935641355205953
.9836624192117303 .18002290140569951
.5682589526701316 .8228497813758263
.8398937941959995 .5427507848645159
.2101118368804696 .9776773578245099
.9296408958431812 .3684668299533723
.3968099874167103 .9179007756213905
.7178700450557317 .696177131491463
.015339206284988102 .9998823474542126
.9997694053512153 .021474080275469508
.6917592583641577 .7221281939292153
.9154487160882678 .40243465085941843
.3627557243673972 .9318842655816681
.9763697313300211 .21610679707621952
.5375870762956455 .8432082396418454
.819347520076797 .5732971666980422
.17398387338746382 .9847485018019042
.9928504144598651 .11936521481099137
.617647307937804 .7864552135990858
.8715950866559511 .49022648328829116
.2696683255729151 .9629532668736839
.9504860739494817 .3107671527496115
.4523495872337709 .8918407093923427
.7592091889783881 .6508466849963809
.07662386139203149 .997060070339483
.9975114561403035 .07050457338961387
.6554928529996153 .7552013768965365
.8945994856313827 .4468688401623742
.31659337555616585 .9485613499157303
.9645897932898128 .2637546789748314
.49556526182577254 .8685707059713409
.79023022143731 .6128100824294097
.12545498341154623 .9920993131421918
.9857975091675675 .16793829497473117
.5783137964116556 .8158144108067338
.8464909387740521 .532403127877198
.22209362097320354 .9750253450669941
.9340925504042589 .35703096123343003
.4080441628649787 .9129621904283982
.726359155084346 .6873153408917592
.027608145778965743 .9996188224951786
.9989412931868569 .04600318213091463
.673829000378756 .7388873244606151
.9052967593181188 .4247796812091088
.33977688440682685 .9405060705932683
.9707721407289504 .2400030224487415
.5167317990176499 .8561473283751945
.8050313311429635 .5932322950397998
.1497645346773215 .9887216919603238
.9896220174632009 .14369503315029444
.5981607069963423 .8013761717231402
.8593018183570084 .5114688504379704
.24595505033579462 .9692812353565485
.9425731976014469 .3339996514420094
.4303264813400826 .9026733182372588
.7430079521351217 .6692825883466361
.052131704680283324 .9986402181802653
.9954807554919269 .094963495329639
.6367618612362842 .7710605242618138
.8833633386657316 .46868882203582796
.29321916269425863 .9560452513499964
.9578264130275329 .2873474595447295
.47410021465055 .8804708890521608
.7749531065948739 .6320187359398091
.10106986275482782 .9948793307948056
.9813791933137546 .19208039704989244
.5581185312205561 .829761233794523
.8331701647019132 .5530167055800276
.1980984107179536 .9801821359681174
.9250492407826776 .37984720892405116
.38551605384391885 .9227011283338785
.7092728264388657 .7049340803759049
.003067956762965976 .9999952938095762
)))
(define med-lut
(list->f64vector '(1. 0.
.9999999999820472 5.9921124526424275e-6
.9999999999281892 1.1984224905069707e-5
.9999999998384257 1.7976337357066685e-5
.9999999997127567 2.396844980841822e-5
.9999999995511824 2.9960562258909154e-5
.9999999993537025 3.5952674708324344e-5
.9999999991203175 4.1944787156448635e-5
.9999999988510269 4.793689960306688e-5
.9999999985458309 5.3929012047963936e-5
.9999999982047294 5.992112449092465e-5
.9999999978277226 6.591323693173387e-5
.9999999974148104 7.190534937017645e-5
.9999999969659927 7.789746180603723e-5
.9999999964812697 8.388957423910108e-5
.9999999959606412 8.988168666915283e-5
.9999999954041073 9.587379909597734e-5
.999999994811668 1.0186591151935948e-4
.9999999941833233 1.0785802393908407e-4
.9999999935190732 1.1385013635493597e-4
.9999999928189177 1.1984224876670004e-4
.9999999920828567 1.2583436117416112e-4
.9999999913108903 1.3182647357710405e-4
.9999999905030187 1.3781858597531374e-4
.9999999896592414 1.4381069836857496e-4
.9999999887795589 1.498028107566726e-4
.9999999878639709 1.5579492313939151e-4
.9999999869124775 1.6178703551651655e-4
.9999999859250787 1.6777914788783258e-4
.9999999849017744 1.737712602531244e-4
.9999999838425648 1.797633726121769e-4
.9999999827474497 1.8575548496477492e-4
.9999999816164293 1.9174759731070332e-4
.9999999804495034 1.9773970964974692e-4
.9999999792466722 2.037318219816906e-4
.9999999780079355 2.0972393430631923e-4
.9999999767332933 2.1571604662341763e-4
.9999999754227459 2.2170815893277063e-4
.9999999740762929 2.2770027123416315e-4
.9999999726939346 2.3369238352737996e-4
.9999999712756709 2.3968449581220595e-4
.9999999698215016 2.45676608088426e-4
.9999999683314271 2.5166872035582493e-4
.9999999668054471 2.5766083261418755e-4
.9999999652435617 2.636529448632988e-4
.9999999636457709 2.696450571029434e-4
.9999999620120748 2.756371693329064e-4
.9999999603424731 2.8162928155297243e-4
.9999999586369661 2.876213937629265e-4
.9999999568955537 2.936135059625534e-4
.9999999551182358 2.99605618151638e-4
.9999999533050126 3.055977303299651e-4
.9999999514558838 3.115898424973196e-4
.9999999495708498 3.1758195465348636e-4
.9999999476499103 3.235740667982502e-4
.9999999456930654 3.2956617893139595e-4
.9999999437003151 3.3555829105270853e-4
.9999999416716594 3.4155040316197275e-4
.9999999396070982 3.475425152589734e-4
.9999999375066316 3.535346273434955e-4
.9999999353702598 3.595267394153237e-4
.9999999331979824 3.6551885147424295e-4
.9999999309897996 3.7151096352003814e-4
.9999999287457114 3.7750307555249406e-4
.9999999264657179 3.8349518757139556e-4
.9999999241498189 3.8948729957652753e-4
.9999999217980144 3.954794115676748e-4
.9999999194103046 4.0147152354462224e-4
.9999999169866894 4.0746363550715466e-4
.9999999145271687 4.134557474550569e-4
.9999999120317428 4.194478593881139e-4
.9999999095004113 4.2543997130611036e-4
.9999999069331744 4.314320832088313e-4
.9999999043300322 4.3742419509606144e-4
.9999999016909845 4.4341630696758576e-4
.9999998990160315 4.4940841882318896e-4
.9999998963051729 4.55400530662656e-4
.999999893558409 4.613926424857717e-4
.9999998907757398 4.673847542923209e-4
.9999998879571651 4.7337686608208844e-4
.9999998851026849 4.793689778548592e-4
.9999998822122994 4.8536108961041806e-4
.9999998792860085 4.913532013485497e-4
.9999998763238122 4.973453130690393e-4
.9999998733257104 5.033374247716714e-4
.9999998702917032 5.09329536456231e-4
.9999998672217907 5.153216481225028e-4
.9999998641159727 5.213137597702719e-4
.9999998609742493 5.27305871399323e-4
.9999998577966206 5.332979830094408e-4
.9999998545830864 5.392900946004105e-4
.9999998513336468 5.452822061720168e-4
.9999998480483018 5.512743177240444e-4
.9999998447270514 5.572664292562783e-4
.9999998413698955 5.632585407685033e-4
.9999998379768343 5.692506522605043e-4
.9999998345478677 5.752427637320661e-4
.9999998310829956 5.812348751829735e-4
.9999998275822183 5.872269866130116e-4
.9999998240455354 5.93219098021965e-4
.9999998204729471 5.992112094096185e-4
.9999998168644535 6.052033207757572e-4
.9999998132200545 6.111954321201659e-4
.99999980953975 6.171875434426292e-4
.9999998058235401 6.231796547429323e-4
.9999998020714248 6.291717660208597e-4
.9999997982834041 6.351638772761965e-4
.9999997944594781 6.411559885087275e-4
.9999997905996466 6.471480997182375e-4
.9999997867039097 6.531402109045114e-4
.9999997827722674 6.591323220673341e-4
.9999997788047197 6.651244332064902e-4
.9999997748012666 6.711165443217649e-4
.9999997707619082 6.771086554129428e-4
.9999997666866443 6.83100766479809e-4
.9999997625754748 6.89092877522148e-4
.9999997584284002 6.950849885397449e-4
.9999997542454201 7.010770995323844e-4
.9999997500265345 7.070692104998515e-4
.9999997457717437 7.130613214419311e-4
.9999997414810473 7.190534323584079e-4
.9999997371544456 7.250455432490666e-4
.9999997327919384 7.310376541136925e-4
.9999997283935259 7.3702976495207e-4
.999999723959208 7.430218757639842e-4
.9999997194889846 7.490139865492199e-4
.9999997149828559 7.55006097307562e-4
.9999997104408218 7.609982080387952e-4
.9999997058628822 7.669903187427045e-4
.9999997012490373 7.729824294190747e-4
.9999996965992869 7.789745400676906e-4
.9999996919136313 7.849666506883372e-4
.99999968719207 7.909587612807992e-4
.9999996824346035 7.969508718448614e-4
.9999996776412315 8.029429823803089e-4
.9999996728119542 8.089350928869263e-4
.9999996679467715 8.149272033644986e-4
.9999996630456833 8.209193138128106e-4
.9999996581086897 8.269114242316472e-4
.9999996531357909 8.329035346207931e-4
.9999996481269865 8.388956449800333e-4
.9999996430822767 8.448877553091527e-4
.9999996380016616 8.508798656079359e-4
.999999632885141 8.56871975876168e-4
.9999996277327151 8.628640861136338e-4
.9999996225443838 8.68856196320118e-4
.9999996173201471 8.748483064954056e-4
.999999612060005 8.808404166392814e-4
.9999996067639574 8.868325267515304e-4
.9999996014320045 8.928246368319371e-4
.9999995960641462 8.988167468802867e-4
.9999995906603825 9.048088568963639e-4
.9999995852207133 9.108009668799535e-4
.9999995797451389 9.167930768308405e-4
.9999995742336589 9.227851867488095e-4
.9999995686862736 9.287772966336457e-4
.9999995631029829 9.347694064851338e-4
.9999995574837868 9.407615163030585e-4
.9999995518286853 9.467536260872047e-4
.9999995461376784 9.527457358373575e-4
.9999995404107661 9.587378455533015e-4
.9999995346479484 9.647299552348216e-4
.9999995288492254 9.707220648817027e-4
.9999995230145969 9.767141744937296e-4
.9999995171440631 9.827062840706872e-4
.9999995112376238 9.886983936123602e-4
.9999995052952791 9.946905031185337e-4
.9999994993170291 .0010006826125889925
.9999994933028736 .0010066747220235214
.9999994872528128 .001012666831421905
.9999994811668466 .0010186589407839286
.999999475044975 .0010246510501093766
.9999994688871979 .0010306431593980344
.9999994626935156 .0010366352686496862
.9999994564639277 .0010426273778641173
.9999994501984345 .0010486194870411127
.999999443897036 .0010546115961804568
.999999437559732 .0010606037052819344
.9999994311865227 .0010665958143453308
.9999994247774079 .0010725879233704307
.9999994183323877 .0010785800323570187
.9999994118514622 .0010845721413048801
.9999994053346313 .0010905642502137994
.9999993987818949 .0010965563590835613
.9999993921932533 .0011025484679139511
.9999993855687062 .0011085405767047535
.9999993789082536 .0011145326854557532
.9999993722118957 .001120524794166735
.9999993654796325 .0011265169028374842
.9999993587114638 .0011325090114677853
.9999993519073898 .001138501120057423
.9999993450674104 .0011444932286061825
.9999993381915255 .0011504853371138485
.9999993312797354 .0011564774455802057
.9999993243320398 .0011624695540050393
.9999993173484387 .001168461662388134
.9999993103289324 .0011744537707292742
.9999993032735206 .0011804458790282454
.9999992961822035 .0011864379872848323
.9999992890549809 .0011924300954988195
.999999281891853 .001198422203669992
.9999992746928197 .0012044143117981348
.999999267457881 .0012104064198830327
.999999260187037 .0012163985279244702
.9999992528802875 .0012223906359222325
.9999992455376326 .0012283827438761045
.9999992381590724 .0012343748517858707
.9999992307446068 .0012403669596513162
.9999992232942359 .001246359067472226
.9999992158079595 .0012523511752483847
.9999992082857777 .001258343282979577
.9999992007276906 .001264335390665588
.999999193133698 .0012703274983062026
.9999991855038001 .0012763196059012057
.9999991778379967 .001282311713450382
.9999991701362881 .0012883038209535163
.999999162398674 .0012942959284103935
.9999991546251547 .0013002880358207985
.9999991468157298 .001306280143184516
.9999991389703996 .001312272250501331
.999999131089164 .0013182643577710285
.999999123172023 .0013242564649933932
.9999991152189767 .0013302485721682098
.9999991072300249 .001336240679295263
.9999990992051678 .0013422327863743383
.9999990911444054 .0013482248934052201
.9999990830477375 .0013542170003876934
.9999990749151643 .001360209107321543
.9999990667466857 .0013662012142065536
.9999990585423016 .0013721933210425101
.9999990503020123 .0013781854278291975
.9999990420258176 .0013841775345664006
.9999990337137175 .0013901696412539043
.999999025365712 .0013961617478914935
.999999016981801 .0014021538544789526
.9999990085619848 .001408145961016067
.9999990001062631 .0014141380675026214
.9999989916146361 .0014201301739384005
.9999989830871038 .0014261222803231893
.9999989745236659 .0014321143866567725
.9999989659243228 .001438106492938935
.9999989572890743 .0014440985991694619
.9999989486179204 .0014500907053481378
.9999989399108612 .0014560828114747475
.9999989311678965 .0014620749175490758
.9999989223890265 .001468067023570908
.9999989135742512 .0014740591295400284
.9999989047235704 .0014800512354562223
.9999988958369843 .0014860433413192743
.9999988869144928 .0014920354471289693
.9999988779560959 .0014980275528850922
.9999988689617937 .0015040196585874275
.9999988599315861 .0015100117642357607
.999998850865473 .0015160038698298762
.9999988417634548 .001521995975369559
.999998832625531 .0015279880808545937
.9999988234517019 .0015339801862847657
.9999988142419675 .0015399722916598592
.9999988049963277 .0015459643969796596
.9999987957147825 .0015519565022439512
.9999987863973319 .0015579486074525195
.9999987770439759 .001563940712605149
.9999987676547146 .0015699328177016243
.999998758229548 .0015759249227417307
.9999987487684759 .0015819170277252528
.9999987392714985 .0015879091326519755
.9999987297386157 .0015939012375216837
.9999987201698276 .0015998933423341623
.9999987105651341 .001605885447089196
.9999987009245352 .0016118775517865696
.999998691248031 .0016178696564260683
.9999986815356214 .0016238617610074765
.9999986717873064 .0016298538655305794
.9999986620030861 .0016358459699951618
.9999986521829605 .0016418380744010084
.9999986423269294 .0016478301787479041
.999998632434993 .0016538222830356339
.9999986225071512 .0016598143872639823
.999998612543404 .0016658064914327345
.9999986025437515 .0016717985955416754
.9999985925081937 .0016777906995905894
.9999985824367305 .0016837828035792617
.9999985723293618 .0016897749075074774
.999998562186088 .0016957670113750207
.9999985520069086 .0017017591151816769
.9999985417918239 .0017077512189272307
.999998531540834 .001713743322611467
.9999985212539385 .0017197354262341706
.9999985109311378 .0017257275297951264
.9999985005724317 .0017317196332941192
.9999984901778203 .0017377117367309341
.9999984797473034 .0017437038401053556
.9999984692808812 .0017496959434171687
.9999984587785538 .0017556880466661582
.9999984482403208 .001761680149852109
.9999984376661826 .0017676722529748061
.999998427056139 .0017736643560340342
.99999841641019 .001779656459029578
.9999984057283358 .0017856485619612225
.9999983950105761 .0017916406648287528
.999998384256911 .0017976327676319532
.9999983734673407 .001803624870370609
.9999983626418649 .0018096169730445048
.9999983517804839 .0018156090756534257
.9999983408831975 .0018216011781971562
.9999983299500057 .0018275932806754815
.9999983189809085 .0018335853830881864
.999998307975906 .0018395774854350557
.9999982969349982 .001845569587715874
.9999982858581851 .0018515616899304264
.9999982747454665 .001857553792078498
.9999982635968426 .001863545894159873
.9999982524123134 .0018695379961743367
.9999982411918789 .001875530098121674
.9999982299355389 .0018815222000016696
.9999982186432936 .0018875143018141083
.999998207315143 .0018935064035587748
.999998195951087 .0018994985052354545
.9999981845511257 .0019054906068439318
.9999981731152591 .0019114827083839918
.999998161643487 .001917474809855419
.9999981501358096 .0019234669112579987
.999998138592227 .0019294590125915154
.9999981270127389 .0019354511138557542
.9999981153973455 .0019414432150504997
.9999981037460468 .0019474353161755369
.9999980920588427 .001953427417230651
.9999980803357332 .001959419518215626
.9999980685767185 .0019654116191302473
.9999980567817984 .0019714037199743
.9999980449509729 .0019773958207475683
.9999980330842422 .0019833879214498375
.999998021181606 .001989380022080892
.9999980092430646 .0019953721226405176
.9999979972686177 .002001364223128498
.9999979852582656 .002007356323544619
.9999979732120081 .002013348423888665
.9999979611298453 .002019340524160421
.9999979490117771 .0020253326243596715
.9999979368578036 .0020313247244862017
.9999979246679247 .002037316824539796
.9999979124421405 .00204330892452024
.999997900180451 .002049301024427318
.9999978878828562 .0020552931242608153
.9999978755493559 .002061285224020516
.9999978631799504 .0020672773237062057
.9999978507746395 .002073269423317669
.9999978383334234 .0020792615228546903
.9999978258563018 .002085253622317055
.999997813343275 .0020912457217045484
.9999978007943428 .002097237821016954
.9999977882095052 .0021032299202540577
.9999977755887623 .0021092220194156444
.9999977629321142 .0021152141185014984
.9999977502395607 .0021212062175114043
.9999977375111019 .002127198316445148
.9999977247467376 .0021331904153025134
.9999977119464681 .002139182514083286
.9999976991102932 .0021451746127872503
.9999976862382131 .002151166711414191
.9999976733302276 .0021571588099638934
.9999976603863368 .0021631509084361423
.9999976474065406 .002169143006830722
.9999976343908391 .002175135105147418
.9999976213392323 .0021811272033860148
.9999976082517201 .002187119301546297
.9999975951283027 .00219311139962805
.9999975819689799 .0021991034976310588
.9999975687737518 .0022050955955551076
.9999975555426184 .0022110876933999816
.9999975422755796 .0022170797911654654
.9999975289726355 .002223071888851344
.9999975156337861 .0022290639864574026
.9999975022590314 .0022350560839834253
.9999974888483714 .002241048181429198
.999997475401806 .0022470402787945045
.9999974619193353 .00225303237607913
.9999974484009593 .0022590244732828596
.9999974348466779 .0022650165704054784
.9999974212564913 .0022710086674467703
.9999974076303992 .002277000764406521
.9999973939684019 .002282992861284515
.9999973802704993 .0022889849580805368
.9999973665366915 .0022949770547943723
.9999973527669782 .0023009691514258054
.9999973389613596 .002306961247974621
.9999973251198357 .0023129533444406045
.9999973112424065 .0023189454408235406
.999997297329072 .0023249375371232135
.9999972833798322 .002330929633339409
.999997269394687 .0023369217294719113
.9999972553736366 .0023429138255205055
.9999972413166809 .0023489059214849765
.9999972272238198 .002354898017365109
.9999972130950534 .0023608901131606883
.9999971989303816 .0023668822088714985
.9999971847298047 .0023728743044973246
.9999971704933224 .0023788664000379523
.9999971562209347 .0023848584954931653
.9999971419126418 .0023908505908627493
.9999971275684435 .0023968426861464883
.99999711318834 .002402834781344168
.9999970987723311 .0024088268764555732
.9999970843204169 .002414818971480488
.9999970698325974 .002420811066418698
.9999970553088726 .0024268031612699878
.9999970407492426 .002432795256034142
.9999970261537071 .002438787350710946
.9999970115222664 .002444779445300184
.9999969968549204 .0024507715398016418
.9999969821516691 .002456763634215103
.9999969674125124 .002462755728540353
.9999969526374506 .0024687478227771774
.9999969378264834 .00247473991692536
.9999969229796108 .002480732010984686
.999996908096833 .0024867241049549406
.9999968931781499 .002492716198835908
.9999968782235614 .0024987082926273734
.9999968632330677 .002504700386329122
.9999968482066687 .002510692479940938
.9999968331443644 .0025166845734626068
.9999968180461547 .0025226766668939127
.9999968029120399 .002528668760234641
.9999967877420196 .002534660853484576
.9999967725360941 .0025406529466435036
.9999967572942633 .002546645039711208
.9999967420165272 .002552637132687474
.9999967267028858 .002558629225572086
.9999967113533391 .0025646213183648297
.9999966959678871 .0025706134110654896
.9999966805465298 .002576605503673851
.9999966650892672 .0025825975961896977
.9999966495960994 .0025885896886128153
.9999966340670262 .0025945817809429885
.9999966185020478 .0026005738731800024
.9999966029011641 .0026065659653236417
.999996587264375 .002612558057373691
.9999965715916808 .002618550149329935
.9999965558830811 .0026245422411921592
.9999965401385762 .002630534332960148
.9999965243581661 .002636526424633687
.9999965085418506 .0026425185162125596
.9999964926896299 .0026485106076965517
.9999964768015038 .0026545026990854484
.9999964608774725 .0026604947903790337
.9999964449175359 .0026664868815770926
.999996428921694 .0026724789726794104
.9999964128899468 .002678471063685772
.9999963968222944 .0026844631545959617
.9999963807187366 .002690455245409765
.9999963645792737 .002696447336126966
.9999963484039053 .00270243942674735
.9999963321926317 .002708431517270702
.9999963159454529 .0027144236076968066
.9999962996623687 .0027204156980254485
.9999962833433793 .002726407788256413
.9999962669884847 .002732399878389485
.9999962505976846 .0027383919684244484
.9999962341709794 .002744384058361089
.9999962177083689 .0027503761481991913
.999996201209853 .0027563682379385403
.9999961846754319 .0027623603275789207
.9999961681051056 .0027683524171201175
.999996151498874 .002774344506561915
.9999961348567371 .002780336595904099
.9999961181786949 .0027863286851464537
.9999961014647475 .0027923207742887642
.9999960847148948 .0027983128633308155
.9999960679291368 .002804304952272392
.9999960511074735 .002810297041113279
.9999960342499049 .0028162891298532606
.9999960173564312 .0028222812184921227
.9999960004270521 .002828273307029649
.9999959834617678 .002834265395465626
.9999959664605781 .0028402574837998367
.9999959494234832 .002846249572032067
.9999959323504831 .0028522416601621014
.9999959152415777 .002858233748189725
.999995898096767 .002864225836114723
.9999958809160512 .0028702179239368793
.9999958636994299 .0028762100116559793
.9999958464469034 .0028822020992718077
.9999958291584717 .0028881941867841495
.9999958118341348 .0028941862741927895
.9999957944738925 .0029001783614975127
.999995777077745 .002906170448698104
.9999957596456922 .0029121625357943475
.9999957421777342 .002918154622786029
.999995724673871 .0029241467096729327
.9999957071341024 .002930138796454844
.9999956895584287 .0029361308831315474
.9999956719468496 .0029421229697028273
.9999956542993652 .0029481150561684695
.9999956366159757 .0029541071425282584
.9999956188966809 .002960099228781979
.9999956011414808 .002966091314929416
.9999955833503754 .002972083400970354
.9999955655233649 .0029780754869045785
.9999955476604491 .0029840675727318736
.999995529761628 .002990059658452025
.9999955118269016 .0029960517440648163
.99999549385627 .0030020438295700336
.9999954758497331 .0030080359149674612
.999995457807291 .003014028000256884
.9999954397289438 .003020020085438087
.9999954216146911 .0030260121705108552
.9999954034645333 .003032004255474973
.9999953852784702 .003037996340330225
.9999953670565019 .003043988425076397
.9999953487986284 .003049980509713273
.9999953305048496 .0030559725942406386
.9999953121751655 .003061964678658278
)))
(define high-lut
(list->f64vector '(1. 0.
.9999999999999999 1.1703344634137277e-8
.9999999999999998 2.3406689268274554e-8
.9999999999999993 3.5110033902411824e-8
.9999999999999989 4.6813378536549095e-8
.9999999999999983 5.851672317068635e-8
.9999999999999976 7.022006780482361e-8
.9999999999999967 8.192341243896085e-8
.9999999999999957 9.362675707309808e-8
.9999999999999944 1.0533010170723531e-7
.9999999999999931 1.170334463413725e-7
.9999999999999917 1.287367909755097e-7
.9999999999999901 1.4044013560964687e-7
.9999999999999885 1.5214348024378403e-7
.9999999999999866 1.6384682487792116e-7
.9999999999999846 1.7555016951205827e-7
.9999999999999825 1.8725351414619535e-7
.9999999999999802 1.989568587803324e-7
.9999999999999778 2.1066020341446942e-7
.9999999999999752 2.2236354804860645e-7
.9999999999999726 2.3406689268274342e-7
.9999999999999698 2.4577023731688034e-7
.9999999999999668 2.5747358195101726e-7
.9999999999999638 2.6917692658515413e-7
.9999999999999606 2.8088027121929094e-7
.9999999999999571 2.9258361585342776e-7
.9999999999999537 3.042869604875645e-7
.99999999999995 3.159903051217012e-7
.9999999999999463 3.276936497558379e-7
.9999999999999424 3.3939699438997453e-7
.9999999999999384 3.5110033902411114e-7
.9999999999999342 3.6280368365824763e-7
.9999999999999298 3.7450702829238413e-7
.9999999999999254 3.8621037292652057e-7
.9999999999999208 3.979137175606569e-7
.9999999999999161 4.0961706219479325e-7
.9999999999999113 4.2132040682892953e-7
.9999999999999063 4.330237514630657e-7
.9999999999999011 4.447270960972019e-7
.9999999999998959 4.5643044073133796e-7
.9999999999998904 4.68133785365474e-7
.9999999999998849 4.7983712999961e-7
.9999999999998792 4.915404746337459e-7
.9999999999998733 5.032438192678817e-7
.9999999999998674 5.149471639020175e-7
.9999999999998613 5.266505085361531e-7
.9999999999998551 5.383538531702888e-7
.9999999999998487 5.500571978044243e-7
.9999999999998422 5.617605424385598e-7
.9999999999998356 5.734638870726952e-7
.9999999999998288 5.851672317068305e-7
.9999999999998219 5.968705763409657e-7
.9999999999998148 6.085739209751009e-7
.9999999999998076 6.202772656092359e-7
.9999999999998003 6.319806102433709e-7
.9999999999997928 6.436839548775058e-7
.9999999999997853 6.553872995116406e-7
.9999999999997775 6.670906441457753e-7
.9999999999997696 6.7879398877991e-7
.9999999999997616 6.904973334140445e-7
.9999999999997534 7.02200678048179e-7
.9999999999997452 7.139040226823132e-7
.9999999999997368 7.256073673164475e-7
.9999999999997282 7.373107119505817e-7
.9999999999997194 7.490140565847157e-7
.9999999999997107 7.607174012188497e-7
.9999999999997017 7.724207458529835e-7
.9999999999996926 7.841240904871172e-7
.9999999999996834 7.958274351212508e-7
.9999999999996739 8.075307797553844e-7
.9999999999996644 8.192341243895178e-7
.9999999999996547 8.309374690236511e-7
.999999999999645 8.426408136577842e-7
.9999999999996351 8.543441582919173e-7
.999999999999625 8.660475029260503e-7
.9999999999996148 8.777508475601831e-7
.9999999999996044 8.894541921943158e-7
.999999999999594 9.011575368284484e-7
.9999999999995833 9.128608814625808e-7
.9999999999995726 9.245642260967132e-7
.9999999999995617 9.362675707308454e-7
.9999999999995507 9.479709153649775e-7
.9999999999995395 9.596742599991095e-7
.9999999999995283 9.713776046332412e-7
.9999999999995168 9.83080949267373e-7
.9999999999995052 9.947842939015044e-7
.9999999999994935 1.006487638535636e-6
.9999999999994816 1.0181909831697673e-6
.9999999999994696 1.0298943278038984e-6
.9999999999994575 1.0415976724380293e-6
.9999999999994453 1.0533010170721601e-6
.9999999999994329 1.065004361706291e-6
.9999999999994204 1.0767077063404215e-6
.9999999999994077 1.088411050974552e-6
.9999999999993949 1.1001143956086822e-6
.9999999999993819 1.1118177402428122e-6
.9999999999993688 1.1235210848769423e-6
.9999999999993556 1.135224429511072e-6
.9999999999993423 1.1469277741452017e-6
.9999999999993288 1.1586311187793313e-6
.9999999999993151 1.1703344634134605e-6
.9999999999993014 1.1820378080475897e-6
.9999999999992875 1.1937411526817187e-6
.9999999999992735 1.2054444973158477e-6
.9999999999992593 1.2171478419499764e-6
.9999999999992449 1.2288511865841048e-6
.9999999999992305 1.2405545312182331e-6
.999999999999216 1.2522578758523615e-6
.9999999999992012 1.2639612204864894e-6
.9999999999991863 1.2756645651206173e-6
.9999999999991713 1.287367909754745e-6
.9999999999991562 1.2990712543888725e-6
.9999999999991409 1.3107745990229998e-6
.9999999999991255 1.3224779436571269e-6
.9999999999991099 1.3341812882912537e-6
.9999999999990943 1.3458846329253806e-6
.9999999999990785 1.3575879775595072e-6
.9999999999990625 1.3692913221936337e-6
.9999999999990464 1.3809946668277597e-6
.9999999999990302 1.3926980114618857e-6
.9999999999990138 1.4044013560960117e-6
.9999999999989974 1.4161047007301373e-6
.9999999999989807 1.4278080453642627e-6
.9999999999989639 1.439511389998388e-6
.999999999998947 1.451214734632513e-6
.99999999999893 1.462918079266638e-6
.9999999999989128 1.4746214239007625e-6
.9999999999988954 1.486324768534887e-6
.999999999998878 1.4980281131690111e-6
.9999999999988604 1.5097314578031353e-6
.9999999999988426 1.5214348024372591e-6
.9999999999988247 1.5331381470713828e-6
.9999999999988067 1.544841491705506e-6
.9999999999987886 1.5565448363396294e-6
.9999999999987703 1.5682481809737524e-6
.9999999999987519 1.579951525607875e-6
.9999999999987333 1.5916548702419977e-6
.9999999999987146 1.60335821487612e-6
.9999999999986958 1.615061559510242e-6
.9999999999986768 1.626764904144364e-6
.9999999999986577 1.6384682487784858e-6
.9999999999986384 1.6501715934126072e-6
.9999999999986191 1.6618749380467283e-6
.9999999999985996 1.6735782826808495e-6
.9999999999985799 1.6852816273149702e-6
.9999999999985602 1.6969849719490907e-6
.9999999999985402 1.708688316583211e-6
.9999999999985201 1.720391661217331e-6
.9999999999985 1.732095005851451e-6
.9999999999984795 1.7437983504855706e-6
.9999999999984591 1.7555016951196899e-6
.9999999999984385 1.767205039753809e-6
.9999999999984177 1.778908384387928e-6
.9999999999983968 1.7906117290220465e-6
.9999999999983759 1.802315073656165e-6
.9999999999983546 1.814018418290283e-6
.9999999999983333 1.825721762924401e-6
.9999999999983119 1.8374251075585186e-6
.9999999999982904 1.8491284521926361e-6
.9999999999982686 1.8608317968267533e-6
.9999999999982468 1.8725351414608702e-6
.9999999999982249 1.8842384860949866e-6
.9999999999982027 1.8959418307291031e-6
.9999999999981805 1.9076451753632194e-6
.999999999998158 1.919348519997335e-6
.9999999999981355 1.9310518646314507e-6
.9999999999981128 1.942755209265566e-6
.9999999999980901 1.954458553899681e-6
.9999999999980671 1.966161898533796e-6
.999999999998044 1.9778652431679103e-6
.9999999999980208 1.9895685878020246e-6
.9999999999979975 2.0012719324361386e-6
.999999999997974 2.012975277070252e-6
.9999999999979503 2.0246786217043656e-6
.9999999999979265 2.0363819663384787e-6
.9999999999979027 2.048085310972592e-6
.9999999999978786 2.0597886556067045e-6
.9999999999978545 2.0714920002408167e-6
.9999999999978302 2.0831953448749286e-6
.9999999999978058 2.0948986895090404e-6
.9999999999977811 2.106602034143152e-6
.9999999999977564 2.118305378777263e-6
.9999999999977315 2.1300087234113738e-6
.9999999999977065 2.1417120680454843e-6
.9999999999976814 2.153415412679595e-6
.9999999999976561 2.1651187573137046e-6
.9999999999976307 2.1768221019478143e-6
.9999999999976051 2.188525446581924e-6
.9999999999975795 2.200228791216033e-6
.9999999999975536 2.2119321358501417e-6
.9999999999975278 2.22363548048425e-6
.9999999999975017 2.2353388251183586e-6
.9999999999974754 2.247042169752466e-6
.999999999997449 2.2587455143865738e-6
.9999999999974225 2.2704488590206814e-6
.9999999999973959 2.282152203654788e-6
.9999999999973691 2.293855548288895e-6
.9999999999973422 2.305558892923001e-6
.9999999999973151 2.317262237557107e-6
.999999999997288 2.328965582191213e-6
.9999999999972606 2.340668926825318e-6
.9999999999972332 2.352372271459423e-6
.9999999999972056 2.364075616093528e-6
.9999999999971778 2.3757789607276323e-6
.99999999999715 2.3874823053617365e-6
.999999999997122 2.3991856499958403e-6
.9999999999970938 2.4108889946299437e-6
.9999999999970656 2.4225923392640466e-6
.9999999999970371 2.4342956838981495e-6
.9999999999970085 2.445999028532252e-6
.9999999999969799 2.457702373166354e-6
.999999999996951 2.4694057178004558e-6
.999999999996922 2.4811090624345574e-6
.9999999999968929 2.4928124070686583e-6
.9999999999968637 2.504515751702759e-6
.9999999999968343 2.5162190963368595e-6
.9999999999968048 2.5279224409709594e-6
.9999999999967751 2.5396257856050594e-6
.9999999999967454 2.5513291302391585e-6
.9999999999967154 2.5630324748732576e-6
.9999999999966853 2.5747358195073563e-6
.9999999999966551 2.5864391641414546e-6
.9999999999966248 2.5981425087755525e-6
.9999999999965944 2.6098458534096503e-6
.9999999999965637 2.6215491980437473e-6
.999999999996533 2.6332525426778443e-6
.9999999999965021 2.644955887311941e-6
.999999999996471 2.656659231946037e-6
.99999999999644 2.6683625765801328e-6
.9999999999964087 2.680065921214228e-6
.9999999999963772 2.6917692658483234e-6
.9999999999963456 2.703472610482418e-6
.999999999996314 2.7151759551165123e-6
.9999999999962821 2.7268792997506064e-6
.9999999999962501 2.7385826443846996e-6
.9999999999962179 2.750285989018793e-6
.9999999999961857 2.761989333652886e-6
.9999999999961533 2.7736926782869783e-6
.9999999999961208 2.78539602292107e-6
.9999999999960881 2.797099367555162e-6
.9999999999960553 2.808802712189253e-6
.9999999999960224 2.8205060568233443e-6
.9999999999959893 2.832209401457435e-6
.9999999999959561 2.8439127460915247e-6
.9999999999959227 2.8556160907256145e-6
.9999999999958893 2.867319435359704e-6
.9999999999958556 2.879022779993793e-6
.9999999999958219 2.8907261246278814e-6
.9999999999957879 2.90242946926197e-6
.999999999995754 2.9141328138960576e-6
.9999999999957198 2.925836158530145e-6
.9999999999956855 2.9375395031642317e-6
.999999999995651 2.9492428477983186e-6
.9999999999956164 2.9609461924324046e-6
.9999999999955816 2.9726495370664905e-6
.9999999999955468 2.9843528817005757e-6
.9999999999955118 2.996056226334661e-6
.9999999999954767 3.007759570968745e-6
.9999999999954414 3.0194629156028294e-6
.999999999995406 3.0311662602369133e-6
.9999999999953705 3.0428696048709963e-6
.9999999999953348 3.0545729495050794e-6
.999999999995299 3.066276294139162e-6
.999999999995263 3.0779796387732437e-6
.9999999999952269 3.0896829834073255e-6
.9999999999951907 3.101386328041407e-6
.9999999999951543 3.1130896726754873e-6
.9999999999951178 3.1247930173095678e-6
.9999999999950812 3.136496361943648e-6
.9999999999950444 3.148199706577727e-6
.9999999999950075 3.1599030512118063e-6
.9999999999949705 3.171606395845885e-6
.9999999999949333 3.183309740479963e-6
.999999999994896 3.195013085114041e-6
.9999999999948584 3.206716429748118e-6
.9999999999948209 3.218419774382195e-6
.9999999999947832 3.2301231190162714e-6
.9999999999947453 3.2418264636503477e-6
.9999999999947072 3.253529808284423e-6
.9999999999946692 3.265233152918498e-6
.9999999999946309 3.276936497552573e-6
.9999999999945924 3.288639842186647e-6
.9999999999945539 3.300343186820721e-6
.9999999999945152 3.312046531454794e-6
.9999999999944763 3.323749876088867e-6
.9999999999944373 3.3354532207229395e-6
.9999999999943983 3.3471565653570115e-6
.9999999999943591 3.358859909991083e-6
.9999999999943197 3.370563254625154e-6
.9999999999942801 3.3822665992592245e-6
.9999999999942405 3.3939699438932944e-6
.9999999999942008 3.4056732885273643e-6
.9999999999941608 3.4173766331614334e-6
.9999999999941207 3.429079977795502e-6
.9999999999940805 3.4407833224295702e-6
.9999999999940402 3.452486667063638e-6
.9999999999939997 3.4641900116977054e-6
.999999999993959 3.4758933563317723e-6
.9999999999939183 3.4875967009658384e-6
.9999999999938775 3.4993000455999045e-6
.9999999999938364 3.5110033902339697e-6
.9999999999937953 3.5227067348680345e-6
.999999999993754 3.534410079502099e-6
.9999999999937126 3.546113424136163e-6
.999999999993671 3.5578167687702264e-6
.9999999999936293 3.5695201134042896e-6
.9999999999935875 3.581223458038352e-6
.9999999999935454 3.592926802672414e-6
.9999999999935033 3.6046301473064755e-6
.9999999999934611 3.6163334919405365e-6
.9999999999934187 3.628036836574597e-6
.9999999999933762 3.639740181208657e-6
.9999999999933334 3.6514435258427166e-6
.9999999999932907 3.6631468704767755e-6
.9999999999932477 3.674850215110834e-6
.9999999999932047 3.686553559744892e-6
.9999999999931615 3.6982569043789496e-6
.9999999999931181 3.7099602490130064e-6
.9999999999930747 3.7216635936470627e-6
.999999999993031 3.733366938281119e-6
.9999999999929873 3.745070282915174e-6
.9999999999929433 3.756773627549229e-6
.9999999999928992 3.768476972183284e-6
.9999999999928552 3.7801803168173377e-6
.9999999999928109 3.791883661451391e-6
.9999999999927663 3.803587006085444e-6
.9999999999927218 3.8152903507194965e-6
.9999999999926771 3.826993695353548e-6
.9999999999926322 3.838697039987599e-6
.9999999999925873 3.85040038462165e-6
.9999999999925421 3.862103729255701e-6
.9999999999924968 3.87380707388975e-6
.9999999999924514 3.885510418523799e-6
.9999999999924059 3.897213763157848e-6
.9999999999923602 3.9089171077918965e-6
.9999999999923144 3.9206204524259435e-6
.9999999999922684 3.9323237970599905e-6
.9999999999922223 3.9440271416940376e-6
.9999999999921761 3.955730486328084e-6
.9999999999921297 3.967433830962129e-6
.9999999999920832 3.9791371755961736e-6
.9999999999920366 3.990840520230218e-6
.9999999999919899 4.002543864864262e-6
.9999999999919429 4.014247209498305e-6
.9999999999918958 4.025950554132348e-6
.9999999999918486 4.03765389876639e-6
.9999999999918013 4.049357243400431e-6
.9999999999917539 4.061060588034472e-6
.9999999999917063 4.072763932668513e-6
.9999999999916586 4.084467277302553e-6
.9999999999916107 4.096170621936592e-6
.9999999999915626 4.107873966570632e-6
.9999999999915146 4.119577311204669e-6
.9999999999914663 4.131280655838707e-6
.9999999999914179 4.142984000472745e-6
.9999999999913692 4.154687345106781e-6
.9999999999913206 4.166390689740817e-6
.9999999999912718 4.178094034374852e-6
.9999999999912228 4.189797379008887e-6
.9999999999911737 4.201500723642921e-6
.9999999999911244 4.213204068276955e-6
.999999999991075 4.224907412910988e-6
.9999999999910255 4.236610757545021e-6
.9999999999909759 4.248314102179053e-6
.9999999999909261 4.260017446813084e-6
.9999999999908762 4.271720791447115e-6
.9999999999908261 4.283424136081145e-6
.9999999999907759 4.295127480715175e-6
.9999999999907256 4.306830825349204e-6
.9999999999906751 4.3185341699832325e-6
.9999999999906245 4.33023751461726e-6
.9999999999905738 4.3419408592512875e-6
.9999999999905229 4.353644203885314e-6
.9999999999904718 4.36534754851934e-6
.9999999999904207 4.377050893153365e-6
.9999999999903694 4.38875423778739e-6
.999999999990318 4.400457582421414e-6
.9999999999902665 4.4121609270554384e-6
.9999999999902147 4.423864271689461e-6
.9999999999901629 4.435567616323483e-6
.9999999999901109 4.447270960957506e-6
.9999999999900587 4.458974305591527e-6
.9999999999900065 4.470677650225547e-6
.9999999999899541 4.482380994859567e-6
.9999999999899016 4.494084339493587e-6
.9999999999898489 4.5057876841276054e-6
.9999999999897962 4.517491028761624e-6
.9999999999897432 4.529194373395641e-6
.9999999999896901 4.5408977180296584e-6
.999999999989637 4.552601062663675e-6
.9999999999895836 4.564304407297691e-6
.99999999998953 4.5760077519317055e-6
.9999999999894764 4.5877110965657195e-6
.9999999999894227 4.5994144411997335e-6
.9999999999893688 4.611117785833747e-6
.9999999999893148 4.622821130467759e-6
.9999999999892606 4.634524475101771e-6
.9999999999892063 4.646227819735783e-6
.9999999999891518 4.657931164369793e-6
.9999999999890973 4.669634509003803e-6
.9999999999890425 4.681337853637813e-6
.9999999999889877 4.693041198271821e-6
.9999999999889327 4.704744542905829e-6
.9999999999888776 4.716447887539837e-6
.9999999999888223 4.728151232173843e-6
.9999999999887669 4.73985457680785e-6
.9999999999887114 4.751557921441855e-6
.9999999999886556 4.76326126607586e-6
.9999999999885999 4.774964610709864e-6
.9999999999885439 4.786667955343868e-6
.9999999999884878 4.798371299977871e-6
.9999999999884316 4.810074644611873e-6
.9999999999883752 4.821777989245874e-6
.9999999999883187 4.833481333879875e-6
.9999999999882621 4.845184678513876e-6
.9999999999882053 4.856888023147875e-6
.9999999999881484 4.868591367781874e-6
.9999999999880914 4.880294712415872e-6
.9999999999880341 4.89199805704987e-6
.9999999999879768 4.903701401683867e-6
.9999999999879194 4.915404746317863e-6
.9999999999878618 4.9271080909518585e-6
.9999999999878041 4.938811435585853e-6
.9999999999877462 4.9505147802198475e-6
.9999999999876882 4.962218124853841e-6
.99999999998763 4.973921469487834e-6
.9999999999875717 4.985624814121826e-6
.9999999999875133 4.997328158755817e-6
.9999999999874548 5.009031503389808e-6
.9999999999873961 5.0207348480237985e-6
.9999999999873372 5.032438192657788e-6
.9999999999872783 5.0441415372917765e-6
.9999999999872192 5.055844881925764e-6
.9999999999871599 5.067548226559752e-6
.9999999999871007 5.079251571193739e-6
.9999999999870411 5.090954915827725e-6
.9999999999869814 5.10265826046171e-6
.9999999999869217 5.1143616050956945e-6
.9999999999868617 5.126064949729678e-6
.9999999999868017 5.1377682943636615e-6
.9999999999867415 5.149471638997644e-6
.9999999999866811 5.161174983631626e-6
.9999999999866207 5.172878328265607e-6
.9999999999865601 5.184581672899587e-6
.9999999999864994 5.196285017533567e-6
.9999999999864384 5.2079883621675455e-6
.9999999999863775 5.219691706801524e-6
.9999999999863163 5.2313950514355015e-6
.999999999986255 5.243098396069478e-6
.9999999999861935 5.254801740703454e-6
.999999999986132 5.266505085337429e-6
.9999999999860703 5.278208429971404e-6
.9999999999860084 5.289911774605378e-6
.9999999999859465 5.301615119239351e-6
.9999999999858843 5.313318463873323e-6
.9999999999858221 5.325021808507295e-6
.9999999999857597 5.336725153141267e-6
.9999999999856971 5.3484284977752366e-6
.9999999999856345 5.360131842409206e-6
.9999999999855717 5.371835187043175e-6
.9999999999855087 5.383538531677143e-6
.9999999999854456 5.3952418763111104e-6
.9999999999853825 5.406945220945077e-6
.9999999999853191 5.418648565579043e-6
.9999999999852557 5.4303519102130076e-6
.9999999999851921 5.4420552548469724e-6
.9999999999851282 5.453758599480936e-6
.9999999999850644 5.465461944114899e-6
.9999999999850003 5.47716528874886e-6
.9999999999849362 5.488868633382822e-6
.9999999999848719 5.500571978016782e-6
.9999999999848074 5.512275322650742e-6
.9999999999847429 5.523978667284702e-6
.9999999999846781 5.53568201191866e-6
.9999999999846133 5.547385356552617e-6
.9999999999845482 5.5590887011865745e-6
.9999999999844832 5.57079204582053e-6
.9999999999844179 5.582495390454486e-6
.9999999999843525 5.59419873508844e-6
.9999999999842869 5.605902079722394e-6
.9999999999842213 5.617605424356347e-6
.9999999999841555 5.629308768990299e-6
.9999999999840895 5.641012113624251e-6
.9999999999840234 5.652715458258201e-6
.9999999999839572 5.664418802892152e-6
.9999999999838908 5.6761221475261e-6
.9999999999838243 5.687825492160048e-6
.9999999999837577 5.699528836793996e-6
.9999999999836909 5.711232181427943e-6
.999999999983624 5.722935526061889e-6
.9999999999835569 5.734638870695834e-6
.9999999999834898 5.746342215329779e-6
.9999999999834225 5.758045559963722e-6
.999999999983355 5.769748904597665e-6
.9999999999832874 5.781452249231607e-6
.9999999999832196 5.793155593865548e-6
.9999999999831518 5.804858938499489e-6
.9999999999830838 5.816562283133429e-6
.9999999999830157 5.8282656277673675e-6
.9999999999829474 5.839968972401306e-6
.9999999999828789 5.851672317035243e-6
.9999999999828104 5.86337566166918e-6
.9999999999827417 5.875079006303115e-6
.9999999999826729 5.88678235093705e-6
.9999999999826039 5.898485695570985e-6
.9999999999825349 5.910189040204917e-6
.9999999999824656 5.92189238483885e-6
.9999999999823962 5.933595729472782e-6
.9999999999823267 5.945299074106713e-6
.9999999999822571 5.957002418740643e-6
.9999999999821872 5.9687057633745715e-6
.9999999999821173 5.9804091080085e-6
)))
(define (make-w log-n)
(let ((n (expt 2 log-n))) ;; number of complexes
(if (fx<= n lut-table-size)
low-lut
(let ((result (make-f64vector (fx* 2 n))))
(define (copy-low-lut)
(do ((i 0 (fx+ i 1)))
((fx= i lut-table-size))
(let ((index (fx* i 2)))
(f64vector-set!
result
index
(f64vector-ref low-lut index))
(f64vector-set!
result
(fx+ index 1)
(f64vector-ref low-lut (fx+ index 1))))))
(define (extend-lut multiplier-lut bit-reverse-size bit-reverse-multiplier start end)
(define (bit-reverse x n)
(do ((i 0 (fx+ i 1))
(x x (fxarithmetic-shift-right x 1))
(result 0 (fx+ (fx* result 2)
(bitwise-and x 1))))
((fx= i n) result)))
(let loop ((i start)
(j 1))
(if (fx< i end)
(let* ((multiplier-index
(fx* 2
(fx* (bit-reverse j bit-reverse-size)
bit-reverse-multiplier)))
(multiplier-real
(f64vector-ref multiplier-lut multiplier-index))
(multiplier-imag
(f64vector-ref multiplier-lut (fx+ multiplier-index 1))))
(let inner ((i i)
(k 0))
;; we copy complex multiples of all entries below
;; start to entries starting at start
(if (fx< k start)
(let* ((index
(fx* k 2))
(real
(f64vector-ref result index))
(imag
(f64vector-ref result (fx+ index 1)))
(result-real
(fl- (fl* multiplier-real real)
(fl* multiplier-imag imag)))
(result-imag
(fl+ (fl* multiplier-real imag)
(fl* multiplier-imag real)))
(result-index (fx* i 2)))
(f64vector-set! result result-index result-real)
(f64vector-set! result (fx+ result-index 1) result-imag)
(inner (fx+ i 1)
(fx+ k 1)))
(loop i
(fx+ j 1)))))
result)))
(cond ((fx<= n lut-table-size^2)
(copy-low-lut)
(extend-lut med-lut
(fx- log-n log-lut-table-size)
(fxarithmetic-shift-left 1 (fx- (fx* 2 log-lut-table-size) log-n))
lut-table-size
n))
((fx<= n lut-table-size^3)
(copy-low-lut)
(extend-lut med-lut
log-lut-table-size
1
lut-table-size
lut-table-size^2)
(extend-lut high-lut
(fx- log-n (fx* 2 log-lut-table-size))
(fxarithmetic-shift-left 1 (fx- (fx* 3 log-lut-table-size) log-n))
lut-table-size^2
n))
(else
(error "asking for too large a table")))))))
(define (direct-fft-recursive-4 a W-table)
;; This is a direcct complex fft, using a decimation-in-time
;; algorithm with inputs in natural order and outputs in
;; bit-reversed order. The table of "twiddle" factors is in
;; bit-reversed order.
this is from page 66 of and , except that we have
;; combined passes in pairs to cut the number of passes through
;; the vector a
(let ((W (f64vector 0. 0. 0. 0.)))
(define (main-loop M N K SizeOfGroup)
(let inner-loop ((K K)
(JFirst M))
(if (fx< JFirst N)
(let* ((JLast (fx+ JFirst SizeOfGroup)))
(if (fxeven? K)
(begin
(f64vector-set! W 0 (f64vector-ref W-table K))
(f64vector-set! W 1 (f64vector-ref W-table (fx+ K 1))))
(begin
(f64vector-set! W 0 (fl- 0. (f64vector-ref W-table K)))
(f64vector-set! W 1 (f64vector-ref W-table (fx- K 1)))))
we know the that the next two complex roots of
unity have index 2 K and 2K+1 so that the 2K+1
index root can be gotten from the 2 K index root
in the same way that we get W_0 and W_1 from the
;; table depending on whether K is even or not
(f64vector-set! W 2 (f64vector-ref W-table (fx* K 2)))
(f64vector-set! W 3 (f64vector-ref W-table (fx+ (fx* K 2) 1)))
(let J-loop ((J0 JFirst))
(if (fx< J0 JLast)
(let* ((J0 J0)
(J1 (fx+ J0 1))
(J2 (fx+ J0 SizeOfGroup))
(J3 (fx+ J2 1))
(J4 (fx+ J2 SizeOfGroup))
(J5 (fx+ J4 1))
(J6 (fx+ J4 SizeOfGroup))
(J7 (fx+ J6 1)))
(let ((W_0 (f64vector-ref W 0))
(W_1 (f64vector-ref W 1))
(W_2 (f64vector-ref W 2))
(W_3 (f64vector-ref W 3))
(a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3))
(a_J4 (f64vector-ref a J4))
(a_J5 (f64vector-ref a J5))
(a_J6 (f64vector-ref a J6))
(a_J7 (f64vector-ref a J7)))
;; first we do the (overlapping) pairs of
butterflies with entries 2*SizeOfGroup
;; apart.
(let ((Temp_0 (fl- (fl* W_0 a_J4)
(fl* W_1 a_J5)))
(Temp_1 (fl+ (fl* W_0 a_J5)
(fl* W_1 a_J4)))
(Temp_2 (fl- (fl* W_0 a_J6)
(fl* W_1 a_J7)))
(Temp_3 (fl+ (fl* W_0 a_J7)
(fl* W_1 a_J6))))
(let ((a_J0 (fl+ a_J0 Temp_0))
(a_J1 (fl+ a_J1 Temp_1))
(a_J2 (fl+ a_J2 Temp_2))
(a_J3 (fl+ a_J3 Temp_3))
(a_J4 (fl- a_J0 Temp_0))
(a_J5 (fl- a_J1 Temp_1))
(a_J6 (fl- a_J2 Temp_2))
(a_J7 (fl- a_J3 Temp_3)))
now we do the two ( disjoint ) pairs
;; of butterflies distance SizeOfGroup
apart , the first pair with ,
the second with -W3+W2i
;; we rewrite the multipliers so I
;; don't hurt my head too much when
;; thinking about them.
(let ((W_0 W_2)
(W_1 W_3)
(W_2 (fl- 0. W_3))
(W_3 W_2))
(let ((Temp_0
(fl- (fl* W_0 a_J2)
(fl* W_1 a_J3)))
(Temp_1
(fl+ (fl* W_0 a_J3)
(fl* W_1 a_J2)))
(Temp_2
(fl- (fl* W_2 a_J6)
(fl* W_3 a_J7)))
(Temp_3
(fl+ (fl* W_2 a_J7)
(fl* W_3 a_J6))))
(let ((a_J0 (fl+ a_J0 Temp_0))
(a_J1 (fl+ a_J1 Temp_1))
(a_J2 (fl- a_J0 Temp_0))
(a_J3 (fl- a_J1 Temp_1))
(a_J4 (fl+ a_J4 Temp_2))
(a_J5 (fl+ a_J5 Temp_3))
(a_J6 (fl- a_J4 Temp_2))
(a_J7 (fl- a_J5 Temp_3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(f64vector-set! a J4 a_J4)
(f64vector-set! a J5 a_J5)
(f64vector-set! a J6 a_J6)
(f64vector-set! a J7 a_J7)
(J-loop (fx+ J0 2)))))))))
(inner-loop (fx+ K 1)
(fx+ JFirst (fx* SizeOfGroup 4)))))))))
(define (recursive-bit M N K SizeOfGroup)
(if (fx<= 2 SizeOfGroup)
(begin
(main-loop M N K SizeOfGroup)
(if (fx< 2048 (fx- N M))
(let ((new-size (fxarithmetic-shift-right (fx- N M) 2)))
(recursive-bit M
(fx+ M new-size)
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M new-size)
(fx+ M (fx* new-size 2))
(fx+ (fx* K 4) 1)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 2))
(fx+ M (fx* new-size 3))
(fx+ (fx* K 4) 2)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 3))
N
(fx+ (fx* K 4) 3)
(fxarithmetic-shift-right SizeOfGroup 2)))
(recursive-bit M
N
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))))))
(define (radix-2-pass a)
;; If we're here, the size of our (conceptually complex)
array is not a power of 4 , so we need to do a basic radix
two pass with w=1 ( so W[0]=1.0 and W[1 ] = 0 . ) and then
call recursive - bit appropriately on the two half arrays .
(let ((SizeOfGroup
(fxarithmetic-shift-right (f64vector-length a) 1)))
(let loop ((J0 0))
(if (fx< J0 SizeOfGroup)
(let ((J0 J0)
(J2 (fx+ J0 SizeOfGroup)))
(let ((J1 (fx+ J0 1))
(J3 (fx+ J2 1)))
(let ((a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J2 (fl- a_J0 a_J2))
(a_J3 (fl- a_J1 a_J3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(loop (fx+ J0 2))))))))))
(let* ((n (f64vector-length a))
(log_n (two^p>=m n)))
there are n/2 complex entries in a ; if is not a power
of 4 , then do a single radix-2 pass and do the rest of
;; the passes as radix-4 passes
(if (fxodd? log_n)
(recursive-bit 0 n 0 (fxarithmetic-shift-right n 2))
(let ((n/2 (fxarithmetic-shift-right n 1))
(n/8 (fxarithmetic-shift-right n 3)))
(radix-2-pass a)
(recursive-bit 0 n/2 0 n/8)
(recursive-bit n/2 n 1 n/8))))))
(define (inverse-fft-recursive-4 a W-table)
;; This is an complex fft, using a decimation-in-frequency algorithm
;; with inputs in bit-reversed order and outputs in natural order.
;; The organization of the algorithm has little to do with the the
associated algorithm on page 41 of and ,
;; I just reversed the operations of the direct algorithm given
above ( without dividing by 2 each time , so that this has to
be " normalized " by dividing by N/2 at the end .
;; The table of "twiddle" factors is in bit-reversed order.
(let ((W (f64vector 0. 0. 0. 0.)))
(define (main-loop M N K SizeOfGroup)
(let inner-loop ((K K)
(JFirst M))
(if (fx< JFirst N)
(let* ((JLast (fx+ JFirst SizeOfGroup)))
(if (fxeven? K)
(begin
(f64vector-set! W 0 (f64vector-ref W-table K))
(f64vector-set! W 1 (f64vector-ref W-table (fx+ K 1))))
(begin
(f64vector-set! W 0 (fl- 0. (f64vector-ref W-table K)))
(f64vector-set! W 1 (f64vector-ref W-table (fx- K 1)))))
(f64vector-set! W 2 (f64vector-ref W-table (fx* K 2)))
(f64vector-set! W 3 (f64vector-ref W-table (fx+ (fx* K 2) 1)))
(let J-loop ((J0 JFirst))
(if (fx< J0 JLast)
(let* ((J0 J0)
(J1 (fx+ J0 1))
(J2 (fx+ J0 SizeOfGroup))
(J3 (fx+ J2 1))
(J4 (fx+ J2 SizeOfGroup))
(J5 (fx+ J4 1))
(J6 (fx+ J4 SizeOfGroup))
(J7 (fx+ J6 1)))
(let ((W_0 (f64vector-ref W 0))
(W_1 (f64vector-ref W 1))
(W_2 (f64vector-ref W 2))
(W_3 (f64vector-ref W 3))
(a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3))
(a_J4 (f64vector-ref a J4))
(a_J5 (f64vector-ref a J5))
(a_J6 (f64vector-ref a J6))
(a_J7 (f64vector-ref a J7)))
(let ((W_00 W_2)
(W_01 W_3)
(W_02 (fl- 0. W_3))
(W_03 W_2))
(let ((Temp_0 (fl- a_J0 a_J2))
(Temp_1 (fl- a_J1 a_J3))
(Temp_2 (fl- a_J4 a_J6))
(Temp_3 (fl- a_J5 a_J7)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J4 (fl+ a_J4 a_J6))
(a_J5 (fl+ a_J5 a_J7))
(a_J2 (fl+ (fl* W_00 Temp_0)
(fl* W_01 Temp_1)))
(a_J3 (fl- (fl* W_00 Temp_1)
(fl* W_01 Temp_0)))
(a_J6 (fl+ (fl* W_02 Temp_2)
(fl* W_03 Temp_3)))
(a_J7 (fl- (fl* W_02 Temp_3)
(fl* W_03 Temp_2))))
(let ((Temp_0 (fl- a_J0 a_J4))
(Temp_1 (fl- a_J1 a_J5))
(Temp_2 (fl- a_J2 a_J6))
(Temp_3 (fl- a_J3 a_J7)))
(let ((a_J0 (fl+ a_J0 a_J4))
(a_J1 (fl+ a_J1 a_J5))
(a_J2 (fl+ a_J2 a_J6))
(a_J3 (fl+ a_J3 a_J7))
(a_J4 (fl+ (fl* W_0 Temp_0)
(fl* W_1 Temp_1)))
(a_J5 (fl- (fl* W_0 Temp_1)
(fl* W_1 Temp_0)))
(a_J6 (fl+ (fl* W_0 Temp_2)
(fl* W_1 Temp_3)))
(a_J7 (fl- (fl* W_0 Temp_3)
(fl* W_1 Temp_2))))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(f64vector-set! a J4 a_J4)
(f64vector-set! a J5 a_J5)
(f64vector-set! a J6 a_J6)
(f64vector-set! a J7 a_J7)
(J-loop (fx+ J0 2)))))))))
(inner-loop (fx+ K 1)
(fx+ JFirst (fx* SizeOfGroup 4)))))))))
(define (recursive-bit M N K SizeOfGroup)
(if (fx<= 2 SizeOfGroup)
(begin
(if (fx< 2048 (fx- N M))
(let ((new-size (fxarithmetic-shift-right (fx- N M) 2)))
(recursive-bit M
(fx+ M new-size)
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M new-size)
(fx+ M (fx* new-size 2))
(fx+ (fx* K 4) 1)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 2))
(fx+ M (fx* new-size 3))
(fx+ (fx* K 4) 2)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 3))
N
(fx+ (fx* K 4) 3)
(fxarithmetic-shift-right SizeOfGroup 2)))
(recursive-bit M
N
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2)))
(main-loop M N K SizeOfGroup))))
(define (radix-2-pass a)
(let ((SizeOfGroup
(fxarithmetic-shift-right (f64vector-length a) 1)))
(let loop ((J0 0))
(if (fx< J0 SizeOfGroup)
(let ((J0 J0)
(J2 (fx+ J0 SizeOfGroup)))
(let ((J1 (fx+ J0 1))
(J3 (fx+ J2 1)))
(let ((a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J2 (fl- a_J0 a_J2))
(a_J3 (fl- a_J1 a_J3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(loop (fx+ J0 2))))))))))
(let* ((n (f64vector-length a))
(log_n (two^p>=m n)))
(if (fxodd? log_n)
(recursive-bit 0 n 0 (fxarithmetic-shift-right n 2))
(let ((n/2 (fxarithmetic-shift-right n 1))
(n/8 (fxarithmetic-shift-right n 3)))
(recursive-bit 0 n/2 0 n/8)
(recursive-bit n/2 n 1 n/8)
(radix-2-pass a))))))
(define (two^p>=m m)
returns smallest p , assumes fixnum m > = 0
(do ((p 0 (fx+ p 1))
(two^p 1 (fx* two^p 2)))
((fx<= m two^p) p)))
(define (test iters n)
(let ((two^n
(expt 2 n))
(table
(make-w (fx- n 1))))
( display ( fx * two^n 2))(newline )
(let ((a
(make-f64vector (fx* two^n 2) 0.)))
(do ((i 0 (fx+ i 1)))
((fx= i iters)
;(write table) (newline)
)
(direct-fft-recursive-4 a table)
(inverse-fft-recursive-4 a table)))))
(cond-expand
(chicken
(let-optionals (command-line-arguments)
((iters "2000")
(n "11"))
(test (string->number iters) (string->number n))))
(else (test 2000 11)))
| null | https://raw.githubusercontent.com/kdltr/chicken-core/b2e6c5243dd469064bec947cb3b49dafaa1514e5/tests/fft.scm | scheme | All the following redefinitions are *ignored* by the Gambit compiler
because of the declarations above.
end of *ignored* definitions
number of complexes
we copy complex multiples of all entries below
start to entries starting at start
This is a direcct complex fft, using a decimation-in-time
algorithm with inputs in natural order and outputs in
bit-reversed order. The table of "twiddle" factors is in
bit-reversed order.
combined passes in pairs to cut the number of passes through
the vector a
table depending on whether K is even or not
first we do the (overlapping) pairs of
apart.
of butterflies distance SizeOfGroup
we rewrite the multipliers so I
don't hurt my head too much when
thinking about them.
If we're here, the size of our (conceptually complex)
if is not a power
the passes as radix-4 passes
This is an complex fft, using a decimation-in-frequency algorithm
with inputs in bit-reversed order and outputs in natural order.
The organization of the algorithm has little to do with the the
I just reversed the operations of the direct algorithm given
The table of "twiddle" factors is in bit-reversed order.
(write table) (newline) | fft.scm - fft benchmark , by
(cond-expand
disable in CHICKEN to test specialization in safe mode
(declare
(standard-bindings)
(extended-bindings)
(block)
(not safe)))
(else
(import (chicken bitwise)
(chicken fixnum)
(chicken flonum)
(chicken process-context))))
(define-syntax defalias
(syntax-rules ()
((_ one two)
(define-syntax one
(syntax-rules ()
((_ . args) (two . args)))))))
(cond-expand
(generic
(begin
(defalias fixnum->flonum exact->inexact)
(defalias fxodd? odd?)
(defalias fxeven? even?)
(defalias fxarithmetic-shift-right fxshr)
(defalias fxarithmetic-shift-left fxshl)
(defalias fl* *)
(defalias fl/ /)
(defalias fl+ +)
(defalias fl- -)))
(chicken
(begin
(defalias fixnum->flonum exact->inexact)
(defalias fxodd? odd?)
(defalias fxeven? even?)
(defalias fxarithmetic-shift-right fxshr)
(defalias fxarithmetic-shift-left fxshl)
(defalias fl* fp*)
(defalias fl/ fp/)
(defalias fl+ fp+)
(defalias fl- fp-)))
(else))
(cond-expand
((and chicken (not unboxed))
(begin
(defalias make-f64vector make-vector)
(defalias f64vector vector)
(defalias f64vector-set! vector-set!)
(defalias f64vector-ref vector-ref)
(defalias list->f64vector list->vector)
(defalias f64vector-length vector-length)) )
(chicken
(import srfi-4))
(else) )
(define lut-table-size 512)
(define lut-table-size^2 262144)
(define lut-table-size^3 134217728)
(define log-lut-table-size 9)
(define low-lut
(list->f64vector '(1. 0.
.7071067811865476 .7071067811865476
.9238795325112867 .3826834323650898
.3826834323650898 .9238795325112867
.9807852804032304 .19509032201612828
.5555702330196022 .8314696123025452
.8314696123025452 .5555702330196022
.19509032201612828 .9807852804032304
.9951847266721969 .0980171403295606
.6343932841636455 .773010453362737
.881921264348355 .47139673682599764
.2902846772544624 .9569403357322088
.9569403357322088 .2902846772544624
.47139673682599764 .881921264348355
.773010453362737 .6343932841636455
.0980171403295606 .9951847266721969
.9987954562051724 .049067674327418015
.6715589548470184 .7409511253549591
.9039892931234433 .4275550934302821
.33688985339222005 .9415440651830208
.970031253194544 .2429801799032639
.5141027441932218 .8577286100002721
.8032075314806449 .5956993044924334
.14673047445536175 .989176509964781
.989176509964781 .14673047445536175
.5956993044924334 .8032075314806449
.8577286100002721 .5141027441932218
.2429801799032639 .970031253194544
.9415440651830208 .33688985339222005
.4275550934302821 .9039892931234433
.7409511253549591 .6715589548470184
.049067674327418015 .9987954562051724
.9996988186962042 .024541228522912288
.6895405447370669 .7242470829514669
.9142097557035307 .40524131400498986
.35989503653498817 .9329927988347388
.9757021300385286 .2191012401568698
.5349976198870973 .8448535652497071
.8175848131515837 .5758081914178453
.17096188876030122 .9852776423889412
.99247953459871 .1224106751992162
.6152315905806268 .7883464276266062
.8700869911087115 .49289819222978404
.26671275747489837 .9637760657954398
.9495281805930367 .31368174039889146
.4496113296546066 .8932243011955153
.7572088465064846 .6531728429537768
.07356456359966743 .9972904566786902
.9972904566786902 .07356456359966743
.6531728429537768 .7572088465064846
.8932243011955153 .4496113296546066
.31368174039889146 .9495281805930367
.9637760657954398 .26671275747489837
.49289819222978404 .8700869911087115
.7883464276266062 .6152315905806268
.1224106751992162 .99247953459871
.9852776423889412 .17096188876030122
.5758081914178453 .8175848131515837
.8448535652497071 .5349976198870973
.2191012401568698 .9757021300385286
.9329927988347388 .35989503653498817
.40524131400498986 .9142097557035307
.7242470829514669 .6895405447370669
.024541228522912288 .9996988186962042
.9999247018391445 .012271538285719925
.6983762494089728 .7157308252838187
.9191138516900578 .3939920400610481
.37131719395183754 .9285060804732156
.9783173707196277 .20711137619221856
.5453249884220465 .8382247055548381
.8245893027850253 .5657318107836132
.18303988795514095 .9831054874312163
.9939069700023561 .11022220729388306
.6248594881423863 .7807372285720945
.8760700941954066 .4821837720791228
.2785196893850531 .9604305194155658
.9533060403541939 .3020059493192281
.46053871095824 .8876396204028539
.765167265622459 .6438315428897915
.0857973123444399 .996312612182778
.9981181129001492 .06132073630220858
.6624157775901718 .7491363945234594
.8986744656939538 .43861623853852766
.3253102921622629 .9456073253805213
.9669764710448521 .25486565960451457
.5035383837257176 .8639728561215867
.7958369046088836 .6055110414043255
.1345807085071262 .99090263542778
.9873014181578584 .15885814333386145
.5857978574564389 .8104571982525948
.8513551931052652 .524589682678469
.2310581082806711 .9729399522055602
.937339011912575 .34841868024943456
.4164295600976372 .9091679830905224
.7326542716724128 .680600997795453
.03680722294135883 .9993223845883495
.9993223845883495 .03680722294135883
.680600997795453 .7326542716724128
.9091679830905224 .4164295600976372
.34841868024943456 .937339011912575
.9729399522055602 .2310581082806711
.524589682678469 .8513551931052652
.8104571982525948 .5857978574564389
.15885814333386145 .9873014181578584
.99090263542778 .1345807085071262
.6055110414043255 .7958369046088836
.8639728561215867 .5035383837257176
.25486565960451457 .9669764710448521
.9456073253805213 .3253102921622629
.43861623853852766 .8986744656939538
.7491363945234594 .6624157775901718
.06132073630220858 .9981181129001492
.996312612182778 .0857973123444399
.6438315428897915 .765167265622459
.8876396204028539 .46053871095824
.3020059493192281 .9533060403541939
.9604305194155658 .2785196893850531
.4821837720791228 .8760700941954066
.7807372285720945 .6248594881423863
.11022220729388306 .9939069700023561
.9831054874312163 .18303988795514095
.5657318107836132 .8245893027850253
.8382247055548381 .5453249884220465
.20711137619221856 .9783173707196277
.9285060804732156 .37131719395183754
.3939920400610481 .9191138516900578
.7157308252838187 .6983762494089728
.012271538285719925 .9999247018391445
.9999811752826011 .006135884649154475
.7027547444572253 .7114321957452164
.9215140393420419 .3883450466988263
.37700741021641826 .9262102421383114
.9795697656854405 .2011046348420919
.5504579729366048 .83486287498638
.8280450452577558 .560661576197336
.18906866414980622 .9819638691095552
.9945645707342554 .10412163387205457
.629638238914927 .7768884656732324
.8790122264286335 .47679923006332214
.2844075372112718 .9587034748958716
.9551411683057707 .29615088824362384
.4659764957679662 .8847970984309378
.7691033376455796 .6391244448637757
.09190895649713272 .9957674144676598
.9984755805732948 .05519524434968994
.6669999223036375 .745057785441466
.901348847046022 .43309381885315196
.33110630575987643 .9435934581619604
.9685220942744173 .24892760574572018
.508830142543107 .8608669386377673
.799537269107905 .600616479383869
.14065823933284924 .9900582102622971
.9882575677307495 .15279718525844344
.5907597018588743 .8068475535437992
.8545579883654005 .5193559901655896
.2370236059943672 .9715038909862518
.9394592236021899 .3426607173119944
.4220002707997997 .9065957045149153
.7368165688773699 .6760927035753159
.04293825693494082 .9990777277526454
.9995294175010931 .030674803176636626
.6850836677727004 .7284643904482252
.9117060320054299 .41084317105790397
.3541635254204904 .9351835099389476
.9743393827855759 .22508391135979283
.5298036246862947 .8481203448032972
.8140363297059484 .5808139580957645
.16491312048996992 .9863080972445987
.9917097536690995 .12849811079379317
.6103828062763095 .7921065773002124
.8670462455156926 .49822766697278187
.2607941179152755 .9653944416976894
.9475855910177411 .3195020308160157
.44412214457042926 .8959662497561851
.7531867990436125 .6578066932970786
.06744391956366406 .9977230666441916
.9968202992911657 .07968243797143013
.6485144010221124 .7612023854842618
.8904487232447579 .45508358712634384
.30784964004153487 .9514350209690083
.9621214042690416 .272621355449949
.48755016014843594 .8730949784182901
.7845565971555752 .6200572117632892
.11631863091190477 .9932119492347945
.984210092386929 .17700422041214875
.5707807458869673 .8211025149911046
.8415549774368984 .5401714727298929
.21311031991609136 .9770281426577544
.9307669610789837 .36561299780477385
.39962419984564684 .9166790599210427
.7200025079613817 .693971460889654
.01840672990580482 .9998305817958234
.9998305817958234 .01840672990580482
.693971460889654 .7200025079613817
.9166790599210427 .39962419984564684
.36561299780477385 .9307669610789837
.9770281426577544 .21311031991609136
.5401714727298929 .8415549774368984
.8211025149911046 .5707807458869673
.17700422041214875 .984210092386929
.9932119492347945 .11631863091190477
.6200572117632892 .7845565971555752
.8730949784182901 .48755016014843594
.272621355449949 .9621214042690416
.9514350209690083 .30784964004153487
.45508358712634384 .8904487232447579
.7612023854842618 .6485144010221124
.07968243797143013 .9968202992911657
.9977230666441916 .06744391956366406
.6578066932970786 .7531867990436125
.8959662497561851 .44412214457042926
.3195020308160157 .9475855910177411
.9653944416976894 .2607941179152755
.49822766697278187 .8670462455156926
.7921065773002124 .6103828062763095
.12849811079379317 .9917097536690995
.9863080972445987 .16491312048996992
.5808139580957645 .8140363297059484
.8481203448032972 .5298036246862947
.22508391135979283 .9743393827855759
.9351835099389476 .3541635254204904
.41084317105790397 .9117060320054299
.7284643904482252 .6850836677727004
.030674803176636626 .9995294175010931
.9990777277526454 .04293825693494082
.6760927035753159 .7368165688773699
.9065957045149153 .4220002707997997
.3426607173119944 .9394592236021899
.9715038909862518 .2370236059943672
.5193559901655896 .8545579883654005
.8068475535437992 .5907597018588743
.15279718525844344 .9882575677307495
.9900582102622971 .14065823933284924
.600616479383869 .799537269107905
.8608669386377673 .508830142543107
.24892760574572018 .9685220942744173
.9435934581619604 .33110630575987643
.43309381885315196 .901348847046022
.745057785441466 .6669999223036375
.05519524434968994 .9984755805732948
.9957674144676598 .09190895649713272
.6391244448637757 .7691033376455796
.8847970984309378 .4659764957679662
.29615088824362384 .9551411683057707
.9587034748958716 .2844075372112718
.47679923006332214 .8790122264286335
.7768884656732324 .629638238914927
.10412163387205457 .9945645707342554
.9819638691095552 .18906866414980622
.560661576197336 .8280450452577558
.83486287498638 .5504579729366048
.2011046348420919 .9795697656854405
.9262102421383114 .37700741021641826
.3883450466988263 .9215140393420419
.7114321957452164 .7027547444572253
.006135884649154475 .9999811752826011
.9999952938095762 .003067956762965976
.7049340803759049 .7092728264388657
.9227011283338785 .38551605384391885
.37984720892405116 .9250492407826776
.9801821359681174 .1980984107179536
.5530167055800276 .8331701647019132
.829761233794523 .5581185312205561
.19208039704989244 .9813791933137546
.9948793307948056 .10106986275482782
.6320187359398091 .7749531065948739
.8804708890521608 .47410021465055
.2873474595447295 .9578264130275329
.9560452513499964 .29321916269425863
.46868882203582796 .8833633386657316
.7710605242618138 .6367618612362842
.094963495329639 .9954807554919269
.9986402181802653 .052131704680283324
.6692825883466361 .7430079521351217
.9026733182372588 .4303264813400826
.3339996514420094 .9425731976014469
.9692812353565485 .24595505033579462
.5114688504379704 .8593018183570084
.8013761717231402 .5981607069963423
.14369503315029444 .9896220174632009
.9887216919603238 .1497645346773215
.5932322950397998 .8050313311429635
.8561473283751945 .5167317990176499
.2400030224487415 .9707721407289504
.9405060705932683 .33977688440682685
.4247796812091088 .9052967593181188
.7388873244606151 .673829000378756
.04600318213091463 .9989412931868569
.9996188224951786 .027608145778965743
.6873153408917592 .726359155084346
.9129621904283982 .4080441628649787
.35703096123343003 .9340925504042589
.9750253450669941 .22209362097320354
.532403127877198 .8464909387740521
.8158144108067338 .5783137964116556
.16793829497473117 .9857975091675675
.9920993131421918 .12545498341154623
.6128100824294097 .79023022143731
.8685707059713409 .49556526182577254
.2637546789748314 .9645897932898128
.9485613499157303 .31659337555616585
.4468688401623742 .8945994856313827
.7552013768965365 .6554928529996153
.07050457338961387 .9975114561403035
.997060070339483 .07662386139203149
.6508466849963809 .7592091889783881
.8918407093923427 .4523495872337709
.3107671527496115 .9504860739494817
.9629532668736839 .2696683255729151
.49022648328829116 .8715950866559511
.7864552135990858 .617647307937804
.11936521481099137 .9928504144598651
.9847485018019042 .17398387338746382
.5732971666980422 .819347520076797
.8432082396418454 .5375870762956455
.21610679707621952 .9763697313300211
.9318842655816681 .3627557243673972
.40243465085941843 .9154487160882678
.7221281939292153 .6917592583641577
.021474080275469508 .9997694053512153
.9998823474542126 .015339206284988102
.696177131491463 .7178700450557317
.9179007756213905 .3968099874167103
.3684668299533723 .9296408958431812
.9776773578245099 .2101118368804696
.5427507848645159 .8398937941959995
.8228497813758263 .5682589526701316
.18002290140569951 .9836624192117303
.9935641355205953 .11327095217756435
.62246127937415 .7826505961665757
.8745866522781761 .4848692480007911
.27557181931095814 .9612804858113206
.9523750127197659 .30492922973540243
.45781330359887723 .8890483558546646
.7631884172633813 .6461760129833164
.08274026454937569 .9965711457905548
.997925286198596 .06438263092985747
.6601143420674205 .7511651319096864
.8973245807054183 .44137126873171667
.32240767880106985 .9466009130832835
.9661900034454125 .257831102162159
.5008853826112408 .8655136240905691
.7939754775543372 .6079497849677736
.13154002870288312 .9913108598461154
.9868094018141855 .16188639378011183
.5833086529376983 .8122505865852039
.8497417680008524 .5271991347819014
.22807208317088573 .973644249650812
.9362656671702783 .35129275608556715
.41363831223843456 .9104412922580672
.7305627692278276 .6828455463852481
.03374117185137759 .9994306045554617
.9992047586183639 .03987292758773981
.6783500431298615 .7347388780959635
.9078861164876663 .41921688836322396
.34554132496398904 .9384035340631081
.9722264970789363 .23404195858354343
.5219752929371544 .8529606049303636
.808656181588175 .5882815482226453
.15582839765426523 .9877841416445722
.9904850842564571 .13762012158648604
.6030665985403482 .7976908409433912
.8624239561110405 .5061866453451553
.25189781815421697 .9677538370934755
.9446048372614803 .32820984357909255
.4358570799222555 .9000158920161603
.7471006059801801 .6647109782033449
.05825826450043576 .9983015449338929
.996044700901252 .0888535525825246
.6414810128085832 .7671389119358204
.8862225301488806 .4632597835518602
.2990798263080405 .9542280951091057
.9595715130819845 .281464937925758
.479493757660153 .8775452902072612
.778816512381476 .6272518154951441
.10717242495680884 .9942404494531879
.9825393022874412 .18605515166344666
.5631993440138341 .8263210628456635
.836547727223512 .5478940591731002
.20410896609281687 .9789481753190622
.9273625256504011 .374164062971458
.39117038430225387 .9203182767091106
.7135848687807936 .7005687939432483
.00920375478205982 .9999576445519639
.9999576445519639 .00920375478205982
.7005687939432483 .7135848687807936
.9203182767091106 .39117038430225387
.374164062971458 .9273625256504011
.9789481753190622 .20410896609281687
.5478940591731002 .836547727223512
.8263210628456635 .5631993440138341
.18605515166344666 .9825393022874412
.9942404494531879 .10717242495680884
.6272518154951441 .778816512381476
.8775452902072612 .479493757660153
.281464937925758 .9595715130819845
.9542280951091057 .2990798263080405
.4632597835518602 .8862225301488806
.7671389119358204 .6414810128085832
.0888535525825246 .996044700901252
.9983015449338929 .05825826450043576
.6647109782033449 .7471006059801801
.9000158920161603 .4358570799222555
.32820984357909255 .9446048372614803
.9677538370934755 .25189781815421697
.5061866453451553 .8624239561110405
.7976908409433912 .6030665985403482
.13762012158648604 .9904850842564571
.9877841416445722 .15582839765426523
.5882815482226453 .808656181588175
.8529606049303636 .5219752929371544
.23404195858354343 .9722264970789363
.9384035340631081 .34554132496398904
.41921688836322396 .9078861164876663
.7347388780959635 .6783500431298615
.03987292758773981 .9992047586183639
.9994306045554617 .03374117185137759
.6828455463852481 .7305627692278276
.9104412922580672 .41363831223843456
.35129275608556715 .9362656671702783
.973644249650812 .22807208317088573
.5271991347819014 .8497417680008524
.8122505865852039 .5833086529376983
.16188639378011183 .9868094018141855
.9913108598461154 .13154002870288312
.6079497849677736 .7939754775543372
.8655136240905691 .5008853826112408
.257831102162159 .9661900034454125
.9466009130832835 .32240767880106985
.44137126873171667 .8973245807054183
.7511651319096864 .6601143420674205
.06438263092985747 .997925286198596
.9965711457905548 .08274026454937569
.6461760129833164 .7631884172633813
.8890483558546646 .45781330359887723
.30492922973540243 .9523750127197659
.9612804858113206 .27557181931095814
.4848692480007911 .8745866522781761
.7826505961665757 .62246127937415
.11327095217756435 .9935641355205953
.9836624192117303 .18002290140569951
.5682589526701316 .8228497813758263
.8398937941959995 .5427507848645159
.2101118368804696 .9776773578245099
.9296408958431812 .3684668299533723
.3968099874167103 .9179007756213905
.7178700450557317 .696177131491463
.015339206284988102 .9998823474542126
.9997694053512153 .021474080275469508
.6917592583641577 .7221281939292153
.9154487160882678 .40243465085941843
.3627557243673972 .9318842655816681
.9763697313300211 .21610679707621952
.5375870762956455 .8432082396418454
.819347520076797 .5732971666980422
.17398387338746382 .9847485018019042
.9928504144598651 .11936521481099137
.617647307937804 .7864552135990858
.8715950866559511 .49022648328829116
.2696683255729151 .9629532668736839
.9504860739494817 .3107671527496115
.4523495872337709 .8918407093923427
.7592091889783881 .6508466849963809
.07662386139203149 .997060070339483
.9975114561403035 .07050457338961387
.6554928529996153 .7552013768965365
.8945994856313827 .4468688401623742
.31659337555616585 .9485613499157303
.9645897932898128 .2637546789748314
.49556526182577254 .8685707059713409
.79023022143731 .6128100824294097
.12545498341154623 .9920993131421918
.9857975091675675 .16793829497473117
.5783137964116556 .8158144108067338
.8464909387740521 .532403127877198
.22209362097320354 .9750253450669941
.9340925504042589 .35703096123343003
.4080441628649787 .9129621904283982
.726359155084346 .6873153408917592
.027608145778965743 .9996188224951786
.9989412931868569 .04600318213091463
.673829000378756 .7388873244606151
.9052967593181188 .4247796812091088
.33977688440682685 .9405060705932683
.9707721407289504 .2400030224487415
.5167317990176499 .8561473283751945
.8050313311429635 .5932322950397998
.1497645346773215 .9887216919603238
.9896220174632009 .14369503315029444
.5981607069963423 .8013761717231402
.8593018183570084 .5114688504379704
.24595505033579462 .9692812353565485
.9425731976014469 .3339996514420094
.4303264813400826 .9026733182372588
.7430079521351217 .6692825883466361
.052131704680283324 .9986402181802653
.9954807554919269 .094963495329639
.6367618612362842 .7710605242618138
.8833633386657316 .46868882203582796
.29321916269425863 .9560452513499964
.9578264130275329 .2873474595447295
.47410021465055 .8804708890521608
.7749531065948739 .6320187359398091
.10106986275482782 .9948793307948056
.9813791933137546 .19208039704989244
.5581185312205561 .829761233794523
.8331701647019132 .5530167055800276
.1980984107179536 .9801821359681174
.9250492407826776 .37984720892405116
.38551605384391885 .9227011283338785
.7092728264388657 .7049340803759049
.003067956762965976 .9999952938095762
)))
(define med-lut
(list->f64vector '(1. 0.
.9999999999820472 5.9921124526424275e-6
.9999999999281892 1.1984224905069707e-5
.9999999998384257 1.7976337357066685e-5
.9999999997127567 2.396844980841822e-5
.9999999995511824 2.9960562258909154e-5
.9999999993537025 3.5952674708324344e-5
.9999999991203175 4.1944787156448635e-5
.9999999988510269 4.793689960306688e-5
.9999999985458309 5.3929012047963936e-5
.9999999982047294 5.992112449092465e-5
.9999999978277226 6.591323693173387e-5
.9999999974148104 7.190534937017645e-5
.9999999969659927 7.789746180603723e-5
.9999999964812697 8.388957423910108e-5
.9999999959606412 8.988168666915283e-5
.9999999954041073 9.587379909597734e-5
.999999994811668 1.0186591151935948e-4
.9999999941833233 1.0785802393908407e-4
.9999999935190732 1.1385013635493597e-4
.9999999928189177 1.1984224876670004e-4
.9999999920828567 1.2583436117416112e-4
.9999999913108903 1.3182647357710405e-4
.9999999905030187 1.3781858597531374e-4
.9999999896592414 1.4381069836857496e-4
.9999999887795589 1.498028107566726e-4
.9999999878639709 1.5579492313939151e-4
.9999999869124775 1.6178703551651655e-4
.9999999859250787 1.6777914788783258e-4
.9999999849017744 1.737712602531244e-4
.9999999838425648 1.797633726121769e-4
.9999999827474497 1.8575548496477492e-4
.9999999816164293 1.9174759731070332e-4
.9999999804495034 1.9773970964974692e-4
.9999999792466722 2.037318219816906e-4
.9999999780079355 2.0972393430631923e-4
.9999999767332933 2.1571604662341763e-4
.9999999754227459 2.2170815893277063e-4
.9999999740762929 2.2770027123416315e-4
.9999999726939346 2.3369238352737996e-4
.9999999712756709 2.3968449581220595e-4
.9999999698215016 2.45676608088426e-4
.9999999683314271 2.5166872035582493e-4
.9999999668054471 2.5766083261418755e-4
.9999999652435617 2.636529448632988e-4
.9999999636457709 2.696450571029434e-4
.9999999620120748 2.756371693329064e-4
.9999999603424731 2.8162928155297243e-4
.9999999586369661 2.876213937629265e-4
.9999999568955537 2.936135059625534e-4
.9999999551182358 2.99605618151638e-4
.9999999533050126 3.055977303299651e-4
.9999999514558838 3.115898424973196e-4
.9999999495708498 3.1758195465348636e-4
.9999999476499103 3.235740667982502e-4
.9999999456930654 3.2956617893139595e-4
.9999999437003151 3.3555829105270853e-4
.9999999416716594 3.4155040316197275e-4
.9999999396070982 3.475425152589734e-4
.9999999375066316 3.535346273434955e-4
.9999999353702598 3.595267394153237e-4
.9999999331979824 3.6551885147424295e-4
.9999999309897996 3.7151096352003814e-4
.9999999287457114 3.7750307555249406e-4
.9999999264657179 3.8349518757139556e-4
.9999999241498189 3.8948729957652753e-4
.9999999217980144 3.954794115676748e-4
.9999999194103046 4.0147152354462224e-4
.9999999169866894 4.0746363550715466e-4
.9999999145271687 4.134557474550569e-4
.9999999120317428 4.194478593881139e-4
.9999999095004113 4.2543997130611036e-4
.9999999069331744 4.314320832088313e-4
.9999999043300322 4.3742419509606144e-4
.9999999016909845 4.4341630696758576e-4
.9999998990160315 4.4940841882318896e-4
.9999998963051729 4.55400530662656e-4
.999999893558409 4.613926424857717e-4
.9999998907757398 4.673847542923209e-4
.9999998879571651 4.7337686608208844e-4
.9999998851026849 4.793689778548592e-4
.9999998822122994 4.8536108961041806e-4
.9999998792860085 4.913532013485497e-4
.9999998763238122 4.973453130690393e-4
.9999998733257104 5.033374247716714e-4
.9999998702917032 5.09329536456231e-4
.9999998672217907 5.153216481225028e-4
.9999998641159727 5.213137597702719e-4
.9999998609742493 5.27305871399323e-4
.9999998577966206 5.332979830094408e-4
.9999998545830864 5.392900946004105e-4
.9999998513336468 5.452822061720168e-4
.9999998480483018 5.512743177240444e-4
.9999998447270514 5.572664292562783e-4
.9999998413698955 5.632585407685033e-4
.9999998379768343 5.692506522605043e-4
.9999998345478677 5.752427637320661e-4
.9999998310829956 5.812348751829735e-4
.9999998275822183 5.872269866130116e-4
.9999998240455354 5.93219098021965e-4
.9999998204729471 5.992112094096185e-4
.9999998168644535 6.052033207757572e-4
.9999998132200545 6.111954321201659e-4
.99999980953975 6.171875434426292e-4
.9999998058235401 6.231796547429323e-4
.9999998020714248 6.291717660208597e-4
.9999997982834041 6.351638772761965e-4
.9999997944594781 6.411559885087275e-4
.9999997905996466 6.471480997182375e-4
.9999997867039097 6.531402109045114e-4
.9999997827722674 6.591323220673341e-4
.9999997788047197 6.651244332064902e-4
.9999997748012666 6.711165443217649e-4
.9999997707619082 6.771086554129428e-4
.9999997666866443 6.83100766479809e-4
.9999997625754748 6.89092877522148e-4
.9999997584284002 6.950849885397449e-4
.9999997542454201 7.010770995323844e-4
.9999997500265345 7.070692104998515e-4
.9999997457717437 7.130613214419311e-4
.9999997414810473 7.190534323584079e-4
.9999997371544456 7.250455432490666e-4
.9999997327919384 7.310376541136925e-4
.9999997283935259 7.3702976495207e-4
.999999723959208 7.430218757639842e-4
.9999997194889846 7.490139865492199e-4
.9999997149828559 7.55006097307562e-4
.9999997104408218 7.609982080387952e-4
.9999997058628822 7.669903187427045e-4
.9999997012490373 7.729824294190747e-4
.9999996965992869 7.789745400676906e-4
.9999996919136313 7.849666506883372e-4
.99999968719207 7.909587612807992e-4
.9999996824346035 7.969508718448614e-4
.9999996776412315 8.029429823803089e-4
.9999996728119542 8.089350928869263e-4
.9999996679467715 8.149272033644986e-4
.9999996630456833 8.209193138128106e-4
.9999996581086897 8.269114242316472e-4
.9999996531357909 8.329035346207931e-4
.9999996481269865 8.388956449800333e-4
.9999996430822767 8.448877553091527e-4
.9999996380016616 8.508798656079359e-4
.999999632885141 8.56871975876168e-4
.9999996277327151 8.628640861136338e-4
.9999996225443838 8.68856196320118e-4
.9999996173201471 8.748483064954056e-4
.999999612060005 8.808404166392814e-4
.9999996067639574 8.868325267515304e-4
.9999996014320045 8.928246368319371e-4
.9999995960641462 8.988167468802867e-4
.9999995906603825 9.048088568963639e-4
.9999995852207133 9.108009668799535e-4
.9999995797451389 9.167930768308405e-4
.9999995742336589 9.227851867488095e-4
.9999995686862736 9.287772966336457e-4
.9999995631029829 9.347694064851338e-4
.9999995574837868 9.407615163030585e-4
.9999995518286853 9.467536260872047e-4
.9999995461376784 9.527457358373575e-4
.9999995404107661 9.587378455533015e-4
.9999995346479484 9.647299552348216e-4
.9999995288492254 9.707220648817027e-4
.9999995230145969 9.767141744937296e-4
.9999995171440631 9.827062840706872e-4
.9999995112376238 9.886983936123602e-4
.9999995052952791 9.946905031185337e-4
.9999994993170291 .0010006826125889925
.9999994933028736 .0010066747220235214
.9999994872528128 .001012666831421905
.9999994811668466 .0010186589407839286
.999999475044975 .0010246510501093766
.9999994688871979 .0010306431593980344
.9999994626935156 .0010366352686496862
.9999994564639277 .0010426273778641173
.9999994501984345 .0010486194870411127
.999999443897036 .0010546115961804568
.999999437559732 .0010606037052819344
.9999994311865227 .0010665958143453308
.9999994247774079 .0010725879233704307
.9999994183323877 .0010785800323570187
.9999994118514622 .0010845721413048801
.9999994053346313 .0010905642502137994
.9999993987818949 .0010965563590835613
.9999993921932533 .0011025484679139511
.9999993855687062 .0011085405767047535
.9999993789082536 .0011145326854557532
.9999993722118957 .001120524794166735
.9999993654796325 .0011265169028374842
.9999993587114638 .0011325090114677853
.9999993519073898 .001138501120057423
.9999993450674104 .0011444932286061825
.9999993381915255 .0011504853371138485
.9999993312797354 .0011564774455802057
.9999993243320398 .0011624695540050393
.9999993173484387 .001168461662388134
.9999993103289324 .0011744537707292742
.9999993032735206 .0011804458790282454
.9999992961822035 .0011864379872848323
.9999992890549809 .0011924300954988195
.999999281891853 .001198422203669992
.9999992746928197 .0012044143117981348
.999999267457881 .0012104064198830327
.999999260187037 .0012163985279244702
.9999992528802875 .0012223906359222325
.9999992455376326 .0012283827438761045
.9999992381590724 .0012343748517858707
.9999992307446068 .0012403669596513162
.9999992232942359 .001246359067472226
.9999992158079595 .0012523511752483847
.9999992082857777 .001258343282979577
.9999992007276906 .001264335390665588
.999999193133698 .0012703274983062026
.9999991855038001 .0012763196059012057
.9999991778379967 .001282311713450382
.9999991701362881 .0012883038209535163
.999999162398674 .0012942959284103935
.9999991546251547 .0013002880358207985
.9999991468157298 .001306280143184516
.9999991389703996 .001312272250501331
.999999131089164 .0013182643577710285
.999999123172023 .0013242564649933932
.9999991152189767 .0013302485721682098
.9999991072300249 .001336240679295263
.9999990992051678 .0013422327863743383
.9999990911444054 .0013482248934052201
.9999990830477375 .0013542170003876934
.9999990749151643 .001360209107321543
.9999990667466857 .0013662012142065536
.9999990585423016 .0013721933210425101
.9999990503020123 .0013781854278291975
.9999990420258176 .0013841775345664006
.9999990337137175 .0013901696412539043
.999999025365712 .0013961617478914935
.999999016981801 .0014021538544789526
.9999990085619848 .001408145961016067
.9999990001062631 .0014141380675026214
.9999989916146361 .0014201301739384005
.9999989830871038 .0014261222803231893
.9999989745236659 .0014321143866567725
.9999989659243228 .001438106492938935
.9999989572890743 .0014440985991694619
.9999989486179204 .0014500907053481378
.9999989399108612 .0014560828114747475
.9999989311678965 .0014620749175490758
.9999989223890265 .001468067023570908
.9999989135742512 .0014740591295400284
.9999989047235704 .0014800512354562223
.9999988958369843 .0014860433413192743
.9999988869144928 .0014920354471289693
.9999988779560959 .0014980275528850922
.9999988689617937 .0015040196585874275
.9999988599315861 .0015100117642357607
.999998850865473 .0015160038698298762
.9999988417634548 .001521995975369559
.999998832625531 .0015279880808545937
.9999988234517019 .0015339801862847657
.9999988142419675 .0015399722916598592
.9999988049963277 .0015459643969796596
.9999987957147825 .0015519565022439512
.9999987863973319 .0015579486074525195
.9999987770439759 .001563940712605149
.9999987676547146 .0015699328177016243
.999998758229548 .0015759249227417307
.9999987487684759 .0015819170277252528
.9999987392714985 .0015879091326519755
.9999987297386157 .0015939012375216837
.9999987201698276 .0015998933423341623
.9999987105651341 .001605885447089196
.9999987009245352 .0016118775517865696
.999998691248031 .0016178696564260683
.9999986815356214 .0016238617610074765
.9999986717873064 .0016298538655305794
.9999986620030861 .0016358459699951618
.9999986521829605 .0016418380744010084
.9999986423269294 .0016478301787479041
.999998632434993 .0016538222830356339
.9999986225071512 .0016598143872639823
.999998612543404 .0016658064914327345
.9999986025437515 .0016717985955416754
.9999985925081937 .0016777906995905894
.9999985824367305 .0016837828035792617
.9999985723293618 .0016897749075074774
.999998562186088 .0016957670113750207
.9999985520069086 .0017017591151816769
.9999985417918239 .0017077512189272307
.999998531540834 .001713743322611467
.9999985212539385 .0017197354262341706
.9999985109311378 .0017257275297951264
.9999985005724317 .0017317196332941192
.9999984901778203 .0017377117367309341
.9999984797473034 .0017437038401053556
.9999984692808812 .0017496959434171687
.9999984587785538 .0017556880466661582
.9999984482403208 .001761680149852109
.9999984376661826 .0017676722529748061
.999998427056139 .0017736643560340342
.99999841641019 .001779656459029578
.9999984057283358 .0017856485619612225
.9999983950105761 .0017916406648287528
.999998384256911 .0017976327676319532
.9999983734673407 .001803624870370609
.9999983626418649 .0018096169730445048
.9999983517804839 .0018156090756534257
.9999983408831975 .0018216011781971562
.9999983299500057 .0018275932806754815
.9999983189809085 .0018335853830881864
.999998307975906 .0018395774854350557
.9999982969349982 .001845569587715874
.9999982858581851 .0018515616899304264
.9999982747454665 .001857553792078498
.9999982635968426 .001863545894159873
.9999982524123134 .0018695379961743367
.9999982411918789 .001875530098121674
.9999982299355389 .0018815222000016696
.9999982186432936 .0018875143018141083
.999998207315143 .0018935064035587748
.999998195951087 .0018994985052354545
.9999981845511257 .0019054906068439318
.9999981731152591 .0019114827083839918
.999998161643487 .001917474809855419
.9999981501358096 .0019234669112579987
.999998138592227 .0019294590125915154
.9999981270127389 .0019354511138557542
.9999981153973455 .0019414432150504997
.9999981037460468 .0019474353161755369
.9999980920588427 .001953427417230651
.9999980803357332 .001959419518215626
.9999980685767185 .0019654116191302473
.9999980567817984 .0019714037199743
.9999980449509729 .0019773958207475683
.9999980330842422 .0019833879214498375
.999998021181606 .001989380022080892
.9999980092430646 .0019953721226405176
.9999979972686177 .002001364223128498
.9999979852582656 .002007356323544619
.9999979732120081 .002013348423888665
.9999979611298453 .002019340524160421
.9999979490117771 .0020253326243596715
.9999979368578036 .0020313247244862017
.9999979246679247 .002037316824539796
.9999979124421405 .00204330892452024
.999997900180451 .002049301024427318
.9999978878828562 .0020552931242608153
.9999978755493559 .002061285224020516
.9999978631799504 .0020672773237062057
.9999978507746395 .002073269423317669
.9999978383334234 .0020792615228546903
.9999978258563018 .002085253622317055
.999997813343275 .0020912457217045484
.9999978007943428 .002097237821016954
.9999977882095052 .0021032299202540577
.9999977755887623 .0021092220194156444
.9999977629321142 .0021152141185014984
.9999977502395607 .0021212062175114043
.9999977375111019 .002127198316445148
.9999977247467376 .0021331904153025134
.9999977119464681 .002139182514083286
.9999976991102932 .0021451746127872503
.9999976862382131 .002151166711414191
.9999976733302276 .0021571588099638934
.9999976603863368 .0021631509084361423
.9999976474065406 .002169143006830722
.9999976343908391 .002175135105147418
.9999976213392323 .0021811272033860148
.9999976082517201 .002187119301546297
.9999975951283027 .00219311139962805
.9999975819689799 .0021991034976310588
.9999975687737518 .0022050955955551076
.9999975555426184 .0022110876933999816
.9999975422755796 .0022170797911654654
.9999975289726355 .002223071888851344
.9999975156337861 .0022290639864574026
.9999975022590314 .0022350560839834253
.9999974888483714 .002241048181429198
.999997475401806 .0022470402787945045
.9999974619193353 .00225303237607913
.9999974484009593 .0022590244732828596
.9999974348466779 .0022650165704054784
.9999974212564913 .0022710086674467703
.9999974076303992 .002277000764406521
.9999973939684019 .002282992861284515
.9999973802704993 .0022889849580805368
.9999973665366915 .0022949770547943723
.9999973527669782 .0023009691514258054
.9999973389613596 .002306961247974621
.9999973251198357 .0023129533444406045
.9999973112424065 .0023189454408235406
.999997297329072 .0023249375371232135
.9999972833798322 .002330929633339409
.999997269394687 .0023369217294719113
.9999972553736366 .0023429138255205055
.9999972413166809 .0023489059214849765
.9999972272238198 .002354898017365109
.9999972130950534 .0023608901131606883
.9999971989303816 .0023668822088714985
.9999971847298047 .0023728743044973246
.9999971704933224 .0023788664000379523
.9999971562209347 .0023848584954931653
.9999971419126418 .0023908505908627493
.9999971275684435 .0023968426861464883
.99999711318834 .002402834781344168
.9999970987723311 .0024088268764555732
.9999970843204169 .002414818971480488
.9999970698325974 .002420811066418698
.9999970553088726 .0024268031612699878
.9999970407492426 .002432795256034142
.9999970261537071 .002438787350710946
.9999970115222664 .002444779445300184
.9999969968549204 .0024507715398016418
.9999969821516691 .002456763634215103
.9999969674125124 .002462755728540353
.9999969526374506 .0024687478227771774
.9999969378264834 .00247473991692536
.9999969229796108 .002480732010984686
.999996908096833 .0024867241049549406
.9999968931781499 .002492716198835908
.9999968782235614 .0024987082926273734
.9999968632330677 .002504700386329122
.9999968482066687 .002510692479940938
.9999968331443644 .0025166845734626068
.9999968180461547 .0025226766668939127
.9999968029120399 .002528668760234641
.9999967877420196 .002534660853484576
.9999967725360941 .0025406529466435036
.9999967572942633 .002546645039711208
.9999967420165272 .002552637132687474
.9999967267028858 .002558629225572086
.9999967113533391 .0025646213183648297
.9999966959678871 .0025706134110654896
.9999966805465298 .002576605503673851
.9999966650892672 .0025825975961896977
.9999966495960994 .0025885896886128153
.9999966340670262 .0025945817809429885
.9999966185020478 .0026005738731800024
.9999966029011641 .0026065659653236417
.999996587264375 .002612558057373691
.9999965715916808 .002618550149329935
.9999965558830811 .0026245422411921592
.9999965401385762 .002630534332960148
.9999965243581661 .002636526424633687
.9999965085418506 .0026425185162125596
.9999964926896299 .0026485106076965517
.9999964768015038 .0026545026990854484
.9999964608774725 .0026604947903790337
.9999964449175359 .0026664868815770926
.999996428921694 .0026724789726794104
.9999964128899468 .002678471063685772
.9999963968222944 .0026844631545959617
.9999963807187366 .002690455245409765
.9999963645792737 .002696447336126966
.9999963484039053 .00270243942674735
.9999963321926317 .002708431517270702
.9999963159454529 .0027144236076968066
.9999962996623687 .0027204156980254485
.9999962833433793 .002726407788256413
.9999962669884847 .002732399878389485
.9999962505976846 .0027383919684244484
.9999962341709794 .002744384058361089
.9999962177083689 .0027503761481991913
.999996201209853 .0027563682379385403
.9999961846754319 .0027623603275789207
.9999961681051056 .0027683524171201175
.999996151498874 .002774344506561915
.9999961348567371 .002780336595904099
.9999961181786949 .0027863286851464537
.9999961014647475 .0027923207742887642
.9999960847148948 .0027983128633308155
.9999960679291368 .002804304952272392
.9999960511074735 .002810297041113279
.9999960342499049 .0028162891298532606
.9999960173564312 .0028222812184921227
.9999960004270521 .002828273307029649
.9999959834617678 .002834265395465626
.9999959664605781 .0028402574837998367
.9999959494234832 .002846249572032067
.9999959323504831 .0028522416601621014
.9999959152415777 .002858233748189725
.999995898096767 .002864225836114723
.9999958809160512 .0028702179239368793
.9999958636994299 .0028762100116559793
.9999958464469034 .0028822020992718077
.9999958291584717 .0028881941867841495
.9999958118341348 .0028941862741927895
.9999957944738925 .0029001783614975127
.999995777077745 .002906170448698104
.9999957596456922 .0029121625357943475
.9999957421777342 .002918154622786029
.999995724673871 .0029241467096729327
.9999957071341024 .002930138796454844
.9999956895584287 .0029361308831315474
.9999956719468496 .0029421229697028273
.9999956542993652 .0029481150561684695
.9999956366159757 .0029541071425282584
.9999956188966809 .002960099228781979
.9999956011414808 .002966091314929416
.9999955833503754 .002972083400970354
.9999955655233649 .0029780754869045785
.9999955476604491 .0029840675727318736
.999995529761628 .002990059658452025
.9999955118269016 .0029960517440648163
.99999549385627 .0030020438295700336
.9999954758497331 .0030080359149674612
.999995457807291 .003014028000256884
.9999954397289438 .003020020085438087
.9999954216146911 .0030260121705108552
.9999954034645333 .003032004255474973
.9999953852784702 .003037996340330225
.9999953670565019 .003043988425076397
.9999953487986284 .003049980509713273
.9999953305048496 .0030559725942406386
.9999953121751655 .003061964678658278
)))
(define high-lut
(list->f64vector '(1. 0.
.9999999999999999 1.1703344634137277e-8
.9999999999999998 2.3406689268274554e-8
.9999999999999993 3.5110033902411824e-8
.9999999999999989 4.6813378536549095e-8
.9999999999999983 5.851672317068635e-8
.9999999999999976 7.022006780482361e-8
.9999999999999967 8.192341243896085e-8
.9999999999999957 9.362675707309808e-8
.9999999999999944 1.0533010170723531e-7
.9999999999999931 1.170334463413725e-7
.9999999999999917 1.287367909755097e-7
.9999999999999901 1.4044013560964687e-7
.9999999999999885 1.5214348024378403e-7
.9999999999999866 1.6384682487792116e-7
.9999999999999846 1.7555016951205827e-7
.9999999999999825 1.8725351414619535e-7
.9999999999999802 1.989568587803324e-7
.9999999999999778 2.1066020341446942e-7
.9999999999999752 2.2236354804860645e-7
.9999999999999726 2.3406689268274342e-7
.9999999999999698 2.4577023731688034e-7
.9999999999999668 2.5747358195101726e-7
.9999999999999638 2.6917692658515413e-7
.9999999999999606 2.8088027121929094e-7
.9999999999999571 2.9258361585342776e-7
.9999999999999537 3.042869604875645e-7
.99999999999995 3.159903051217012e-7
.9999999999999463 3.276936497558379e-7
.9999999999999424 3.3939699438997453e-7
.9999999999999384 3.5110033902411114e-7
.9999999999999342 3.6280368365824763e-7
.9999999999999298 3.7450702829238413e-7
.9999999999999254 3.8621037292652057e-7
.9999999999999208 3.979137175606569e-7
.9999999999999161 4.0961706219479325e-7
.9999999999999113 4.2132040682892953e-7
.9999999999999063 4.330237514630657e-7
.9999999999999011 4.447270960972019e-7
.9999999999998959 4.5643044073133796e-7
.9999999999998904 4.68133785365474e-7
.9999999999998849 4.7983712999961e-7
.9999999999998792 4.915404746337459e-7
.9999999999998733 5.032438192678817e-7
.9999999999998674 5.149471639020175e-7
.9999999999998613 5.266505085361531e-7
.9999999999998551 5.383538531702888e-7
.9999999999998487 5.500571978044243e-7
.9999999999998422 5.617605424385598e-7
.9999999999998356 5.734638870726952e-7
.9999999999998288 5.851672317068305e-7
.9999999999998219 5.968705763409657e-7
.9999999999998148 6.085739209751009e-7
.9999999999998076 6.202772656092359e-7
.9999999999998003 6.319806102433709e-7
.9999999999997928 6.436839548775058e-7
.9999999999997853 6.553872995116406e-7
.9999999999997775 6.670906441457753e-7
.9999999999997696 6.7879398877991e-7
.9999999999997616 6.904973334140445e-7
.9999999999997534 7.02200678048179e-7
.9999999999997452 7.139040226823132e-7
.9999999999997368 7.256073673164475e-7
.9999999999997282 7.373107119505817e-7
.9999999999997194 7.490140565847157e-7
.9999999999997107 7.607174012188497e-7
.9999999999997017 7.724207458529835e-7
.9999999999996926 7.841240904871172e-7
.9999999999996834 7.958274351212508e-7
.9999999999996739 8.075307797553844e-7
.9999999999996644 8.192341243895178e-7
.9999999999996547 8.309374690236511e-7
.999999999999645 8.426408136577842e-7
.9999999999996351 8.543441582919173e-7
.999999999999625 8.660475029260503e-7
.9999999999996148 8.777508475601831e-7
.9999999999996044 8.894541921943158e-7
.999999999999594 9.011575368284484e-7
.9999999999995833 9.128608814625808e-7
.9999999999995726 9.245642260967132e-7
.9999999999995617 9.362675707308454e-7
.9999999999995507 9.479709153649775e-7
.9999999999995395 9.596742599991095e-7
.9999999999995283 9.713776046332412e-7
.9999999999995168 9.83080949267373e-7
.9999999999995052 9.947842939015044e-7
.9999999999994935 1.006487638535636e-6
.9999999999994816 1.0181909831697673e-6
.9999999999994696 1.0298943278038984e-6
.9999999999994575 1.0415976724380293e-6
.9999999999994453 1.0533010170721601e-6
.9999999999994329 1.065004361706291e-6
.9999999999994204 1.0767077063404215e-6
.9999999999994077 1.088411050974552e-6
.9999999999993949 1.1001143956086822e-6
.9999999999993819 1.1118177402428122e-6
.9999999999993688 1.1235210848769423e-6
.9999999999993556 1.135224429511072e-6
.9999999999993423 1.1469277741452017e-6
.9999999999993288 1.1586311187793313e-6
.9999999999993151 1.1703344634134605e-6
.9999999999993014 1.1820378080475897e-6
.9999999999992875 1.1937411526817187e-6
.9999999999992735 1.2054444973158477e-6
.9999999999992593 1.2171478419499764e-6
.9999999999992449 1.2288511865841048e-6
.9999999999992305 1.2405545312182331e-6
.999999999999216 1.2522578758523615e-6
.9999999999992012 1.2639612204864894e-6
.9999999999991863 1.2756645651206173e-6
.9999999999991713 1.287367909754745e-6
.9999999999991562 1.2990712543888725e-6
.9999999999991409 1.3107745990229998e-6
.9999999999991255 1.3224779436571269e-6
.9999999999991099 1.3341812882912537e-6
.9999999999990943 1.3458846329253806e-6
.9999999999990785 1.3575879775595072e-6
.9999999999990625 1.3692913221936337e-6
.9999999999990464 1.3809946668277597e-6
.9999999999990302 1.3926980114618857e-6
.9999999999990138 1.4044013560960117e-6
.9999999999989974 1.4161047007301373e-6
.9999999999989807 1.4278080453642627e-6
.9999999999989639 1.439511389998388e-6
.999999999998947 1.451214734632513e-6
.99999999999893 1.462918079266638e-6
.9999999999989128 1.4746214239007625e-6
.9999999999988954 1.486324768534887e-6
.999999999998878 1.4980281131690111e-6
.9999999999988604 1.5097314578031353e-6
.9999999999988426 1.5214348024372591e-6
.9999999999988247 1.5331381470713828e-6
.9999999999988067 1.544841491705506e-6
.9999999999987886 1.5565448363396294e-6
.9999999999987703 1.5682481809737524e-6
.9999999999987519 1.579951525607875e-6
.9999999999987333 1.5916548702419977e-6
.9999999999987146 1.60335821487612e-6
.9999999999986958 1.615061559510242e-6
.9999999999986768 1.626764904144364e-6
.9999999999986577 1.6384682487784858e-6
.9999999999986384 1.6501715934126072e-6
.9999999999986191 1.6618749380467283e-6
.9999999999985996 1.6735782826808495e-6
.9999999999985799 1.6852816273149702e-6
.9999999999985602 1.6969849719490907e-6
.9999999999985402 1.708688316583211e-6
.9999999999985201 1.720391661217331e-6
.9999999999985 1.732095005851451e-6
.9999999999984795 1.7437983504855706e-6
.9999999999984591 1.7555016951196899e-6
.9999999999984385 1.767205039753809e-6
.9999999999984177 1.778908384387928e-6
.9999999999983968 1.7906117290220465e-6
.9999999999983759 1.802315073656165e-6
.9999999999983546 1.814018418290283e-6
.9999999999983333 1.825721762924401e-6
.9999999999983119 1.8374251075585186e-6
.9999999999982904 1.8491284521926361e-6
.9999999999982686 1.8608317968267533e-6
.9999999999982468 1.8725351414608702e-6
.9999999999982249 1.8842384860949866e-6
.9999999999982027 1.8959418307291031e-6
.9999999999981805 1.9076451753632194e-6
.999999999998158 1.919348519997335e-6
.9999999999981355 1.9310518646314507e-6
.9999999999981128 1.942755209265566e-6
.9999999999980901 1.954458553899681e-6
.9999999999980671 1.966161898533796e-6
.999999999998044 1.9778652431679103e-6
.9999999999980208 1.9895685878020246e-6
.9999999999979975 2.0012719324361386e-6
.999999999997974 2.012975277070252e-6
.9999999999979503 2.0246786217043656e-6
.9999999999979265 2.0363819663384787e-6
.9999999999979027 2.048085310972592e-6
.9999999999978786 2.0597886556067045e-6
.9999999999978545 2.0714920002408167e-6
.9999999999978302 2.0831953448749286e-6
.9999999999978058 2.0948986895090404e-6
.9999999999977811 2.106602034143152e-6
.9999999999977564 2.118305378777263e-6
.9999999999977315 2.1300087234113738e-6
.9999999999977065 2.1417120680454843e-6
.9999999999976814 2.153415412679595e-6
.9999999999976561 2.1651187573137046e-6
.9999999999976307 2.1768221019478143e-6
.9999999999976051 2.188525446581924e-6
.9999999999975795 2.200228791216033e-6
.9999999999975536 2.2119321358501417e-6
.9999999999975278 2.22363548048425e-6
.9999999999975017 2.2353388251183586e-6
.9999999999974754 2.247042169752466e-6
.999999999997449 2.2587455143865738e-6
.9999999999974225 2.2704488590206814e-6
.9999999999973959 2.282152203654788e-6
.9999999999973691 2.293855548288895e-6
.9999999999973422 2.305558892923001e-6
.9999999999973151 2.317262237557107e-6
.999999999997288 2.328965582191213e-6
.9999999999972606 2.340668926825318e-6
.9999999999972332 2.352372271459423e-6
.9999999999972056 2.364075616093528e-6
.9999999999971778 2.3757789607276323e-6
.99999999999715 2.3874823053617365e-6
.999999999997122 2.3991856499958403e-6
.9999999999970938 2.4108889946299437e-6
.9999999999970656 2.4225923392640466e-6
.9999999999970371 2.4342956838981495e-6
.9999999999970085 2.445999028532252e-6
.9999999999969799 2.457702373166354e-6
.999999999996951 2.4694057178004558e-6
.999999999996922 2.4811090624345574e-6
.9999999999968929 2.4928124070686583e-6
.9999999999968637 2.504515751702759e-6
.9999999999968343 2.5162190963368595e-6
.9999999999968048 2.5279224409709594e-6
.9999999999967751 2.5396257856050594e-6
.9999999999967454 2.5513291302391585e-6
.9999999999967154 2.5630324748732576e-6
.9999999999966853 2.5747358195073563e-6
.9999999999966551 2.5864391641414546e-6
.9999999999966248 2.5981425087755525e-6
.9999999999965944 2.6098458534096503e-6
.9999999999965637 2.6215491980437473e-6
.999999999996533 2.6332525426778443e-6
.9999999999965021 2.644955887311941e-6
.999999999996471 2.656659231946037e-6
.99999999999644 2.6683625765801328e-6
.9999999999964087 2.680065921214228e-6
.9999999999963772 2.6917692658483234e-6
.9999999999963456 2.703472610482418e-6
.999999999996314 2.7151759551165123e-6
.9999999999962821 2.7268792997506064e-6
.9999999999962501 2.7385826443846996e-6
.9999999999962179 2.750285989018793e-6
.9999999999961857 2.761989333652886e-6
.9999999999961533 2.7736926782869783e-6
.9999999999961208 2.78539602292107e-6
.9999999999960881 2.797099367555162e-6
.9999999999960553 2.808802712189253e-6
.9999999999960224 2.8205060568233443e-6
.9999999999959893 2.832209401457435e-6
.9999999999959561 2.8439127460915247e-6
.9999999999959227 2.8556160907256145e-6
.9999999999958893 2.867319435359704e-6
.9999999999958556 2.879022779993793e-6
.9999999999958219 2.8907261246278814e-6
.9999999999957879 2.90242946926197e-6
.999999999995754 2.9141328138960576e-6
.9999999999957198 2.925836158530145e-6
.9999999999956855 2.9375395031642317e-6
.999999999995651 2.9492428477983186e-6
.9999999999956164 2.9609461924324046e-6
.9999999999955816 2.9726495370664905e-6
.9999999999955468 2.9843528817005757e-6
.9999999999955118 2.996056226334661e-6
.9999999999954767 3.007759570968745e-6
.9999999999954414 3.0194629156028294e-6
.999999999995406 3.0311662602369133e-6
.9999999999953705 3.0428696048709963e-6
.9999999999953348 3.0545729495050794e-6
.999999999995299 3.066276294139162e-6
.999999999995263 3.0779796387732437e-6
.9999999999952269 3.0896829834073255e-6
.9999999999951907 3.101386328041407e-6
.9999999999951543 3.1130896726754873e-6
.9999999999951178 3.1247930173095678e-6
.9999999999950812 3.136496361943648e-6
.9999999999950444 3.148199706577727e-6
.9999999999950075 3.1599030512118063e-6
.9999999999949705 3.171606395845885e-6
.9999999999949333 3.183309740479963e-6
.999999999994896 3.195013085114041e-6
.9999999999948584 3.206716429748118e-6
.9999999999948209 3.218419774382195e-6
.9999999999947832 3.2301231190162714e-6
.9999999999947453 3.2418264636503477e-6
.9999999999947072 3.253529808284423e-6
.9999999999946692 3.265233152918498e-6
.9999999999946309 3.276936497552573e-6
.9999999999945924 3.288639842186647e-6
.9999999999945539 3.300343186820721e-6
.9999999999945152 3.312046531454794e-6
.9999999999944763 3.323749876088867e-6
.9999999999944373 3.3354532207229395e-6
.9999999999943983 3.3471565653570115e-6
.9999999999943591 3.358859909991083e-6
.9999999999943197 3.370563254625154e-6
.9999999999942801 3.3822665992592245e-6
.9999999999942405 3.3939699438932944e-6
.9999999999942008 3.4056732885273643e-6
.9999999999941608 3.4173766331614334e-6
.9999999999941207 3.429079977795502e-6
.9999999999940805 3.4407833224295702e-6
.9999999999940402 3.452486667063638e-6
.9999999999939997 3.4641900116977054e-6
.999999999993959 3.4758933563317723e-6
.9999999999939183 3.4875967009658384e-6
.9999999999938775 3.4993000455999045e-6
.9999999999938364 3.5110033902339697e-6
.9999999999937953 3.5227067348680345e-6
.999999999993754 3.534410079502099e-6
.9999999999937126 3.546113424136163e-6
.999999999993671 3.5578167687702264e-6
.9999999999936293 3.5695201134042896e-6
.9999999999935875 3.581223458038352e-6
.9999999999935454 3.592926802672414e-6
.9999999999935033 3.6046301473064755e-6
.9999999999934611 3.6163334919405365e-6
.9999999999934187 3.628036836574597e-6
.9999999999933762 3.639740181208657e-6
.9999999999933334 3.6514435258427166e-6
.9999999999932907 3.6631468704767755e-6
.9999999999932477 3.674850215110834e-6
.9999999999932047 3.686553559744892e-6
.9999999999931615 3.6982569043789496e-6
.9999999999931181 3.7099602490130064e-6
.9999999999930747 3.7216635936470627e-6
.999999999993031 3.733366938281119e-6
.9999999999929873 3.745070282915174e-6
.9999999999929433 3.756773627549229e-6
.9999999999928992 3.768476972183284e-6
.9999999999928552 3.7801803168173377e-6
.9999999999928109 3.791883661451391e-6
.9999999999927663 3.803587006085444e-6
.9999999999927218 3.8152903507194965e-6
.9999999999926771 3.826993695353548e-6
.9999999999926322 3.838697039987599e-6
.9999999999925873 3.85040038462165e-6
.9999999999925421 3.862103729255701e-6
.9999999999924968 3.87380707388975e-6
.9999999999924514 3.885510418523799e-6
.9999999999924059 3.897213763157848e-6
.9999999999923602 3.9089171077918965e-6
.9999999999923144 3.9206204524259435e-6
.9999999999922684 3.9323237970599905e-6
.9999999999922223 3.9440271416940376e-6
.9999999999921761 3.955730486328084e-6
.9999999999921297 3.967433830962129e-6
.9999999999920832 3.9791371755961736e-6
.9999999999920366 3.990840520230218e-6
.9999999999919899 4.002543864864262e-6
.9999999999919429 4.014247209498305e-6
.9999999999918958 4.025950554132348e-6
.9999999999918486 4.03765389876639e-6
.9999999999918013 4.049357243400431e-6
.9999999999917539 4.061060588034472e-6
.9999999999917063 4.072763932668513e-6
.9999999999916586 4.084467277302553e-6
.9999999999916107 4.096170621936592e-6
.9999999999915626 4.107873966570632e-6
.9999999999915146 4.119577311204669e-6
.9999999999914663 4.131280655838707e-6
.9999999999914179 4.142984000472745e-6
.9999999999913692 4.154687345106781e-6
.9999999999913206 4.166390689740817e-6
.9999999999912718 4.178094034374852e-6
.9999999999912228 4.189797379008887e-6
.9999999999911737 4.201500723642921e-6
.9999999999911244 4.213204068276955e-6
.999999999991075 4.224907412910988e-6
.9999999999910255 4.236610757545021e-6
.9999999999909759 4.248314102179053e-6
.9999999999909261 4.260017446813084e-6
.9999999999908762 4.271720791447115e-6
.9999999999908261 4.283424136081145e-6
.9999999999907759 4.295127480715175e-6
.9999999999907256 4.306830825349204e-6
.9999999999906751 4.3185341699832325e-6
.9999999999906245 4.33023751461726e-6
.9999999999905738 4.3419408592512875e-6
.9999999999905229 4.353644203885314e-6
.9999999999904718 4.36534754851934e-6
.9999999999904207 4.377050893153365e-6
.9999999999903694 4.38875423778739e-6
.999999999990318 4.400457582421414e-6
.9999999999902665 4.4121609270554384e-6
.9999999999902147 4.423864271689461e-6
.9999999999901629 4.435567616323483e-6
.9999999999901109 4.447270960957506e-6
.9999999999900587 4.458974305591527e-6
.9999999999900065 4.470677650225547e-6
.9999999999899541 4.482380994859567e-6
.9999999999899016 4.494084339493587e-6
.9999999999898489 4.5057876841276054e-6
.9999999999897962 4.517491028761624e-6
.9999999999897432 4.529194373395641e-6
.9999999999896901 4.5408977180296584e-6
.999999999989637 4.552601062663675e-6
.9999999999895836 4.564304407297691e-6
.99999999998953 4.5760077519317055e-6
.9999999999894764 4.5877110965657195e-6
.9999999999894227 4.5994144411997335e-6
.9999999999893688 4.611117785833747e-6
.9999999999893148 4.622821130467759e-6
.9999999999892606 4.634524475101771e-6
.9999999999892063 4.646227819735783e-6
.9999999999891518 4.657931164369793e-6
.9999999999890973 4.669634509003803e-6
.9999999999890425 4.681337853637813e-6
.9999999999889877 4.693041198271821e-6
.9999999999889327 4.704744542905829e-6
.9999999999888776 4.716447887539837e-6
.9999999999888223 4.728151232173843e-6
.9999999999887669 4.73985457680785e-6
.9999999999887114 4.751557921441855e-6
.9999999999886556 4.76326126607586e-6
.9999999999885999 4.774964610709864e-6
.9999999999885439 4.786667955343868e-6
.9999999999884878 4.798371299977871e-6
.9999999999884316 4.810074644611873e-6
.9999999999883752 4.821777989245874e-6
.9999999999883187 4.833481333879875e-6
.9999999999882621 4.845184678513876e-6
.9999999999882053 4.856888023147875e-6
.9999999999881484 4.868591367781874e-6
.9999999999880914 4.880294712415872e-6
.9999999999880341 4.89199805704987e-6
.9999999999879768 4.903701401683867e-6
.9999999999879194 4.915404746317863e-6
.9999999999878618 4.9271080909518585e-6
.9999999999878041 4.938811435585853e-6
.9999999999877462 4.9505147802198475e-6
.9999999999876882 4.962218124853841e-6
.99999999998763 4.973921469487834e-6
.9999999999875717 4.985624814121826e-6
.9999999999875133 4.997328158755817e-6
.9999999999874548 5.009031503389808e-6
.9999999999873961 5.0207348480237985e-6
.9999999999873372 5.032438192657788e-6
.9999999999872783 5.0441415372917765e-6
.9999999999872192 5.055844881925764e-6
.9999999999871599 5.067548226559752e-6
.9999999999871007 5.079251571193739e-6
.9999999999870411 5.090954915827725e-6
.9999999999869814 5.10265826046171e-6
.9999999999869217 5.1143616050956945e-6
.9999999999868617 5.126064949729678e-6
.9999999999868017 5.1377682943636615e-6
.9999999999867415 5.149471638997644e-6
.9999999999866811 5.161174983631626e-6
.9999999999866207 5.172878328265607e-6
.9999999999865601 5.184581672899587e-6
.9999999999864994 5.196285017533567e-6
.9999999999864384 5.2079883621675455e-6
.9999999999863775 5.219691706801524e-6
.9999999999863163 5.2313950514355015e-6
.999999999986255 5.243098396069478e-6
.9999999999861935 5.254801740703454e-6
.999999999986132 5.266505085337429e-6
.9999999999860703 5.278208429971404e-6
.9999999999860084 5.289911774605378e-6
.9999999999859465 5.301615119239351e-6
.9999999999858843 5.313318463873323e-6
.9999999999858221 5.325021808507295e-6
.9999999999857597 5.336725153141267e-6
.9999999999856971 5.3484284977752366e-6
.9999999999856345 5.360131842409206e-6
.9999999999855717 5.371835187043175e-6
.9999999999855087 5.383538531677143e-6
.9999999999854456 5.3952418763111104e-6
.9999999999853825 5.406945220945077e-6
.9999999999853191 5.418648565579043e-6
.9999999999852557 5.4303519102130076e-6
.9999999999851921 5.4420552548469724e-6
.9999999999851282 5.453758599480936e-6
.9999999999850644 5.465461944114899e-6
.9999999999850003 5.47716528874886e-6
.9999999999849362 5.488868633382822e-6
.9999999999848719 5.500571978016782e-6
.9999999999848074 5.512275322650742e-6
.9999999999847429 5.523978667284702e-6
.9999999999846781 5.53568201191866e-6
.9999999999846133 5.547385356552617e-6
.9999999999845482 5.5590887011865745e-6
.9999999999844832 5.57079204582053e-6
.9999999999844179 5.582495390454486e-6
.9999999999843525 5.59419873508844e-6
.9999999999842869 5.605902079722394e-6
.9999999999842213 5.617605424356347e-6
.9999999999841555 5.629308768990299e-6
.9999999999840895 5.641012113624251e-6
.9999999999840234 5.652715458258201e-6
.9999999999839572 5.664418802892152e-6
.9999999999838908 5.6761221475261e-6
.9999999999838243 5.687825492160048e-6
.9999999999837577 5.699528836793996e-6
.9999999999836909 5.711232181427943e-6
.999999999983624 5.722935526061889e-6
.9999999999835569 5.734638870695834e-6
.9999999999834898 5.746342215329779e-6
.9999999999834225 5.758045559963722e-6
.999999999983355 5.769748904597665e-6
.9999999999832874 5.781452249231607e-6
.9999999999832196 5.793155593865548e-6
.9999999999831518 5.804858938499489e-6
.9999999999830838 5.816562283133429e-6
.9999999999830157 5.8282656277673675e-6
.9999999999829474 5.839968972401306e-6
.9999999999828789 5.851672317035243e-6
.9999999999828104 5.86337566166918e-6
.9999999999827417 5.875079006303115e-6
.9999999999826729 5.88678235093705e-6
.9999999999826039 5.898485695570985e-6
.9999999999825349 5.910189040204917e-6
.9999999999824656 5.92189238483885e-6
.9999999999823962 5.933595729472782e-6
.9999999999823267 5.945299074106713e-6
.9999999999822571 5.957002418740643e-6
.9999999999821872 5.9687057633745715e-6
.9999999999821173 5.9804091080085e-6
)))
(define (make-w log-n)
(if (fx<= n lut-table-size)
low-lut
(let ((result (make-f64vector (fx* 2 n))))
(define (copy-low-lut)
(do ((i 0 (fx+ i 1)))
((fx= i lut-table-size))
(let ((index (fx* i 2)))
(f64vector-set!
result
index
(f64vector-ref low-lut index))
(f64vector-set!
result
(fx+ index 1)
(f64vector-ref low-lut (fx+ index 1))))))
(define (extend-lut multiplier-lut bit-reverse-size bit-reverse-multiplier start end)
(define (bit-reverse x n)
(do ((i 0 (fx+ i 1))
(x x (fxarithmetic-shift-right x 1))
(result 0 (fx+ (fx* result 2)
(bitwise-and x 1))))
((fx= i n) result)))
(let loop ((i start)
(j 1))
(if (fx< i end)
(let* ((multiplier-index
(fx* 2
(fx* (bit-reverse j bit-reverse-size)
bit-reverse-multiplier)))
(multiplier-real
(f64vector-ref multiplier-lut multiplier-index))
(multiplier-imag
(f64vector-ref multiplier-lut (fx+ multiplier-index 1))))
(let inner ((i i)
(k 0))
(if (fx< k start)
(let* ((index
(fx* k 2))
(real
(f64vector-ref result index))
(imag
(f64vector-ref result (fx+ index 1)))
(result-real
(fl- (fl* multiplier-real real)
(fl* multiplier-imag imag)))
(result-imag
(fl+ (fl* multiplier-real imag)
(fl* multiplier-imag real)))
(result-index (fx* i 2)))
(f64vector-set! result result-index result-real)
(f64vector-set! result (fx+ result-index 1) result-imag)
(inner (fx+ i 1)
(fx+ k 1)))
(loop i
(fx+ j 1)))))
result)))
(cond ((fx<= n lut-table-size^2)
(copy-low-lut)
(extend-lut med-lut
(fx- log-n log-lut-table-size)
(fxarithmetic-shift-left 1 (fx- (fx* 2 log-lut-table-size) log-n))
lut-table-size
n))
((fx<= n lut-table-size^3)
(copy-low-lut)
(extend-lut med-lut
log-lut-table-size
1
lut-table-size
lut-table-size^2)
(extend-lut high-lut
(fx- log-n (fx* 2 log-lut-table-size))
(fxarithmetic-shift-left 1 (fx- (fx* 3 log-lut-table-size) log-n))
lut-table-size^2
n))
(else
(error "asking for too large a table")))))))
(define (direct-fft-recursive-4 a W-table)
this is from page 66 of and , except that we have
(let ((W (f64vector 0. 0. 0. 0.)))
(define (main-loop M N K SizeOfGroup)
(let inner-loop ((K K)
(JFirst M))
(if (fx< JFirst N)
(let* ((JLast (fx+ JFirst SizeOfGroup)))
(if (fxeven? K)
(begin
(f64vector-set! W 0 (f64vector-ref W-table K))
(f64vector-set! W 1 (f64vector-ref W-table (fx+ K 1))))
(begin
(f64vector-set! W 0 (fl- 0. (f64vector-ref W-table K)))
(f64vector-set! W 1 (f64vector-ref W-table (fx- K 1)))))
we know the that the next two complex roots of
unity have index 2 K and 2K+1 so that the 2K+1
index root can be gotten from the 2 K index root
in the same way that we get W_0 and W_1 from the
(f64vector-set! W 2 (f64vector-ref W-table (fx* K 2)))
(f64vector-set! W 3 (f64vector-ref W-table (fx+ (fx* K 2) 1)))
(let J-loop ((J0 JFirst))
(if (fx< J0 JLast)
(let* ((J0 J0)
(J1 (fx+ J0 1))
(J2 (fx+ J0 SizeOfGroup))
(J3 (fx+ J2 1))
(J4 (fx+ J2 SizeOfGroup))
(J5 (fx+ J4 1))
(J6 (fx+ J4 SizeOfGroup))
(J7 (fx+ J6 1)))
(let ((W_0 (f64vector-ref W 0))
(W_1 (f64vector-ref W 1))
(W_2 (f64vector-ref W 2))
(W_3 (f64vector-ref W 3))
(a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3))
(a_J4 (f64vector-ref a J4))
(a_J5 (f64vector-ref a J5))
(a_J6 (f64vector-ref a J6))
(a_J7 (f64vector-ref a J7)))
butterflies with entries 2*SizeOfGroup
(let ((Temp_0 (fl- (fl* W_0 a_J4)
(fl* W_1 a_J5)))
(Temp_1 (fl+ (fl* W_0 a_J5)
(fl* W_1 a_J4)))
(Temp_2 (fl- (fl* W_0 a_J6)
(fl* W_1 a_J7)))
(Temp_3 (fl+ (fl* W_0 a_J7)
(fl* W_1 a_J6))))
(let ((a_J0 (fl+ a_J0 Temp_0))
(a_J1 (fl+ a_J1 Temp_1))
(a_J2 (fl+ a_J2 Temp_2))
(a_J3 (fl+ a_J3 Temp_3))
(a_J4 (fl- a_J0 Temp_0))
(a_J5 (fl- a_J1 Temp_1))
(a_J6 (fl- a_J2 Temp_2))
(a_J7 (fl- a_J3 Temp_3)))
now we do the two ( disjoint ) pairs
apart , the first pair with ,
the second with -W3+W2i
(let ((W_0 W_2)
(W_1 W_3)
(W_2 (fl- 0. W_3))
(W_3 W_2))
(let ((Temp_0
(fl- (fl* W_0 a_J2)
(fl* W_1 a_J3)))
(Temp_1
(fl+ (fl* W_0 a_J3)
(fl* W_1 a_J2)))
(Temp_2
(fl- (fl* W_2 a_J6)
(fl* W_3 a_J7)))
(Temp_3
(fl+ (fl* W_2 a_J7)
(fl* W_3 a_J6))))
(let ((a_J0 (fl+ a_J0 Temp_0))
(a_J1 (fl+ a_J1 Temp_1))
(a_J2 (fl- a_J0 Temp_0))
(a_J3 (fl- a_J1 Temp_1))
(a_J4 (fl+ a_J4 Temp_2))
(a_J5 (fl+ a_J5 Temp_3))
(a_J6 (fl- a_J4 Temp_2))
(a_J7 (fl- a_J5 Temp_3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(f64vector-set! a J4 a_J4)
(f64vector-set! a J5 a_J5)
(f64vector-set! a J6 a_J6)
(f64vector-set! a J7 a_J7)
(J-loop (fx+ J0 2)))))))))
(inner-loop (fx+ K 1)
(fx+ JFirst (fx* SizeOfGroup 4)))))))))
(define (recursive-bit M N K SizeOfGroup)
(if (fx<= 2 SizeOfGroup)
(begin
(main-loop M N K SizeOfGroup)
(if (fx< 2048 (fx- N M))
(let ((new-size (fxarithmetic-shift-right (fx- N M) 2)))
(recursive-bit M
(fx+ M new-size)
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M new-size)
(fx+ M (fx* new-size 2))
(fx+ (fx* K 4) 1)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 2))
(fx+ M (fx* new-size 3))
(fx+ (fx* K 4) 2)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 3))
N
(fx+ (fx* K 4) 3)
(fxarithmetic-shift-right SizeOfGroup 2)))
(recursive-bit M
N
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))))))
(define (radix-2-pass a)
array is not a power of 4 , so we need to do a basic radix
two pass with w=1 ( so W[0]=1.0 and W[1 ] = 0 . ) and then
call recursive - bit appropriately on the two half arrays .
(let ((SizeOfGroup
(fxarithmetic-shift-right (f64vector-length a) 1)))
(let loop ((J0 0))
(if (fx< J0 SizeOfGroup)
(let ((J0 J0)
(J2 (fx+ J0 SizeOfGroup)))
(let ((J1 (fx+ J0 1))
(J3 (fx+ J2 1)))
(let ((a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J2 (fl- a_J0 a_J2))
(a_J3 (fl- a_J1 a_J3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(loop (fx+ J0 2))))))))))
(let* ((n (f64vector-length a))
(log_n (two^p>=m n)))
of 4 , then do a single radix-2 pass and do the rest of
(if (fxodd? log_n)
(recursive-bit 0 n 0 (fxarithmetic-shift-right n 2))
(let ((n/2 (fxarithmetic-shift-right n 1))
(n/8 (fxarithmetic-shift-right n 3)))
(radix-2-pass a)
(recursive-bit 0 n/2 0 n/8)
(recursive-bit n/2 n 1 n/8))))))
(define (inverse-fft-recursive-4 a W-table)
associated algorithm on page 41 of and ,
above ( without dividing by 2 each time , so that this has to
be " normalized " by dividing by N/2 at the end .
(let ((W (f64vector 0. 0. 0. 0.)))
(define (main-loop M N K SizeOfGroup)
(let inner-loop ((K K)
(JFirst M))
(if (fx< JFirst N)
(let* ((JLast (fx+ JFirst SizeOfGroup)))
(if (fxeven? K)
(begin
(f64vector-set! W 0 (f64vector-ref W-table K))
(f64vector-set! W 1 (f64vector-ref W-table (fx+ K 1))))
(begin
(f64vector-set! W 0 (fl- 0. (f64vector-ref W-table K)))
(f64vector-set! W 1 (f64vector-ref W-table (fx- K 1)))))
(f64vector-set! W 2 (f64vector-ref W-table (fx* K 2)))
(f64vector-set! W 3 (f64vector-ref W-table (fx+ (fx* K 2) 1)))
(let J-loop ((J0 JFirst))
(if (fx< J0 JLast)
(let* ((J0 J0)
(J1 (fx+ J0 1))
(J2 (fx+ J0 SizeOfGroup))
(J3 (fx+ J2 1))
(J4 (fx+ J2 SizeOfGroup))
(J5 (fx+ J4 1))
(J6 (fx+ J4 SizeOfGroup))
(J7 (fx+ J6 1)))
(let ((W_0 (f64vector-ref W 0))
(W_1 (f64vector-ref W 1))
(W_2 (f64vector-ref W 2))
(W_3 (f64vector-ref W 3))
(a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3))
(a_J4 (f64vector-ref a J4))
(a_J5 (f64vector-ref a J5))
(a_J6 (f64vector-ref a J6))
(a_J7 (f64vector-ref a J7)))
(let ((W_00 W_2)
(W_01 W_3)
(W_02 (fl- 0. W_3))
(W_03 W_2))
(let ((Temp_0 (fl- a_J0 a_J2))
(Temp_1 (fl- a_J1 a_J3))
(Temp_2 (fl- a_J4 a_J6))
(Temp_3 (fl- a_J5 a_J7)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J4 (fl+ a_J4 a_J6))
(a_J5 (fl+ a_J5 a_J7))
(a_J2 (fl+ (fl* W_00 Temp_0)
(fl* W_01 Temp_1)))
(a_J3 (fl- (fl* W_00 Temp_1)
(fl* W_01 Temp_0)))
(a_J6 (fl+ (fl* W_02 Temp_2)
(fl* W_03 Temp_3)))
(a_J7 (fl- (fl* W_02 Temp_3)
(fl* W_03 Temp_2))))
(let ((Temp_0 (fl- a_J0 a_J4))
(Temp_1 (fl- a_J1 a_J5))
(Temp_2 (fl- a_J2 a_J6))
(Temp_3 (fl- a_J3 a_J7)))
(let ((a_J0 (fl+ a_J0 a_J4))
(a_J1 (fl+ a_J1 a_J5))
(a_J2 (fl+ a_J2 a_J6))
(a_J3 (fl+ a_J3 a_J7))
(a_J4 (fl+ (fl* W_0 Temp_0)
(fl* W_1 Temp_1)))
(a_J5 (fl- (fl* W_0 Temp_1)
(fl* W_1 Temp_0)))
(a_J6 (fl+ (fl* W_0 Temp_2)
(fl* W_1 Temp_3)))
(a_J7 (fl- (fl* W_0 Temp_3)
(fl* W_1 Temp_2))))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(f64vector-set! a J4 a_J4)
(f64vector-set! a J5 a_J5)
(f64vector-set! a J6 a_J6)
(f64vector-set! a J7 a_J7)
(J-loop (fx+ J0 2)))))))))
(inner-loop (fx+ K 1)
(fx+ JFirst (fx* SizeOfGroup 4)))))))))
(define (recursive-bit M N K SizeOfGroup)
(if (fx<= 2 SizeOfGroup)
(begin
(if (fx< 2048 (fx- N M))
(let ((new-size (fxarithmetic-shift-right (fx- N M) 2)))
(recursive-bit M
(fx+ M new-size)
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M new-size)
(fx+ M (fx* new-size 2))
(fx+ (fx* K 4) 1)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 2))
(fx+ M (fx* new-size 3))
(fx+ (fx* K 4) 2)
(fxarithmetic-shift-right SizeOfGroup 2))
(recursive-bit (fx+ M (fx* new-size 3))
N
(fx+ (fx* K 4) 3)
(fxarithmetic-shift-right SizeOfGroup 2)))
(recursive-bit M
N
(fx* K 4)
(fxarithmetic-shift-right SizeOfGroup 2)))
(main-loop M N K SizeOfGroup))))
(define (radix-2-pass a)
(let ((SizeOfGroup
(fxarithmetic-shift-right (f64vector-length a) 1)))
(let loop ((J0 0))
(if (fx< J0 SizeOfGroup)
(let ((J0 J0)
(J2 (fx+ J0 SizeOfGroup)))
(let ((J1 (fx+ J0 1))
(J3 (fx+ J2 1)))
(let ((a_J0 (f64vector-ref a J0))
(a_J1 (f64vector-ref a J1))
(a_J2 (f64vector-ref a J2))
(a_J3 (f64vector-ref a J3)))
(let ((a_J0 (fl+ a_J0 a_J2))
(a_J1 (fl+ a_J1 a_J3))
(a_J2 (fl- a_J0 a_J2))
(a_J3 (fl- a_J1 a_J3)))
(f64vector-set! a J0 a_J0)
(f64vector-set! a J1 a_J1)
(f64vector-set! a J2 a_J2)
(f64vector-set! a J3 a_J3)
(loop (fx+ J0 2))))))))))
(let* ((n (f64vector-length a))
(log_n (two^p>=m n)))
(if (fxodd? log_n)
(recursive-bit 0 n 0 (fxarithmetic-shift-right n 2))
(let ((n/2 (fxarithmetic-shift-right n 1))
(n/8 (fxarithmetic-shift-right n 3)))
(recursive-bit 0 n/2 0 n/8)
(recursive-bit n/2 n 1 n/8)
(radix-2-pass a))))))
(define (two^p>=m m)
returns smallest p , assumes fixnum m > = 0
(do ((p 0 (fx+ p 1))
(two^p 1 (fx* two^p 2)))
((fx<= m two^p) p)))
(define (test iters n)
(let ((two^n
(expt 2 n))
(table
(make-w (fx- n 1))))
( display ( fx * two^n 2))(newline )
(let ((a
(make-f64vector (fx* two^n 2) 0.)))
(do ((i 0 (fx+ i 1)))
((fx= i iters)
)
(direct-fft-recursive-4 a table)
(inverse-fft-recursive-4 a table)))))
(cond-expand
(chicken
(let-optionals (command-line-arguments)
((iters "2000")
(n "11"))
(test (string->number iters) (string->number n))))
(else (test 2000 11)))
|
6b303887b19a1ba15331d35d8eebaecdfce6a01b765b16cc70ba2ee7530a2037 | ocsigen/eliom | ocamlbuild_eliom.ml | open Ocamlbuild_plugin
module Pack = Ocamlbuild_pack
module type ELIOM = sig
val server_dir : Ocamlbuild_plugin.Pathname.t
val type_dir : Ocamlbuild_plugin.Pathname.t
val client_dir : Ocamlbuild_plugin.Pathname.t
end
module type INTERNALS = sig
val with_eliom_ppx : ([< `Client | `Server] -> string) option
val with_package : string -> string
end
module MakeIntern (I : INTERNALS) (Eliom : ELIOM) = struct
(* WARNING: if you change this, also change inferred_type_prefix in
ppx/ppx_eliom_utils.ml and tools/eliomc.ml *)
let inferred_type_prefix = "eliom_inferred_type_"
let sed_rule name ~dep ~prod scripts =
rule name ~dep ~prod (fun env _build ->
let dep = env dep and prod = env prod in
let script_args =
List.map (fun script -> S [A "-e"; A script]) scripts
in
Cmd (S [A "sed"; S script_args; P dep; Sh ">"; Px prod]))
let copy_with_header src prod =
let contents = Pathname.read src in
we need an empty line to keep the comments : weird
let header = "# 0 \"" ^ src ^ "\"\n\n" in
Pack.Shell.mkdir_p (Filename.dirname prod);
Echo ([header; contents], prod)
let copy_rule_with_header f name ?(deps = []) src prod =
rule name ~deps:(src :: deps) ~prod (fun env _ ->
let prod = env prod in
let src = env src in
f env (Pathname.dirname prod) (Pathname.basename prod) src prod;
copy_with_header src prod)
let syntaxes_p4 = [I.with_package "eliom.syntax.predef"]
let no_extra_syntaxes = "no_extra_syntaxes"
let eliom_ppx = "eliom_ppx"
let use_ppx src = Tags.mem eliom_ppx (tags_of_pathname src)
let tag_file_inside_rule file tags =
tag_file file tags;
Pack.Param_tags.partial_init "Eliom plugin" (Tags.of_list tags)
let use_all_syntaxes src =
if Filename.check_suffix src ".eliomi"
then false
else not (Tags.mem no_extra_syntaxes (tags_of_pathname src))
let get_eliom_syntax_ppx = function
| `Client -> "eliom.ppx.client"
| `Server -> "eliom.ppx.server"
| `Type -> "eliom.ppx.type"
let get_syntaxes_p4 _ _eliom_syntax src =
let s = if use_all_syntaxes src then syntaxes_p4 else [] in
let s = if s = [] then [] else "thread" :: "syntax(camlp4o)" :: s in
s @ Tags.elements (tags_of_pathname src)
let get_syntaxes_ppx with_eliom_syntax eliom_syntax _src =
if with_eliom_syntax
then [I.with_package (get_eliom_syntax_ppx eliom_syntax)]
else []
let get_syntaxes with_eliom_syntax eliom_syntax src =
(if use_ppx src then get_syntaxes_ppx else get_syntaxes_p4)
with_eliom_syntax eliom_syntax src
(* A variant of flag_and_dep which recurse into Quote. *)
let dflag tags x =
let rec aux = function
| Quote x -> aux x
| S xs -> List.iter aux xs
| P path -> dep tags [path]
| N | A _ | Sh _ | V _ | T _ | Px _ -> ()
in
aux x; flag tags x
let flag_infer ~file ~name ~path eliom_syntax =
let type_inferred =
Pathname.concat
(Pathname.concat path Eliom.type_dir)
(Pathname.update_extension "inferred_gen.mli" name)
in
let ppflags, ppflags_notype =
if use_ppx file
then
match I.with_eliom_ppx with
| None ->
let pkg = get_eliom_syntax_ppx eliom_syntax in
( S [A "-ppxopt"; A (pkg ^ ",-type," ^ type_inferred)]
, S [A "-ppxopt"; A (pkg ^ ",-notype")] )
| Some f ->
let ppx = f eliom_syntax in
( S [A "-ppx"; Quote (S [P ppx; A "-type"; P type_inferred])]
, S [A "-ppx"; Quote (S [P ppx; A "-notype"])] )
else
( S [A "-ppopt"; A "-type"; A "-ppopt"; P type_inferred]
, S [A "-ppopt"; A "-notype"] )
in
let file_tag = "file:" ^ file in
dflag ["ocaml"; "ocamldep"; file_tag] ppflags;
dflag ["ocaml"; "compile"; file_tag] ppflags;
dflag ["ocaml"; "infer_interface"; file_tag] ppflags;
dflag ["ocaml"; "doc"; file_tag] ppflags_notype
let ocamlfind_query pkg =
let cmd = Printf.sprintf "ocamlfind query %s" (Filename.quote pkg) in
Ocamlbuild_pack.My_unix.run_and_open cmd input_line
let copy_rule_server ?(eliom = true) =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
tag_file_inside_rule file
(I.with_package "eliom.server" :: get_syntaxes eliom `Server src);
if eliom then flag_infer ~file ~name ~path `Server;
dflag
["ocaml"; "compile"; "file:" ^ file]
(S [A "-I"; A (ocamlfind_query "js_of_ocaml")]);
Pathname.define_context dir [path];
Pathname.define_context path [dir])
let copy_rule_client ?(eliom = true) =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
tag_file_inside_rule file
(I.with_package "eliom.client" :: get_syntaxes eliom `Client src);
if eliom then flag_infer ~file ~name ~path `Client;
Pathname.define_context dir [path])
let copy_rule_type =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
let server_dir = Pathname.concat path Eliom.server_dir in
let server_file = Pathname.concat server_dir name in
tag_file_inside_rule file
((I.with_package "eliom.server" :: get_syntaxes true `Type src)
@ Tags.elements (tags_of_pathname server_file));
Pathname.define_context dir [path; server_dir])
let init = function
| After_rules ->
mark_tag_used no_extra_syntaxes;
mark_tag_used eliom_ppx;
sed_rule ".inferred.mli -> .inferred_gen.mli"
~dep:"%(path)/%(file).inferred.mli"
~prod:"%(path)/%(file).inferred_gen.mli"
[ "s$/[1-9][0-9]*$$g"
; "s/_\\[\\([<>]\\)/[\\1/g"
; Printf.sprintf "s/'\\(_[a-z0-9_]*\\)/'%s\\1/g" inferred_type_prefix
];
eliom files
copy_rule_server "*.eliom -> **/_server/*.ml"
~deps:["%(path)/" ^ Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(path)/%(file).eliom"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server "*.eliomi -> **/_server/*.mli" "%(path)/%(file).eliomi"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_type "*.eliom -> **/_type/*.ml" "%(path)/%(file).eliom"
("%(path)/" ^ Eliom.type_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliom -> **/_client/*.ml"
~deps:["%(path)/" ^ Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(path)/%(file).eliom"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliomi -> **/_client/*.mli" "%(path)/%(file).eliomi"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_server "*.eliom -> _server/*.ml"
~deps:[Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(file).eliom"
(Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server "*.eliomi -> _server/*.mli" "%(file).eliomi"
(Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_type "*.eliom -> _type/*.ml" "%(file).eliom"
(Eliom.type_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliom -> _client/*.ml"
~deps:[Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(file).eliom"
(Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliomi -> _client/*.mli" "%(file).eliomi"
(Eliom.client_dir ^ "/%(file:<*>).mli");
(* copy {shared,client,server}.ml rules *)
copy_rule_client ~eliom:false "client.ml -> .ml"
"%(path)/%(file).client.ml"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client ~eliom:false "client.mli -> .mli"
"%(path)/%(file).client.mli"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_client ~eliom:false "shared.ml -> client.ml"
"%(path)/%(file).shared.ml"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client ~eliom:false "shared -> client.mli"
"%(path)/%(file).shared.mli"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_server ~eliom:false "server.ml -> .ml"
"%(path)/%(file).server.ml"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server ~eliom:false "server.mli -> .mli"
"%(path)/%(file).server.mli"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_server ~eliom:false "shared.ml -> server.ml"
"%(path)/%(file).shared.ml"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server ~eliom:false "shared.ml -> server.mli"
"%(path)/%(file).shared.mli"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
Include C stubs in client.cma
flag ["link"; "eliomstubs"]
(S [A "-dllib"; A "-leliom_stubs"; A "-cclib"; A "-leliom_stubs"]);
dep ["link"; "eliomstubs"] ["src/lib/client/libeliom_stubs.a"]
| _ -> ()
let dispatcher ?oasis_executables hook =
Ocamlbuild_js_of_ocaml.dispatcher ?oasis_executables hook;
init hook
end
module Make (Eliom : ELIOM) =
MakeIntern
(struct
let with_eliom_ppx = None
let with_package = Printf.sprintf "package(%s)"
end)
(Eliom)
| null | https://raw.githubusercontent.com/ocsigen/eliom/c3e0eea5bef02e0af3942b6d27585add95d01d6c/src/ocamlbuild/ocamlbuild_eliom.ml | ocaml | WARNING: if you change this, also change inferred_type_prefix in
ppx/ppx_eliom_utils.ml and tools/eliomc.ml
A variant of flag_and_dep which recurse into Quote.
copy {shared,client,server}.ml rules | open Ocamlbuild_plugin
module Pack = Ocamlbuild_pack
module type ELIOM = sig
val server_dir : Ocamlbuild_plugin.Pathname.t
val type_dir : Ocamlbuild_plugin.Pathname.t
val client_dir : Ocamlbuild_plugin.Pathname.t
end
module type INTERNALS = sig
val with_eliom_ppx : ([< `Client | `Server] -> string) option
val with_package : string -> string
end
module MakeIntern (I : INTERNALS) (Eliom : ELIOM) = struct
let inferred_type_prefix = "eliom_inferred_type_"
let sed_rule name ~dep ~prod scripts =
rule name ~dep ~prod (fun env _build ->
let dep = env dep and prod = env prod in
let script_args =
List.map (fun script -> S [A "-e"; A script]) scripts
in
Cmd (S [A "sed"; S script_args; P dep; Sh ">"; Px prod]))
let copy_with_header src prod =
let contents = Pathname.read src in
we need an empty line to keep the comments : weird
let header = "# 0 \"" ^ src ^ "\"\n\n" in
Pack.Shell.mkdir_p (Filename.dirname prod);
Echo ([header; contents], prod)
let copy_rule_with_header f name ?(deps = []) src prod =
rule name ~deps:(src :: deps) ~prod (fun env _ ->
let prod = env prod in
let src = env src in
f env (Pathname.dirname prod) (Pathname.basename prod) src prod;
copy_with_header src prod)
let syntaxes_p4 = [I.with_package "eliom.syntax.predef"]
let no_extra_syntaxes = "no_extra_syntaxes"
let eliom_ppx = "eliom_ppx"
let use_ppx src = Tags.mem eliom_ppx (tags_of_pathname src)
let tag_file_inside_rule file tags =
tag_file file tags;
Pack.Param_tags.partial_init "Eliom plugin" (Tags.of_list tags)
let use_all_syntaxes src =
if Filename.check_suffix src ".eliomi"
then false
else not (Tags.mem no_extra_syntaxes (tags_of_pathname src))
let get_eliom_syntax_ppx = function
| `Client -> "eliom.ppx.client"
| `Server -> "eliom.ppx.server"
| `Type -> "eliom.ppx.type"
let get_syntaxes_p4 _ _eliom_syntax src =
let s = if use_all_syntaxes src then syntaxes_p4 else [] in
let s = if s = [] then [] else "thread" :: "syntax(camlp4o)" :: s in
s @ Tags.elements (tags_of_pathname src)
let get_syntaxes_ppx with_eliom_syntax eliom_syntax _src =
if with_eliom_syntax
then [I.with_package (get_eliom_syntax_ppx eliom_syntax)]
else []
let get_syntaxes with_eliom_syntax eliom_syntax src =
(if use_ppx src then get_syntaxes_ppx else get_syntaxes_p4)
with_eliom_syntax eliom_syntax src
let dflag tags x =
let rec aux = function
| Quote x -> aux x
| S xs -> List.iter aux xs
| P path -> dep tags [path]
| N | A _ | Sh _ | V _ | T _ | Px _ -> ()
in
aux x; flag tags x
let flag_infer ~file ~name ~path eliom_syntax =
let type_inferred =
Pathname.concat
(Pathname.concat path Eliom.type_dir)
(Pathname.update_extension "inferred_gen.mli" name)
in
let ppflags, ppflags_notype =
if use_ppx file
then
match I.with_eliom_ppx with
| None ->
let pkg = get_eliom_syntax_ppx eliom_syntax in
( S [A "-ppxopt"; A (pkg ^ ",-type," ^ type_inferred)]
, S [A "-ppxopt"; A (pkg ^ ",-notype")] )
| Some f ->
let ppx = f eliom_syntax in
( S [A "-ppx"; Quote (S [P ppx; A "-type"; P type_inferred])]
, S [A "-ppx"; Quote (S [P ppx; A "-notype"])] )
else
( S [A "-ppopt"; A "-type"; A "-ppopt"; P type_inferred]
, S [A "-ppopt"; A "-notype"] )
in
let file_tag = "file:" ^ file in
dflag ["ocaml"; "ocamldep"; file_tag] ppflags;
dflag ["ocaml"; "compile"; file_tag] ppflags;
dflag ["ocaml"; "infer_interface"; file_tag] ppflags;
dflag ["ocaml"; "doc"; file_tag] ppflags_notype
let ocamlfind_query pkg =
let cmd = Printf.sprintf "ocamlfind query %s" (Filename.quote pkg) in
Ocamlbuild_pack.My_unix.run_and_open cmd input_line
let copy_rule_server ?(eliom = true) =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
tag_file_inside_rule file
(I.with_package "eliom.server" :: get_syntaxes eliom `Server src);
if eliom then flag_infer ~file ~name ~path `Server;
dflag
["ocaml"; "compile"; "file:" ^ file]
(S [A "-I"; A (ocamlfind_query "js_of_ocaml")]);
Pathname.define_context dir [path];
Pathname.define_context path [dir])
let copy_rule_client ?(eliom = true) =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
tag_file_inside_rule file
(I.with_package "eliom.client" :: get_syntaxes eliom `Client src);
if eliom then flag_infer ~file ~name ~path `Client;
Pathname.define_context dir [path])
let copy_rule_type =
copy_rule_with_header (fun env dir name src file ->
let path = env "%(path)" in
let server_dir = Pathname.concat path Eliom.server_dir in
let server_file = Pathname.concat server_dir name in
tag_file_inside_rule file
((I.with_package "eliom.server" :: get_syntaxes true `Type src)
@ Tags.elements (tags_of_pathname server_file));
Pathname.define_context dir [path; server_dir])
let init = function
| After_rules ->
mark_tag_used no_extra_syntaxes;
mark_tag_used eliom_ppx;
sed_rule ".inferred.mli -> .inferred_gen.mli"
~dep:"%(path)/%(file).inferred.mli"
~prod:"%(path)/%(file).inferred_gen.mli"
[ "s$/[1-9][0-9]*$$g"
; "s/_\\[\\([<>]\\)/[\\1/g"
; Printf.sprintf "s/'\\(_[a-z0-9_]*\\)/'%s\\1/g" inferred_type_prefix
];
eliom files
copy_rule_server "*.eliom -> **/_server/*.ml"
~deps:["%(path)/" ^ Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(path)/%(file).eliom"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server "*.eliomi -> **/_server/*.mli" "%(path)/%(file).eliomi"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_type "*.eliom -> **/_type/*.ml" "%(path)/%(file).eliom"
("%(path)/" ^ Eliom.type_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliom -> **/_client/*.ml"
~deps:["%(path)/" ^ Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(path)/%(file).eliom"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliomi -> **/_client/*.mli" "%(path)/%(file).eliomi"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_server "*.eliom -> _server/*.ml"
~deps:[Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(file).eliom"
(Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server "*.eliomi -> _server/*.mli" "%(file).eliomi"
(Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_type "*.eliom -> _type/*.ml" "%(file).eliom"
(Eliom.type_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliom -> _client/*.ml"
~deps:[Eliom.type_dir ^ "/%(file).inferred_gen.mli"]
"%(file).eliom"
(Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client "*.eliomi -> _client/*.mli" "%(file).eliomi"
(Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_client ~eliom:false "client.ml -> .ml"
"%(path)/%(file).client.ml"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client ~eliom:false "client.mli -> .mli"
"%(path)/%(file).client.mli"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_client ~eliom:false "shared.ml -> client.ml"
"%(path)/%(file).shared.ml"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).ml");
copy_rule_client ~eliom:false "shared -> client.mli"
"%(path)/%(file).shared.mli"
("%(path)/" ^ Eliom.client_dir ^ "/%(file:<*>).mli");
copy_rule_server ~eliom:false "server.ml -> .ml"
"%(path)/%(file).server.ml"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server ~eliom:false "server.mli -> .mli"
"%(path)/%(file).server.mli"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
copy_rule_server ~eliom:false "shared.ml -> server.ml"
"%(path)/%(file).shared.ml"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).ml");
copy_rule_server ~eliom:false "shared.ml -> server.mli"
"%(path)/%(file).shared.mli"
("%(path)/" ^ Eliom.server_dir ^ "/%(file:<*>).mli");
Include C stubs in client.cma
flag ["link"; "eliomstubs"]
(S [A "-dllib"; A "-leliom_stubs"; A "-cclib"; A "-leliom_stubs"]);
dep ["link"; "eliomstubs"] ["src/lib/client/libeliom_stubs.a"]
| _ -> ()
let dispatcher ?oasis_executables hook =
Ocamlbuild_js_of_ocaml.dispatcher ?oasis_executables hook;
init hook
end
module Make (Eliom : ELIOM) =
MakeIntern
(struct
let with_eliom_ppx = None
let with_package = Printf.sprintf "package(%s)"
end)
(Eliom)
|
c756432b40ebce9eb8157d36caa00c9ef3d2e96dc75700d1cb38838e83f8156b | hazel-el/hazel | BCP47.hs | {-# LANGUAGE OverloadedStrings #-}
|
Module : . . OWL.BCP47
Copyright : ( c ) 2013 , 2014
License : GPL-3
Maintainer : < >
Stability : experimental
Portability : unknown
for the parts of BCP 47 referenced in the OWL2 Functional - Style grammar .
Module : Hazel.Parser.OWL.BCP47
Copyright : (c) 2013, 2014 Maximilian Marx
License : GPL-3
Maintainer : Maximilian Marx <>
Stability : experimental
Portability : unknown
Parser for the parts of BCP 47 referenced in the OWL2 Functional-Style grammar.
-}
module Hazel.Parser.OWL.BCP47 ( LanguageTag (..)
, langTag
) where
import Prelude hiding (takeWhile)
import Control.Applicative ( (<|>)
, (<$>)
, (<*>)
)
import Control.Monad (liftM)
import Data.Monoid (mappend)
import Data.Text ( Text
, cons
, pack
, unpack
)
import qualified Data.Text as T
import Data.Attoparsec.Text
import Hazel.Parser.Utils (countFromTo)
import Hazel.Parser.OWL.RFC5234 (alpha)
| BCP 47 Language Tag
data LanguageTag = LanguageTag { languageTagLanguage :: Text -- ^ language
, languageTagScript :: Maybe Text -- ^ script
, languageTagRegion :: Maybe Text -- ^ region
, languageTagVariant :: [Text] -- ^ variants
, languageTagExtension :: [Text] -- ^ extensions
, languageTagPrivateUse :: Maybe Text -- ^ reserved for private use
}
instance Show LanguageTag where
show lt = concatMap unpack [ languageTagLanguage lt
, fromMaybe $ languageTagScript lt
, fromMaybe $ languageTagRegion lt
, fromList $ languageTagVariant lt
, fromList $ languageTagExtension lt
, fromMaybe $ languageTagPrivateUse lt
]
where fromMaybe = maybe "" prefix
fromList = T.concat . map prefix
prefix = cons '-'
alphanum :: Parser Char
alphanum = satisfy $ inClass "a-zA-Z0-9"
singleton :: Parser Char
singleton = satisfy $ inClass $ concat [ "0-9"
, "A-W"
, "Y-Z"
, "a-w"
, "y-z"
]
| Parser for BCP 47 Language Tags
langTag :: Parser LanguageTag
langTag = LanguageTag <$> language
<*> option Nothing (liftM Just script)
<*> option Nothing (liftM Just region)
<*> many' variant
<*> many' extension
<*> option Nothing (liftM Just privateUse)
language :: Parser Text
language = mappend <$> liftM pack (countFromTo 2 3 alpha) <*> option "" extlang
<|> liftM pack (count 4 alpha)
<|> liftM pack (countFromTo 5 8 alpha)
extlang :: Parser Text
extlang = mappend <$> liftM pack (count 3 alpha) <*> exts
where exts = T.concat <$> countFromTo 0 2 (pack <$> ((:) <$> char '-' <*> count 3 alpha))
script :: Parser Text
script = liftM pack $ (:) <$> char '-' <*> count 4 alpha
region :: Parser Text
region = liftM pack $ (:) <$> char '-' <*> count 2 alpha <|> count 3 digit
variant :: Parser Text
variant = pack <$> (countFromTo 5 8 alphanum <|> (:) <$> digit
<*> count 3 alphanum)
extension :: Parser Text
extension = cons <$> singleton <*> extensions
where extensions = T.concat <$> many1 (liftM pack ((:) <$> char '-'
<*> countFromTo 2 8 alphanum))
privateUse :: Parser Text
privateUse = cons <$> char 'x' <*> privates
where privates = T.concat <$> many1 (liftM pack ((:) <$> char '-'
<*> countFromTo 1 8 alphanum))
| null | https://raw.githubusercontent.com/hazel-el/hazel/0f5d7e0c03d5eebd16f103df8cc05c5408c152f1/Hazel/Parser/OWL/BCP47.hs | haskell | # LANGUAGE OverloadedStrings #
^ language
^ script
^ region
^ variants
^ extensions
^ reserved for private use |
|
Module : . . OWL.BCP47
Copyright : ( c ) 2013 , 2014
License : GPL-3
Maintainer : < >
Stability : experimental
Portability : unknown
for the parts of BCP 47 referenced in the OWL2 Functional - Style grammar .
Module : Hazel.Parser.OWL.BCP47
Copyright : (c) 2013, 2014 Maximilian Marx
License : GPL-3
Maintainer : Maximilian Marx <>
Stability : experimental
Portability : unknown
Parser for the parts of BCP 47 referenced in the OWL2 Functional-Style grammar.
-}
module Hazel.Parser.OWL.BCP47 ( LanguageTag (..)
, langTag
) where
import Prelude hiding (takeWhile)
import Control.Applicative ( (<|>)
, (<$>)
, (<*>)
)
import Control.Monad (liftM)
import Data.Monoid (mappend)
import Data.Text ( Text
, cons
, pack
, unpack
)
import qualified Data.Text as T
import Data.Attoparsec.Text
import Hazel.Parser.Utils (countFromTo)
import Hazel.Parser.OWL.RFC5234 (alpha)
| BCP 47 Language Tag
}
instance Show LanguageTag where
show lt = concatMap unpack [ languageTagLanguage lt
, fromMaybe $ languageTagScript lt
, fromMaybe $ languageTagRegion lt
, fromList $ languageTagVariant lt
, fromList $ languageTagExtension lt
, fromMaybe $ languageTagPrivateUse lt
]
where fromMaybe = maybe "" prefix
fromList = T.concat . map prefix
prefix = cons '-'
alphanum :: Parser Char
alphanum = satisfy $ inClass "a-zA-Z0-9"
singleton :: Parser Char
singleton = satisfy $ inClass $ concat [ "0-9"
, "A-W"
, "Y-Z"
, "a-w"
, "y-z"
]
| Parser for BCP 47 Language Tags
langTag :: Parser LanguageTag
langTag = LanguageTag <$> language
<*> option Nothing (liftM Just script)
<*> option Nothing (liftM Just region)
<*> many' variant
<*> many' extension
<*> option Nothing (liftM Just privateUse)
language :: Parser Text
language = mappend <$> liftM pack (countFromTo 2 3 alpha) <*> option "" extlang
<|> liftM pack (count 4 alpha)
<|> liftM pack (countFromTo 5 8 alpha)
extlang :: Parser Text
extlang = mappend <$> liftM pack (count 3 alpha) <*> exts
where exts = T.concat <$> countFromTo 0 2 (pack <$> ((:) <$> char '-' <*> count 3 alpha))
script :: Parser Text
script = liftM pack $ (:) <$> char '-' <*> count 4 alpha
region :: Parser Text
region = liftM pack $ (:) <$> char '-' <*> count 2 alpha <|> count 3 digit
variant :: Parser Text
variant = pack <$> (countFromTo 5 8 alphanum <|> (:) <$> digit
<*> count 3 alphanum)
extension :: Parser Text
extension = cons <$> singleton <*> extensions
where extensions = T.concat <$> many1 (liftM pack ((:) <$> char '-'
<*> countFromTo 2 8 alphanum))
privateUse :: Parser Text
privateUse = cons <$> char 'x' <*> privates
where privates = T.concat <$> many1 (liftM pack ((:) <$> char '-'
<*> countFromTo 1 8 alphanum))
|
67be8fafcf35492ef78444c0383a95ba3392e5abf1ea5ab91f629204287dcbf1 | AllAlgorithms/racket | factorial.rkt | #lang racket
(provide factorial)
(define (factorial n)
(let loop ([acc 1]
[n n])
(if (zero? n)
acc
(loop (* acc n) (sub1 n)))))
| null | https://raw.githubusercontent.com/AllAlgorithms/racket/46b2a4b963b0536351273ee8068f58fed4884dde/algorithms/math/factorial.rkt | racket | #lang racket
(provide factorial)
(define (factorial n)
(let loop ([acc 1]
[n n])
(if (zero? n)
acc
(loop (* acc n) (sub1 n)))))
| |
8d659c26dd049124ea5964a8ea41afde802c0c78561b423f10cca2d9320d5dc3 | emqx/emqx-bridge-mqtt | emqx_bridge_rpc.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
@doc This module implements EMQX Bridge transport layer based on gen_rpc .
-module(emqx_bridge_rpc).
-behaviour(emqx_bridge_connect).
%% behaviour callbacks
-export([ start/1
, send/2
, stop/1
]).
%% Internal exports
-export([ handle_send/1
, heartbeat/2
]).
-type ack_ref() :: emqx_bridge_worker:ack_ref().
-type batch() :: emqx_bridge_worker:batch().
-define(HEARTBEAT_INTERVAL, timer:seconds(1)).
-define(RPC, emqx_rpc).
start(#{address := Remote}) ->
case poke(Remote) of
ok ->
Pid = proc_lib:spawn_link(?MODULE, heartbeat, [self(), Remote]),
{ok, #{client_pid => Pid, address => Remote}};
Error ->
Error
end.
stop(#{client_pid := Pid}) when is_pid(Pid) ->
Ref = erlang:monitor(process, Pid),
unlink(Pid),
Pid ! stop,
receive
{'DOWN', Ref, process, Pid, _Reason} ->
ok
after
1000 ->
exit(Pid, kill)
end,
ok.
%% @doc Callback for `emqx_bridge_connect' behaviour
-spec send(node(), batch()) -> {ok, ack_ref()} | {error, any()}.
send(#{address := Remote}, Batch) ->
case ?RPC:call(Remote, ?MODULE, handle_send, [Batch]) of
ok ->
Ref = make_ref(),
self() ! {batch_ack, Ref},
{ok, Ref};
{badrpc, Reason} -> {error, Reason}
end.
%% @doc Handle send on receiver side.
-spec handle_send(batch()) -> ok.
handle_send(Batch) ->
lists:foreach(fun(Msg) -> emqx_broker:publish(Msg) end, Batch).
%% @hidden Heartbeat loop
heartbeat(Parent, RemoteNode) ->
Interval = ?HEARTBEAT_INTERVAL,
receive
stop -> exit(normal)
after
Interval ->
case poke(RemoteNode) of
ok ->
?MODULE:heartbeat(Parent, RemoteNode);
{error, Reason} ->
Parent ! {disconnected, self(), Reason},
exit(normal)
end
end.
poke(Node) ->
case ?RPC:call(Node, erlang, node, []) of
Node -> ok;
{badrpc, Reason} -> {error, Reason}
end.
| null | https://raw.githubusercontent.com/emqx/emqx-bridge-mqtt/f0107526efbfc38fab7727605ffd81e817ea3013/src/emqx_bridge_rpc.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
behaviour callbacks
Internal exports
@doc Callback for `emqx_bridge_connect' behaviour
@doc Handle send on receiver side.
@hidden Heartbeat loop | Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@doc This module implements EMQX Bridge transport layer based on gen_rpc .
-module(emqx_bridge_rpc).
-behaviour(emqx_bridge_connect).
-export([ start/1
, send/2
, stop/1
]).
-export([ handle_send/1
, heartbeat/2
]).
-type ack_ref() :: emqx_bridge_worker:ack_ref().
-type batch() :: emqx_bridge_worker:batch().
-define(HEARTBEAT_INTERVAL, timer:seconds(1)).
-define(RPC, emqx_rpc).
start(#{address := Remote}) ->
case poke(Remote) of
ok ->
Pid = proc_lib:spawn_link(?MODULE, heartbeat, [self(), Remote]),
{ok, #{client_pid => Pid, address => Remote}};
Error ->
Error
end.
stop(#{client_pid := Pid}) when is_pid(Pid) ->
Ref = erlang:monitor(process, Pid),
unlink(Pid),
Pid ! stop,
receive
{'DOWN', Ref, process, Pid, _Reason} ->
ok
after
1000 ->
exit(Pid, kill)
end,
ok.
-spec send(node(), batch()) -> {ok, ack_ref()} | {error, any()}.
send(#{address := Remote}, Batch) ->
case ?RPC:call(Remote, ?MODULE, handle_send, [Batch]) of
ok ->
Ref = make_ref(),
self() ! {batch_ack, Ref},
{ok, Ref};
{badrpc, Reason} -> {error, Reason}
end.
-spec handle_send(batch()) -> ok.
handle_send(Batch) ->
lists:foreach(fun(Msg) -> emqx_broker:publish(Msg) end, Batch).
heartbeat(Parent, RemoteNode) ->
Interval = ?HEARTBEAT_INTERVAL,
receive
stop -> exit(normal)
after
Interval ->
case poke(RemoteNode) of
ok ->
?MODULE:heartbeat(Parent, RemoteNode);
{error, Reason} ->
Parent ! {disconnected, self(), Reason},
exit(normal)
end
end.
poke(Node) ->
case ?RPC:call(Node, erlang, node, []) of
Node -> ok;
{badrpc, Reason} -> {error, Reason}
end.
|
0448cec4f0dddee871115a5c8f1f0b48c9a2105520927166e88398215b53cca6 | saltysystems/enet | enet_sync.erl | -module(enet_sync).
-include_lib("enet/include/enet.hrl").
-export([
start_host/2,
connect_from_full_local_host/3,
connect_to_full_remote_host/3,
connect_to_self/3,
connect/3,
disconnect/2,
stop_host/1,
send_unsequenced/2,
send_unreliable/2,
send_reliable/2,
get_host_port/1,
get_local_peer_pid/1,
get_local_channels/1,
get_remote_peer_pid/1,
get_remote_channels/1,
get_channel/2
]).
start_host(ConnectFun, Options) ->
enet:start_host(0, ConnectFun, Options).
connect_from_full_local_host(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect_to_full_remote_host(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect_to_self(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect(LocalHost, RemotePort, ChannelCount) ->
case enet:connect_peer(LocalHost, "127.0.0.1", RemotePort, ChannelCount) of
{error, reached_peer_limit} ->
{error, reached_peer_limit};
{ok, LPeer} ->
receive
#{peer := LP, channels := LCs, connect_id := CID} ->
receive
#{peer := RP, channels := RCs, connect_id := CID} ->
{LP, LCs, RP, RCs}
after 1000 ->
{error, remote_timeout}
end
after 2000 ->
Pool = enet_peer:get_pool(LPeer),
Name = enet_peer:get_name(LPeer),
exit(LPeer, normal),
wait_until_worker_has_left_pool(Pool, Name),
{error, local_timeout}
end
end.
disconnect(LPid, RPid) ->
LPool = enet_peer:get_pool(LPid),
RPool = enet_peer:get_pool(RPid),
LName = enet_peer:get_name(LPid),
RName = enet_peer:get_name(RPid),
ok = enet:disconnect_peer(LPid),
receive
{enet, disconnected, local, LPid, ConnectID} ->
receive
{enet, disconnected, remote, RPid, ConnectID} ->
wait_until_worker_has_left_pool(LPool, LName),
wait_until_worker_has_left_pool(RPool, RName)
after 5000 ->
{error, remote_timeout}
end
after 5000 ->
{error, local_timeout}
end.
stop_host(Port) ->
RemoteConnectedPeers =
gproc:select([{{{p, l, remote_host_port}, '$1', Port}, [], ['$1']}]),
PeerMonitors = lists:map(
fun(Peer) ->
Pool = enet_peer:get_pool(Peer),
Name = enet_peer:get_name(Peer),
{Peer, Pool, Name}
end,
RemoteConnectedPeers
),
[Pid] = gproc:select([{{{p, l, port}, '$1', Port}, [], ['$1']}]),
Ref = monitor(process, Pid),
ok = enet:stop_host(Port),
receive
{'DOWN', Ref, process, Pid, shutdown} ->
lists:foreach(
fun
({Peer, Pool, _Name}) when Pool =:= Port ->
exit(Peer, normal);
({Peer, Pool, Name}) ->
exit(Peer, normal),
wait_until_worker_has_left_pool(Pool, Name)
end,
PeerMonitors
)
after 1000 ->
{error, timeout}
end.
send_unsequenced(Channel, Data) ->
enet:send_unsequenced(Channel, Data),
receive
{enet, _ID, #unsequenced{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
send_unreliable(Channel, Data) ->
enet:send_unreliable(Channel, Data),
receive
{enet, _ID, #unreliable{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
send_reliable(Channel, Data) ->
enet:send_reliable(Channel, Data),
receive
{enet, _ID, #reliable{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
%%%
%%% Helpers
%%%
wait_until_worker_has_left_pool(Pool, Name) ->
case lists:member(Name, [N || {N, _P} <- gproc_pool:worker_pool(Pool)]) of
false ->
ok;
true ->
receive
after 200 -> wait_until_worker_has_left_pool(Pool, Name)
end
end.
get_host_port(V) ->
element(2, V).
get_local_peer_pid(V) ->
element(1, V).
get_local_channels(V) ->
element(2, V).
get_remote_peer_pid(V) ->
element(3, V).
get_remote_channels(V) ->
element(4, V).
get_channel(ID, Channels) ->
{ok, Channel} = maps:find(ID, Channels),
Channel.
| null | https://raw.githubusercontent.com/saltysystems/enet/327f0cd4f408ce750027c736ab3cd021c0bb031b/test/enet_sync.erl | erlang |
Helpers
| -module(enet_sync).
-include_lib("enet/include/enet.hrl").
-export([
start_host/2,
connect_from_full_local_host/3,
connect_to_full_remote_host/3,
connect_to_self/3,
connect/3,
disconnect/2,
stop_host/1,
send_unsequenced/2,
send_unreliable/2,
send_reliable/2,
get_host_port/1,
get_local_peer_pid/1,
get_local_channels/1,
get_remote_peer_pid/1,
get_remote_channels/1,
get_channel/2
]).
start_host(ConnectFun, Options) ->
enet:start_host(0, ConnectFun, Options).
connect_from_full_local_host(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect_to_full_remote_host(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect_to_self(LocalHost, RemotePort, ChannelCount) ->
connect(LocalHost, RemotePort, ChannelCount).
connect(LocalHost, RemotePort, ChannelCount) ->
case enet:connect_peer(LocalHost, "127.0.0.1", RemotePort, ChannelCount) of
{error, reached_peer_limit} ->
{error, reached_peer_limit};
{ok, LPeer} ->
receive
#{peer := LP, channels := LCs, connect_id := CID} ->
receive
#{peer := RP, channels := RCs, connect_id := CID} ->
{LP, LCs, RP, RCs}
after 1000 ->
{error, remote_timeout}
end
after 2000 ->
Pool = enet_peer:get_pool(LPeer),
Name = enet_peer:get_name(LPeer),
exit(LPeer, normal),
wait_until_worker_has_left_pool(Pool, Name),
{error, local_timeout}
end
end.
disconnect(LPid, RPid) ->
LPool = enet_peer:get_pool(LPid),
RPool = enet_peer:get_pool(RPid),
LName = enet_peer:get_name(LPid),
RName = enet_peer:get_name(RPid),
ok = enet:disconnect_peer(LPid),
receive
{enet, disconnected, local, LPid, ConnectID} ->
receive
{enet, disconnected, remote, RPid, ConnectID} ->
wait_until_worker_has_left_pool(LPool, LName),
wait_until_worker_has_left_pool(RPool, RName)
after 5000 ->
{error, remote_timeout}
end
after 5000 ->
{error, local_timeout}
end.
stop_host(Port) ->
RemoteConnectedPeers =
gproc:select([{{{p, l, remote_host_port}, '$1', Port}, [], ['$1']}]),
PeerMonitors = lists:map(
fun(Peer) ->
Pool = enet_peer:get_pool(Peer),
Name = enet_peer:get_name(Peer),
{Peer, Pool, Name}
end,
RemoteConnectedPeers
),
[Pid] = gproc:select([{{{p, l, port}, '$1', Port}, [], ['$1']}]),
Ref = monitor(process, Pid),
ok = enet:stop_host(Port),
receive
{'DOWN', Ref, process, Pid, shutdown} ->
lists:foreach(
fun
({Peer, Pool, _Name}) when Pool =:= Port ->
exit(Peer, normal);
({Peer, Pool, Name}) ->
exit(Peer, normal),
wait_until_worker_has_left_pool(Pool, Name)
end,
PeerMonitors
)
after 1000 ->
{error, timeout}
end.
send_unsequenced(Channel, Data) ->
enet:send_unsequenced(Channel, Data),
receive
{enet, _ID, #unsequenced{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
send_unreliable(Channel, Data) ->
enet:send_unreliable(Channel, Data),
receive
{enet, _ID, #unreliable{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
send_reliable(Channel, Data) ->
enet:send_reliable(Channel, Data),
receive
{enet, _ID, #reliable{data = Data}} ->
ok
after 1000 ->
{error, data_not_received}
end.
wait_until_worker_has_left_pool(Pool, Name) ->
case lists:member(Name, [N || {N, _P} <- gproc_pool:worker_pool(Pool)]) of
false ->
ok;
true ->
receive
after 200 -> wait_until_worker_has_left_pool(Pool, Name)
end
end.
get_host_port(V) ->
element(2, V).
get_local_peer_pid(V) ->
element(1, V).
get_local_channels(V) ->
element(2, V).
get_remote_peer_pid(V) ->
element(3, V).
get_remote_channels(V) ->
element(4, V).
get_channel(ID, Channels) ->
{ok, Channel} = maps:find(ID, Channels),
Channel.
|
6f1b163cdf53dff550696710ead26d0a002f99bbf865a794fcfae553de122ccd | ghc/ghc | FreeRegs.hs | module GHC.CmmToAsm.Reg.Linear.FreeRegs (
FR(..),
maxSpillSlots
)
where
import GHC.Prelude
import GHC.Platform.Reg
import GHC.Platform.Reg.Class
import GHC.CmmToAsm.Config
import GHC.Utils.Panic
import GHC.Platform
-- -----------------------------------------------------------------------------
-- The free register set
-- This needs to be *efficient*
Here 's an inefficient ' executable specification ' of the FreeRegs data type :
--
-- type FreeRegs = [RegNo]
noFreeRegs = 0
-- releaseReg n f = if n `elem` f then f else (n : f)
-- initFreeRegs = allocatableRegs
getFreeRegs cls f = filter ( (= = cls ) . . ) f
-- allocateReg f r = filter (/= r) f
import qualified GHC.CmmToAsm.Reg.Linear.PPC as PPC
import qualified GHC.CmmToAsm.Reg.Linear.X86 as X86
import qualified GHC.CmmToAsm.Reg.Linear.X86_64 as X86_64
import qualified GHC.CmmToAsm.Reg.Linear.AArch64 as AArch64
import qualified GHC.CmmToAsm.PPC.Instr as PPC.Instr
import qualified GHC.CmmToAsm.X86.Instr as X86.Instr
import qualified GHC.CmmToAsm.AArch64.Instr as AArch64.Instr
class Show freeRegs => FR freeRegs where
frAllocateReg :: Platform -> RealReg -> freeRegs -> freeRegs
frGetFreeRegs :: Platform -> RegClass -> freeRegs -> [RealReg]
frInitFreeRegs :: Platform -> freeRegs
frReleaseReg :: Platform -> RealReg -> freeRegs -> freeRegs
instance FR X86.FreeRegs where
frAllocateReg = \_ -> X86.allocateReg
frGetFreeRegs = X86.getFreeRegs
frInitFreeRegs = X86.initFreeRegs
frReleaseReg = \_ -> X86.releaseReg
instance FR X86_64.FreeRegs where
frAllocateReg = \_ -> X86_64.allocateReg
frGetFreeRegs = X86_64.getFreeRegs
frInitFreeRegs = X86_64.initFreeRegs
frReleaseReg = \_ -> X86_64.releaseReg
instance FR PPC.FreeRegs where
frAllocateReg = \_ -> PPC.allocateReg
frGetFreeRegs = \_ -> PPC.getFreeRegs
frInitFreeRegs = PPC.initFreeRegs
frReleaseReg = \_ -> PPC.releaseReg
instance FR AArch64.FreeRegs where
frAllocateReg = \_ -> AArch64.allocateReg
frGetFreeRegs = \_ -> AArch64.getFreeRegs
frInitFreeRegs = AArch64.initFreeRegs
frReleaseReg = \_ -> AArch64.releaseReg
maxSpillSlots :: NCGConfig -> Int
maxSpillSlots config = case platformArch (ncgPlatform config) of
ArchX86 -> X86.Instr.maxSpillSlots config
ArchX86_64 -> X86.Instr.maxSpillSlots config
ArchPPC -> PPC.Instr.maxSpillSlots config
ArchS390X -> panic "maxSpillSlots ArchS390X"
ArchARM _ _ _ -> panic "maxSpillSlots ArchARM"
ArchAArch64 -> AArch64.Instr.maxSpillSlots config
ArchPPC_64 _ -> PPC.Instr.maxSpillSlots config
ArchAlpha -> panic "maxSpillSlots ArchAlpha"
ArchMipseb -> panic "maxSpillSlots ArchMipseb"
ArchMipsel -> panic "maxSpillSlots ArchMipsel"
ArchRISCV64 -> panic "maxSpillSlots ArchRISCV64"
ArchLoongArch64->panic "maxSpillSlots ArchLoongArch64"
ArchJavaScript-> panic "maxSpillSlots ArchJavaScript"
ArchWasm32 -> panic "maxSpillSlots ArchWasm32"
ArchUnknown -> panic "maxSpillSlots ArchUnknown"
| null | https://raw.githubusercontent.com/ghc/ghc/aacf616df0b4059e6b177ecb64624ae6fb1d1c87/compiler/GHC/CmmToAsm/Reg/Linear/FreeRegs.hs | haskell | -----------------------------------------------------------------------------
The free register set
This needs to be *efficient*
type FreeRegs = [RegNo]
releaseReg n f = if n `elem` f then f else (n : f)
initFreeRegs = allocatableRegs
allocateReg f r = filter (/= r) f | module GHC.CmmToAsm.Reg.Linear.FreeRegs (
FR(..),
maxSpillSlots
)
where
import GHC.Prelude
import GHC.Platform.Reg
import GHC.Platform.Reg.Class
import GHC.CmmToAsm.Config
import GHC.Utils.Panic
import GHC.Platform
Here 's an inefficient ' executable specification ' of the FreeRegs data type :
noFreeRegs = 0
getFreeRegs cls f = filter ( (= = cls ) . . ) f
import qualified GHC.CmmToAsm.Reg.Linear.PPC as PPC
import qualified GHC.CmmToAsm.Reg.Linear.X86 as X86
import qualified GHC.CmmToAsm.Reg.Linear.X86_64 as X86_64
import qualified GHC.CmmToAsm.Reg.Linear.AArch64 as AArch64
import qualified GHC.CmmToAsm.PPC.Instr as PPC.Instr
import qualified GHC.CmmToAsm.X86.Instr as X86.Instr
import qualified GHC.CmmToAsm.AArch64.Instr as AArch64.Instr
class Show freeRegs => FR freeRegs where
frAllocateReg :: Platform -> RealReg -> freeRegs -> freeRegs
frGetFreeRegs :: Platform -> RegClass -> freeRegs -> [RealReg]
frInitFreeRegs :: Platform -> freeRegs
frReleaseReg :: Platform -> RealReg -> freeRegs -> freeRegs
instance FR X86.FreeRegs where
frAllocateReg = \_ -> X86.allocateReg
frGetFreeRegs = X86.getFreeRegs
frInitFreeRegs = X86.initFreeRegs
frReleaseReg = \_ -> X86.releaseReg
instance FR X86_64.FreeRegs where
frAllocateReg = \_ -> X86_64.allocateReg
frGetFreeRegs = X86_64.getFreeRegs
frInitFreeRegs = X86_64.initFreeRegs
frReleaseReg = \_ -> X86_64.releaseReg
instance FR PPC.FreeRegs where
frAllocateReg = \_ -> PPC.allocateReg
frGetFreeRegs = \_ -> PPC.getFreeRegs
frInitFreeRegs = PPC.initFreeRegs
frReleaseReg = \_ -> PPC.releaseReg
instance FR AArch64.FreeRegs where
frAllocateReg = \_ -> AArch64.allocateReg
frGetFreeRegs = \_ -> AArch64.getFreeRegs
frInitFreeRegs = AArch64.initFreeRegs
frReleaseReg = \_ -> AArch64.releaseReg
maxSpillSlots :: NCGConfig -> Int
maxSpillSlots config = case platformArch (ncgPlatform config) of
ArchX86 -> X86.Instr.maxSpillSlots config
ArchX86_64 -> X86.Instr.maxSpillSlots config
ArchPPC -> PPC.Instr.maxSpillSlots config
ArchS390X -> panic "maxSpillSlots ArchS390X"
ArchARM _ _ _ -> panic "maxSpillSlots ArchARM"
ArchAArch64 -> AArch64.Instr.maxSpillSlots config
ArchPPC_64 _ -> PPC.Instr.maxSpillSlots config
ArchAlpha -> panic "maxSpillSlots ArchAlpha"
ArchMipseb -> panic "maxSpillSlots ArchMipseb"
ArchMipsel -> panic "maxSpillSlots ArchMipsel"
ArchRISCV64 -> panic "maxSpillSlots ArchRISCV64"
ArchLoongArch64->panic "maxSpillSlots ArchLoongArch64"
ArchJavaScript-> panic "maxSpillSlots ArchJavaScript"
ArchWasm32 -> panic "maxSpillSlots ArchWasm32"
ArchUnknown -> panic "maxSpillSlots ArchUnknown"
|
1388c919021faf59904c404466397f292ea50f5bf8343b9fdd2452ad71e3a0d8 | RichiH/git-annex | ProblemFixer.hs | git - annex assistant thread to handle fixing problems with repositories
-
- Copyright 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.ProblemFixer (
problemFixerThread
) where
import Assistant.Common
import Assistant.Types.RepoProblem
import Assistant.RepoProblem
import Assistant.Types.UrlRenderer
import Assistant.Alert
import Remote
import qualified Types.Remote as Remote
import qualified Git.Fsck
import Assistant.Repair
import qualified Git
import Annex.UUID
import Utility.ThreadScheduler
{- Waits for problems with a repo, and tries to fsck the repo and repair
- the problem. -}
problemFixerThread :: UrlRenderer -> NamedThread
problemFixerThread urlrenderer = namedThread "ProblemFixer" $
go =<< getRepoProblems
where
go problems = do
mapM_ (handleProblem urlrenderer) problems
liftIO $ threadDelaySeconds (Seconds 60)
-- Problems may have been re-reported while they were being
-- fixed, so ignore those. If a new unique problem happened
60 seconds after the last was fixed , we 're unlikely
-- to do much good anyway.
go =<< filter (\p -> not (any (sameRepoProblem p) problems))
<$> getRepoProblems
handleProblem :: UrlRenderer -> RepoProblem -> Assistant ()
handleProblem urlrenderer repoproblem = do
fixed <- ifM ((==) (problemUUID repoproblem) <$> liftAnnex getUUID)
( handleLocalRepoProblem urlrenderer
, maybe (return False) (handleRemoteProblem urlrenderer)
=<< liftAnnex (remoteFromUUID $ problemUUID repoproblem)
)
when fixed $
liftIO $ afterFix repoproblem
handleRemoteProblem :: UrlRenderer -> Remote -> Assistant Bool
handleRemoteProblem urlrenderer rmt
| Git.repoIsLocal r && not (Git.repoIsLocalUnknown r) =
ifM (liftIO $ checkAvailable True rmt)
( do
fixedlocks <- repairStaleGitLocks r
fsckresults <- showFscking urlrenderer (Just rmt) $ tryNonAsync $
Git.Fsck.findBroken True r
repaired <- repairWhenNecessary urlrenderer (Remote.uuid rmt) (Just rmt) fsckresults
return $ fixedlocks || repaired
, return False
)
| otherwise = return False
where
r = Remote.repo rmt
{- This is not yet used, and should probably do a fsck. -}
handleLocalRepoProblem :: UrlRenderer -> Assistant Bool
handleLocalRepoProblem _urlrenderer = do
repairStaleGitLocks =<< liftAnnex gitRepo
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Assistant/Threads/ProblemFixer.hs | haskell | Waits for problems with a repo, and tries to fsck the repo and repair
- the problem.
Problems may have been re-reported while they were being
fixed, so ignore those. If a new unique problem happened
to do much good anyway.
This is not yet used, and should probably do a fsck. | git - annex assistant thread to handle fixing problems with repositories
-
- Copyright 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Assistant.Threads.ProblemFixer (
problemFixerThread
) where
import Assistant.Common
import Assistant.Types.RepoProblem
import Assistant.RepoProblem
import Assistant.Types.UrlRenderer
import Assistant.Alert
import Remote
import qualified Types.Remote as Remote
import qualified Git.Fsck
import Assistant.Repair
import qualified Git
import Annex.UUID
import Utility.ThreadScheduler
problemFixerThread :: UrlRenderer -> NamedThread
problemFixerThread urlrenderer = namedThread "ProblemFixer" $
go =<< getRepoProblems
where
go problems = do
mapM_ (handleProblem urlrenderer) problems
liftIO $ threadDelaySeconds (Seconds 60)
60 seconds after the last was fixed , we 're unlikely
go =<< filter (\p -> not (any (sameRepoProblem p) problems))
<$> getRepoProblems
handleProblem :: UrlRenderer -> RepoProblem -> Assistant ()
handleProblem urlrenderer repoproblem = do
fixed <- ifM ((==) (problemUUID repoproblem) <$> liftAnnex getUUID)
( handleLocalRepoProblem urlrenderer
, maybe (return False) (handleRemoteProblem urlrenderer)
=<< liftAnnex (remoteFromUUID $ problemUUID repoproblem)
)
when fixed $
liftIO $ afterFix repoproblem
handleRemoteProblem :: UrlRenderer -> Remote -> Assistant Bool
handleRemoteProblem urlrenderer rmt
| Git.repoIsLocal r && not (Git.repoIsLocalUnknown r) =
ifM (liftIO $ checkAvailable True rmt)
( do
fixedlocks <- repairStaleGitLocks r
fsckresults <- showFscking urlrenderer (Just rmt) $ tryNonAsync $
Git.Fsck.findBroken True r
repaired <- repairWhenNecessary urlrenderer (Remote.uuid rmt) (Just rmt) fsckresults
return $ fixedlocks || repaired
, return False
)
| otherwise = return False
where
r = Remote.repo rmt
handleLocalRepoProblem :: UrlRenderer -> Assistant Bool
handleLocalRepoProblem _urlrenderer = do
repairStaleGitLocks =<< liftAnnex gitRepo
|
1ecd2df5b1e287a5cf3ff66c896a6815556c688f633add894823d2b525571a3a | CSCfi/rems | test_layers.clj | (ns rems.test-layers
"Test REMS layer architecture automatically.
Generates an architecture diagram.
NB: requires `graphviz` and `clj-kondo` to be installed on the machine.
NB: Work in progress. You should run `graph` manually currently."
(:require [clojure.java.io :as io]
[clojure.java.shell :as sh]
[clojure.edn :as edn]
[clojure.string :as str]
[clojure.tools.logging]
[clojure.test :refer [deftest is]]
[medley.core :refer [index-by]]
[tangle.core]))
(defn interesting-ns? [s]
(and (str/starts-with? s "rems.")
(not (str/ends-with? s ".util"))
(not (str/starts-with? s "rems.api.schema"))
(not (str/starts-with? s "rems.service.dependencies"))
(or (str/starts-with? s "rems.ext.")
;;(str/starts-with? s "rems.cli")
(str/starts-with? s "rems.db.")
(str/starts-with? s "rems.service.")
(str/starts-with? s "rems.api"))))
;; use for future namespace renaming needs
(defn rename-ns [s]
(-> s
#_(str/replace "rems.db.test-data" "rems.service.test-data")))
(defn ok-transition? [from to]
(case [(:layer from) (:layer to)]
[:api :service] true
[:service :db] true
[:service :service] true
[:api :ext] true
[:service :ext] true
[:ext :ext] true
[:db :db] (= "rems.db.core" (:name to))
[:api :api] (= "rems.api" (:name from))
false))
(defn analyze-code []
(let [ret (sh/sh "clj-kondo" "--lint" "src" "--parallel" "--config" "{:output {:format :edn} :analysis true}")
analysis (-> ret :out edn/read-string :analysis)
namespaces (->> analysis
:namespace-definitions
(mapv (comp str :name))
;;(concat ["rems.cli"]) ; consider implementing `rems.cli` ns
#_(mapv rename-ns)
(filter interesting-ns?)
(mapv #(merge {:name %}
(cond (str/starts-with? % "rems.ext.") {:layer :ext}
(str/starts-with? % "rems.db.") {:layer :db}
(str/starts-with? % "rems.service.") {:layer :service}
(str/starts-with? % "rems.api") {:layer :api}))))
namespace-by-id (index-by :name namespaces)
namespace-usages (->> analysis
:namespace-usages
(map (juxt (comp str :from) (comp str :to)))
distinct
#_(map (fn [[from to]] [(rename-ns from) (rename-ns to)]))
(filterv (fn [[from to]]
(and (interesting-ns? from)
(interesting-ns? to))))
;;(concat [["rems.cli" "rems.service.test-data"]])
(map (fn [[from to]] [from to (when-not (ok-transition? (namespace-by-id from)
(namespace-by-id to))
{:color :red
:constraint false
:weight 0.01})])))]
{:namespace-usages namespace-usages
:namespaces namespaces}))
(defn graph []
(let [{nodes :namespaces edges :namespace-usages} (analyze-code)
dot (tangle.core/graph->dot nodes edges {:directed? true
:graph {:rankdir :LR
:ranksep 3.5
:rank :min
:dpi 150
:layout :dot}
:edge {:penwidth 2}
:node {:shape :box}
:node->id :name
:node->descriptor (fn [node]
(when-let [layer (:layer node)]
(case layer
:api {:style :filled
:fontcolor :white
:fillcolor "red"}
:service {:style :filled
:fillcolor "#00ff00"}
:db {:style :filled
:fontcolor :white
:fillcolor "blue"}
:ext {:style :filled
:fillcolor "cyan"})))})]
(clojure.java.io/copy dot (clojure.java.io/file "docs/rems-layers.dot"))
(clojure.java.io/copy (tangle.core/dot->svg dot) (clojure.java.io/file "docs/rems-layers.svg"))
(clojure.java.io/copy (tangle.core/dot->image dot "png") (clojure.java.io/file "docs/rems-layers.png"))))
(comment
(graph))
(deftest test-architecture-layers
TODO implement as test
| null | https://raw.githubusercontent.com/CSCfi/rems/132f19a6393765d780d339fc9904bc2055654218/test/clj/rems/test_layers.clj | clojure | (str/starts-with? s "rems.cli")
use for future namespace renaming needs
(concat ["rems.cli"]) ; consider implementing `rems.cli` ns
(concat [["rems.cli" "rems.service.test-data"]]) | (ns rems.test-layers
"Test REMS layer architecture automatically.
Generates an architecture diagram.
NB: requires `graphviz` and `clj-kondo` to be installed on the machine.
NB: Work in progress. You should run `graph` manually currently."
(:require [clojure.java.io :as io]
[clojure.java.shell :as sh]
[clojure.edn :as edn]
[clojure.string :as str]
[clojure.tools.logging]
[clojure.test :refer [deftest is]]
[medley.core :refer [index-by]]
[tangle.core]))
(defn interesting-ns? [s]
(and (str/starts-with? s "rems.")
(not (str/ends-with? s ".util"))
(not (str/starts-with? s "rems.api.schema"))
(not (str/starts-with? s "rems.service.dependencies"))
(or (str/starts-with? s "rems.ext.")
(str/starts-with? s "rems.db.")
(str/starts-with? s "rems.service.")
(str/starts-with? s "rems.api"))))
(defn rename-ns [s]
(-> s
#_(str/replace "rems.db.test-data" "rems.service.test-data")))
(defn ok-transition? [from to]
(case [(:layer from) (:layer to)]
[:api :service] true
[:service :db] true
[:service :service] true
[:api :ext] true
[:service :ext] true
[:ext :ext] true
[:db :db] (= "rems.db.core" (:name to))
[:api :api] (= "rems.api" (:name from))
false))
(defn analyze-code []
(let [ret (sh/sh "clj-kondo" "--lint" "src" "--parallel" "--config" "{:output {:format :edn} :analysis true}")
analysis (-> ret :out edn/read-string :analysis)
namespaces (->> analysis
:namespace-definitions
(mapv (comp str :name))
#_(mapv rename-ns)
(filter interesting-ns?)
(mapv #(merge {:name %}
(cond (str/starts-with? % "rems.ext.") {:layer :ext}
(str/starts-with? % "rems.db.") {:layer :db}
(str/starts-with? % "rems.service.") {:layer :service}
(str/starts-with? % "rems.api") {:layer :api}))))
namespace-by-id (index-by :name namespaces)
namespace-usages (->> analysis
:namespace-usages
(map (juxt (comp str :from) (comp str :to)))
distinct
#_(map (fn [[from to]] [(rename-ns from) (rename-ns to)]))
(filterv (fn [[from to]]
(and (interesting-ns? from)
(interesting-ns? to))))
(map (fn [[from to]] [from to (when-not (ok-transition? (namespace-by-id from)
(namespace-by-id to))
{:color :red
:constraint false
:weight 0.01})])))]
{:namespace-usages namespace-usages
:namespaces namespaces}))
(defn graph []
(let [{nodes :namespaces edges :namespace-usages} (analyze-code)
dot (tangle.core/graph->dot nodes edges {:directed? true
:graph {:rankdir :LR
:ranksep 3.5
:rank :min
:dpi 150
:layout :dot}
:edge {:penwidth 2}
:node {:shape :box}
:node->id :name
:node->descriptor (fn [node]
(when-let [layer (:layer node)]
(case layer
:api {:style :filled
:fontcolor :white
:fillcolor "red"}
:service {:style :filled
:fillcolor "#00ff00"}
:db {:style :filled
:fontcolor :white
:fillcolor "blue"}
:ext {:style :filled
:fillcolor "cyan"})))})]
(clojure.java.io/copy dot (clojure.java.io/file "docs/rems-layers.dot"))
(clojure.java.io/copy (tangle.core/dot->svg dot) (clojure.java.io/file "docs/rems-layers.svg"))
(clojure.java.io/copy (tangle.core/dot->image dot "png") (clojure.java.io/file "docs/rems-layers.png"))))
(comment
(graph))
(deftest test-architecture-layers
TODO implement as test
|
ae3a9082a31e166e3156b524c49702beada0d879245b9d1c2b37020e964f9eff | MarcFontaine/stm32hs | API.hs | ----------------------------------------------------------------------------
-- |
Module : STM32.API
Copyright : ( c ) 2017
-- License : BSD3
--
Maintainer :
-- Stability : experimental
Portability : GHC - only
--
-- The general part of the API.
The module for the peripheral ( GPIO , USART , ADC , .. ) has to be imported separately .
module STM32.API
(
module STM32.MachineInterface
, module STLinkUSB
, module STM32.RCC
, module Data.Word
, module Data.Bits
, module Device
, module STM32.Utils
)
where
import Data.Word
import Data.Bits
import Device
import STM32.MachineInterface
import STM32.Utils
import STM32.STLinkUSB as STLinkUSB hiding (resetHalt)
import STM32.RCC (setDefaultClocks , peripheralClockOn
, peripheralClockOff, peripheralResetToggle)
| null | https://raw.githubusercontent.com/MarcFontaine/stm32hs/d7afeb8f9d83e01c76003f4b199b45044bd4e383/STM32-Zombie/src/STM32/API.hs | haskell | --------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
The general part of the API. | Module : STM32.API
Copyright : ( c ) 2017
Maintainer :
Portability : GHC - only
The module for the peripheral ( GPIO , USART , ADC , .. ) has to be imported separately .
module STM32.API
(
module STM32.MachineInterface
, module STLinkUSB
, module STM32.RCC
, module Data.Word
, module Data.Bits
, module Device
, module STM32.Utils
)
where
import Data.Word
import Data.Bits
import Device
import STM32.MachineInterface
import STM32.Utils
import STM32.STLinkUSB as STLinkUSB hiding (resetHalt)
import STM32.RCC (setDefaultClocks , peripheralClockOn
, peripheralClockOff, peripheralResetToggle)
|
8c43d6d816b51b4ca69b64f4bc39a67616eed37dd331d4b023f2eb9578708032 | zclj/replication-packages | gitlab_experiment.clj | (ns gql.gitlab-experiment
(:require [clojure.data.json :as json]
[org.httpkit.client :as http]
[gql.generators :as gqlgen]
[clojure.test.check.properties :as prop]
[clojure.test.check.generators :as gen]
[gql.query-data :as data]
[clojure.spec.alpha :as s]))
( def token " ZaBpdrqzpzcmbdEzd1kB " )
( defn run - query
;; [query-str]
;; (json/read-str
;; (:body
;; @(http/request {:url ""
;; :method :post
;; :headers {"PRIVATE-TOKEN" token}
;; :content-type "application/json"
;; :form-params {"query" query-str}}))
;; :key-fn keyword))
( defn run - query-2
;; [query-str]
;; (let [response @(http/request {:url ""
;; :method :post
;; :headers {"PRIVATE-TOKEN" token}
;; :content-type "application/json"
;; :form-params {"query" query-str}})]
;; {:body (json/read-str (:body response) :key-fn keyword)
;; :status-code (:status response)}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Experiments utils
(def results-db (atom []))
(defn coverage-of-nodes
[]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
nodes (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema))]
(map #(let [from-type-raw (:from-type %)
from-type (if from-type-raw from-type-raw "Query")]
(vector from-type (:name %))) (flatten (gen/sample nodes 1)))))
(defn make-query-of-nodes
[]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
nodes (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema) 5)]
(map gqlgen/make-query (gen/sample nodes 100))))
(defn result-field-names
[]
(into #{} (map :name (flatten (map :query-tree @results-db)))))
(def schema-field-names
(into #{}
(flatten
(reduce
(fn [acc t]
(if (and (= (:kind t) "OBJECT")
(not= (:name t) "Mutation")
(not (clojure.string/starts-with? (:name t) "__")))
(conj acc (map :name (:fields t)))
acc))
[]
(:types (-> gql.query-data/gitlab-schema-explore :data :__schema))))))
(def mutation-related-objects
#{"AddAwardEmojiPayload" "AwardEmoji" "CreateDiffNotePayload" "CreateImageDiffNotePayload" "CreateNotePayload" "CreateSnippetPayload" "DestroyNotePayload" "IssueSetConfidentialPayload" "IssueSetDueDatePayload" "MarkAsSpamSnippetPayload" "MergeRequestSetAssigneesPayload" "MergeRequestSetLabelsPayload" "MergeRequestSetMilestonePayload" "MergeRequestSetSubscriptionPayload" "MergeRequestSetWipPayload" "RemoveAwardEmojiPayload" "TodoMarkDonePayload" "TodoRestorePayload" "TodosMarkAllDonePayload" "ToggleAwardEmojiPayload" "UpdateNotePayload" "UpdateSnippetPayload" "DestroySnippetPayload" "MergeRequestSetLockedPayload"})
(def schema-field-tuples
(into #{}
(filter
#(not (contains? mutation-related-objects (first %)))
(reduce
(fn [acc t]
(if (and (= (:kind t) "OBJECT")
(not= (:name t) "Mutation")
(not (clojure.string/starts-with? (:name t) "__")))
(into acc (map #(vector (:name t) (:name %)) (:fields t)))
acc))
[]
(:types (-> gql.query-data/gitlab-schema-explore :data :__schema))))))
(def generated-queries (atom []))
(def generated-nodes-db (atom #{}))
(defn coverage-of-gitlab-schema
[size max-elements]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
query-tree (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema) max-elements
gqlgen/resolve-arg-gen-alphanumeric)
generated-nodes (gen/sample query-tree size)
node-field-names (into
#{}
(map
#(let [from-type-raw (:from-type %)
from-type
(if from-type-raw from-type-raw "Query")]
(vector from-type (:name %)))
(flatten generated-nodes)))
nrof-schema-fields (count schema-field-tuples)
fields-not-covered (clojure.set/difference schema-field-tuples node-field-names)]
(swap! generated-nodes-db into node-field-names)
(reset! generated-queries (map gqlgen/make-query generated-nodes))
;;fields-not-covered
(double
(/ (- nrof-schema-fields (count fields-not-covered)) nrof-schema-fields))
))
;;;;
;; DBs
(def coverage-result-db (atom []))
(defn report-node-coverage-result
[n]
{:iterations n
:avg-coverage (* 100 (double (/ (reduce + @coverage-result-db) n)))
:compounded-coverage
(*
100
(double
(/ (count @generated-nodes-db) (count schema-field-tuples))))})
(defn run-range-of-generations
[n sample-size max-elements]
(reset! coverage-result-db [])
(reset! generated-nodes-db #{})
(do
(doseq [_ (range 0 n)]
(let [result-n (coverage-of-gitlab-schema sample-size max-elements)]
(swap! coverage-result-db conj result-n)))
(report-node-coverage-result n)))
(defn run-until-coverage
[max-n sample-size max-elements]
(reset! coverage-result-db [])
(reset! generated-nodes-db #{})
(loop [n 0]
(if (and (not= 100.0 (:compounded-coverage (report-node-coverage-result max-n)))
(< n max-n))
(recur
(do
(let [result-n (coverage-of-gitlab-schema sample-size max-elements)]
(swap! coverage-result-db conj result-n))
(inc n)))
(report-node-coverage-result (count @coverage-result-db)))))
| null | https://raw.githubusercontent.com/zclj/replication-packages/8a5cb02e4fdda74107b5dcc34cbfa5cb5ae34dcf/GraphQL/implementation/src/gql/gitlab_experiment.clj | clojure | [query-str]
(json/read-str
(:body
@(http/request {:url ""
:method :post
:headers {"PRIVATE-TOKEN" token}
:content-type "application/json"
:form-params {"query" query-str}}))
:key-fn keyword))
[query-str]
(let [response @(http/request {:url ""
:method :post
:headers {"PRIVATE-TOKEN" token}
:content-type "application/json"
:form-params {"query" query-str}})]
{:body (json/read-str (:body response) :key-fn keyword)
:status-code (:status response)}))
Experiments utils
fields-not-covered
DBs | (ns gql.gitlab-experiment
(:require [clojure.data.json :as json]
[org.httpkit.client :as http]
[gql.generators :as gqlgen]
[clojure.test.check.properties :as prop]
[clojure.test.check.generators :as gen]
[gql.query-data :as data]
[clojure.spec.alpha :as s]))
( def token " ZaBpdrqzpzcmbdEzd1kB " )
( defn run - query
( defn run - query-2
(def results-db (atom []))
(defn coverage-of-nodes
[]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
nodes (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema))]
(map #(let [from-type-raw (:from-type %)
from-type (if from-type-raw from-type-raw "Query")]
(vector from-type (:name %))) (flatten (gen/sample nodes 1)))))
(defn make-query-of-nodes
[]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
nodes (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema) 5)]
(map gqlgen/make-query (gen/sample nodes 100))))
(defn result-field-names
[]
(into #{} (map :name (flatten (map :query-tree @results-db)))))
(def schema-field-names
(into #{}
(flatten
(reduce
(fn [acc t]
(if (and (= (:kind t) "OBJECT")
(not= (:name t) "Mutation")
(not (clojure.string/starts-with? (:name t) "__")))
(conj acc (map :name (:fields t)))
acc))
[]
(:types (-> gql.query-data/gitlab-schema-explore :data :__schema))))))
(def mutation-related-objects
#{"AddAwardEmojiPayload" "AwardEmoji" "CreateDiffNotePayload" "CreateImageDiffNotePayload" "CreateNotePayload" "CreateSnippetPayload" "DestroyNotePayload" "IssueSetConfidentialPayload" "IssueSetDueDatePayload" "MarkAsSpamSnippetPayload" "MergeRequestSetAssigneesPayload" "MergeRequestSetLabelsPayload" "MergeRequestSetMilestonePayload" "MergeRequestSetSubscriptionPayload" "MergeRequestSetWipPayload" "RemoveAwardEmojiPayload" "TodoMarkDonePayload" "TodoRestorePayload" "TodosMarkAllDonePayload" "ToggleAwardEmojiPayload" "UpdateNotePayload" "UpdateSnippetPayload" "DestroySnippetPayload" "MergeRequestSetLockedPayload"})
(def schema-field-tuples
(into #{}
(filter
#(not (contains? mutation-related-objects (first %)))
(reduce
(fn [acc t]
(if (and (= (:kind t) "OBJECT")
(not= (:name t) "Mutation")
(not (clojure.string/starts-with? (:name t) "__")))
(into acc (map #(vector (:name t) (:name %)) (:fields t)))
acc))
[]
(:types (-> gql.query-data/gitlab-schema-explore :data :__schema))))))
(def generated-queries (atom []))
(def generated-nodes-db (atom #{}))
(defn coverage-of-gitlab-schema
[size max-elements]
(let [schema (-> gql.query-data/gitlab-schema-explore :data :__schema)
query-tree (gqlgen/gen-gql-nodes
(nth (:types schema) 2) (:types schema) max-elements
gqlgen/resolve-arg-gen-alphanumeric)
generated-nodes (gen/sample query-tree size)
node-field-names (into
#{}
(map
#(let [from-type-raw (:from-type %)
from-type
(if from-type-raw from-type-raw "Query")]
(vector from-type (:name %)))
(flatten generated-nodes)))
nrof-schema-fields (count schema-field-tuples)
fields-not-covered (clojure.set/difference schema-field-tuples node-field-names)]
(swap! generated-nodes-db into node-field-names)
(reset! generated-queries (map gqlgen/make-query generated-nodes))
(double
(/ (- nrof-schema-fields (count fields-not-covered)) nrof-schema-fields))
))
(def coverage-result-db (atom []))
(defn report-node-coverage-result
[n]
{:iterations n
:avg-coverage (* 100 (double (/ (reduce + @coverage-result-db) n)))
:compounded-coverage
(*
100
(double
(/ (count @generated-nodes-db) (count schema-field-tuples))))})
(defn run-range-of-generations
[n sample-size max-elements]
(reset! coverage-result-db [])
(reset! generated-nodes-db #{})
(do
(doseq [_ (range 0 n)]
(let [result-n (coverage-of-gitlab-schema sample-size max-elements)]
(swap! coverage-result-db conj result-n)))
(report-node-coverage-result n)))
(defn run-until-coverage
[max-n sample-size max-elements]
(reset! coverage-result-db [])
(reset! generated-nodes-db #{})
(loop [n 0]
(if (and (not= 100.0 (:compounded-coverage (report-node-coverage-result max-n)))
(< n max-n))
(recur
(do
(let [result-n (coverage-of-gitlab-schema sample-size max-elements)]
(swap! coverage-result-db conj result-n))
(inc n)))
(report-node-coverage-result (count @coverage-result-db)))))
|
ce1881ce7c614f733f238251fd1cdef851e14d9b2aca3fd48ed07d7ad7695b2a | chaoxu/fancy-walks | 2.hs |
fib = 1 : 1 : zipWith (+) fib (tail fib)
problem_2 = sum $ filter even $ takeWhile (<=4000000) fib
main = print problem_2
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/projecteuler.net/2.hs | haskell |
fib = 1 : 1 : zipWith (+) fib (tail fib)
problem_2 = sum $ filter even $ takeWhile (<=4000000) fib
main = print problem_2
| |
51717ee21faa33e579ed55e8d1bc8850fbdae962039d521ed0c9b4a6a46af060 | chaitanyagupta/chronicity | repeater-hour.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
;;; repeater-hour.lisp
;;; See the LICENSE file for licensing information.
(cl:in-package #:chronicity-test)
(define-test repeater-hour-next-future
(let* ((now (make-datetime 2006 8 16 14))
(hours (create-tag 'repeater-hour :hour :now now))
(next-hour))
(setf next-hour (r-next hours :future))
(assert-datetime= (make-datetime 2006 8 16 15) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 16) (span-end next-hour))
(setf next-hour (r-next hours :future))
(assert-datetime= (make-datetime 2006 8 16 16) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 17) (span-end next-hour))))
(define-test repeater-hour-next-past
(let* ((now (make-datetime 2006 8 16 14))
(hours (create-tag 'repeater-hour :hour :now now))
(next-hour))
(setf next-hour (r-next hours :past))
(assert-datetime= (make-datetime 2006 8 16 13) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 14) (span-end next-hour))
(setf next-hour (r-next hours :past))
(assert-datetime= (make-datetime 2006 8 16 12) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 13) (span-end next-hour))))
(define-test repeater-hour-this
(let* ((now (make-datetime 2006 8 16 14 30))
(hours (create-tag 'repeater-hour :hour :now now))
(this-hour))
(setf this-hour (r-this hours :future))
(assert-datetime= (make-datetime 2006 8 16 14 30) (span-start this-hour))
(assert-datetime= (make-datetime 2006 8 16 15) (span-end this-hour))
(setf this-hour (r-this hours :past))
(assert-datetime= (make-datetime 2006 8 16 14) (span-start this-hour))
(assert-datetime= (make-datetime 2006 8 16 14 30) (span-end this-hour))))
(define-test repeater-hour-offset
(let* ((now (make-datetime 2006 8 16 14))
(span (make-span now (datetime-incr now :sec)))
(offset-span)
(repeater (create-tag 'repeater-hour :hour)))
(setf offset-span (r-offset repeater span 3 :future))
(assert-datetime= (make-datetime 2006 8 16 17) (span-start offset-span))
(assert-datetime= (make-datetime 2006 8 16 17 0 1) (span-end offset-span))
(setf offset-span (r-offset repeater span 24 :past))
(assert-datetime= (make-datetime 2006 8 15 14) (span-start offset-span))
(assert-datetime= (make-datetime 2006 8 15 14 0 1) (span-end offset-span))))
| null | https://raw.githubusercontent.com/chaitanyagupta/chronicity/5841d1548cad0ca6917d8e68933124a5af68f5ec/test/repeater-hour.lisp | lisp | Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
repeater-hour.lisp
See the LICENSE file for licensing information. |
(cl:in-package #:chronicity-test)
(define-test repeater-hour-next-future
(let* ((now (make-datetime 2006 8 16 14))
(hours (create-tag 'repeater-hour :hour :now now))
(next-hour))
(setf next-hour (r-next hours :future))
(assert-datetime= (make-datetime 2006 8 16 15) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 16) (span-end next-hour))
(setf next-hour (r-next hours :future))
(assert-datetime= (make-datetime 2006 8 16 16) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 17) (span-end next-hour))))
(define-test repeater-hour-next-past
(let* ((now (make-datetime 2006 8 16 14))
(hours (create-tag 'repeater-hour :hour :now now))
(next-hour))
(setf next-hour (r-next hours :past))
(assert-datetime= (make-datetime 2006 8 16 13) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 14) (span-end next-hour))
(setf next-hour (r-next hours :past))
(assert-datetime= (make-datetime 2006 8 16 12) (span-start next-hour))
(assert-datetime= (make-datetime 2006 8 16 13) (span-end next-hour))))
(define-test repeater-hour-this
(let* ((now (make-datetime 2006 8 16 14 30))
(hours (create-tag 'repeater-hour :hour :now now))
(this-hour))
(setf this-hour (r-this hours :future))
(assert-datetime= (make-datetime 2006 8 16 14 30) (span-start this-hour))
(assert-datetime= (make-datetime 2006 8 16 15) (span-end this-hour))
(setf this-hour (r-this hours :past))
(assert-datetime= (make-datetime 2006 8 16 14) (span-start this-hour))
(assert-datetime= (make-datetime 2006 8 16 14 30) (span-end this-hour))))
(define-test repeater-hour-offset
(let* ((now (make-datetime 2006 8 16 14))
(span (make-span now (datetime-incr now :sec)))
(offset-span)
(repeater (create-tag 'repeater-hour :hour)))
(setf offset-span (r-offset repeater span 3 :future))
(assert-datetime= (make-datetime 2006 8 16 17) (span-start offset-span))
(assert-datetime= (make-datetime 2006 8 16 17 0 1) (span-end offset-span))
(setf offset-span (r-offset repeater span 24 :past))
(assert-datetime= (make-datetime 2006 8 15 14) (span-start offset-span))
(assert-datetime= (make-datetime 2006 8 15 14 0 1) (span-end offset-span))))
|
087f486b02aeb039978275506e372752035cd39c220cc724e340e9e97833082f | tommaisey/aeon | buf-comb-c.help.scm | (with-sc3
(lambda (fd)
(async fd (b-alloc 0 44100 1))))
(let ((x (mul3 (decay (dust ar 1) 0.2) (white-noise ar) 0.5)))
(audition (out 0 (buf-comb-n 0 x 0.25 6))))
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/rsc3/help/ugen/delays/buf-comb-c.help.scm | scheme | (with-sc3
(lambda (fd)
(async fd (b-alloc 0 44100 1))))
(let ((x (mul3 (decay (dust ar 1) 0.2) (white-noise ar) 0.5)))
(audition (out 0 (buf-comb-n 0 x 0.25 6))))
| |
f46a673f1e18437f3b7c0f369e328e91a1201860e74325648de513566569430a | dscarpetti/codax | prefix.clj | (ns codax.prefix)
(defn set-prefix [tx prefix]
(assoc tx :prefix
(cond
(nil? prefix) []
(vector? prefix) prefix
(coll? prefix) (vec prefix)
:else [prefix])))
(defn prefix-path
"Helper function for prefixing a path with the `:prefix` of the transaction `txn`.
Generally for internal library use only"
[tx k]
(cond
(nil? k) (:prefix tx)
(coll? k) (vec (concat (:prefix tx) (vec k)))
:else (conj (:prefix tx) k)))
(defn clear-prefix
"Clears the prefix of the transaction. This is the
default state of a new transaction object."
[tx]
(set-prefix tx nil))
(defn extend-prefix
"Conjoins the current transaction prefix with `prefix-extentension`."
[tx prefix-extension]
(if (nil? prefix-extension)
tx
(assoc tx :prefix
(vec (concat (:prefix tx) (if (coll? prefix-extension) prefix-extension [prefix-extension]))))))
(defn push-prefix
"Stores the current transaction prefix in the prefix stack, then
sets the supplied `prefix` as the active transaction prefix."
[tx prefix]
(-> tx
(assoc :prefix-stack (cons (:prefix tx) (:prefix-stack tx)))
(set-prefix prefix)))
(defn pop-prefix
"Replaces the active transaction prefix with the top item on the
prefix stack. Throws an error if the stack is empty."
[tx]
(assert (not (empty? (:prefix-stack tx))) "Prefix stack is empty. More pushes than pops.")
(-> tx
(set-prefix (first (:prefix-stack tx)))
(assoc :prefix-stack (rest (:prefix-stack tx)))))
(defn push-extend-prefix
"Stores the current transaction prefix in the prefix stack, then
sets the transaction prefix to that prefix conjoined with supplied
`prefix-extension`"
[tx prefix-extension]
(-> tx
(assoc :prefix-stack (cons (:prefix tx) (:prefix-stack tx)))
(set-prefix (concat (:prefix tx) (if (coll? prefix-extension) prefix-extension [prefix-extension])))))
| null | https://raw.githubusercontent.com/dscarpetti/codax/68edc73dbdfe4d33f5d08f658052edbc3de85121/src/codax/prefix.clj | clojure | (ns codax.prefix)
(defn set-prefix [tx prefix]
(assoc tx :prefix
(cond
(nil? prefix) []
(vector? prefix) prefix
(coll? prefix) (vec prefix)
:else [prefix])))
(defn prefix-path
"Helper function for prefixing a path with the `:prefix` of the transaction `txn`.
Generally for internal library use only"
[tx k]
(cond
(nil? k) (:prefix tx)
(coll? k) (vec (concat (:prefix tx) (vec k)))
:else (conj (:prefix tx) k)))
(defn clear-prefix
"Clears the prefix of the transaction. This is the
default state of a new transaction object."
[tx]
(set-prefix tx nil))
(defn extend-prefix
"Conjoins the current transaction prefix with `prefix-extentension`."
[tx prefix-extension]
(if (nil? prefix-extension)
tx
(assoc tx :prefix
(vec (concat (:prefix tx) (if (coll? prefix-extension) prefix-extension [prefix-extension]))))))
(defn push-prefix
"Stores the current transaction prefix in the prefix stack, then
sets the supplied `prefix` as the active transaction prefix."
[tx prefix]
(-> tx
(assoc :prefix-stack (cons (:prefix tx) (:prefix-stack tx)))
(set-prefix prefix)))
(defn pop-prefix
"Replaces the active transaction prefix with the top item on the
prefix stack. Throws an error if the stack is empty."
[tx]
(assert (not (empty? (:prefix-stack tx))) "Prefix stack is empty. More pushes than pops.")
(-> tx
(set-prefix (first (:prefix-stack tx)))
(assoc :prefix-stack (rest (:prefix-stack tx)))))
(defn push-extend-prefix
"Stores the current transaction prefix in the prefix stack, then
sets the transaction prefix to that prefix conjoined with supplied
`prefix-extension`"
[tx prefix-extension]
(-> tx
(assoc :prefix-stack (cons (:prefix tx) (:prefix-stack tx)))
(set-prefix (concat (:prefix tx) (if (coll? prefix-extension) prefix-extension [prefix-extension])))))
| |
2e2f6560388ece835608304e4c349e18214ed448a7f8f163429d0d67d85b9960 | oakes/Nightweb | main.clj | (ns net.nightweb.main
(:require [neko.notify :as notify]
[neko.resource :as r]
[neko.ui.mapping :as mapping]
[net.clandroid.service :as service]
[net.nightweb.utils :as utils]
[nightweb.router :as router]))
(mapping/defelement :scroll-view
:classname android.widget.ScrollView
:inherits :view)
(mapping/defelement :frame-layout
:classname android.widget.FrameLayout
:inherits :view)
(mapping/defelement :relative-layout
:classname android.widget.RelativeLayout
:inherits :view)
(mapping/defelement :image-view
:classname android.widget.ImageView
:inherits :view)
(mapping/defelement :view-pager
:classname android.support.v4.view.ViewPager
:inherits :view)
(def ^:const service-name "net.nightweb.MainService")
(def ^:const shutdown-receiver-name "ACTION_CLOSE_APP")
(service/defservice
net.nightweb.MainService
:def service
:state (atom {})
:on-create
(fn [this]
(->> (notify/notification
:icon (r/get-resource :drawable :ic_launcher)
:content-title (r/get-string :shut_down_nightweb)
:content-text (r/get-string :nightweb_is_running)
:action [:broadcast shutdown-receiver-name])
(service/start-foreground! this 1))
(->> (fn [context intent]
(try
(.stopSelf service)
(catch Exception e nil)))
(service/start-receiver! this shutdown-receiver-name)
(utils/set-state! this shutdown-receiver-name))
(-> this .getFilesDir .getAbsolutePath router/start-router!))
:on-destroy
(fn [this]
(when-let [receiver (utils/get-state this shutdown-receiver-name)]
(service/stop-receiver! this receiver))
(router/stop-router!)))
| null | https://raw.githubusercontent.com/oakes/Nightweb/089c7a99a9a875fde33cc29d56e1faf54dc9de84/android/src/clojure/net/nightweb/main.clj | clojure | (ns net.nightweb.main
(:require [neko.notify :as notify]
[neko.resource :as r]
[neko.ui.mapping :as mapping]
[net.clandroid.service :as service]
[net.nightweb.utils :as utils]
[nightweb.router :as router]))
(mapping/defelement :scroll-view
:classname android.widget.ScrollView
:inherits :view)
(mapping/defelement :frame-layout
:classname android.widget.FrameLayout
:inherits :view)
(mapping/defelement :relative-layout
:classname android.widget.RelativeLayout
:inherits :view)
(mapping/defelement :image-view
:classname android.widget.ImageView
:inherits :view)
(mapping/defelement :view-pager
:classname android.support.v4.view.ViewPager
:inherits :view)
(def ^:const service-name "net.nightweb.MainService")
(def ^:const shutdown-receiver-name "ACTION_CLOSE_APP")
(service/defservice
net.nightweb.MainService
:def service
:state (atom {})
:on-create
(fn [this]
(->> (notify/notification
:icon (r/get-resource :drawable :ic_launcher)
:content-title (r/get-string :shut_down_nightweb)
:content-text (r/get-string :nightweb_is_running)
:action [:broadcast shutdown-receiver-name])
(service/start-foreground! this 1))
(->> (fn [context intent]
(try
(.stopSelf service)
(catch Exception e nil)))
(service/start-receiver! this shutdown-receiver-name)
(utils/set-state! this shutdown-receiver-name))
(-> this .getFilesDir .getAbsolutePath router/start-router!))
:on-destroy
(fn [this]
(when-let [receiver (utils/get-state this shutdown-receiver-name)]
(service/stop-receiver! this receiver))
(router/stop-router!)))
| |
3a41eecba5c3656e1fe1e227071063b687186d3555aac72ea0bc11b4fb9425b3 | inaka/elvis_core | consistent_generic_type_term_and_any.erl | -module(consistent_generic_type_term_and_any).
-export([simple_term/1, simple_combined/1, simple_when/1]).
% Type definitions when this is alone or combined
-type my_type() :: term().
-type combined() :: any() | my_type().
% Record definitions
-record(my_record, {t :: term(), a :: any()}).
% Callback definitions (with or without when)
-callback my_callback(term()) -> any().
-callback my_callback_when(X) -> X when X :: term().
-spec simple_term(term()) -> ok.
simple_term(_Args) -> ok.
-spec simple_combined(combined()) -> ok.
simple_combined(_Args) -> ok.
% Specs with when
-spec simple_when(#my_record{}) -> {any(), X} when X :: term().
simple_when(#my_record{a = A, t = T}) -> {A, T}.
| null | https://raw.githubusercontent.com/inaka/elvis_core/02161609796225f8574e3c3175b37f1795fb8eb6/test/examples/consistent_generic_type_term_and_any.erl | erlang | Type definitions when this is alone or combined
Record definitions
Callback definitions (with or without when)
Specs with when | -module(consistent_generic_type_term_and_any).
-export([simple_term/1, simple_combined/1, simple_when/1]).
-type my_type() :: term().
-type combined() :: any() | my_type().
-record(my_record, {t :: term(), a :: any()}).
-callback my_callback(term()) -> any().
-callback my_callback_when(X) -> X when X :: term().
-spec simple_term(term()) -> ok.
simple_term(_Args) -> ok.
-spec simple_combined(combined()) -> ok.
simple_combined(_Args) -> ok.
-spec simple_when(#my_record{}) -> {any(), X} when X :: term().
simple_when(#my_record{a = A, t = T}) -> {A, T}.
|
b882185b3a579b281681a662a36f4b7a5505c4a66802c01ca91f7b36745d4553 | input-output-hk/project-icarus-importer | Txp.hs | {-# LANGUAGE Rank2Types #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
-- | Utils for payments and redeeming.
module Pos.Wallet.Web.Methods.Txp
( MonadWalletTxFull
, gatherPendingTxsSummary
, rewrapTxError
, coinDistrToOutputs
, submitAndSaveNewPtx
, getPendingAddresses
) where
import Universum
import qualified Data.List.NonEmpty as NE
import Formatting (build, sformat, stext, (%))
import Pos.Client.KeyStorage (MonadKeys)
import Pos.Client.Txp.Addresses (MonadAddresses (..))
import Pos.Client.Txp.Util (InputSelectionPolicy (..), PendingAddresses (..),
isCheckedTxError)
import Pos.Core.Common (Coin)
import Pos.Core.Txp (Tx (..), TxAux (..), TxOut (..), TxOutAux (..))
import Pos.Crypto (PassPhrase, hash)
import Pos.Util.Chrono (getNewestFirst, toNewestFirst)
import Pos.Util.Servant (encodeCType)
import Pos.Wallet.Web.ClientTypes (AccountId, Addr, CId)
import Pos.Wallet.Web.Error (WalletError (..), rewrapToWalletError)
import Pos.Wallet.Web.Methods.History (MonadWalletHistory)
import Pos.Wallet.Web.Methods.Misc (PendingTxsSummary (..))
import Pos.Wallet.Web.Mode (MonadWalletWebMode)
import Pos.Wallet.Web.Pending (PendingTx (..), TxSubmissionMode, allPendingAddresses,
isPtxInBlocks, ptxFirstSubmissionHandler, sortPtxsChrono)
import Pos.Wallet.Web.Pending.Submission (submitAndSavePtx)
import Pos.Wallet.Web.State (WalletDB, WalletSnapshot, askWalletSnapshot, getPendingTxs)
import Pos.Wallet.Web.Util (decodeCTypeOrFail)
type MonadWalletTxFull ctx m =
( TxSubmissionMode ctx m
, MonadWalletHistory ctx m
, MonadKeys m
, AddrData m ~ (AccountId, PassPhrase)
)
rewrapTxError
:: forall m a. MonadCatch m
=> Text -> m a -> m a
rewrapTxError prefix =
rewrapToWalletError (\SomeException{} -> True) (InternalError . sbuild) .
rewrapToWalletError isCheckedTxError (RequestError . sbuild)
where
sbuild :: Buildable e => e -> Text
sbuild = sformat (stext%": "%build) prefix
coinDistrToOutputs
:: MonadThrow m
=> NonEmpty (CId Addr, Coin)
-> m (NonEmpty TxOutAux)
coinDistrToOutputs distr = do
addrs <- mapM decodeCTypeOrFail cAddrs
pure $ NE.zipWith mkTxOut addrs coins
where
(cAddrs, coins) = NE.unzip distr
mkTxOut addr coin = TxOutAux (TxOut addr coin)
-- | Like 'submitAndSaveTx', but suppresses errors which can get gone
-- by the time of resubmission.
submitAndSaveNewPtx
:: TxSubmissionMode ctx m
=> WalletDB
-> (TxAux -> m Bool)
-> PendingTx
-> m ()
submitAndSaveNewPtx db submit = submitAndSavePtx db submit ptxFirstSubmissionHandler
gatherPendingTxsSummary :: MonadWalletWebMode ctx m => m [PendingTxsSummary]
gatherPendingTxsSummary =
map mkInfo .
getNewestFirst . toNewestFirst . sortPtxsChrono .
filter unconfirmedPtx .
getPendingTxs <$> askWalletSnapshot
where
unconfirmedPtx = not . isPtxInBlocks . _ptxCond
mkInfo PendingTx{..} =
let tx = taTx _ptxTxAux
in PendingTxsSummary
{ ptiSlot = _ptxCreationSlot
, ptiCond = encodeCType (Just _ptxCond)
, ptiInputs = _txInputs tx
, ptiOutputs = _txOutputs tx
, ptiTxId = hash tx
}
| With regard to tx creation policy which is going to be used ,
-- get addresses which are refered by some yet unconfirmed transaction outputs.
getPendingAddresses :: WalletSnapshot -> InputSelectionPolicy -> PendingAddresses
getPendingAddresses ws = \case
OptimizeForSecurity ->
-- NOTE (int-index) The pending transactions are ignored when we optimize
-- for security, so it is faster to not get them. In case they start being
-- used for other purposes, this shortcut must be removed.
mempty
OptimizeForHighThroughput ->
allPendingAddresses (getPendingTxs ws)
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/wallet/src/Pos/Wallet/Web/Methods/Txp.hs | haskell | # LANGUAGE Rank2Types #
| Utils for payments and redeeming.
| Like 'submitAndSaveTx', but suppresses errors which can get gone
by the time of resubmission.
get addresses which are refered by some yet unconfirmed transaction outputs.
NOTE (int-index) The pending transactions are ignored when we optimize
for security, so it is faster to not get them. In case they start being
used for other purposes, this shortcut must be removed. | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Pos.Wallet.Web.Methods.Txp
( MonadWalletTxFull
, gatherPendingTxsSummary
, rewrapTxError
, coinDistrToOutputs
, submitAndSaveNewPtx
, getPendingAddresses
) where
import Universum
import qualified Data.List.NonEmpty as NE
import Formatting (build, sformat, stext, (%))
import Pos.Client.KeyStorage (MonadKeys)
import Pos.Client.Txp.Addresses (MonadAddresses (..))
import Pos.Client.Txp.Util (InputSelectionPolicy (..), PendingAddresses (..),
isCheckedTxError)
import Pos.Core.Common (Coin)
import Pos.Core.Txp (Tx (..), TxAux (..), TxOut (..), TxOutAux (..))
import Pos.Crypto (PassPhrase, hash)
import Pos.Util.Chrono (getNewestFirst, toNewestFirst)
import Pos.Util.Servant (encodeCType)
import Pos.Wallet.Web.ClientTypes (AccountId, Addr, CId)
import Pos.Wallet.Web.Error (WalletError (..), rewrapToWalletError)
import Pos.Wallet.Web.Methods.History (MonadWalletHistory)
import Pos.Wallet.Web.Methods.Misc (PendingTxsSummary (..))
import Pos.Wallet.Web.Mode (MonadWalletWebMode)
import Pos.Wallet.Web.Pending (PendingTx (..), TxSubmissionMode, allPendingAddresses,
isPtxInBlocks, ptxFirstSubmissionHandler, sortPtxsChrono)
import Pos.Wallet.Web.Pending.Submission (submitAndSavePtx)
import Pos.Wallet.Web.State (WalletDB, WalletSnapshot, askWalletSnapshot, getPendingTxs)
import Pos.Wallet.Web.Util (decodeCTypeOrFail)
type MonadWalletTxFull ctx m =
( TxSubmissionMode ctx m
, MonadWalletHistory ctx m
, MonadKeys m
, AddrData m ~ (AccountId, PassPhrase)
)
rewrapTxError
:: forall m a. MonadCatch m
=> Text -> m a -> m a
rewrapTxError prefix =
rewrapToWalletError (\SomeException{} -> True) (InternalError . sbuild) .
rewrapToWalletError isCheckedTxError (RequestError . sbuild)
where
sbuild :: Buildable e => e -> Text
sbuild = sformat (stext%": "%build) prefix
coinDistrToOutputs
:: MonadThrow m
=> NonEmpty (CId Addr, Coin)
-> m (NonEmpty TxOutAux)
coinDistrToOutputs distr = do
addrs <- mapM decodeCTypeOrFail cAddrs
pure $ NE.zipWith mkTxOut addrs coins
where
(cAddrs, coins) = NE.unzip distr
mkTxOut addr coin = TxOutAux (TxOut addr coin)
submitAndSaveNewPtx
:: TxSubmissionMode ctx m
=> WalletDB
-> (TxAux -> m Bool)
-> PendingTx
-> m ()
submitAndSaveNewPtx db submit = submitAndSavePtx db submit ptxFirstSubmissionHandler
gatherPendingTxsSummary :: MonadWalletWebMode ctx m => m [PendingTxsSummary]
gatherPendingTxsSummary =
map mkInfo .
getNewestFirst . toNewestFirst . sortPtxsChrono .
filter unconfirmedPtx .
getPendingTxs <$> askWalletSnapshot
where
unconfirmedPtx = not . isPtxInBlocks . _ptxCond
mkInfo PendingTx{..} =
let tx = taTx _ptxTxAux
in PendingTxsSummary
{ ptiSlot = _ptxCreationSlot
, ptiCond = encodeCType (Just _ptxCond)
, ptiInputs = _txInputs tx
, ptiOutputs = _txOutputs tx
, ptiTxId = hash tx
}
| With regard to tx creation policy which is going to be used ,
getPendingAddresses :: WalletSnapshot -> InputSelectionPolicy -> PendingAddresses
getPendingAddresses ws = \case
OptimizeForSecurity ->
mempty
OptimizeForHighThroughput ->
allPendingAddresses (getPendingTxs ws)
|
85e92477949716d64017d2d0b3fcd5c45cd718afa9ce3a9e4afaefe0efa2802e | satori-com/mzbench | worker_from_git.erl | [
{make_install, [
{git, {var, "mzbench_repo"}},
{branch, {var, "worker_branch"}},
{dir, "workers/exec"}]},
{pool, [{size, 2}, {worker_type, exec_worker}],
[{execute, "sleep 5"}]}
].
| null | https://raw.githubusercontent.com/satori-com/mzbench/02be2684655cde94d537c322bb0611e258ae9718/acceptance_tests/scripts/worker_from_git.erl | erlang | [
{make_install, [
{git, {var, "mzbench_repo"}},
{branch, {var, "worker_branch"}},
{dir, "workers/exec"}]},
{pool, [{size, 2}, {worker_type, exec_worker}],
[{execute, "sleep 5"}]}
].
| |
216164ae74c2ce6c34c12d7f3918bfe96c2dd855b909c5896a958e18312f2ee7 | ocaml-omake/omake | lm_printf.mli |
(**
* For now, just use normal output channels.
* Type t of buffers.
*)
type 'a t = Format.formatter -> 'a -> unit
val open_out : string -> Format.formatter
val open_out_bin : string -> Format.formatter
(*
* These functions are bad style for functional programs.
*)
val prerr_char : char -> unit
val prerr_int : int -> unit
val prerr_string : string -> unit
(*
* Flush the output.
*)
val flush : Format.formatter -> unit
val eflush : Format.formatter -> unit
(*
* Printing.
*)
val eprintf : ('a, Format.formatter, unit) format -> 'a
val printf : ('a, Format.formatter, unit) format -> 'a
val sprintf : ('a, unit, string) format -> 'a
val fprintf : Format.formatter -> ('a, Format.formatter, unit) format -> 'a
(*
* List printing helpers.
*)
val print_any_list : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a list -> unit
val print_string_list : Format.formatter -> string list -> unit
val print_int_list : Format.formatter -> int list -> unit
(************************************************************************
* Formatter interface.
*)
(*
* Boxes.
*)
val open_box : int -> unit
val open_vbox : int -> unit
val open_hbox : unit -> unit
val open_hvbox : int -> unit
val open_hovbox : int -> unit
val close_box : unit -> unit
(*
* Formatting functions.
*)
val print_string : string -> unit
val print_as : int -> string -> unit
val print_int : int -> unit
val print_float : float -> unit
val print_char : char -> unit
val print_bool : bool -> unit
(*
* Break hints.
*)
val print_space : unit -> unit
val print_cut : unit -> unit
val print_break : int -> int -> unit
val print_flush : unit -> unit
val print_newline : unit -> unit
val force_newline : unit -> unit
val print_if_newline : unit -> unit
(*
* Margin.
*)
val set_margin : int -> unit
val get_margin : unit -> int
(*
* Indentation limit.
*)
val set_max_indent : int -> unit
val get_max_indent : unit -> int
(*
* Formatting depth.
*)
val set_max_boxes : int -> unit
val get_max_boxes : unit -> int
val over_max_boxes : unit -> bool
(*
* Tabulations.
*)
val open_tbox : unit -> unit
val close_tbox : unit -> unit
val print_tbreak : int -> int -> unit
val set_tab : unit -> unit
val print_tab : unit -> unit
(*
* Ellipsis.
*)
val set_ellipsis_text : string -> unit
val get_ellipsis_text : unit -> string
(*
* Redirecting formatter output.
*)
val set_formatter_out_channel : out_channel -> unit
val set_formatter_output_functions : (string -> int -> int -> unit) -> (unit -> unit) -> unit
val get_formatter_output_functions : unit -> (string -> int -> int -> unit) * (unit -> unit)
(*
* Multiple formatted output.
*)
type formatter = Format.formatter
val formatter_of_out_channel : out_channel -> formatter
val std_formatter : formatter
val err_formatter : formatter
val str_formatter : formatter
val stdbuf : Buffer.t
val flush_str_formatter : unit -> string
val formatter_of_buffer : Buffer.t -> formatter
val make_formatter : (string -> int -> int -> unit) -> (unit -> unit) -> formatter
val byte_formatter : (bytes -> int -> int -> unit) -> (unit -> unit) -> formatter
val pp_open_hbox : formatter -> unit -> unit
val pp_open_vbox : formatter -> int -> unit
val pp_open_hvbox : formatter -> int -> unit
val pp_open_hovbox : formatter -> int -> unit
val pp_open_box : formatter -> int -> unit
val pp_close_box : formatter -> unit -> unit
val pp_print_string : formatter -> string -> unit
val pp_print_as : formatter -> int -> string -> unit
val pp_print_int : formatter -> int -> unit
val pp_print_float : formatter -> float -> unit
val pp_print_char : formatter -> char -> unit
val pp_print_bool : formatter -> bool -> unit
val pp_print_break : formatter -> int -> int -> unit
val pp_print_cut : formatter -> unit -> unit
val pp_print_space : formatter -> unit -> unit
val pp_force_newline : formatter -> unit -> unit
val pp_print_flush : formatter -> unit -> unit
val pp_print_newline : formatter -> unit -> unit
val pp_print_if_newline : formatter -> unit -> unit
val pp_open_tbox : formatter -> unit -> unit
val pp_close_tbox : formatter -> unit -> unit
val pp_print_tbreak : formatter -> int -> int -> unit
val pp_set_tab : formatter -> unit -> unit
val pp_print_tab : formatter -> unit -> unit
val pp_set_margin : formatter -> int -> unit
val pp_get_margin : formatter -> unit -> int
val pp_set_max_indent : formatter -> int -> unit
val pp_get_max_indent : formatter -> unit -> int
val pp_set_max_boxes : formatter -> int -> unit
val pp_get_max_boxes : formatter -> unit -> int
val pp_over_max_boxes : formatter -> unit -> bool
val pp_set_ellipsis_text : formatter -> string -> unit
val pp_get_ellipsis_text : formatter -> unit -> string
val pp_set_formatter_out_channel : formatter -> out_channel -> unit
val pp_set_formatter_output_functions :
formatter -> (string -> int -> int -> unit) -> (unit -> unit) -> unit
(* Prints a "; "- separated list. *)
val pp_print_any_list : (formatter -> 'a -> unit) -> formatter -> 'a list -> unit
| null | https://raw.githubusercontent.com/ocaml-omake/omake/26b39e82f81c912f8c0f9859328c9c24800e6ba8/src/libmojave/lm_printf.mli | ocaml | *
* For now, just use normal output channels.
* Type t of buffers.
* These functions are bad style for functional programs.
* Flush the output.
* Printing.
* List printing helpers.
***********************************************************************
* Formatter interface.
* Boxes.
* Formatting functions.
* Break hints.
* Margin.
* Indentation limit.
* Formatting depth.
* Tabulations.
* Ellipsis.
* Redirecting formatter output.
* Multiple formatted output.
Prints a "; "- separated list. |
type 'a t = Format.formatter -> 'a -> unit
val open_out : string -> Format.formatter
val open_out_bin : string -> Format.formatter
val prerr_char : char -> unit
val prerr_int : int -> unit
val prerr_string : string -> unit
val flush : Format.formatter -> unit
val eflush : Format.formatter -> unit
val eprintf : ('a, Format.formatter, unit) format -> 'a
val printf : ('a, Format.formatter, unit) format -> 'a
val sprintf : ('a, unit, string) format -> 'a
val fprintf : Format.formatter -> ('a, Format.formatter, unit) format -> 'a
val print_any_list : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a list -> unit
val print_string_list : Format.formatter -> string list -> unit
val print_int_list : Format.formatter -> int list -> unit
val open_box : int -> unit
val open_vbox : int -> unit
val open_hbox : unit -> unit
val open_hvbox : int -> unit
val open_hovbox : int -> unit
val close_box : unit -> unit
val print_string : string -> unit
val print_as : int -> string -> unit
val print_int : int -> unit
val print_float : float -> unit
val print_char : char -> unit
val print_bool : bool -> unit
val print_space : unit -> unit
val print_cut : unit -> unit
val print_break : int -> int -> unit
val print_flush : unit -> unit
val print_newline : unit -> unit
val force_newline : unit -> unit
val print_if_newline : unit -> unit
val set_margin : int -> unit
val get_margin : unit -> int
val set_max_indent : int -> unit
val get_max_indent : unit -> int
val set_max_boxes : int -> unit
val get_max_boxes : unit -> int
val over_max_boxes : unit -> bool
val open_tbox : unit -> unit
val close_tbox : unit -> unit
val print_tbreak : int -> int -> unit
val set_tab : unit -> unit
val print_tab : unit -> unit
val set_ellipsis_text : string -> unit
val get_ellipsis_text : unit -> string
val set_formatter_out_channel : out_channel -> unit
val set_formatter_output_functions : (string -> int -> int -> unit) -> (unit -> unit) -> unit
val get_formatter_output_functions : unit -> (string -> int -> int -> unit) * (unit -> unit)
type formatter = Format.formatter
val formatter_of_out_channel : out_channel -> formatter
val std_formatter : formatter
val err_formatter : formatter
val str_formatter : formatter
val stdbuf : Buffer.t
val flush_str_formatter : unit -> string
val formatter_of_buffer : Buffer.t -> formatter
val make_formatter : (string -> int -> int -> unit) -> (unit -> unit) -> formatter
val byte_formatter : (bytes -> int -> int -> unit) -> (unit -> unit) -> formatter
val pp_open_hbox : formatter -> unit -> unit
val pp_open_vbox : formatter -> int -> unit
val pp_open_hvbox : formatter -> int -> unit
val pp_open_hovbox : formatter -> int -> unit
val pp_open_box : formatter -> int -> unit
val pp_close_box : formatter -> unit -> unit
val pp_print_string : formatter -> string -> unit
val pp_print_as : formatter -> int -> string -> unit
val pp_print_int : formatter -> int -> unit
val pp_print_float : formatter -> float -> unit
val pp_print_char : formatter -> char -> unit
val pp_print_bool : formatter -> bool -> unit
val pp_print_break : formatter -> int -> int -> unit
val pp_print_cut : formatter -> unit -> unit
val pp_print_space : formatter -> unit -> unit
val pp_force_newline : formatter -> unit -> unit
val pp_print_flush : formatter -> unit -> unit
val pp_print_newline : formatter -> unit -> unit
val pp_print_if_newline : formatter -> unit -> unit
val pp_open_tbox : formatter -> unit -> unit
val pp_close_tbox : formatter -> unit -> unit
val pp_print_tbreak : formatter -> int -> int -> unit
val pp_set_tab : formatter -> unit -> unit
val pp_print_tab : formatter -> unit -> unit
val pp_set_margin : formatter -> int -> unit
val pp_get_margin : formatter -> unit -> int
val pp_set_max_indent : formatter -> int -> unit
val pp_get_max_indent : formatter -> unit -> int
val pp_set_max_boxes : formatter -> int -> unit
val pp_get_max_boxes : formatter -> unit -> int
val pp_over_max_boxes : formatter -> unit -> bool
val pp_set_ellipsis_text : formatter -> string -> unit
val pp_get_ellipsis_text : formatter -> unit -> string
val pp_set_formatter_out_channel : formatter -> out_channel -> unit
val pp_set_formatter_output_functions :
formatter -> (string -> int -> int -> unit) -> (unit -> unit) -> unit
val pp_print_any_list : (formatter -> 'a -> unit) -> formatter -> 'a list -> unit
|
e3a32ca27cbb66e07a344ff5c2a22eb5effbf616c0489f648adbcb0f16cf28f4 | ygrek/mldonkey | donkeyUdp.ml | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf2
open Md4
open CommonSearch
open CommonTypes
open Options
open BasicSocket
open DonkeyOneFile
open DonkeyProtoCom
open DonkeyTypes
open DonkeyGlobals
open DonkeyComplexOptions
open DonkeyOptions
open CommonOptions
open DonkeyClient
open CommonGlobals
module Udp = DonkeyProtoUdp
let udp_server_send_query_location s l =
if s.server_has_get_sources2 then
udp_server_send s (Udp.QueryLocationUdpReq2 l)
else
udp_server_send s (Udp.QueryLocationUdpReq (List.map (fun (md4,_) -> md4) l))
let search_handler s t =
let waiting = s.search_waiting - 1 in
s.search_waiting <- waiting;
List.iter (fun f ->
search_found false s f.f_md4 f.f_tags
) t
(* search.search_handler (Waiting s.search_waiting) *)
let make_xs ss =
if !verbose_udp then lprintf "******** make_xs ********\n";
if ss.search_num <> !xs_last_search then
begin
xs_last_search := ss.search_num;
xs_servers_list := Hashtbl2.to_list servers_by_key;
end;
let cut_for_udp_send max_servers list =
let min_last_conn = last_time () - 8 * 3600 in
let rec iter list n left =
if n = 0 then
left, list
else
match list with
[] -> left, []
| s :: tail ->
if connection_last_conn s.server_connection_control > min_last_conn then
iter tail (n-1) (s :: left)
else
iter tail n left
in
iter list max_servers []
in
let before, after = cut_for_udp_send !!max_xs_packets !xs_servers_list in
xs_servers_list := after;
List.iter (fun s ->
match s.server_sock with
Connection _ -> ()
| _ ->
let module M = DonkeyProtoServer in
let module Q = M.Query in
udp_server_send s (
By default , send the MultipleUdp ! ! ! we have to set
server_send_multiple_replies to true by default , and change it to false
when receiving an old ping .
if server_send_multiple_replies s then
Udp . QueryUdpReq ss.search_query
else
server_send_multiple_replies to true by default, and change it to false
when receiving an old ping.
if server_send_multiple_replies s then
Udp.QueryUdpReq ss.search_query
else *)
Udp.QueryMultipleUdpReq ss.search_query);
) before;
if !verbose_overnet then lprintf "===================== STARTING SEARCH ON OVERNET =========\n";
DonkeyProtoOvernet.Overnet.overnet_search ss;
DonkeyProtoKademlia.Kademlia.overnet_search ss
let extent_search () =
try
if !xs_last_search >= 0 then begin
try
make_xs (search_find !xs_last_search)
with _ -> ()
end;
start removed by savannah patch # 3616
let files = ref [ ] in
List.iter ( fun file - >
if file_state file = FileDownloading then
files : = file : : ! files ) ! current_files ;
if ! files < > [ ] then
let old_servers = ref [ ] in
let new_servers = ref [ ] in
let = ref 0 in
while ! < ! ! max_udp_sends & &
match ! udp_servers_list with
[ ] - > false
| s : : tail - >
udp_servers_list : = tail ;
( match s.server_sock with
Connection _ - > ( )
| _ - >
if
connection_last_conn s.server_connection_control + 3600 * 8 > last_time ( ) & &
s.server_next_udp < = last_time ( ) then begin
( if server_accept_multiple_getsources s then
new_servers : = s : : ! new_servers
else
old_servers : = s : : ! old_servers ) ;
incr nservers ;
end
) ;
true do
( )
done ;
if ! new_servers < > [ ] then begin
let md4s = List.map ( fun file - > file.file_md4 ) ! files in
List.iter ( fun s - >
let module in
udp_server_send s ( Udp . QueryLocationUdpReq md4s ) ;
s.server_next_udp < - last_time ( ) + ! ! ) ! new_servers
end ;
if ! old_servers < > [ ] then
List.iter ( fun file - >
if file_state file = FileDownloading then begin
( * ( * USELESS NOW
start removed by savannah patch #3616
let files = ref [] in
List.iter (fun file ->
if file_state file = FileDownloading then
files := file :: !files) !current_files;
if !files <> [] then
let old_servers = ref [] in
let new_servers = ref [] in
let nservers = ref 0 in
while !nservers < !!max_udp_sends &&
match !udp_servers_list with
[] -> false
| s :: tail ->
udp_servers_list := tail;
(match s.server_sock with
Connection _ -> ()
| _ ->
if
connection_last_conn s.server_connection_control + 3600*8 > last_time () &&
s.server_next_udp <= last_time () then begin
(if server_accept_multiple_getsources s then
new_servers := s :: !new_servers
else
old_servers := s :: !old_servers);
incr nservers;
end
);
true do
()
done;
if !new_servers <> [] then begin
let md4s = List.map (fun file -> file.file_md4) !files in
List.iter (fun s ->
let module Udp = DonkeyProtoUdp in
udp_server_send s (Udp.QueryLocationUdpReq md4s);
s.server_next_udp <- last_time () + !!min_reask_delay
) !new_servers
end;
if !old_servers <> [] then
List.iter (fun file ->
if file_state file = FileDownloading then begin
(*(* USELESS NOW *)
Intmap.iter (fun _ c ->
try connect_client !!client_ip [file] c with _ -> ())
file.file_known_locations;
*)
(* now, it is done in donkeySources
List.iter (fun s ->
match s.server_sock with
None -> () (* assert false !!! *)
| Some sock ->
(try DonkeyServers.query_location file sock with _ -> ())
) (connected_servers());
*)
List.iter (fun s ->
if
connection_last_conn s.server_connection_control + 3600*8 > last_time () &&
s.server_next_udp <= last_time () then
match s.server_sock with
| Connection _ -> ()
| _ -> udp_query_locations file s
) !old_servers;
end
) !current_files;
List.iter (fun s ->
s.server_next_udp <- last_time () + !!min_reask_delay
) !old_servers;
if !udp_servers_list = [] then
udp_servers_list := Hashtbl2.to_list servers_by_key;
end removed by savannah patch #3616
*)
with e ->
lprintf "extent_search: %s\n" (Printexc2.to_string e)
let add_user_friend s u =
let kind =
if Ip.usable u.user_ip then
Direct_address (u.user_ip, u.user_port)
else
begin
( match s.server_sock, server_state s with
Connection sock, (Connected _ |Connected_downloading _) ->
query_id s.server_ip s.server_port (id_of_ip u.user_ip)
| _ -> ()
);
Invalid_address (u.user_name, Md4.to_string u.user_md4)
end
in
let c = new_client kind None in
c.client_tags <- u.user_tags;
set_client_name c u.user_name u.user_md4;
friend_add c
let udp_client_handler t p =
if !verbose_udp then
lprintf_nl "Received UDP message:\n%s" (Udp.print t);
let udp_from_server p =
match p.UdpSocket.udp_addr with
| Unix.ADDR_INET(ip, port) ->
let ip = Ip.of_inet_addr ip in
let s =
if !!update_server_list_server then
check_add_server ip (port-4)
else
find_server ip (port-4) in
set , but add a 2 minutes offset to prevent
staying connected to this server
staying connected to this server *)
connection_set_last_conn s.server_connection_control (
last_time () - 121);
s.server_score <- s.server_score + 3;
s.server_failed_count <- 0;
s
| _ -> raise Not_found
in
match t with
| Udp.QueryLocationReplyUdpReq t ->
(* lprintf "Received location by UDP\n"; *)
let s = udp_from_server p in
List.iter (query_locations_reply s) t
| Udp.QueryReplyUdpReq t ->
(* lprintf "Received file by UDP\n"; *)
if !xs_last_search >= 0 then
let ss = search_find !xs_last_search in
let s = udp_from_server p in
List.iter (fun t ->
Hashtbl.add udp_servers_replies t.f_md4 s;
search_handler ss [t]
) t
| Udp.PingServerReplyUdpReq t ->
let s = udp_from_server p in
let module M = Udp.PingServerReplyUdp in
let check_challenge, challenge_v =
match s.server_udp_ping_challenge with
| Some challenge when challenge = t.M.challenge -> true, challenge
| Some challenge -> false, challenge
| _ -> false, 0L
in
if check_challenge then begin
UdpSocket.declare_pong s.server_ip;
let now = Unix.gettimeofday() in
s.server_ping <- int_of_float ((now -. s.server_last_ping) *. 1000.);
s.server_udp_ping_challenge <- None;
s.server_has_get_sources <- t.M.get_sources;
s.server_has_get_files <- t.M.get_files;
s.server_has_newtags <- t.M.newtags;
s.server_has_unicode <- t.M.unicode;
s.server_has_get_sources2 <- t.M.get_sources2;
s.server_has_largefiles <- t.M.largefiles;
(match s.server_obfuscation_udp with
| None -> if t.M.udp_obfuscation then s.server_obfuscation_udp <- Some 0
| Some p -> if not t.M.udp_obfuscation then s.server_obfuscation_udp <- None);
(match s.server_obfuscation_tcp with
| None -> if t.M.tcp_obfuscation then s.server_obfuscation_tcp <- Some 0
| Some p -> if not t.M.tcp_obfuscation then s.server_obfuscation_tcp <- None);
if t.M.files > 0L then s.server_nfiles <- Some t.M.files;
if t.M.users > 0L then s.server_nusers <- Some t.M.users;
(match t.M.max_users with
Some x when x > 0L -> s.server_max_users <- Some x
| _ -> ());
(match t.M.lowid_users with
Some x when x > 0L -> s.server_lowid_users <- Some x
| _ -> ());
(match t.M.soft_limit with
Some x when x > 0L -> s.server_soft_limit <- Some x
| _ -> ());
(match t.M.hard_limit with
Some x when x > 0L -> s.server_hard_limit <- Some x
| _ -> ());
server_must_update s
end else
begin
lprintf_nl "received PingServerReply from %s with invalid challenge: %Ld <> %Ld"
(string_of_server s) challenge_v t.M.challenge;
s.server_udp_ping_challenge <- None;
end
| Udp.ServerDescReplyUdpReq t ->
let module M = Udp.ServerDescReplyUdp in
let s = udp_from_server p in
let check_challenge, challenge_v =
match s.server_udp_desc_challenge with
| Some challenge when challenge = t.M.challenge -> true, challenge
| Some challenge -> false, challenge
| _ -> false, 0L
in
if check_challenge then begin
s.server_name <- t.M.name;
s.server_description <- t.M.desc;
s.server_udp_desc_challenge <- None;
List.iter (fun tag ->
match tag with
{ tag_name = Field_KNOWN "version"; tag_value = Uint64 i } ->
let i = Int64.to_int i in
s.server_version <- Printf.sprintf "%d.%d" (i lsr 16) (i land 0xFFFF);
| { tag_name = Field_KNOWN "auxportslist" ; tag_value = String aux } ->
s.server_auxportslist <- aux
| { tag_name = Field_KNOWN "dynip" ; tag_value = String dynip } ->
s.server_dynip <- dynip
| _ -> ()
) t.M.tags;
if s.server_tags = [] then
s.server_tags <- t.M.tags;
server_must_update s
end else
begin
lprintf_nl "received ServerDescReply from %s with invalid challenge: %Ld <> %Ld"
(string_of_server s) challenge_v t.M.challenge;
s.server_udp_desc_challenge <- None;
end
| Udp.EmuleReaskFilePingUdpReq t -> ()
| Udp.EmulePortTestReq ->
(match !porttest_sock with
None -> ()
| Some sock ->
let s = Buffer.create 10 in
DonkeyProtoUdp.write s Udp.EmulePortTestReq;
TcpBufferedSocket.write_string sock (Buffer.contents s);
porttest_sock := None)
| _ ->
if !verbose_unexpected_messages then
lprintf "Unexpected UDP message: %s\n"
(DonkeyProtoUdp.print t)
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/networks/donkey/donkeyUdp.ml | ocaml | search.search_handler (Waiting s.search_waiting)
(* USELESS NOW
now, it is done in donkeySources
List.iter (fun s ->
match s.server_sock with
None -> () (* assert false !!!
lprintf "Received location by UDP\n";
lprintf "Received file by UDP\n"; | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf2
open Md4
open CommonSearch
open CommonTypes
open Options
open BasicSocket
open DonkeyOneFile
open DonkeyProtoCom
open DonkeyTypes
open DonkeyGlobals
open DonkeyComplexOptions
open DonkeyOptions
open CommonOptions
open DonkeyClient
open CommonGlobals
module Udp = DonkeyProtoUdp
let udp_server_send_query_location s l =
if s.server_has_get_sources2 then
udp_server_send s (Udp.QueryLocationUdpReq2 l)
else
udp_server_send s (Udp.QueryLocationUdpReq (List.map (fun (md4,_) -> md4) l))
let search_handler s t =
let waiting = s.search_waiting - 1 in
s.search_waiting <- waiting;
List.iter (fun f ->
search_found false s f.f_md4 f.f_tags
) t
let make_xs ss =
if !verbose_udp then lprintf "******** make_xs ********\n";
if ss.search_num <> !xs_last_search then
begin
xs_last_search := ss.search_num;
xs_servers_list := Hashtbl2.to_list servers_by_key;
end;
let cut_for_udp_send max_servers list =
let min_last_conn = last_time () - 8 * 3600 in
let rec iter list n left =
if n = 0 then
left, list
else
match list with
[] -> left, []
| s :: tail ->
if connection_last_conn s.server_connection_control > min_last_conn then
iter tail (n-1) (s :: left)
else
iter tail n left
in
iter list max_servers []
in
let before, after = cut_for_udp_send !!max_xs_packets !xs_servers_list in
xs_servers_list := after;
List.iter (fun s ->
match s.server_sock with
Connection _ -> ()
| _ ->
let module M = DonkeyProtoServer in
let module Q = M.Query in
udp_server_send s (
By default , send the MultipleUdp ! ! ! we have to set
server_send_multiple_replies to true by default , and change it to false
when receiving an old ping .
if server_send_multiple_replies s then
Udp . QueryUdpReq ss.search_query
else
server_send_multiple_replies to true by default, and change it to false
when receiving an old ping.
if server_send_multiple_replies s then
Udp.QueryUdpReq ss.search_query
else *)
Udp.QueryMultipleUdpReq ss.search_query);
) before;
if !verbose_overnet then lprintf "===================== STARTING SEARCH ON OVERNET =========\n";
DonkeyProtoOvernet.Overnet.overnet_search ss;
DonkeyProtoKademlia.Kademlia.overnet_search ss
let extent_search () =
try
if !xs_last_search >= 0 then begin
try
make_xs (search_find !xs_last_search)
with _ -> ()
end;
start removed by savannah patch # 3616
let files = ref [ ] in
List.iter ( fun file - >
if file_state file = FileDownloading then
files : = file : : ! files ) ! current_files ;
if ! files < > [ ] then
let old_servers = ref [ ] in
let new_servers = ref [ ] in
let = ref 0 in
while ! < ! ! max_udp_sends & &
match ! udp_servers_list with
[ ] - > false
| s : : tail - >
udp_servers_list : = tail ;
( match s.server_sock with
Connection _ - > ( )
| _ - >
if
connection_last_conn s.server_connection_control + 3600 * 8 > last_time ( ) & &
s.server_next_udp < = last_time ( ) then begin
( if server_accept_multiple_getsources s then
new_servers : = s : : ! new_servers
else
old_servers : = s : : ! old_servers ) ;
incr nservers ;
end
) ;
true do
( )
done ;
if ! new_servers < > [ ] then begin
let md4s = List.map ( fun file - > file.file_md4 ) ! files in
List.iter ( fun s - >
let module in
udp_server_send s ( Udp . QueryLocationUdpReq md4s ) ;
s.server_next_udp < - last_time ( ) + ! ! ) ! new_servers
end ;
if ! old_servers < > [ ] then
List.iter ( fun file - >
if file_state file = FileDownloading then begin
( * ( * USELESS NOW
start removed by savannah patch #3616
let files = ref [] in
List.iter (fun file ->
if file_state file = FileDownloading then
files := file :: !files) !current_files;
if !files <> [] then
let old_servers = ref [] in
let new_servers = ref [] in
let nservers = ref 0 in
while !nservers < !!max_udp_sends &&
match !udp_servers_list with
[] -> false
| s :: tail ->
udp_servers_list := tail;
(match s.server_sock with
Connection _ -> ()
| _ ->
if
connection_last_conn s.server_connection_control + 3600*8 > last_time () &&
s.server_next_udp <= last_time () then begin
(if server_accept_multiple_getsources s then
new_servers := s :: !new_servers
else
old_servers := s :: !old_servers);
incr nservers;
end
);
true do
()
done;
if !new_servers <> [] then begin
let md4s = List.map (fun file -> file.file_md4) !files in
List.iter (fun s ->
let module Udp = DonkeyProtoUdp in
udp_server_send s (Udp.QueryLocationUdpReq md4s);
s.server_next_udp <- last_time () + !!min_reask_delay
) !new_servers
end;
if !old_servers <> [] then
List.iter (fun file ->
if file_state file = FileDownloading then begin
Intmap.iter (fun _ c ->
try connect_client !!client_ip [file] c with _ -> ())
file.file_known_locations;
*)
| Some sock ->
(try DonkeyServers.query_location file sock with _ -> ())
) (connected_servers());
*)
List.iter (fun s ->
if
connection_last_conn s.server_connection_control + 3600*8 > last_time () &&
s.server_next_udp <= last_time () then
match s.server_sock with
| Connection _ -> ()
| _ -> udp_query_locations file s
) !old_servers;
end
) !current_files;
List.iter (fun s ->
s.server_next_udp <- last_time () + !!min_reask_delay
) !old_servers;
if !udp_servers_list = [] then
udp_servers_list := Hashtbl2.to_list servers_by_key;
end removed by savannah patch #3616
*)
with e ->
lprintf "extent_search: %s\n" (Printexc2.to_string e)
let add_user_friend s u =
let kind =
if Ip.usable u.user_ip then
Direct_address (u.user_ip, u.user_port)
else
begin
( match s.server_sock, server_state s with
Connection sock, (Connected _ |Connected_downloading _) ->
query_id s.server_ip s.server_port (id_of_ip u.user_ip)
| _ -> ()
);
Invalid_address (u.user_name, Md4.to_string u.user_md4)
end
in
let c = new_client kind None in
c.client_tags <- u.user_tags;
set_client_name c u.user_name u.user_md4;
friend_add c
let udp_client_handler t p =
if !verbose_udp then
lprintf_nl "Received UDP message:\n%s" (Udp.print t);
let udp_from_server p =
match p.UdpSocket.udp_addr with
| Unix.ADDR_INET(ip, port) ->
let ip = Ip.of_inet_addr ip in
let s =
if !!update_server_list_server then
check_add_server ip (port-4)
else
find_server ip (port-4) in
set , but add a 2 minutes offset to prevent
staying connected to this server
staying connected to this server *)
connection_set_last_conn s.server_connection_control (
last_time () - 121);
s.server_score <- s.server_score + 3;
s.server_failed_count <- 0;
s
| _ -> raise Not_found
in
match t with
| Udp.QueryLocationReplyUdpReq t ->
let s = udp_from_server p in
List.iter (query_locations_reply s) t
| Udp.QueryReplyUdpReq t ->
if !xs_last_search >= 0 then
let ss = search_find !xs_last_search in
let s = udp_from_server p in
List.iter (fun t ->
Hashtbl.add udp_servers_replies t.f_md4 s;
search_handler ss [t]
) t
| Udp.PingServerReplyUdpReq t ->
let s = udp_from_server p in
let module M = Udp.PingServerReplyUdp in
let check_challenge, challenge_v =
match s.server_udp_ping_challenge with
| Some challenge when challenge = t.M.challenge -> true, challenge
| Some challenge -> false, challenge
| _ -> false, 0L
in
if check_challenge then begin
UdpSocket.declare_pong s.server_ip;
let now = Unix.gettimeofday() in
s.server_ping <- int_of_float ((now -. s.server_last_ping) *. 1000.);
s.server_udp_ping_challenge <- None;
s.server_has_get_sources <- t.M.get_sources;
s.server_has_get_files <- t.M.get_files;
s.server_has_newtags <- t.M.newtags;
s.server_has_unicode <- t.M.unicode;
s.server_has_get_sources2 <- t.M.get_sources2;
s.server_has_largefiles <- t.M.largefiles;
(match s.server_obfuscation_udp with
| None -> if t.M.udp_obfuscation then s.server_obfuscation_udp <- Some 0
| Some p -> if not t.M.udp_obfuscation then s.server_obfuscation_udp <- None);
(match s.server_obfuscation_tcp with
| None -> if t.M.tcp_obfuscation then s.server_obfuscation_tcp <- Some 0
| Some p -> if not t.M.tcp_obfuscation then s.server_obfuscation_tcp <- None);
if t.M.files > 0L then s.server_nfiles <- Some t.M.files;
if t.M.users > 0L then s.server_nusers <- Some t.M.users;
(match t.M.max_users with
Some x when x > 0L -> s.server_max_users <- Some x
| _ -> ());
(match t.M.lowid_users with
Some x when x > 0L -> s.server_lowid_users <- Some x
| _ -> ());
(match t.M.soft_limit with
Some x when x > 0L -> s.server_soft_limit <- Some x
| _ -> ());
(match t.M.hard_limit with
Some x when x > 0L -> s.server_hard_limit <- Some x
| _ -> ());
server_must_update s
end else
begin
lprintf_nl "received PingServerReply from %s with invalid challenge: %Ld <> %Ld"
(string_of_server s) challenge_v t.M.challenge;
s.server_udp_ping_challenge <- None;
end
| Udp.ServerDescReplyUdpReq t ->
let module M = Udp.ServerDescReplyUdp in
let s = udp_from_server p in
let check_challenge, challenge_v =
match s.server_udp_desc_challenge with
| Some challenge when challenge = t.M.challenge -> true, challenge
| Some challenge -> false, challenge
| _ -> false, 0L
in
if check_challenge then begin
s.server_name <- t.M.name;
s.server_description <- t.M.desc;
s.server_udp_desc_challenge <- None;
List.iter (fun tag ->
match tag with
{ tag_name = Field_KNOWN "version"; tag_value = Uint64 i } ->
let i = Int64.to_int i in
s.server_version <- Printf.sprintf "%d.%d" (i lsr 16) (i land 0xFFFF);
| { tag_name = Field_KNOWN "auxportslist" ; tag_value = String aux } ->
s.server_auxportslist <- aux
| { tag_name = Field_KNOWN "dynip" ; tag_value = String dynip } ->
s.server_dynip <- dynip
| _ -> ()
) t.M.tags;
if s.server_tags = [] then
s.server_tags <- t.M.tags;
server_must_update s
end else
begin
lprintf_nl "received ServerDescReply from %s with invalid challenge: %Ld <> %Ld"
(string_of_server s) challenge_v t.M.challenge;
s.server_udp_desc_challenge <- None;
end
| Udp.EmuleReaskFilePingUdpReq t -> ()
| Udp.EmulePortTestReq ->
(match !porttest_sock with
None -> ()
| Some sock ->
let s = Buffer.create 10 in
DonkeyProtoUdp.write s Udp.EmulePortTestReq;
TcpBufferedSocket.write_string sock (Buffer.contents s);
porttest_sock := None)
| _ ->
if !verbose_unexpected_messages then
lprintf "Unexpected UDP message: %s\n"
(DonkeyProtoUdp.print t)
|
6924e38410fe7e344cdabf7db579c257c3f27a7fe5281dee7672d791c900ae20 | sicmutils/sicmutils | ch1_test.cljc | #_"SPDX-License-Identifier: GPL-3.0"
(ns sicmutils.fdg.ch1-test
(:refer-clojure :exclude [+ - * /])
(:require [clojure.test :refer [is deftest use-fixtures]]
[sicmutils.env :as e :refer [- * /
compose
up
sin cos square
R2-rect
chart point
define-coordinates]]
[sicmutils.simplify :refer [hermetic-simplify-fixture]]
[sicmutils.value :as v]))
(use-fixtures :each hermetic-simplify-fixture)
(def simplify
(comp v/freeze e/simplify))
(defn Lfree
[mass]
(fn [[_ _ v]]
(* (/ 1 2) mass (square v))))
(defn sphere->R3
[R]
(fn [[_ [theta phi]]]
(up (* R (sin theta) (cos phi)) ; x
(* R (sin theta) (sin phi)) ; y
(* R (cos theta))))) ; z
(defn Lsphere
[m R]
(compose (Lfree m) (e/F->C (sphere->R3 R))))
(defn L2
[mass metric]
(fn [place velocity]
(* (/ 1 2) mass ((metric velocity velocity) place))))
(defn Lc [mass metric coordsys]
(fn [[_ x v]]
(let [e (e/coordinate-system->vector-basis coordsys)]
((L2 mass metric) ((point coordsys) x) (* e v)))))
(define-coordinates t e/R1-rect)
(deftest chapter-one-tests
(is (= '(+ (* (/ 1 2) (expt R 2) m (expt phidot 2) (expt (sin theta) 2))
(* (/ 1 2) (expt R 2) m (expt thetadot 2)))
(simplify
((Lsphere 'm 'R)
(up 't (up 'theta 'phi) (up 'thetadot 'phidot)))))
"p4")
(let [the-metric (e/literal-metric 'g R2-rect)
L (Lc 'm the-metric R2-rect)]
(is (= '(+ (* (/ 1 2) m (expt vx 2) (g_00 (up x y)))
(* m vx vy (g_01 (up x y)))
(* (/ 1 2) m (expt vy 2) (g_11 (up x y))))
(simplify
(L (up 't (up 'x 'y) (up 'vx 'vy)))))
"p7: Compare this result with equation (1.3)")
(let [gamma (e/literal-manifold-map 'q R1-rect R2-rect)
coordinate-path (compose (chart R2-rect)
gamma
(point R1-rect))]
(is (= (up '(q↑0 t) '(q↑1 t))
((chart R2-rect) (gamma ((point R1-rect) 't))))
"page 8")
(is (= (up '(q↑0 t) '(q↑1 t))
(coordinate-path 't)))
;; Now we can compute the residuals of the Euler-Lagrange equations, but
;; we get a large messy expression that we will not show.
(let [Lagrange-residuals (((e/Lagrange-equations L) coordinate-path) 't)
R2-basis (e/coordinate-system->basis R2-rect)
Cartan
(e/Christoffel->Cartan
(e/metric->Christoffel-2 the-metric R2-basis))
geodesic-equation-residuals (((((e/covariant-derivative Cartan gamma) d:dt)
((e/differential gamma) d:dt))
(chart R2-rect))
((point R1-rect) 't))
metric-components (e/metric->components the-metric R2-basis)]
(is (= '(down 0 0)
(simplify
(- Lagrange-residuals
(* (* 'm (metric-components (gamma ((point R1-rect) 't))))
geodesic-equation-residuals))))
"p10: This establishes that for a 2-dimensional space the
Euler-Lagrange equations are equivalent to the geodesic
equations.")))))
| null | https://raw.githubusercontent.com/sicmutils/sicmutils/f427fd073937faf62741fcdda6362f910ecda71f/test/sicmutils/fdg/ch1_test.cljc | clojure | x
y
z
Now we can compute the residuals of the Euler-Lagrange equations, but
we get a large messy expression that we will not show. | #_"SPDX-License-Identifier: GPL-3.0"
(ns sicmutils.fdg.ch1-test
(:refer-clojure :exclude [+ - * /])
(:require [clojure.test :refer [is deftest use-fixtures]]
[sicmutils.env :as e :refer [- * /
compose
up
sin cos square
R2-rect
chart point
define-coordinates]]
[sicmutils.simplify :refer [hermetic-simplify-fixture]]
[sicmutils.value :as v]))
(use-fixtures :each hermetic-simplify-fixture)
(def simplify
(comp v/freeze e/simplify))
(defn Lfree
[mass]
(fn [[_ _ v]]
(* (/ 1 2) mass (square v))))
(defn sphere->R3
[R]
(fn [[_ [theta phi]]]
(defn Lsphere
[m R]
(compose (Lfree m) (e/F->C (sphere->R3 R))))
(defn L2
[mass metric]
(fn [place velocity]
(* (/ 1 2) mass ((metric velocity velocity) place))))
(defn Lc [mass metric coordsys]
(fn [[_ x v]]
(let [e (e/coordinate-system->vector-basis coordsys)]
((L2 mass metric) ((point coordsys) x) (* e v)))))
(define-coordinates t e/R1-rect)
(deftest chapter-one-tests
(is (= '(+ (* (/ 1 2) (expt R 2) m (expt phidot 2) (expt (sin theta) 2))
(* (/ 1 2) (expt R 2) m (expt thetadot 2)))
(simplify
((Lsphere 'm 'R)
(up 't (up 'theta 'phi) (up 'thetadot 'phidot)))))
"p4")
(let [the-metric (e/literal-metric 'g R2-rect)
L (Lc 'm the-metric R2-rect)]
(is (= '(+ (* (/ 1 2) m (expt vx 2) (g_00 (up x y)))
(* m vx vy (g_01 (up x y)))
(* (/ 1 2) m (expt vy 2) (g_11 (up x y))))
(simplify
(L (up 't (up 'x 'y) (up 'vx 'vy)))))
"p7: Compare this result with equation (1.3)")
(let [gamma (e/literal-manifold-map 'q R1-rect R2-rect)
coordinate-path (compose (chart R2-rect)
gamma
(point R1-rect))]
(is (= (up '(q↑0 t) '(q↑1 t))
((chart R2-rect) (gamma ((point R1-rect) 't))))
"page 8")
(is (= (up '(q↑0 t) '(q↑1 t))
(coordinate-path 't)))
(let [Lagrange-residuals (((e/Lagrange-equations L) coordinate-path) 't)
R2-basis (e/coordinate-system->basis R2-rect)
Cartan
(e/Christoffel->Cartan
(e/metric->Christoffel-2 the-metric R2-basis))
geodesic-equation-residuals (((((e/covariant-derivative Cartan gamma) d:dt)
((e/differential gamma) d:dt))
(chart R2-rect))
((point R1-rect) 't))
metric-components (e/metric->components the-metric R2-basis)]
(is (= '(down 0 0)
(simplify
(- Lagrange-residuals
(* (* 'm (metric-components (gamma ((point R1-rect) 't))))
geodesic-equation-residuals))))
"p10: This establishes that for a 2-dimensional space the
Euler-Lagrange equations are equivalent to the geodesic
equations.")))))
|
e83993a7be1c4bbf15df260e87accd238dd812ad6237c95a1cded7ceb3751c7d | nasa/Common-Metadata-Repository | project.clj | (defproject nasa-cmr/cmr-metadata-db-app "0.1.0-SNAPSHOT"
:description "The metadata db is a micro-service that provides
support for persisting metadata concepts."
:url "-Metadata-Repository/tree/master/metadata-db-app"
:exclusions [[cheshire]
[clj-http]
[clj-time]
[com.fasterxml.jackson.core/jackson-core]
[com.fasterxml.jackson.core/jackson-databind]
[org.clojure/tools.reader]
[org.slf4j/slf4j-api]]
:dependencies [[cheshire "5.10.0"]
[clj-http "3.11.0"]
[clj-time "0.15.1"]
[com.fasterxml.jackson.core/jackson-core "2.13.2"]
[com.fasterxml.jackson.dataformat/jackson-dataformat-cbor "2.13.2"]
[compojure "1.6.1"]
[drift "1.5.3"]
[inflections "0.13.0"]
[nasa-cmr/cmr-acl-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-common-app-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-common-lib "0.1.1-SNAPSHOT"]
[nasa-cmr/cmr-message-queue-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-oracle-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-schemas "0.0.1-SNAPSHOT"]
[nasa-cmr/cmr-umm-spec-lib "0.1.0-SNAPSHOT"]
[org.clojure/clojure "1.10.0"]
[org.clojure/tools.nrepl "0.2.13"]
[org.clojure/tools.reader "1.3.2"]
[org.quartz-scheduler/quartz "2.3.2"]
[org.slf4j/slf4j-api "1.7.30"]
[ring/ring-core "1.9.2"]
[ring/ring-json "0.5.1"]]
:plugins [[drift "1.5.3"]
[lein-exec "0.3.7"]
[lein-shell "0.5.0"]
[test2junit "1.3.3"]]
:repl-options {:init-ns user}
:jvm-opts ^:replace ["-server"
"-Dclojure.compiler.direct-linking=true"]
:test-paths ["test" "int-test"]
:profiles {:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}}
:dev {:dependencies [[nasa-cmr/cmr-mock-echo-app "0.1.0-SNAPSHOT"]
[org.clojars.gjahad/debug-repl "0.3.3"]
[org.clojure/tools.namespace "0.2.11"]
[pjstadig/humane-test-output "0.9.0"]
[proto-repl "0.3.1"]]
:jvm-opts ^:replace ["-server"]
:source-paths ["src" "dev" "test" "int-test"]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]}
:uberjar {:main cmr.metadata-db.runner
:aot :all}
:static {}
;; This profile is used for linting and static analysis. To run for this
project , use ` lint ` from inside the project directory . To run for
;; all projects at the same time, use the same command but from the top-
;; level directory.
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.2.5"]
[lein-ancient "0.6.15"]
[lein-bikeshed "0.5.0"]
[lein-kibit "0.1.6"]]}
;; The following profile is overriden on the build server or in the user's
;; ~/.lein/profiles.clj file.
:internal-repos {}
:kaocha {:dependencies [[lambdaisland/kaocha "1.0.732"]
[lambdaisland/kaocha-cloverage "1.0.75"]
[lambdaisland/kaocha-junit-xml "0.0.76"]]}}
Database migrations run by executing " migrate "
:aliases {"create-user" ["exec" "-p" "./support/create_user.clj"]
"drop-user" ["exec" "-p" "./support/drop_user.clj"]
"migrate" ["migrate" "-c" "config.mdb-migrate-config/app-migrate-config"]
;; Prints out documentation on configuration environment variables.
"env-config-docs" ["exec" "-ep" "(do (use 'cmr.common.config) (print-all-configs-docs) (shutdown-agents))"]
to test2junit for consistency with - test - out
"test-out" ["test2junit"]
test aliases
;; refer to tests.edn for test configuration
"kaocha" ["with-profile" "+kaocha" "run" "-m" "kaocha.runner"]
"itest" ["kaocha" "--focus" ":integration"]
"utest" ["kaocha" "--focus" ":unit"]
"ci-test" ["kaocha" "--profile" ":ci"]
"ci-itest" ["itest" "--profile" ":ci"]
"ci-utest" ["utest" "--profile" ":ci"]
;; Linting aliases
"kibit" ["do"
["with-profile" "lint" "shell" "echo" "== Kibit =="]
["with-profile" "lint" "kibit"]]
"eastwood" ["with-profile" "lint" "eastwood" "{:namespaces [:source-paths]}"]
"bikeshed" ["with-profile" "lint" "bikeshed" "--max-line-length=100"]
"check-deps" ["with-profile" "lint" "ancient" ":all"]
"check-sec" ["with-profile" "security" "dependency-check"]
"lint" ["do" ["check"] ["kibit"] ["eastwood"]]
;; Placeholder for future docs and enabler of top-level alias
"generate-static" ["with-profile" "static" "shell" "echo"]})
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/67820501151731b535b7087e11990ab1c2240042/metadata-db-app/project.clj | clojure | This profile is used for linting and static analysis. To run for this
all projects at the same time, use the same command but from the top-
level directory.
The following profile is overriden on the build server or in the user's
~/.lein/profiles.clj file.
Prints out documentation on configuration environment variables.
refer to tests.edn for test configuration
Linting aliases
Placeholder for future docs and enabler of top-level alias | (defproject nasa-cmr/cmr-metadata-db-app "0.1.0-SNAPSHOT"
:description "The metadata db is a micro-service that provides
support for persisting metadata concepts."
:url "-Metadata-Repository/tree/master/metadata-db-app"
:exclusions [[cheshire]
[clj-http]
[clj-time]
[com.fasterxml.jackson.core/jackson-core]
[com.fasterxml.jackson.core/jackson-databind]
[org.clojure/tools.reader]
[org.slf4j/slf4j-api]]
:dependencies [[cheshire "5.10.0"]
[clj-http "3.11.0"]
[clj-time "0.15.1"]
[com.fasterxml.jackson.core/jackson-core "2.13.2"]
[com.fasterxml.jackson.dataformat/jackson-dataformat-cbor "2.13.2"]
[compojure "1.6.1"]
[drift "1.5.3"]
[inflections "0.13.0"]
[nasa-cmr/cmr-acl-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-common-app-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-common-lib "0.1.1-SNAPSHOT"]
[nasa-cmr/cmr-message-queue-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-oracle-lib "0.1.0-SNAPSHOT"]
[nasa-cmr/cmr-schemas "0.0.1-SNAPSHOT"]
[nasa-cmr/cmr-umm-spec-lib "0.1.0-SNAPSHOT"]
[org.clojure/clojure "1.10.0"]
[org.clojure/tools.nrepl "0.2.13"]
[org.clojure/tools.reader "1.3.2"]
[org.quartz-scheduler/quartz "2.3.2"]
[org.slf4j/slf4j-api "1.7.30"]
[ring/ring-core "1.9.2"]
[ring/ring-json "0.5.1"]]
:plugins [[drift "1.5.3"]
[lein-exec "0.3.7"]
[lein-shell "0.5.0"]
[test2junit "1.3.3"]]
:repl-options {:init-ns user}
:jvm-opts ^:replace ["-server"
"-Dclojure.compiler.direct-linking=true"]
:test-paths ["test" "int-test"]
:profiles {:security {:plugins [[com.livingsocial/lein-dependency-check "1.1.1"]]
:dependency-check {:output-format [:all]
:suppression-file "resources/security/suppression.xml"}}
:dev {:dependencies [[nasa-cmr/cmr-mock-echo-app "0.1.0-SNAPSHOT"]
[org.clojars.gjahad/debug-repl "0.3.3"]
[org.clojure/tools.namespace "0.2.11"]
[pjstadig/humane-test-output "0.9.0"]
[proto-repl "0.3.1"]]
:jvm-opts ^:replace ["-server"]
:source-paths ["src" "dev" "test" "int-test"]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]}
:uberjar {:main cmr.metadata-db.runner
:aot :all}
:static {}
project , use ` lint ` from inside the project directory . To run for
:lint {:source-paths ^:replace ["src"]
:test-paths ^:replace []
:plugins [[jonase/eastwood "0.2.5"]
[lein-ancient "0.6.15"]
[lein-bikeshed "0.5.0"]
[lein-kibit "0.1.6"]]}
:internal-repos {}
:kaocha {:dependencies [[lambdaisland/kaocha "1.0.732"]
[lambdaisland/kaocha-cloverage "1.0.75"]
[lambdaisland/kaocha-junit-xml "0.0.76"]]}}
Database migrations run by executing " migrate "
:aliases {"create-user" ["exec" "-p" "./support/create_user.clj"]
"drop-user" ["exec" "-p" "./support/drop_user.clj"]
"migrate" ["migrate" "-c" "config.mdb-migrate-config/app-migrate-config"]
"env-config-docs" ["exec" "-ep" "(do (use 'cmr.common.config) (print-all-configs-docs) (shutdown-agents))"]
to test2junit for consistency with - test - out
"test-out" ["test2junit"]
test aliases
"kaocha" ["with-profile" "+kaocha" "run" "-m" "kaocha.runner"]
"itest" ["kaocha" "--focus" ":integration"]
"utest" ["kaocha" "--focus" ":unit"]
"ci-test" ["kaocha" "--profile" ":ci"]
"ci-itest" ["itest" "--profile" ":ci"]
"ci-utest" ["utest" "--profile" ":ci"]
"kibit" ["do"
["with-profile" "lint" "shell" "echo" "== Kibit =="]
["with-profile" "lint" "kibit"]]
"eastwood" ["with-profile" "lint" "eastwood" "{:namespaces [:source-paths]}"]
"bikeshed" ["with-profile" "lint" "bikeshed" "--max-line-length=100"]
"check-deps" ["with-profile" "lint" "ancient" ":all"]
"check-sec" ["with-profile" "security" "dependency-check"]
"lint" ["do" ["check"] ["kibit"] ["eastwood"]]
"generate-static" ["with-profile" "static" "shell" "echo"]})
|
702294af7b83cc4fc0ab5889bb1cec007953c0f6537ac959080f7b6ba420114f | fulcro-legacy/semantic-ui-wrapper | ui_tab.cljs | (ns fulcrologic.semantic-ui.modules.tab.ui-tab
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Tab/Tab" :default Tab]))
(def ui-tab
"A Tab is a hidden section of content activated by a Menu.
Props:
- activeIndex (number|string): Index of the currently active tab. ()
- as (custom): An element type to render as (string or function).
- defaultActiveIndex (number|string): The initial activeIndex. ()
- grid (object): Shorthand props for the Grid.
- menu (object): Shorthand props for the Menu.
- menuPosition (enum): Align vertical menu (left, right)
- onTabChange (func): Called on tab change.
- panes (arrayOf): Array of objects describing each Menu.Item and Tab.Pane: ([:name \"shape\"], [:value {:menuItem {:name \"custom\", :raw \"customPropTypes.itemShorthand\", :required false}, :pane {:name \"custom\", :raw \"customPropTypes.itemShorthand\", :required false}, :render {:name \"func\", :required false}}])
- renderActiveOnly (bool): A Tab can render only active pane."
(h/factory-apply Tab))
| null | https://raw.githubusercontent.com/fulcro-legacy/semantic-ui-wrapper/b0473480ddfff18496df086bf506099ac897f18f/semantic-ui-wrappers-shadow/src/main/fulcrologic/semantic_ui/modules/tab/ui_tab.cljs | clojure | (ns fulcrologic.semantic-ui.modules.tab.ui-tab
(:require
[fulcrologic.semantic-ui.factory-helpers :as h]
["semantic-ui-react/dist/commonjs/modules/Tab/Tab" :default Tab]))
(def ui-tab
"A Tab is a hidden section of content activated by a Menu.
Props:
- activeIndex (number|string): Index of the currently active tab. ()
- as (custom): An element type to render as (string or function).
- defaultActiveIndex (number|string): The initial activeIndex. ()
- grid (object): Shorthand props for the Grid.
- menu (object): Shorthand props for the Menu.
- menuPosition (enum): Align vertical menu (left, right)
- onTabChange (func): Called on tab change.
- panes (arrayOf): Array of objects describing each Menu.Item and Tab.Pane: ([:name \"shape\"], [:value {:menuItem {:name \"custom\", :raw \"customPropTypes.itemShorthand\", :required false}, :pane {:name \"custom\", :raw \"customPropTypes.itemShorthand\", :required false}, :render {:name \"func\", :required false}}])
- renderActiveOnly (bool): A Tab can render only active pane."
(h/factory-apply Tab))
| |
e524f8a5c9e90dd45174be0a740ffb798f2fdeddeb53fd7c87d02955bb7b7ede | KingoftheHomeless/in-other-words | Fix.hs | # LANGUAGE CPP #
# OPTIONS_HADDOCK not - home #
module Control.Effect.Type.Fix
( -- * Effects
Fix(..)
-- * Threading utilities
, threadFixViaClass
) where
import Control.Monad.Fix
import qualified Control.Monad.Trans.Except as E
import qualified Control.Monad.Trans.Reader as R
import qualified Control.Monad.Trans.State.Lazy as LSt
import qualified Control.Monad.Trans.State.Strict as SSt
import qualified Control.Monad.Trans.Writer.Lazy as LWr
import qualified Control.Monad.Trans.Writer.Strict as SWr
import qualified Control.Monad.Trans.Writer.CPS as CPSWr
import Control.Effect.Internal.ViaAlg
import Control.Effect.Internal.Reflection
import Control.Effect.Internal.Utils
import Control.Effect.Internal.Union
| An effect corresponding to the ' MonadFix ' type class .
--
' Control . Effect . ' 's ' MonadFix ' instance is based
-- on this effect; by having access to 'Fix', you're able to
-- use recursive do notation inside of effect handlers.
--
-- __Fix is typically used as a primitive effect__.
-- If you define a 'Control.Effect.Carrier' that relies on a novel
-- non-trivial monad transformer @t@, then you need to make
a ' t ' Fix'@ instance ( if possible ) .
-- 'threadFixViaClass' can help you with that.
--
-- The following threading constraints accept 'Fix':
--
-- * 'Control.Effect.ReaderThreads'
-- * 'Control.Effect.State.StateThreads'
-- * 'Control.Effect.State.StateLazyThreads'
-- * 'Control.Effect.Error.ErrorThreads'
-- * 'Control.Effect.Writer.WriterThreads'
-- * 'Control.Effect.Writer.WriterLazyThreads'
newtype Fix :: Effect where
Fix :: (a -> m a) -> Fix m a
instance ( Reifies s (ReifiedEffAlgebra Fix m)
, Monad m
) => MonadFix (ViaAlg s Fix m) where
mfix f = case reflect @s of
ReifiedEffAlgebra alg -> coerceAlg alg (Fix f)
# INLINE mfix #
| A valid definition of ' threadEff ' for a ' t ' Fix'@ instance ,
given that @t@ lifts ' MonadFix ' .
threadFixViaClass :: Monad m
=> ( RepresentationalT t
, forall b. MonadFix b => MonadFix (t b)
)
=> (forall x. Fix m x -> m x)
-> Fix (t m) a -> t m a
threadFixViaClass alg (Fix f) = reify (ReifiedEffAlgebra alg) $ \(_ :: pr s) ->
unViaAlgT (mfix (viaAlgT @s @Fix #. f))
# INLINE threadFixViaClass #
#define THREADFIX(monadT) \
instance ThreadsEff (monadT) Fix where \
threadEff = threadFixViaClass; \
# INLINE threadEff #
#define THREADFIX_CTX(ctx, monadT) \
instance ctx => ThreadsEff (monadT) Fix where \
threadEff = threadFixViaClass; \
# INLINE threadEff #
TODO(KingoftheHomeless ): Benchmark this vs hand - written instances .
THREADFIX(LSt.StateT s)
THREADFIX(SSt.StateT s)
THREADFIX_CTX(Monoid s, LWr.WriterT s)
THREADFIX_CTX(Monoid s, SWr.WriterT s)
THREADFIX(CPSWr.WriterT s)
THREADFIX(E.ExceptT e)
THREADFIX(R.ReaderT i)
| null | https://raw.githubusercontent.com/KingoftheHomeless/in-other-words/9c864c81beb4fdf71d363b6962db5c90275c57ef/src/Control/Effect/Type/Fix.hs | haskell | * Effects
* Threading utilities
on this effect; by having access to 'Fix', you're able to
use recursive do notation inside of effect handlers.
__Fix is typically used as a primitive effect__.
If you define a 'Control.Effect.Carrier' that relies on a novel
non-trivial monad transformer @t@, then you need to make
'threadFixViaClass' can help you with that.
The following threading constraints accept 'Fix':
* 'Control.Effect.ReaderThreads'
* 'Control.Effect.State.StateThreads'
* 'Control.Effect.State.StateLazyThreads'
* 'Control.Effect.Error.ErrorThreads'
* 'Control.Effect.Writer.WriterThreads'
* 'Control.Effect.Writer.WriterLazyThreads' | # LANGUAGE CPP #
# OPTIONS_HADDOCK not - home #
module Control.Effect.Type.Fix
Fix(..)
, threadFixViaClass
) where
import Control.Monad.Fix
import qualified Control.Monad.Trans.Except as E
import qualified Control.Monad.Trans.Reader as R
import qualified Control.Monad.Trans.State.Lazy as LSt
import qualified Control.Monad.Trans.State.Strict as SSt
import qualified Control.Monad.Trans.Writer.Lazy as LWr
import qualified Control.Monad.Trans.Writer.Strict as SWr
import qualified Control.Monad.Trans.Writer.CPS as CPSWr
import Control.Effect.Internal.ViaAlg
import Control.Effect.Internal.Reflection
import Control.Effect.Internal.Utils
import Control.Effect.Internal.Union
| An effect corresponding to the ' MonadFix ' type class .
' Control . Effect . ' 's ' MonadFix ' instance is based
a ' t ' Fix'@ instance ( if possible ) .
newtype Fix :: Effect where
Fix :: (a -> m a) -> Fix m a
instance ( Reifies s (ReifiedEffAlgebra Fix m)
, Monad m
) => MonadFix (ViaAlg s Fix m) where
mfix f = case reflect @s of
ReifiedEffAlgebra alg -> coerceAlg alg (Fix f)
# INLINE mfix #
| A valid definition of ' threadEff ' for a ' t ' Fix'@ instance ,
given that @t@ lifts ' MonadFix ' .
threadFixViaClass :: Monad m
=> ( RepresentationalT t
, forall b. MonadFix b => MonadFix (t b)
)
=> (forall x. Fix m x -> m x)
-> Fix (t m) a -> t m a
threadFixViaClass alg (Fix f) = reify (ReifiedEffAlgebra alg) $ \(_ :: pr s) ->
unViaAlgT (mfix (viaAlgT @s @Fix #. f))
# INLINE threadFixViaClass #
#define THREADFIX(monadT) \
instance ThreadsEff (monadT) Fix where \
threadEff = threadFixViaClass; \
# INLINE threadEff #
#define THREADFIX_CTX(ctx, monadT) \
instance ctx => ThreadsEff (monadT) Fix where \
threadEff = threadFixViaClass; \
# INLINE threadEff #
TODO(KingoftheHomeless ): Benchmark this vs hand - written instances .
THREADFIX(LSt.StateT s)
THREADFIX(SSt.StateT s)
THREADFIX_CTX(Monoid s, LWr.WriterT s)
THREADFIX_CTX(Monoid s, SWr.WriterT s)
THREADFIX(CPSWr.WriterT s)
THREADFIX(E.ExceptT e)
THREADFIX(R.ReaderT i)
|
a90488defe30629fbb9d42e2b6a60ce8c75c1031c71f45a44a3f6477e6926a59 | metrics-clojure/metrics-clojure | csv.clj | (ns metrics.reporters.csv
"CSV reporting"
(:require [metrics.core :refer [default-registry]]
[clojure.java.io :as io]
[metrics.reporters :as mrep])
(:import java.util.concurrent.TimeUnit
[com.codahale.metrics CsvReporter MetricRegistry MetricFilter ScheduledReporter]
java.util.Locale))
(defn- validate-create-output-dir
[^java.io.File d]
(when-not (.exists d)
(.mkdirs d))
(when-not (.canWrite d)
(throw (java.io.IOException. (str "Don't have write permissions to " d))))
(when-not (.isDirectory d)
(throw (java.io.IOException. (str d " is not a directory.")))))
(defn ^com.codahale.metrics.CsvReporter reporter
([dir opts]
(reporter default-registry dir opts))
([^MetricRegistry reg dir opts]
(let [b (CsvReporter/forRegistry reg)
d (io/file dir)]
(validate-create-output-dir d)
(when-let [^Locale l (:locale opts)]
(.formatFor b l))
(when-let [^TimeUnit ru (:rate-unit opts)]
(.convertRatesTo b ru))
(when-let [^TimeUnit du (:duration-unit opts)]
(.convertDurationsTo b du))
(when-let [^MetricFilter f (:filter opts)]
(.filter b f))
(.build b d))))
(defn start
"Report all metrics to csv"
[^ScheduledReporter r ^long seconds]
(mrep/start r seconds))
(defn stop
"Stops reporting."
[^ScheduledReporter r]
(mrep/stop r))
| null | https://raw.githubusercontent.com/metrics-clojure/metrics-clojure/a1dbacc748a1f8165f0094e2229c84f228efe29b/metrics-clojure-core/src/metrics/reporters/csv.clj | clojure | (ns metrics.reporters.csv
"CSV reporting"
(:require [metrics.core :refer [default-registry]]
[clojure.java.io :as io]
[metrics.reporters :as mrep])
(:import java.util.concurrent.TimeUnit
[com.codahale.metrics CsvReporter MetricRegistry MetricFilter ScheduledReporter]
java.util.Locale))
(defn- validate-create-output-dir
[^java.io.File d]
(when-not (.exists d)
(.mkdirs d))
(when-not (.canWrite d)
(throw (java.io.IOException. (str "Don't have write permissions to " d))))
(when-not (.isDirectory d)
(throw (java.io.IOException. (str d " is not a directory.")))))
(defn ^com.codahale.metrics.CsvReporter reporter
([dir opts]
(reporter default-registry dir opts))
([^MetricRegistry reg dir opts]
(let [b (CsvReporter/forRegistry reg)
d (io/file dir)]
(validate-create-output-dir d)
(when-let [^Locale l (:locale opts)]
(.formatFor b l))
(when-let [^TimeUnit ru (:rate-unit opts)]
(.convertRatesTo b ru))
(when-let [^TimeUnit du (:duration-unit opts)]
(.convertDurationsTo b du))
(when-let [^MetricFilter f (:filter opts)]
(.filter b f))
(.build b d))))
(defn start
"Report all metrics to csv"
[^ScheduledReporter r ^long seconds]
(mrep/start r seconds))
(defn stop
"Stops reporting."
[^ScheduledReporter r]
(mrep/stop r))
| |
4c7ea25c2297495383c03e4efecf436b561bae0f5d96be12e80e185545f785b1 | leanprover/tc | Internal.hs | |
Module : . Level . Internal
Description : Universe levels
Copyright : ( c ) , 2016
License : GPL-3
Maintainer :
Implementation of universe levels
Module : Kernel.Level.Internal
Description : Universe levels
Copyright : (c) Daniel Selsam, 2016
License : GPL-3
Maintainer :
Implementation of universe levels
-}
module Kernel.Level.Internal where
import Kernel.Name
import Lens.Simple
import Data.List as List
import Control.Monad
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.List (elemIndex, sortBy, genericLength)
newtype SuccData = SuccData { succOf :: Level } deriving (Eq,Show,Ord)
data MaxCoreData = MaxCoreData { isImax :: Bool, maxLHS :: Level, maxRHS :: Level } deriving (Eq,Show,Ord)
data Level = Zero
| Succ SuccData
| Max MaxCoreData
| IMax MaxCoreData
| LevelParam Name
| GlobalLevel Name
deriving (Eq, Ord)
showLevel :: Level -> String
showLevel l = case toLevelOffset l of
(l,0) -> "{ " ++ showLevelCore l ++ " }"
(l,k) -> "{ <" ++ show k ++ "> " ++ showLevelCore l ++ " }"
where
showLevelCore :: Level -> String
showLevelCore l = case l of
Zero -> "0"
Max max -> "(max " ++ showLevel (maxLHS max) ++ " " ++ showLevel (maxRHS max) ++ ")"
IMax imax -> "(max " ++ showLevel (maxLHS imax) ++ " " ++ showLevel (maxRHS imax) ++ ")"
LevelParam lp -> show lp
GlobalLevel gl -> "!" ++ show gl
instance Show Level where show e = showLevel e
getUndefParam :: Level -> [Name] -> Maybe Name
getUndefParam l ns = case l of
Zero -> Nothing
Succ succ -> getUndefParam (succOf succ) ns
Max max -> getUndefParam (maxLHS max) ns `mplus` getUndefParam (maxRHS max) ns
IMax imax -> getUndefParam (maxLHS imax) ns `mplus` getUndefParam (maxRHS imax) ns
LevelParam n -> if elem n ns then Nothing else Just n
GlobalLevel n -> Nothing
getUndefGlobal :: Level -> Set Name -> Maybe Name
getUndefGlobal l ns = case l of
Zero -> Nothing
Succ succ -> getUndefGlobal (succOf succ) ns
Max max -> getUndefGlobal (maxLHS max) ns `mplus` getUndefGlobal (maxRHS max) ns
IMax imax -> getUndefGlobal (maxLHS imax) ns `mplus` getUndefGlobal (maxRHS imax) ns
LevelParam n -> Nothing
GlobalLevel n -> if Set.member n ns then Nothing else Just n
-- A level is explicit if it is of the form 'Succ^k Zero' for some 'k'.
isExplicit l = case l of
Zero -> True
Succ succ -> isExplicit (succOf succ)
Max max -> False
IMax imax -> False
LevelParam n -> False
GlobalLevel n -> False
getDepth l = case l of
Zero -> 0
Succ succ -> 1 + getDepth (succOf succ)
Max max -> 0
IMax imax -> 0
LevelParam n -> 0
GlobalLevel n -> 0
-- Factors out outermost sequence of 'mkSucc' applications.
toLevelOffset l = case l of
Succ succ -> over _2 (+1) $ toLevelOffset (succOf succ)
otherwise -> (l,0)
isZero l = case l of
Zero -> True
_ -> False
mkZero = Zero
mkSucc l = Succ (SuccData l)
mkLevelOne = mkSucc mkZero
mkLevelTwo = mkSucc $ mkSucc mkZero
mkIteratedSucc l k
| k == 0 = l
| k > 0 = Succ (SuccData (mkIteratedSucc l (k-1)))
mkMax l1 l2
| isExplicit l1 && isExplicit l2 = if getDepth l1 >= getDepth l2 then l1 else l2
| l1 == l2 = l1
| isZero l1 = l2
| isZero l2 = l1
| otherwise =
case l1 of
Max max | maxLHS max == l2 || maxRHS max == l2 -> l1
otherwise ->
case l2 of
Max max | maxLHS max == l1 || maxRHS max == l1 -> l2
otherwise ->
let (l1',k1) = toLevelOffset l1
(l2',k2) = toLevelOffset l2
in
if l1' == l2' then (if k1 >= k2 then l1 else l2) else Max (MaxCoreData False l1 l2)
mkIMax l1 l2
| isDefinitelyNotZero l2 = mkMax l1 l2
| isZero l2 = l2
| isZero l1 = l2
| l1 == l2 = l1
| otherwise = IMax (MaxCoreData True l1 l2)
mkLevelParam = LevelParam
mkGlobalLevel = GlobalLevel
isDefinitelyNotZero l = case l of
Zero -> False
LevelParam _ -> False
GlobalLevel _ -> False
Succ _ -> True
Max max -> isDefinitelyNotZero (maxLHS max) || isDefinitelyNotZero (maxRHS max)
IMax imax -> isDefinitelyNotZero (maxRHS imax)
levelHasParam l = case l of
LevelParam _ -> True
Succ succ -> levelHasParam (succOf succ)
Max max -> levelHasParam (maxLHS max) || levelHasParam (maxRHS max)
IMax imax -> levelHasParam (maxLHS imax) || levelHasParam (maxRHS imax)
_ -> False
levelKindRank l = case l of
Zero -> 0
Succ _ -> 1
Max _ -> 2
IMax _ -> 3
LevelParam _ -> 4
GlobalLevel _ -> 5
levelNormCmp l1 l2 = if l1 == l2 then EQ else levelNormCmpCore (toLevelOffset l1) (toLevelOffset l2)
levelNormCmpCore (l1,k1) (l2,k2)
| l1 == l2 = compare k1 k2
| levelKindRank l1 /= levelKindRank l2 = compare (levelKindRank l1) (levelKindRank l2)
| otherwise =
case (l1,l2) of
(LevelParam n1,LevelParam n2) -> compare n1 n2
(GlobalLevel n1,GlobalLevel n2) -> compare n1 n2
(Max max1,Max max2) -> levelNormCmpMaxCore max1 max2
(IMax max1,IMax max2) -> levelNormCmpMaxCore max1 max2
levelNormCmpMaxCore (MaxCoreData _ l1a l2a) (MaxCoreData _ l1b l2b)
| l1a /= l1b = levelNormCmp l1a l1b
| otherwise = levelNormCmp l2a l2b
collectMaxArgs (Max (MaxCoreData False l1 l2)) = collectMaxArgs l1 ++ collectMaxArgs l2
collectMaxArgs l = [l]
-- called on sorted explicits
removeSmallExplicits [] = Nothing
removeSmallExplicits [l] = Just l
removeSmallExplicits (l:ls) = removeSmallExplicits ls
normalizeLevel l = let p = toLevelOffset l in case fst p of
Zero -> l
LevelParam _ -> l
GlobalLevel _ -> l
IMax (MaxCoreData True l1 l2) ->
let l1_n = normalizeLevel l1
l2_n = normalizeLevel l2
in
if l1 /= l1_n || l2 /= l2_n then mkIteratedSucc (mkIMax l1_n l2_n) (snd p) else l
Max max ->
let maxArgs = (sortBy levelNormCmp) . concat . (map (collectMaxArgs . normalizeLevel)) $ collectMaxArgs (Max max)
explicit = removeSmallExplicits $ filter isExplicit maxArgs
nonExplicits = let rest = filter (not . isExplicit) maxArgs
(butLast,last) = foldl (\ (keep,prev) curr ->
if fst (toLevelOffset prev) == fst (toLevelOffset curr)
then (keep,curr)
else (keep ++ [prev],curr))
([],head rest)
(tail rest)
in butLast ++ [last]
explicits = case explicit of
Nothing -> []
Just x -> if snd (toLevelOffset x) <= maximum (map (snd . toLevelOffset) nonExplicits) then [] else [x]
allArgs = explicits ++ nonExplicits
liftedArgs = map (flip mkIteratedSucc (snd p)) allArgs
in
mkBigMax liftedArgs
mkBigMax [] = mkZero
mkBigMax [l] = l
mkBigMax (x:xs) = mkMax x (mkBigMax xs)
Check whether two levels are equivalent ( modulo normalizing ' max ' )
levelEquiv l1 l2 = l1 == l2 || normalizeLevel l1 == normalizeLevel l2
-- Replace
type LevelReplaceFn = (Level -> Maybe Level)
replaceInLevel :: LevelReplaceFn -> Level -> Level
replaceInLevel f l =
case f l of
Just l0 -> l0
Nothing ->
case l of
Zero -> l
Succ succ -> mkSucc (replaceInLevel f $ succOf succ)
Max max -> mkMax (replaceInLevel f $ maxLHS max) (replaceInLevel f $ maxRHS max)
IMax imax -> mkIMax (replaceInLevel f $ maxLHS imax) (replaceInLevel f $ maxRHS imax)
LevelParam _ -> l
GlobalLevel _ -> l
instantiateLevel :: [Name] -> [Level] -> Level -> Level
instantiateLevel lpNames levels level =
replaceInLevel (instantiateLevelFn lpNames levels) level
where
instantiateLevelFn :: [Name] -> [Level] -> LevelReplaceFn
instantiateLevelFn lpNames levels level
| not (genericLength lpNames == genericLength levels) = error "Wrong number of level params"
| not (levelHasParam level) = Just level
instantiateLevelFn lpNames levels (LevelParam name) =
case elemIndex name lpNames of
Nothing -> Nothing
Just idx -> Just (levels!!idx)
instantiateLevelFn _ _ _ = Nothing
-- Order
levelNotBiggerThan l1 l2 = levelNotBiggerThanCore (normalizeLevel l1) (normalizeLevel l2) where
levelNotBiggerThanCore l1 l2
| l1 == l2 || isZero l1 = True
levelNotBiggerThanCore (Max max) l2 = levelNotBiggerThan (maxLHS max) l2 && levelNotBiggerThan (maxRHS max) l2
levelNotBiggerThanCore l1 (Max max)
| levelNotBiggerThan l1 (maxLHS max) || levelNotBiggerThan l1 (maxRHS max) = True
levelNotBiggerThanCore (IMax imax) l2 = levelNotBiggerThan (maxLHS imax) l2 && levelNotBiggerThan (maxRHS imax) l2
levelNotBiggerThanCore l1 (IMax imax) = levelNotBiggerThan l1 (maxRHS imax)
levelNotBiggerThanCore l1 l2 =
let (l1',k1) = toLevelOffset l1
(l2',k2) = toLevelOffset l2
in
if l1' == l2' || isZero l1' then k1 <= k2 else
if k1 == k2 && k1 > 0 then levelNotBiggerThan l1' l2' else
False
maybeParamName :: Level -> Maybe Name
maybeParamName l = case l of
LevelParam n -> Just n
_ -> Nothing
| null | https://raw.githubusercontent.com/leanprover/tc/250a568346f29ae27190fccee169ba10002c7399/src/Kernel/Level/Internal.hs | haskell | A level is explicit if it is of the form 'Succ^k Zero' for some 'k'.
Factors out outermost sequence of 'mkSucc' applications.
called on sorted explicits
Replace
Order | |
Module : . Level . Internal
Description : Universe levels
Copyright : ( c ) , 2016
License : GPL-3
Maintainer :
Implementation of universe levels
Module : Kernel.Level.Internal
Description : Universe levels
Copyright : (c) Daniel Selsam, 2016
License : GPL-3
Maintainer :
Implementation of universe levels
-}
module Kernel.Level.Internal where
import Kernel.Name
import Lens.Simple
import Data.List as List
import Control.Monad
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.List (elemIndex, sortBy, genericLength)
newtype SuccData = SuccData { succOf :: Level } deriving (Eq,Show,Ord)
data MaxCoreData = MaxCoreData { isImax :: Bool, maxLHS :: Level, maxRHS :: Level } deriving (Eq,Show,Ord)
data Level = Zero
| Succ SuccData
| Max MaxCoreData
| IMax MaxCoreData
| LevelParam Name
| GlobalLevel Name
deriving (Eq, Ord)
showLevel :: Level -> String
showLevel l = case toLevelOffset l of
(l,0) -> "{ " ++ showLevelCore l ++ " }"
(l,k) -> "{ <" ++ show k ++ "> " ++ showLevelCore l ++ " }"
where
showLevelCore :: Level -> String
showLevelCore l = case l of
Zero -> "0"
Max max -> "(max " ++ showLevel (maxLHS max) ++ " " ++ showLevel (maxRHS max) ++ ")"
IMax imax -> "(max " ++ showLevel (maxLHS imax) ++ " " ++ showLevel (maxRHS imax) ++ ")"
LevelParam lp -> show lp
GlobalLevel gl -> "!" ++ show gl
instance Show Level where show e = showLevel e
getUndefParam :: Level -> [Name] -> Maybe Name
getUndefParam l ns = case l of
Zero -> Nothing
Succ succ -> getUndefParam (succOf succ) ns
Max max -> getUndefParam (maxLHS max) ns `mplus` getUndefParam (maxRHS max) ns
IMax imax -> getUndefParam (maxLHS imax) ns `mplus` getUndefParam (maxRHS imax) ns
LevelParam n -> if elem n ns then Nothing else Just n
GlobalLevel n -> Nothing
getUndefGlobal :: Level -> Set Name -> Maybe Name
getUndefGlobal l ns = case l of
Zero -> Nothing
Succ succ -> getUndefGlobal (succOf succ) ns
Max max -> getUndefGlobal (maxLHS max) ns `mplus` getUndefGlobal (maxRHS max) ns
IMax imax -> getUndefGlobal (maxLHS imax) ns `mplus` getUndefGlobal (maxRHS imax) ns
LevelParam n -> Nothing
GlobalLevel n -> if Set.member n ns then Nothing else Just n
isExplicit l = case l of
Zero -> True
Succ succ -> isExplicit (succOf succ)
Max max -> False
IMax imax -> False
LevelParam n -> False
GlobalLevel n -> False
getDepth l = case l of
Zero -> 0
Succ succ -> 1 + getDepth (succOf succ)
Max max -> 0
IMax imax -> 0
LevelParam n -> 0
GlobalLevel n -> 0
toLevelOffset l = case l of
Succ succ -> over _2 (+1) $ toLevelOffset (succOf succ)
otherwise -> (l,0)
isZero l = case l of
Zero -> True
_ -> False
mkZero = Zero
mkSucc l = Succ (SuccData l)
mkLevelOne = mkSucc mkZero
mkLevelTwo = mkSucc $ mkSucc mkZero
mkIteratedSucc l k
| k == 0 = l
| k > 0 = Succ (SuccData (mkIteratedSucc l (k-1)))
mkMax l1 l2
| isExplicit l1 && isExplicit l2 = if getDepth l1 >= getDepth l2 then l1 else l2
| l1 == l2 = l1
| isZero l1 = l2
| isZero l2 = l1
| otherwise =
case l1 of
Max max | maxLHS max == l2 || maxRHS max == l2 -> l1
otherwise ->
case l2 of
Max max | maxLHS max == l1 || maxRHS max == l1 -> l2
otherwise ->
let (l1',k1) = toLevelOffset l1
(l2',k2) = toLevelOffset l2
in
if l1' == l2' then (if k1 >= k2 then l1 else l2) else Max (MaxCoreData False l1 l2)
mkIMax l1 l2
| isDefinitelyNotZero l2 = mkMax l1 l2
| isZero l2 = l2
| isZero l1 = l2
| l1 == l2 = l1
| otherwise = IMax (MaxCoreData True l1 l2)
mkLevelParam = LevelParam
mkGlobalLevel = GlobalLevel
isDefinitelyNotZero l = case l of
Zero -> False
LevelParam _ -> False
GlobalLevel _ -> False
Succ _ -> True
Max max -> isDefinitelyNotZero (maxLHS max) || isDefinitelyNotZero (maxRHS max)
IMax imax -> isDefinitelyNotZero (maxRHS imax)
levelHasParam l = case l of
LevelParam _ -> True
Succ succ -> levelHasParam (succOf succ)
Max max -> levelHasParam (maxLHS max) || levelHasParam (maxRHS max)
IMax imax -> levelHasParam (maxLHS imax) || levelHasParam (maxRHS imax)
_ -> False
levelKindRank l = case l of
Zero -> 0
Succ _ -> 1
Max _ -> 2
IMax _ -> 3
LevelParam _ -> 4
GlobalLevel _ -> 5
levelNormCmp l1 l2 = if l1 == l2 then EQ else levelNormCmpCore (toLevelOffset l1) (toLevelOffset l2)
levelNormCmpCore (l1,k1) (l2,k2)
| l1 == l2 = compare k1 k2
| levelKindRank l1 /= levelKindRank l2 = compare (levelKindRank l1) (levelKindRank l2)
| otherwise =
case (l1,l2) of
(LevelParam n1,LevelParam n2) -> compare n1 n2
(GlobalLevel n1,GlobalLevel n2) -> compare n1 n2
(Max max1,Max max2) -> levelNormCmpMaxCore max1 max2
(IMax max1,IMax max2) -> levelNormCmpMaxCore max1 max2
levelNormCmpMaxCore (MaxCoreData _ l1a l2a) (MaxCoreData _ l1b l2b)
| l1a /= l1b = levelNormCmp l1a l1b
| otherwise = levelNormCmp l2a l2b
collectMaxArgs (Max (MaxCoreData False l1 l2)) = collectMaxArgs l1 ++ collectMaxArgs l2
collectMaxArgs l = [l]
removeSmallExplicits [] = Nothing
removeSmallExplicits [l] = Just l
removeSmallExplicits (l:ls) = removeSmallExplicits ls
normalizeLevel l = let p = toLevelOffset l in case fst p of
Zero -> l
LevelParam _ -> l
GlobalLevel _ -> l
IMax (MaxCoreData True l1 l2) ->
let l1_n = normalizeLevel l1
l2_n = normalizeLevel l2
in
if l1 /= l1_n || l2 /= l2_n then mkIteratedSucc (mkIMax l1_n l2_n) (snd p) else l
Max max ->
let maxArgs = (sortBy levelNormCmp) . concat . (map (collectMaxArgs . normalizeLevel)) $ collectMaxArgs (Max max)
explicit = removeSmallExplicits $ filter isExplicit maxArgs
nonExplicits = let rest = filter (not . isExplicit) maxArgs
(butLast,last) = foldl (\ (keep,prev) curr ->
if fst (toLevelOffset prev) == fst (toLevelOffset curr)
then (keep,curr)
else (keep ++ [prev],curr))
([],head rest)
(tail rest)
in butLast ++ [last]
explicits = case explicit of
Nothing -> []
Just x -> if snd (toLevelOffset x) <= maximum (map (snd . toLevelOffset) nonExplicits) then [] else [x]
allArgs = explicits ++ nonExplicits
liftedArgs = map (flip mkIteratedSucc (snd p)) allArgs
in
mkBigMax liftedArgs
mkBigMax [] = mkZero
mkBigMax [l] = l
mkBigMax (x:xs) = mkMax x (mkBigMax xs)
Check whether two levels are equivalent ( modulo normalizing ' max ' )
levelEquiv l1 l2 = l1 == l2 || normalizeLevel l1 == normalizeLevel l2
type LevelReplaceFn = (Level -> Maybe Level)
replaceInLevel :: LevelReplaceFn -> Level -> Level
replaceInLevel f l =
case f l of
Just l0 -> l0
Nothing ->
case l of
Zero -> l
Succ succ -> mkSucc (replaceInLevel f $ succOf succ)
Max max -> mkMax (replaceInLevel f $ maxLHS max) (replaceInLevel f $ maxRHS max)
IMax imax -> mkIMax (replaceInLevel f $ maxLHS imax) (replaceInLevel f $ maxRHS imax)
LevelParam _ -> l
GlobalLevel _ -> l
instantiateLevel :: [Name] -> [Level] -> Level -> Level
instantiateLevel lpNames levels level =
replaceInLevel (instantiateLevelFn lpNames levels) level
where
instantiateLevelFn :: [Name] -> [Level] -> LevelReplaceFn
instantiateLevelFn lpNames levels level
| not (genericLength lpNames == genericLength levels) = error "Wrong number of level params"
| not (levelHasParam level) = Just level
instantiateLevelFn lpNames levels (LevelParam name) =
case elemIndex name lpNames of
Nothing -> Nothing
Just idx -> Just (levels!!idx)
instantiateLevelFn _ _ _ = Nothing
levelNotBiggerThan l1 l2 = levelNotBiggerThanCore (normalizeLevel l1) (normalizeLevel l2) where
levelNotBiggerThanCore l1 l2
| l1 == l2 || isZero l1 = True
levelNotBiggerThanCore (Max max) l2 = levelNotBiggerThan (maxLHS max) l2 && levelNotBiggerThan (maxRHS max) l2
levelNotBiggerThanCore l1 (Max max)
| levelNotBiggerThan l1 (maxLHS max) || levelNotBiggerThan l1 (maxRHS max) = True
levelNotBiggerThanCore (IMax imax) l2 = levelNotBiggerThan (maxLHS imax) l2 && levelNotBiggerThan (maxRHS imax) l2
levelNotBiggerThanCore l1 (IMax imax) = levelNotBiggerThan l1 (maxRHS imax)
levelNotBiggerThanCore l1 l2 =
let (l1',k1) = toLevelOffset l1
(l2',k2) = toLevelOffset l2
in
if l1' == l2' || isZero l1' then k1 <= k2 else
if k1 == k2 && k1 > 0 then levelNotBiggerThan l1' l2' else
False
maybeParamName :: Level -> Maybe Name
maybeParamName l = case l of
LevelParam n -> Just n
_ -> Nothing
|
28937c03111d8c958911ff020098d89ae6891d69254f054084b0346f7531281c | testedminds/edgewise | graph.clj | (in-ns 'edgewise.core)
;; All fn's return a graph:
(defn empty-graph []
{:vertex-id 0
:edge-id 0
:vertex-data {}
:vertex-index {:label {}}
:edge-data {}})
(defn- update-vertex-index [g property value id]
(let [index (:vertex-index g)
property-index (index property)
update (assoc property-index value id)
new-v-index (assoc index property update)]
(assoc g :vertex-index new-v-index)))
(defn label-index [g label]
(((:vertex-index g) :label) label))
(defn add-vertex
([g label] (add-vertex g label {}))
([g label props]
(let [id (or (:_id props) (:vertex-id g))
existing (or ((:vertex-data g) id) (label-index g label))
v (assoc props :out-e [] :in-e [] :label label :_id id)
new-v-data (assoc (:vertex-data g) id v)]
(if existing g
(-> (assoc g :vertex-data new-v-data)
(assoc :vertex-id (inc (:vertex-id g)))
(update-vertex-index :label label id))))))
(defn- add-edge-by-id [g source-id target-id props]
(let [next-e-id (:edge-id g)
edge (assoc props :out-v source-id :in-v target-id :_id next-e-id)
v (:vertex-data g)
new-out-e (conj ((v source-id) :out-e) next-e-id)
new-out-v (assoc (v source-id) :out-e new-out-e)
new-in-e (conj ((v target-id) :in-e) next-e-id)
new-in-v (assoc (v target-id) :in-e new-in-e)
new-edge-data (assoc (:edge-data g) next-e-id edge)
new-vertex-data (-> (assoc v source-id new-out-v)
(assoc target-id new-in-v))]
(-> (assoc g :edge-data new-edge-data)
(assoc :vertex-data new-vertex-data)
(assoc :edge-id (inc next-e-id)))))
(defmulti add-edge* (fn [g source-id target-id props] [(type source-id) (type target-id)]))
(defmethod add-edge* [Number Number]
[g source-id target-id props]
(add-edge-by-id g source-id target-id props))
(defmethod add-edge* [String String]
[g source-label target-label props]
(let [g-with-v (-> (add-vertex g source-label)
(add-vertex target-label))
source-id (label-index g-with-v source-label)
target-id (label-index g-with-v target-label)]
(add-edge-by-id g-with-v source-id target-id props)))
(defn add-edge
([g source-id target-id props] (add-edge* g source-id target-id props))
([g source-id target-id] (add-edge g source-id target-id {})))
(defn add-bi-edge
"Add two edges to represent bi-directional or undirected relationships."
([g i j props]
(-> g
(add-edge i j props)
(add-edge j i props)))
([g i j]
(-> g
(add-edge i j {})
(add-edge j i {}))))
(defn- remove-edge-from-vertex
[g edge-id vertex-id]
(let [vertex (-> g :vertex-data (get vertex-id))
new-oute (remove #{edge-id} (:out-e vertex))
new-ine (remove #{edge-id} (:in-e vertex))
new-vertex (-> vertex (assoc :out-e new-oute) (assoc :in-e new-ine))]
(assoc-in g [:vertex-data vertex-id] new-vertex)))
(defn remove-edge
[g edge-id]
(let [edge ((:edge-data g) edge-id)
new-edge-data (dissoc (:edge-data g) edge-id)]
(-> (assoc g :edge-data new-edge-data)
(remove-edge-from-vertex edge-id (:out-v edge))
(remove-edge-from-vertex edge-id (:in-v edge)))))
| null | https://raw.githubusercontent.com/testedminds/edgewise/0fb64c718e6a8e70eda87b77677a6679770569d1/src/edgewise/graph.clj | clojure | All fn's return a graph: | (in-ns 'edgewise.core)
(defn empty-graph []
{:vertex-id 0
:edge-id 0
:vertex-data {}
:vertex-index {:label {}}
:edge-data {}})
(defn- update-vertex-index [g property value id]
(let [index (:vertex-index g)
property-index (index property)
update (assoc property-index value id)
new-v-index (assoc index property update)]
(assoc g :vertex-index new-v-index)))
(defn label-index [g label]
(((:vertex-index g) :label) label))
(defn add-vertex
([g label] (add-vertex g label {}))
([g label props]
(let [id (or (:_id props) (:vertex-id g))
existing (or ((:vertex-data g) id) (label-index g label))
v (assoc props :out-e [] :in-e [] :label label :_id id)
new-v-data (assoc (:vertex-data g) id v)]
(if existing g
(-> (assoc g :vertex-data new-v-data)
(assoc :vertex-id (inc (:vertex-id g)))
(update-vertex-index :label label id))))))
(defn- add-edge-by-id [g source-id target-id props]
(let [next-e-id (:edge-id g)
edge (assoc props :out-v source-id :in-v target-id :_id next-e-id)
v (:vertex-data g)
new-out-e (conj ((v source-id) :out-e) next-e-id)
new-out-v (assoc (v source-id) :out-e new-out-e)
new-in-e (conj ((v target-id) :in-e) next-e-id)
new-in-v (assoc (v target-id) :in-e new-in-e)
new-edge-data (assoc (:edge-data g) next-e-id edge)
new-vertex-data (-> (assoc v source-id new-out-v)
(assoc target-id new-in-v))]
(-> (assoc g :edge-data new-edge-data)
(assoc :vertex-data new-vertex-data)
(assoc :edge-id (inc next-e-id)))))
(defmulti add-edge* (fn [g source-id target-id props] [(type source-id) (type target-id)]))
(defmethod add-edge* [Number Number]
[g source-id target-id props]
(add-edge-by-id g source-id target-id props))
(defmethod add-edge* [String String]
[g source-label target-label props]
(let [g-with-v (-> (add-vertex g source-label)
(add-vertex target-label))
source-id (label-index g-with-v source-label)
target-id (label-index g-with-v target-label)]
(add-edge-by-id g-with-v source-id target-id props)))
(defn add-edge
([g source-id target-id props] (add-edge* g source-id target-id props))
([g source-id target-id] (add-edge g source-id target-id {})))
(defn add-bi-edge
"Add two edges to represent bi-directional or undirected relationships."
([g i j props]
(-> g
(add-edge i j props)
(add-edge j i props)))
([g i j]
(-> g
(add-edge i j {})
(add-edge j i {}))))
(defn- remove-edge-from-vertex
[g edge-id vertex-id]
(let [vertex (-> g :vertex-data (get vertex-id))
new-oute (remove #{edge-id} (:out-e vertex))
new-ine (remove #{edge-id} (:in-e vertex))
new-vertex (-> vertex (assoc :out-e new-oute) (assoc :in-e new-ine))]
(assoc-in g [:vertex-data vertex-id] new-vertex)))
(defn remove-edge
[g edge-id]
(let [edge ((:edge-data g) edge-id)
new-edge-data (dissoc (:edge-data g) edge-id)]
(-> (assoc g :edge-data new-edge-data)
(remove-edge-from-vertex edge-id (:out-v edge))
(remove-edge-from-vertex edge-id (:in-v edge)))))
|
f7b3bfb5f286307f00d52325f6a1e82964335a165bea0b5100e8daa74e4dc244 | xapi-project/xen-api | memory_interface.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
(** @group Memory *)
open Rpc
open Idl
module D = Debug.Make (struct let name = "memory_interface" end)
open D
let service_name = "memory"
let queue_name = Xcp_service.common_prefix ^ service_name
let json_path = "/var/xapi/memory.json"
let xml_path = "/var/xapi/memory"
(** The reservation_id is an opaque identifier associated with a block of
memory. It is used to reserve memory for a domain before the domain has been
created. *)
type reservation_id = string [@@deriving rpcty]
* Domain zero can have a different policy to that used by guest domains .
type domain_zero_policy =
| Fixed_size of int64 (** Never balloon, use the specified fixed size *)
| Auto_balloon of int64 * int64
* Balloon between the two sizes specified
[@@deriving rpcty]
type errors =
| Cannot_free_this_much_memory of (int64 * int64)
* [ Cannot_free_this_much_memory ( required , free ) ] is reported if it is
not possible to free [ required ] kib . [ free ] is the amount of memory
currently free .
not possible to free [required] kib. [free] is the amount of memory
currently free. *)
| Domains_refused_to_cooperate of int list
* [ Domains_refused_to_cooperate ( domid list ) ] is reported if a set of
domains do not respond in a timely manner to the request to balloon .
The uncooperative domain ids are returned .
domains do not respond in a timely manner to the request to balloon.
The uncooperative domain ids are returned. *)
| Unknown_reservation of reservation_id
(** [Unknown_reservation (id)] is reported if a the specified
reservation_id is unknown. *)
| No_reservation
(** [No_reservation] is reported by [query_reservation_of_domain] if the
domain does not have a reservation. *)
| Invalid_memory_value of int64
* [ Invalid_memory_value ( value ) ] is reported if a memory value passed is
not valid , e.g. negative .
not valid, e.g. negative. *)
| Internal_error of string
(** [Internal_error (value)] is reported if an unexpected error is
triggered by the library. *)
| Unknown_error (** The default variant for forward compatibility. *)
[@@default Unknown_error] [@@deriving rpcty]
exception MemoryError of errors
let () =
register printer for MemoryError
let sprintf = Printf.sprintf in
let string_of_error e =
Rpcmarshal.marshal errors.Rpc.Types.ty e |> Rpc.to_string
in
let printer = function
| MemoryError e ->
Some (sprintf "Memory_interface.Memory_error(%s)" (string_of_error e))
| _ ->
None
in
Printexc.register_printer printer
let err =
Error.
{
def= errors
; raiser=
(fun e ->
log_backtrace () ;
let exn = MemoryError e in
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
raise exn
)
; matcher=
(function
| MemoryError e as exn ->
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
Some e
| exn ->
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
Some (Internal_error (Printexc.to_string exn))
)
}
(** An uninterpreted string associated with the operation. *)
type debug_info = string [@@deriving rpcty]
(** An identifier to associate requests with a client. This is obtained by a
call to [login]. *)
type session_id = string [@@deriving rpcty]
type reserve_memory_range_result = reservation_id * int64 [@@deriving rpcty]
module API (R : RPC) = struct
open R
let description =
Interface.
{
name= "Memory"
; namespace= None
; description=
[
"This interface is used by Xapi and Squeezed to manage the "
; "dynamic memory usage of VMs on a host."
]
; version= (1, 0, 0)
}
let implementation = implement description
General parameters , used by more than one API call
let debug_info_p =
Param.mk
~description:["An uninterpreted string to associate with the operation."]
Types.string
let diagnostics_result_p =
Param.mk
~description:
["A string containing diagnostic information from the server."]
Types.string
let service_name_p =
Param.mk
~description:
[
"The name of the service attempting to interact with the squeeze \
daemon."
]
Types.string
let session_id_p =
Param.mk
~description:
[
"An identifier to associate requests with a client. This is "
; "obtained by a call to [login]."
]
Types.string
let domid_p = Param.mk ~description:["Domain id of a VM."] Types.int
let reservation_id_p =
Param.mk
~description:
[
"The reservation_id is the token used to identify a memory allocation."
]
reservation_id
let size_p =
Param.mk ~description:["The size in bytes to reserve"] Types.int64
let unit_p = Param.mk Types.unit
(* Individual API calls *)
let get_diagnostics =
declare "get_diagnostics"
["Gets diagnostic information from the server"]
(debug_info_p @-> returning diagnostics_result_p err)
let login =
declare "login"
[
"Logs into the squeeze daemon. Any reservations previously made with \
the "
; "specified service name not yet associated with a domain will be \
removed."
]
(debug_info_p @-> service_name_p @-> returning session_id_p err)
let reserve_memory =
declare "reserve_memory"
[
"[reserve_memory dbg session size] reserves memory for a domain. If \
necessary, "
; "other domains will be ballooned down to ensure [size] is available. \
The call "
; "returns a reservation_id that can later be transferred to a domain."
]
(debug_info_p
@-> session_id_p
@-> size_p
@-> returning reservation_id_p err
)
let reserve_memory_range =
let result =
Param.mk
~description:
[
"A tuple containing the reservation_id and the amount of memory \
actually reserved."
]
reserve_memory_range_result
in
declare "reserve_memory_range"
[
"[reserve_memory_range dbg session min max] reserves memory for a \
domain. If necessary, "
; "other domains will be ballooned down to ensure enough memory is \
available. The amount "
; "of memory will be between [min] and [max] according to the policy in \
operation. The call "
; "returns a reservation_id and the actual memory amount that can later \
be transferred to a domain."
]
(debug_info_p
@-> session_id_p
@-> size_p
@-> size_p
@-> returning result err
)
let delete_reservation =
declare "delete_reservation"
[
"Deletes a reservation. Note that memory rebalancing is not done \
synchronously after the "
; "operation has completed."
]
(debug_info_p
@-> session_id_p
@-> reservation_id_p
@-> returning unit_p err
)
let transfer_reservation_to_domain =
declare "transfer_reservation_to_domain"
[
"Transfers a reservation to a domain. This is called when the domain \
has been created for "
; "the VM for which the reservation was initially made."
]
(debug_info_p
@-> session_id_p
@-> reservation_id_p
@-> domid_p
@-> returning unit_p err
)
let query_reservation_of_domain =
declare "query_reservation_of_domain"
["Queries the reservation_id associated with a domain"]
(debug_info_p
@-> session_id_p
@-> domid_p
@-> returning reservation_id_p err
)
let balance_memory =
declare "balance_memory"
[
"Forces a rebalance of the hosts memory. Blocks until the system is in \
a stable "
; "state."
]
(debug_info_p @-> returning unit_p err)
let get_host_reserved_memory =
declare "get_host_reserved_memory"
[
"Gets the amount of reserved memory in a host. This is the lower limit \
of memory that "
; "squeezed will ensure remains unused by any domain or reservation."
]
(debug_info_p @-> returning size_p err)
let get_host_initial_free_memory =
declare "get_host_initial_free_memory"
["Gets the amount of initial free memory in a host"]
(debug_info_p @-> returning size_p err)
let get_domain_zero_policy =
let result_p =
Param.mk
~description:["The policy associated with domain 0"]
domain_zero_policy
in
declare "get_domain_zero_policy"
["Gets the ballooning policy for domain zero."]
(debug_info_p @-> returning result_p err)
end
| null | https://raw.githubusercontent.com/xapi-project/xen-api/47fae74032aa6ade0fc12e867c530eaf2a96bf75/ocaml/xapi-idl/memory/memory_interface.ml | ocaml | * @group Memory
* The reservation_id is an opaque identifier associated with a block of
memory. It is used to reserve memory for a domain before the domain has been
created.
* Never balloon, use the specified fixed size
* [Unknown_reservation (id)] is reported if a the specified
reservation_id is unknown.
* [No_reservation] is reported by [query_reservation_of_domain] if the
domain does not have a reservation.
* [Internal_error (value)] is reported if an unexpected error is
triggered by the library.
* The default variant for forward compatibility.
* An uninterpreted string associated with the operation.
* An identifier to associate requests with a client. This is obtained by a
call to [login].
Individual API calls |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
open Rpc
open Idl
module D = Debug.Make (struct let name = "memory_interface" end)
open D
let service_name = "memory"
let queue_name = Xcp_service.common_prefix ^ service_name
let json_path = "/var/xapi/memory.json"
let xml_path = "/var/xapi/memory"
type reservation_id = string [@@deriving rpcty]
* Domain zero can have a different policy to that used by guest domains .
type domain_zero_policy =
| Auto_balloon of int64 * int64
* Balloon between the two sizes specified
[@@deriving rpcty]
type errors =
| Cannot_free_this_much_memory of (int64 * int64)
* [ Cannot_free_this_much_memory ( required , free ) ] is reported if it is
not possible to free [ required ] kib . [ free ] is the amount of memory
currently free .
not possible to free [required] kib. [free] is the amount of memory
currently free. *)
| Domains_refused_to_cooperate of int list
* [ Domains_refused_to_cooperate ( domid list ) ] is reported if a set of
domains do not respond in a timely manner to the request to balloon .
The uncooperative domain ids are returned .
domains do not respond in a timely manner to the request to balloon.
The uncooperative domain ids are returned. *)
| Unknown_reservation of reservation_id
| No_reservation
| Invalid_memory_value of int64
* [ Invalid_memory_value ( value ) ] is reported if a memory value passed is
not valid , e.g. negative .
not valid, e.g. negative. *)
| Internal_error of string
[@@default Unknown_error] [@@deriving rpcty]
exception MemoryError of errors
let () =
register printer for MemoryError
let sprintf = Printf.sprintf in
let string_of_error e =
Rpcmarshal.marshal errors.Rpc.Types.ty e |> Rpc.to_string
in
let printer = function
| MemoryError e ->
Some (sprintf "Memory_interface.Memory_error(%s)" (string_of_error e))
| _ ->
None
in
Printexc.register_printer printer
let err =
Error.
{
def= errors
; raiser=
(fun e ->
log_backtrace () ;
let exn = MemoryError e in
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
raise exn
)
; matcher=
(function
| MemoryError e as exn ->
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
Some e
| exn ->
error "%s (%s)" (Printexc.to_string exn) __LOC__ ;
Some (Internal_error (Printexc.to_string exn))
)
}
type debug_info = string [@@deriving rpcty]
type session_id = string [@@deriving rpcty]
type reserve_memory_range_result = reservation_id * int64 [@@deriving rpcty]
module API (R : RPC) = struct
open R
let description =
Interface.
{
name= "Memory"
; namespace= None
; description=
[
"This interface is used by Xapi and Squeezed to manage the "
; "dynamic memory usage of VMs on a host."
]
; version= (1, 0, 0)
}
let implementation = implement description
General parameters , used by more than one API call
let debug_info_p =
Param.mk
~description:["An uninterpreted string to associate with the operation."]
Types.string
let diagnostics_result_p =
Param.mk
~description:
["A string containing diagnostic information from the server."]
Types.string
let service_name_p =
Param.mk
~description:
[
"The name of the service attempting to interact with the squeeze \
daemon."
]
Types.string
let session_id_p =
Param.mk
~description:
[
"An identifier to associate requests with a client. This is "
; "obtained by a call to [login]."
]
Types.string
let domid_p = Param.mk ~description:["Domain id of a VM."] Types.int
let reservation_id_p =
Param.mk
~description:
[
"The reservation_id is the token used to identify a memory allocation."
]
reservation_id
let size_p =
Param.mk ~description:["The size in bytes to reserve"] Types.int64
let unit_p = Param.mk Types.unit
let get_diagnostics =
declare "get_diagnostics"
["Gets diagnostic information from the server"]
(debug_info_p @-> returning diagnostics_result_p err)
let login =
declare "login"
[
"Logs into the squeeze daemon. Any reservations previously made with \
the "
; "specified service name not yet associated with a domain will be \
removed."
]
(debug_info_p @-> service_name_p @-> returning session_id_p err)
let reserve_memory =
declare "reserve_memory"
[
"[reserve_memory dbg session size] reserves memory for a domain. If \
necessary, "
; "other domains will be ballooned down to ensure [size] is available. \
The call "
; "returns a reservation_id that can later be transferred to a domain."
]
(debug_info_p
@-> session_id_p
@-> size_p
@-> returning reservation_id_p err
)
let reserve_memory_range =
let result =
Param.mk
~description:
[
"A tuple containing the reservation_id and the amount of memory \
actually reserved."
]
reserve_memory_range_result
in
declare "reserve_memory_range"
[
"[reserve_memory_range dbg session min max] reserves memory for a \
domain. If necessary, "
; "other domains will be ballooned down to ensure enough memory is \
available. The amount "
; "of memory will be between [min] and [max] according to the policy in \
operation. The call "
; "returns a reservation_id and the actual memory amount that can later \
be transferred to a domain."
]
(debug_info_p
@-> session_id_p
@-> size_p
@-> size_p
@-> returning result err
)
let delete_reservation =
declare "delete_reservation"
[
"Deletes a reservation. Note that memory rebalancing is not done \
synchronously after the "
; "operation has completed."
]
(debug_info_p
@-> session_id_p
@-> reservation_id_p
@-> returning unit_p err
)
let transfer_reservation_to_domain =
declare "transfer_reservation_to_domain"
[
"Transfers a reservation to a domain. This is called when the domain \
has been created for "
; "the VM for which the reservation was initially made."
]
(debug_info_p
@-> session_id_p
@-> reservation_id_p
@-> domid_p
@-> returning unit_p err
)
let query_reservation_of_domain =
declare "query_reservation_of_domain"
["Queries the reservation_id associated with a domain"]
(debug_info_p
@-> session_id_p
@-> domid_p
@-> returning reservation_id_p err
)
let balance_memory =
declare "balance_memory"
[
"Forces a rebalance of the hosts memory. Blocks until the system is in \
a stable "
; "state."
]
(debug_info_p @-> returning unit_p err)
let get_host_reserved_memory =
declare "get_host_reserved_memory"
[
"Gets the amount of reserved memory in a host. This is the lower limit \
of memory that "
; "squeezed will ensure remains unused by any domain or reservation."
]
(debug_info_p @-> returning size_p err)
let get_host_initial_free_memory =
declare "get_host_initial_free_memory"
["Gets the amount of initial free memory in a host"]
(debug_info_p @-> returning size_p err)
let get_domain_zero_policy =
let result_p =
Param.mk
~description:["The policy associated with domain 0"]
domain_zero_policy
in
declare "get_domain_zero_policy"
["Gets the ballooning policy for domain zero."]
(debug_info_p @-> returning result_p err)
end
|
bb36b9c5387272b9bf72957839a9e03f389680bcfa782d6813784cc9d66fb4e3 | blindglobe/clocc | section8.lisp | section 8 structures -*- mode : lisp -*-
(in-package :cl-user)
(proclaim '(special log))
;;;
;;; Example 1
;;; define town structure type
;;; area, watertowers, firetrucks, population, elevation are its components
;;;
(check-for-bug :section8-legacy-11
(defstruct town
area
watertowers
(firetrucks 1 :type fixnum) ;an initialized slot
population
(elevation 5128 :read-only t)) ;a slot that can't be changed
TOWN)
;create a town instance
(check-for-bug :section8-legacy-21
(progn
(setq town1 (make-town :area 0 :watertowers 0))
t)
t )
;town's predicate recognizes the new instance
(check-for-bug :section8-legacy-28
(town-p town1)
t)
;new town's area is as specified by make-town
(check-for-bug :section8-legacy-33
(town-area town1)
0)
;new town's elevation has initial value
(check-for-bug :section8-legacy-38
(town-elevation town1)
5128)
setf recognizes reader function
(check-for-bug :section8-legacy-43
(setf (town-population town1) 99)
99)
(check-for-bug :section8-legacy-47
(town-population town1)
99)
copier function makes a copy of town1
(check-for-bug :section8-legacy-52
(progn
(setq town2 (copy-town town1))
t)
t)
(check-for-bug :section8-legacy-58
(= (town-population town1) (town-population town2))
t)
;since elevation is a read-only slot, its value can be set only
;when the structure is created
(check-for-bug :section8-legacy-64
(progn
(setq town3 (make-town :area 0 :watertowers 3 :elevation 1200))
t)
t)
;;;
Example 2
;;; define clown structure type
;;; this structure uses a nonstandard prefix
;;;
(check-for-bug :section8-legacy-75
(defstruct (clown (:conc-name bozo-))
(nose-color 'red)
frizzy-hair-p polkadots)
CLOWN)
(check-for-bug :section8-legacy-81
(progn
(setq funny-clown (make-clown))
t)
t)
;use non-default reader name
(check-for-bug :section8-legacy-88
(bozo-nose-color funny-clown)
RED )
(check-for-bug :section8-legacy-92
(defstruct (klown (:constructor make-up-klown) ;similar def using other
(:copier clone-klown) ;customizing keywords
(:predicate is-a-bozo-p))
nose-color frizzy-hair-p polkadots)
klown)
;custom constructor now exists
(check-for-bug :section8-legacy-100
(fboundp 'make-up-klown)
t)
;;;
Example 3
;;; define a vehicle structure type
;;; then define a truck structure type that includes
;;; the vehicle structure
;;;
(check-for-bug :section8-legacy-110
(defstruct vehicle name year (diesel t :read-only t))
VEHICLE)
(check-for-bug :section8-legacy-114
(defstruct (truck (:include vehicle (year 79)))
load-limit
(axles 6))
TRUCK)
(check-for-bug :section8-legacy-120
(progn
(setq x (make-truck :name 'mac :diesel t :load-limit 17))
t)
t)
;vehicle readers work on trucks
(check-for-bug :section8-legacy-127
(vehicle-name x)
MAC)
;default taken from :include clause
(check-for-bug :section8-legacy-132
(vehicle-year x)
79 )
(check-for-bug :section8-legacy-136
(defstruct (pickup (:include truck)) ;pickup type includes truck
camper long-bed four-wheel-drive)
PICKUP)
(check-for-bug :section8-legacy-141
(progn
(setq x (make-pickup :name 'king :long-bed t))
t)
t)
;:include default inherited
(check-for-bug :section8-legacy-148
(pickup-year x)
79)
;;;
Example 4
use of BOA constructors
;;;
(check-for-bug :section8-legacy-156
BOA constructors
(:constructor make-dfs-boa (a b c))
(:constructor create-dfs-boa
(a &optional b (c 'cc) &rest d &aux e (f 'ff))))
a b c d e f)
DFS-BOA)
;a, b, and c set by position, and the rest are uninitialized
(check-for-bug :section8-legacy-165
(progn
(setq x (make-dfs-boa 1 2 3))
t)
t)
(check-for-bug :section8-legacy-171
(dfs-boa-a x)
1)
;a and b set, c and f defaulted
(check-for-bug :section8-legacy-176
(progn
(setq x (create-dfs-boa 1 2))
t)
t)
(check-for-bug :section8-legacy-182
(dfs-boa-b x)
2)
(check-for-bug :section8-legacy-186
(eq (dfs-boa-c x) 'cc)
t)
;a, b, and c set, and the rest are collected into d
(check-for-bug :section8-legacy-191
(progn
(setq x (create-dfs-boa 1 2 3 4 5 6))
t)
t)
(check-for-bug :section8-legacy-197
(dfs-boa-d x)
(4 5 6))
| null | https://raw.githubusercontent.com/blindglobe/clocc/a50bb75edb01039b282cf320e4505122a59c59a7/src/tools/ansi-test/section8.lisp | lisp |
Example 1
define town structure type
area, watertowers, firetrucks, population, elevation are its components
an initialized slot
a slot that can't be changed
create a town instance
town's predicate recognizes the new instance
new town's area is as specified by make-town
new town's elevation has initial value
since elevation is a read-only slot, its value can be set only
when the structure is created
define clown structure type
this structure uses a nonstandard prefix
use non-default reader name
similar def using other
customizing keywords
custom constructor now exists
define a vehicle structure type
then define a truck structure type that includes
the vehicle structure
vehicle readers work on trucks
default taken from :include clause
pickup type includes truck
:include default inherited
a, b, and c set by position, and the rest are uninitialized
a and b set, c and f defaulted
a, b, and c set, and the rest are collected into d | section 8 structures -*- mode : lisp -*-
(in-package :cl-user)
(proclaim '(special log))
(check-for-bug :section8-legacy-11
(defstruct town
area
watertowers
population
TOWN)
(check-for-bug :section8-legacy-21
(progn
(setq town1 (make-town :area 0 :watertowers 0))
t)
t )
(check-for-bug :section8-legacy-28
(town-p town1)
t)
(check-for-bug :section8-legacy-33
(town-area town1)
0)
(check-for-bug :section8-legacy-38
(town-elevation town1)
5128)
setf recognizes reader function
(check-for-bug :section8-legacy-43
(setf (town-population town1) 99)
99)
(check-for-bug :section8-legacy-47
(town-population town1)
99)
copier function makes a copy of town1
(check-for-bug :section8-legacy-52
(progn
(setq town2 (copy-town town1))
t)
t)
(check-for-bug :section8-legacy-58
(= (town-population town1) (town-population town2))
t)
(check-for-bug :section8-legacy-64
(progn
(setq town3 (make-town :area 0 :watertowers 3 :elevation 1200))
t)
t)
Example 2
(check-for-bug :section8-legacy-75
(defstruct (clown (:conc-name bozo-))
(nose-color 'red)
frizzy-hair-p polkadots)
CLOWN)
(check-for-bug :section8-legacy-81
(progn
(setq funny-clown (make-clown))
t)
t)
(check-for-bug :section8-legacy-88
(bozo-nose-color funny-clown)
RED )
(check-for-bug :section8-legacy-92
(:predicate is-a-bozo-p))
nose-color frizzy-hair-p polkadots)
klown)
(check-for-bug :section8-legacy-100
(fboundp 'make-up-klown)
t)
Example 3
(check-for-bug :section8-legacy-110
(defstruct vehicle name year (diesel t :read-only t))
VEHICLE)
(check-for-bug :section8-legacy-114
(defstruct (truck (:include vehicle (year 79)))
load-limit
(axles 6))
TRUCK)
(check-for-bug :section8-legacy-120
(progn
(setq x (make-truck :name 'mac :diesel t :load-limit 17))
t)
t)
(check-for-bug :section8-legacy-127
(vehicle-name x)
MAC)
(check-for-bug :section8-legacy-132
(vehicle-year x)
79 )
(check-for-bug :section8-legacy-136
camper long-bed four-wheel-drive)
PICKUP)
(check-for-bug :section8-legacy-141
(progn
(setq x (make-pickup :name 'king :long-bed t))
t)
t)
(check-for-bug :section8-legacy-148
(pickup-year x)
79)
Example 4
use of BOA constructors
(check-for-bug :section8-legacy-156
BOA constructors
(:constructor make-dfs-boa (a b c))
(:constructor create-dfs-boa
(a &optional b (c 'cc) &rest d &aux e (f 'ff))))
a b c d e f)
DFS-BOA)
(check-for-bug :section8-legacy-165
(progn
(setq x (make-dfs-boa 1 2 3))
t)
t)
(check-for-bug :section8-legacy-171
(dfs-boa-a x)
1)
(check-for-bug :section8-legacy-176
(progn
(setq x (create-dfs-boa 1 2))
t)
t)
(check-for-bug :section8-legacy-182
(dfs-boa-b x)
2)
(check-for-bug :section8-legacy-186
(eq (dfs-boa-c x) 'cc)
t)
(check-for-bug :section8-legacy-191
(progn
(setq x (create-dfs-boa 1 2 3 4 5 6))
t)
t)
(check-for-bug :section8-legacy-197
(dfs-boa-d x)
(4 5 6))
|
46413cf6a666468eadad8a09bb948ba853a76f3c4603d1faaf5cfaa0def7dae1 | NorfairKing/smos | Report.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingVia #
{-# LANGUAGE OverloadedStrings #-}
module Smos.Report.Report where
import Autodocodec
import Data.Aeson (FromJSON, ToJSON)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Text (Text)
import Data.Validity
import GHC.Generics (Generic)
import Smos.Report.Archive
import Smos.Report.Filter
import Smos.Report.Projection
import Smos.Report.Sorter
data PreparedReport = PreparedReport
{ preparedReportDescription :: Maybe Text,
perparedReportFilter :: Maybe EntryFilter,
perparedReportProjection :: Maybe (NonEmpty Projection),
preparedReportSorter :: Maybe Sorter,
preparedReportHideArchive :: Maybe HideArchive
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON) via (Autodocodec PreparedReport)
instance Validity PreparedReport
instance HasCodec PreparedReport where
codec =
object "PreparedReport" $
PreparedReport
<$> optionalFieldOrNull "description" "A description of the report" .= preparedReportDescription
<*> optionalFieldOrNull "filter" "The entry filter to get the results in the report" .= perparedReportFilter
<*> optionalFieldOrNull "columns" "The columns of the report" .= perparedReportProjection
<*> optionalFieldOrNull "sorter" "The sorter to sort the rows of the report by" .= preparedReportSorter
<*> optionalFieldOrNull "hide-archive" "Whether to consider the archive for the report" .= preparedReportHideArchive
| null | https://raw.githubusercontent.com/NorfairKing/smos/489f5b510c9a30a3c79fef0a0d1a796464705923/smos-report/src/Smos/Report/Report.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingVia #
module Smos.Report.Report where
import Autodocodec
import Data.Aeson (FromJSON, ToJSON)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Text (Text)
import Data.Validity
import GHC.Generics (Generic)
import Smos.Report.Archive
import Smos.Report.Filter
import Smos.Report.Projection
import Smos.Report.Sorter
data PreparedReport = PreparedReport
{ preparedReportDescription :: Maybe Text,
perparedReportFilter :: Maybe EntryFilter,
perparedReportProjection :: Maybe (NonEmpty Projection),
preparedReportSorter :: Maybe Sorter,
preparedReportHideArchive :: Maybe HideArchive
}
deriving stock (Show, Eq, Generic)
deriving (FromJSON, ToJSON) via (Autodocodec PreparedReport)
instance Validity PreparedReport
instance HasCodec PreparedReport where
codec =
object "PreparedReport" $
PreparedReport
<$> optionalFieldOrNull "description" "A description of the report" .= preparedReportDescription
<*> optionalFieldOrNull "filter" "The entry filter to get the results in the report" .= perparedReportFilter
<*> optionalFieldOrNull "columns" "The columns of the report" .= perparedReportProjection
<*> optionalFieldOrNull "sorter" "The sorter to sort the rows of the report by" .= preparedReportSorter
<*> optionalFieldOrNull "hide-archive" "Whether to consider the archive for the report" .= preparedReportHideArchive
|
3c8e99b3e572515784b3a7c0c45fb5b3a77af9dcbe6336c0aa4341d4d9f3ac8e | iatorm/grime | grime.hs | A two - dimensional language based on Boolean grammars
import Expression
import Matcher (matchAllEmpty)
import Parser (Option(..), parseMatFile, parseGrFile, parseOptions)
import Data.List (nub)
import Data.Map.Strict (toList)
import Control.Monad (forM_, when)
import System.Environment (getArgs)
import System.IO (hPutStrLn, stderr)
-- Take a submatrix of a newline-delimited string, possibly with border
submatrix :: Bool -> Rect -> String -> String
submatrix border (x, y, w, h) = unlines . take h . drop y' . map (take w . drop x') . addBorder . lines
where addBorder matrix = if border
then let blankRow = replicate (maximum $ map length matrix) ' '
in blankRow : map (\row -> ' ' : row ++ " ") matrix ++ [blankRow]
else matrix
(x', y') = if border then (x+1, y+1) else (x, y)
-- Print a string to STDERR with newline
printErr :: String -> IO ()
printErr = hPutStrLn stderr
main :: IO ()
main = do
args <- getArgs
let (cmdOpts, grFile, matFile) = case args of
['-':a, b, c] -> (parseOptions a, b, c)
[a, b] -> ([], a, b)
_ -> error "Incorrect arguments. Usage: grime [opts] grammarfile patternfile"
parsedGrammar <- fmap (parseGrFile grFile) $ readFile grFile
case parsedGrammar of
Left parseError -> printErr $ show parseError
Right (fileOpts, grammar) -> do
pict <- readFile matFile
let opts = [opt | opt <- nub $ cmdOpts ++ fileOpts, elem opt cmdOpts /= elem opt fileOpts]
(sze@(wMat, hMat), mat) = parseMatFile pict
(minX, minY, numX, numY) = if elem AddBorder opts then (-1, -1, wMat+2, hMat+2) else (0, 0, wMat, hMat)
(matches, logs) = matchAllEmpty sze (elem AddBorder opts) mat grammar $
if elem Entire opts
then [(minX, minY, numX, numY)]
else [(x, y, w, h) |
w <- [numX,numX-1..0], h <- [numY,numY-1..0],
x <- [minX..numX-w], y <- [minY..numY-h]]
finalMatches = if elem AllMatches opts || elem Number opts then matches else take 1 matches
when (elem Debug0 opts) $ do
printErr $ "Options: " ++ show opts
printErr $ "Input dimensions: " ++ show sze
printErr "Definitions:"
forM_ (toList grammar) $ \(l, e) ->
printErr $ " " ++ (case l of Nothing -> "_ = "; Just a -> a:" = ") ++ show e
when (elem Debug1 opts) $ printErr logs
if (elem Number opts /= elem Entire opts)
then print $ length finalMatches
else forM_ finalMatches $ \rect -> do
when (elem Positions opts) $ print rect
when (not $ elem Patterns opts) . putStrLn $ submatrix (elem AddBorder opts) rect pict
| null | https://raw.githubusercontent.com/iatorm/grime/52b4742bcd17479a892d665cbcd158b06d116243/grime.hs | haskell | Take a submatrix of a newline-delimited string, possibly with border
Print a string to STDERR with newline | A two - dimensional language based on Boolean grammars
import Expression
import Matcher (matchAllEmpty)
import Parser (Option(..), parseMatFile, parseGrFile, parseOptions)
import Data.List (nub)
import Data.Map.Strict (toList)
import Control.Monad (forM_, when)
import System.Environment (getArgs)
import System.IO (hPutStrLn, stderr)
submatrix :: Bool -> Rect -> String -> String
submatrix border (x, y, w, h) = unlines . take h . drop y' . map (take w . drop x') . addBorder . lines
where addBorder matrix = if border
then let blankRow = replicate (maximum $ map length matrix) ' '
in blankRow : map (\row -> ' ' : row ++ " ") matrix ++ [blankRow]
else matrix
(x', y') = if border then (x+1, y+1) else (x, y)
printErr :: String -> IO ()
printErr = hPutStrLn stderr
main :: IO ()
main = do
args <- getArgs
let (cmdOpts, grFile, matFile) = case args of
['-':a, b, c] -> (parseOptions a, b, c)
[a, b] -> ([], a, b)
_ -> error "Incorrect arguments. Usage: grime [opts] grammarfile patternfile"
parsedGrammar <- fmap (parseGrFile grFile) $ readFile grFile
case parsedGrammar of
Left parseError -> printErr $ show parseError
Right (fileOpts, grammar) -> do
pict <- readFile matFile
let opts = [opt | opt <- nub $ cmdOpts ++ fileOpts, elem opt cmdOpts /= elem opt fileOpts]
(sze@(wMat, hMat), mat) = parseMatFile pict
(minX, minY, numX, numY) = if elem AddBorder opts then (-1, -1, wMat+2, hMat+2) else (0, 0, wMat, hMat)
(matches, logs) = matchAllEmpty sze (elem AddBorder opts) mat grammar $
if elem Entire opts
then [(minX, minY, numX, numY)]
else [(x, y, w, h) |
w <- [numX,numX-1..0], h <- [numY,numY-1..0],
x <- [minX..numX-w], y <- [minY..numY-h]]
finalMatches = if elem AllMatches opts || elem Number opts then matches else take 1 matches
when (elem Debug0 opts) $ do
printErr $ "Options: " ++ show opts
printErr $ "Input dimensions: " ++ show sze
printErr "Definitions:"
forM_ (toList grammar) $ \(l, e) ->
printErr $ " " ++ (case l of Nothing -> "_ = "; Just a -> a:" = ") ++ show e
when (elem Debug1 opts) $ printErr logs
if (elem Number opts /= elem Entire opts)
then print $ length finalMatches
else forM_ finalMatches $ \rect -> do
when (elem Positions opts) $ print rect
when (not $ elem Patterns opts) . putStrLn $ submatrix (elem AddBorder opts) rect pict
|
ae49608a94ea2f31d28c388da540313f28b0412b16c6d2606a9d38300e911811 | unnohideyuki/bunny | sample292.hs | a = 42 :: Int
b = 5 :: Integer
c = 3.14 :: Float
d = pi :: Double
main = do print $ (realToFrac a :: Float)
print $ (realToFrac b :: Float)
print $ (realToFrac c :: Float)
print $ (realToFrac d :: Float)
print $ (realToFrac a :: Double)
print $ (realToFrac b :: Double)
print $ (realToFrac c :: Double)
print $ (realToFrac d :: Double)
| null | https://raw.githubusercontent.com/unnohideyuki/bunny/501856ff48f14b252b674585f25a2bf3801cb185/compiler/test/samples/sample292.hs | haskell | a = 42 :: Int
b = 5 :: Integer
c = 3.14 :: Float
d = pi :: Double
main = do print $ (realToFrac a :: Float)
print $ (realToFrac b :: Float)
print $ (realToFrac c :: Float)
print $ (realToFrac d :: Float)
print $ (realToFrac a :: Double)
print $ (realToFrac b :: Double)
print $ (realToFrac c :: Double)
print $ (realToFrac d :: Double)
| |
5585aa164b024120c0556a447021d40ed18eed2cf72195ec9cc0af8361f93e16 | mu-chaco/ReWire | DLX.hs | module DLX where
import Prelude hiding (and,or,seq,(||))
import Boilerplate
import Control.Monad.Resumption.Reactive
import Control.Monad.State hiding (when)
import Control.Monad.Identity hiding (when)
loop = do
case rstIn inp of
One - > reset
Zero - > do ie < - getIEFlag
case ( ie , intIn inp ) of
( One , One ) - > interrupt
_ - > case dataIn inp of
W8 Zero Zero Zero Zero rEn wEn b0 b1 - > mem rEn wEn ( mkReg b0 b1 )
loop = do inp <- getInputs
case rstIn inp of
One -> reset
Zero -> do ie <- getIEFlag
case (ie,intIn inp) of
(One,One) -> interrupt
_ -> case dataIn inp of
W8 Zero Zero Zero Zero rEn wEn b0 b1 -> mem rEn wEn (mkReg b0 b1)
-}
instrdec w32 =
let
t6 = top6 w32
in
case t6 of
W6 Zero Zero Zero Zero Zero Zero -> decodeR opcode rs1' rs2' rd'
where (rs1,rs2,rd,opcode) = rtype w32
rs1' = decodeReg rs1
rs2' = decodeReg rs2
rd' = decodeReg rd
W6 Zero Zero Zero Zero One Zero -> decodeJ offset t6
where (opcode,offset) = jtype w32
W6 Zero Zero Zero Zero One One -> decodeJ offset t6
where (opcode,offset) = jtype w32
_ -> decodeI opcode rs1' rd' imm
where (opcode,rs1,rd,imm) = itype w32
rs1' = decodeReg rs1
rd' = decodeReg rd
reset = putOutputs initOutputs
decodeR :: W6 ->
Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
decodeR opcode rs1 rs2 rd = case opcode of
" ADD 00100000 x20 "
" AND 00100100 x24 "
" OR 0x25 100101 "
" SEQ 0x28 101000 "
" SLE 0x2c 101100 "
W6 Zero Zero Zero One Zero Zero -> error "SLL 0x04 000100"
" SLT 0x2a 101010 "
" SNE 0x29 101001 "
W6 Zero Zero Zero One One One -> error "SRA 0x07 000111"
W6 Zero Zero Zero One One Zero -> error "SRL 0x06 000110"
W6 One Zero Zero Zero One Zero -> error "SUB 0x22 100010"
W6 One Zero Zero One One Zero -> error "XOR 0x26 100110"
decodeJ offset w6 = case w6 of
W6 Zero Zero Zero Zero One Zero -> j offset -- "J 00000010 x02"
" JAL 00000011 x03 "
decodeI :: W6 ->
Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
decodeI opcode rs1 rd imm = case opcode of
" ADDI 00001000 x08 "
" ANDI 00001100 x0c "
" BEQZ 00000100 x04 "
" BNEZ 00000101 x05 "
" JALR 00010011 x13 "
" JR 00010010 x12 "
W6 Zero Zero One One One One -> lhi rs1 imm -- "LHI 0x0f 001111"
W6 One Zero Zero Zero One One -> lw rs1 rd imm -- "LW 0x23 100011"
" ORI 0x0d 001101 "
" SEQI 0x18 011000 "
" SLEI 0x1c 011100 "
W6 Zero One Zero One Zero Zero -> error "SLLI 0x14 010100"
" SLTI 0x1a 011010 "
W6 Zero One One Zero Zero One -> snei rd rs1 imm -- "SNEI 0x19 011001"
W6 Zero One Zero One One One -> error "SRAI 0x17 010111"
W6 Zero One Zero One One Zero -> error "SRLI 0x16 010110"
W6 Zero Zero One Zero One Zero -> error "SUBI 0x0a 001010"
W6 One Zero One Zero One One -> error "SW 0x2b 101011"
W6 Zero Zero One One One Zero -> error "XORI 0x0e 001110"
_ -> error "unknown opcode"
--
-- Instructions
--
add :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
add rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
let (cout,v) = plusCW32 v1 v2 Zero
putReg rd v
tick
addi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
addi rD rS imm = do vS <- getReg rS
let signext_imm = signextend16_32 imm
let sum = plusW32 vS signext_imm Zero
putReg rD sum
tick
and :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
and rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
putReg rd (andW32 v1 v2)
andi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
andi rd rs1 imm = do v1 <- getReg rs1
let imm32 = zero16 || imm
putReg rd (andW32 v1 imm32)
beqz :: Register -> W16 -> DLXM ()
beqz rs1 offset = do v1 <- getReg rs1
let se_offset = signextend16_32 offset
pc <- getPC
let pc' = plusW32 (plusW32 pc w32_4 Zero) se_offset Zero
if zero v1 then putPC pc' >> tick else tick
bnez :: Register -> W16 -> DLXM ()
bnez rs1 offset = do v1 <- getReg rs1
let se_offset = signextend16_32 offset
pc <- getPC
let pc' = plusW32 (plusW32 pc w32_4 Zero) se_offset Zero
if zero v1 then tick else putPC pc' >> tick
zero w32 = case w32 of { (W32 Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero) -> True ; _ -> False }
-- Jump
j :: W26 -> DLXM ()
j offset = do pc <- getPC
let signext_offset = signextend26_to_32 offset
let pc' = plusW32 pc (plusW32 signext_offset w32_4 Zero) Zero
putPC pc'
tick
-- Jump and Link
jal :: W26 -> DLXM ()
jal offset = do pc <- getPC
let signext_offset = signextend26_to_32 offset
let pc' = plusW32 pc (plusW32 signext_offset w32_4 Zero) Zero
let r31' = plusW32 pc w32_8 Zero
putReg R31 r31'
putPC pc'
tick
Jump and Link register
jalr :: Register -> DLXM ()
jalr rs1 = do pc <- getPC
let r31' = plusW32 pc w32_8 Zero
putReg R31 r31'
dst <- getReg rs1
putPC dst
tick
-- Jump register
jr :: Register -> DLXM ()
jr rs1 = do pc <- getPC
dst <- getReg rs1
putPC dst
tick
-- Load high bits immediate
lhi :: Register -> W16 -> DLXM ()
lhi rd imm = let
w32 = imm || zero16
in
putReg rd w32
-- Load Word
lw :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
lw rs1 rd offset = do base <- getReg rs1
eff_addr <- return $ plusW32 base (signextend16_32 offset) Zero
putWeOut Zero
putAddrOut eff_addr
tick
v <- getDataIn
putReg rd v
-- It seems weird to me that there is no checking of whether
there is a special register involved with the next two .
-- Where does that happen?
-- Move general purpose to special
movi2s :: Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
movi2s rd rs1 = getReg rs1 >>= putReg rd >> tick
-- Move special to general purpose
movs2i :: Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
movs2i rd rs1 = getReg rs1 >>= putReg rd >> tick
-- No Op
nop :: DLXM ()
nop = return ()
or :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
or rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
let vd = orW32 v1 v2
putReg rd vd
tick
ori :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
ori rd rs1 imm = do v1 <- getReg rs1
let imm32 = zero16 || imm
putReg rd (orW32 v1 imm32)
-- Set if equal
seq :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
seq rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1==v2
then
putReg rd one32
else
putReg rd zero32
-- Set if equal to immediate
seqi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
seqi rd rs1 imm = do v1 <- getReg rs1
if v1 == signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
-- Set if less than or equal
sle :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
sle rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_lte` v2
then
putReg rd one32
else
putReg rd zero32
-- Set if less than or equal to immediate
slei :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slei rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_lte` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
-- Set if less than
slt :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slt rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_lt` v2
then
putReg rd one32
else
putReg rd zero32
-- Set if less than immediate
slti :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slti rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_lt` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
-- Set if not equal
sne :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
sne rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_ne` v2
then
putReg rd one32
else
putReg rd zero32
-- Set if not equal to immediate
snei :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
snei rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_ne` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
data Register = R0 | R1 | R2 | R3 | R4 | R5 | R6 | R7
| R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15
| R16 | R17 | R18 | R19 | R20 | R21 | R22 | R23
| R24 | R25 | R26 | R27 | R28 | R29 | R30 | R31
data Inputs = Inputs { dataIn :: W32,
rstIn :: Bit,
intIn :: Bit
}
data Outputs = Outputs { addrOut :: W32,
dataOut :: W32,
--, not sure what this stuff is.
weOut :: Bit,
iackOut :: Bit
}
{- -}
--
Breaks a W32 into opcode and value fields of a DLX J - type instruction .
--
jtype :: W32 -> (W6, W26)
jtype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (opcode,value)
where opcode = W6 b31 b30 b29 b28 b27 b26
value = W26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6
b5 b4 b3 b2 b1 b0
--
-- Breaks a W32 into opcode, source and destination register, and immediate
-- fields of a DLX I-type instruction.
--
itype :: W32 -> (W6, W5, W5, W16)
itype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (opcode,rs1,rd,immediate)
where opcode = W6 b31 b30 b29 b28 b27 b26
rs1 = W5 b25 b24 b23 b22 b21
rd = W5 b20 b19 b18 b17 b16
immediate = W16 b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0
rtype :: W32 -> (W5, W5, W5, W6)
rtype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (rs1,rs2,rd,opcode)
where rs1 = W5 b25 b24 b23 b22 b21
rs2 = W5 b20 b19 b18 b17 b16
rd = W5 b15 b14 b13 b12 b11
opcode = W6 b5 b4 b3 b2 b1 b0
data DLXState = DLXState { inputs :: Inputs,
outputs :: Outputs,
zFlag :: Bit, -- some of this stuff has been
ieFlag :: Bit, -- snarfed willy nilly from MiniIsa.hs.
overFlw :: Bit,
pc :: W32,
a " special register " along with FPSR
r0 :: W32,
r1 :: W32,
r2 :: W32,
r3 :: W32,
r4 :: W32,
r5 :: W32,
r6 :: W32,
r7 :: W32,
r8 :: W32,
r9 :: W32,
r10 :: W32,
r11 :: W32,
r12 :: W32,
r13 :: W32,
r14 :: W32,
r15 :: W32,
r16 :: W32,
r17 :: W32,
r18 :: W32,
r19 :: W32,
r20 :: W32,
r21 :: W32,
r22 :: W32,
r23 :: W32,
r24 :: W32,
r25 :: W32,
r26 :: W32,
r27 :: W32,
r28 :: W32,
r29 :: W32,
r30 :: W32,
r31 :: W32
}
type DLXM = ReacT Inputs Outputs (StateT DLXState Identity)
getState :: DLXM DLXState
getState = lift get
putState :: DLXState -> DLXM ()
putState = lift . put
putWeOut :: Bit -> DLXM ()
putWeOut b = do o <- getOutputs
putOutputs (o { weOut = b })
initInputs = Inputs { dataIn = w32_0, rstIn = Zero, intIn = Zero }
initOutputs = Outputs { addrOut = w32_0, dataOut = w32_0, weOut = Zero, iackOut = Zero }
getDataIn :: DLXM W32
getDataIn = do i <- getInputs
return (dataIn i)
getReg :: Register -> DLXM W32
getReg r = do s <- getState
case r of
R0 -> return (r0 s)
R1 -> return (r1 s)
R2 -> return (r2 s)
R3 -> return (r3 s)
R4 -> return (r4 s)
R5 -> return (r5 s)
R6 -> return (r6 s)
R7 -> return (r7 s)
R8 -> return (r8 s)
R9 -> return (r9 s)
R10 -> return (r10 s)
R11 -> return (r11 s)
R12 -> return (r12 s)
R13 -> return (r13 s)
R14 -> return (r14 s)
R15 -> return (r15 s)
R16 -> return (r16 s)
R17 -> return (r17 s)
R18 -> return (r18 s)
R19 -> return (r19 s)
R20 -> return (r20 s)
R21 -> return (r21 s)
R22 -> return (r22 s)
R23 -> return (r23 s)
R24 -> return (r24 s)
R25 -> return (r25 s)
R26 -> return (r26 s)
R27 -> return (r27 s)
R28 -> return (r28 s)
R29 -> return (r29 s)
R30 -> return (r30 s)
R31 -> return (r31 s)
putReg :: Register -> W32 -> DLXM ()
putReg r v = do s <- getState
case r of
R0 -> putState (s { r0 = v })
R1 -> putState (s { r1 = v })
R2 -> putState (s { r2 = v })
R3 -> putState (s { r3 = v })
R4 -> putState (s { r4 = v })
R5 -> putState (s { r5 = v })
R6 -> putState (s { r6 = v })
R7 -> putState (s { r7 = v })
R8 -> putState (s { r8 = v })
R9 -> putState (s { r9 = v })
R10 -> putState (s { r10 = v })
R11 -> putState (s { r11 = v })
R12 -> putState (s { r12 = v })
R13 -> putState (s { r13 = v })
R14 -> putState (s { r14 = v })
R15 -> putState (s { r15 = v })
R16 -> putState (s { r16 = v })
R17 -> putState (s { r17 = v })
R18 -> putState (s { r18 = v })
R19 -> putState (s { r19 = v })
R20 -> putState (s { r20 = v })
R21 -> putState (s { r21 = v })
R22 -> putState (s { r22 = v })
R23 -> putState (s { r23 = v })
R24 -> putState (s { r24 = v })
R25 -> putState (s { r25 = v })
R26 -> putState (s { r26 = v })
R27 -> putState (s { r27 = v })
R28 -> putState (s { r28 = v })
R29 -> putState (s { r29 = v })
R30 -> putState (s { r30 = v })
R31 -> putState (s { r31 = v })
getPC :: DLXM W32
getPC = do s <- getState
return (pc s)
putPC :: W32 -> DLXM ()
putPC v = do s <- getState
putState (s { pc = v })
getIAR :: DLXM W32
getIAR = do s <- getState
return (iar s)
putIAR :: W32 -> DLXM ()
putIAR v = do s <- getState
putState (s { iar = v })
getInputs :: DLXM Inputs
getInputs = do s <- getState
return (inputs s)
putInputs :: Inputs -> DLXM ()
putInputs i = do s <- getState
putState (s { inputs = i })
getOutputs :: DLXM Outputs
getOutputs = do s <- getState
return (outputs s)
putOutputs :: Outputs -> DLXM ()
putOutputs o = do s <- getState
putState (s { outputs = o })
putAddrOut :: W32 -> DLXM ()
putAddrOut a = do o <- getOutputs
putOutputs (o { addrOut = a })
tick :: DLXM ()
tick = do o <- getOutputs
i <- signal o
putInputs i
signextend26_to_32 :: W26 -> W32
signextend26_to_32 (W26 Zero b24 b23 b22 b21 b20 b19 b18 b17 b16 b15 b14
b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= W32 Zero Zero Zero Zero Zero Zero Zero
b24 b23 b22 b21 b20 b19 b18 b17
b16 b15 b14 b13 b12 b11 b10 b9
b8 b7 b6 b5 b4 b3 b2 b1 b0
signextend26_to_32 (W26 One b24 b23 b22 b21 b20 b19 b18 b17 b16 b15 b14
b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= W32 One One One One One One One
b24 b23 b22 b21 b20 b19 b18 b17
b16 b15 b14 b13 b12 b11 b10 b9
b8 b7 b6 b5 b4 b3 b2 b1 b0
--
-- I'm not sure this is exactly the right thing to do.
--
signextend16_32 :: W16 -> W32
signextend16_32 (W16 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15)
= W32 Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero
b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15
decodeReg w5 = case w5 of
W5 Zero Zero Zero Zero Zero -> R0
W5 Zero Zero Zero Zero One -> R1
W5 Zero Zero Zero One Zero -> R2
W5 Zero Zero Zero One One -> R3
W5 Zero Zero One Zero Zero -> R4
W5 Zero Zero One Zero One -> R5
W5 Zero Zero One One Zero -> R6
W5 Zero Zero One One One -> R7
W5 Zero One Zero Zero Zero -> R8
W5 Zero One Zero Zero One -> R9
W5 Zero One Zero One Zero -> R10
W5 Zero One Zero One One -> R11
W5 Zero One One Zero Zero -> R12
W5 Zero One One Zero One -> R13
W5 Zero One One One Zero -> R14
W5 Zero One One One One -> R15
----
W5 One Zero Zero Zero Zero -> R16
W5 One Zero Zero Zero One -> R17
W5 One Zero Zero One Zero -> R18
W5 One Zero Zero One One -> R19
W5 One Zero One Zero Zero -> R20
W5 One Zero One Zero One -> R21
W5 One Zero One One Zero -> R22
W5 One Zero One One One -> R23
W5 One One Zero Zero Zero -> R24
W5 One One Zero Zero One -> R25
W5 One One Zero One Zero -> R26
W5 One One Zero One One -> R27
W5 One One One Zero Zero -> R28
W5 One One One Zero One -> R29
W5 One One One One Zero -> R30
W5 One One One One One -> R31
| null | https://raw.githubusercontent.com/mu-chaco/ReWire/a8dcea6ab0989474988a758179a1d876e2c32370/cruft/DLX.hs | haskell | "J 00000010 x02"
"LHI 0x0f 001111"
"LW 0x23 100011"
"SNEI 0x19 011001"
Instructions
Jump
Jump and Link
Jump register
Load high bits immediate
Load Word
It seems weird to me that there is no checking of whether
Where does that happen?
Move general purpose to special
Move special to general purpose
No Op
Set if equal
Set if equal to immediate
Set if less than or equal
Set if less than or equal to immediate
Set if less than
Set if less than immediate
Set if not equal
Set if not equal to immediate
, not sure what this stuff is.
Breaks a W32 into opcode, source and destination register, and immediate
fields of a DLX I-type instruction.
some of this stuff has been
snarfed willy nilly from MiniIsa.hs.
I'm not sure this is exactly the right thing to do.
-- | module DLX where
import Prelude hiding (and,or,seq,(||))
import Boilerplate
import Control.Monad.Resumption.Reactive
import Control.Monad.State hiding (when)
import Control.Monad.Identity hiding (when)
loop = do
case rstIn inp of
One - > reset
Zero - > do ie < - getIEFlag
case ( ie , intIn inp ) of
( One , One ) - > interrupt
_ - > case dataIn inp of
W8 Zero Zero Zero Zero rEn wEn b0 b1 - > mem rEn wEn ( mkReg b0 b1 )
loop = do inp <- getInputs
case rstIn inp of
One -> reset
Zero -> do ie <- getIEFlag
case (ie,intIn inp) of
(One,One) -> interrupt
_ -> case dataIn inp of
W8 Zero Zero Zero Zero rEn wEn b0 b1 -> mem rEn wEn (mkReg b0 b1)
-}
instrdec w32 =
let
t6 = top6 w32
in
case t6 of
W6 Zero Zero Zero Zero Zero Zero -> decodeR opcode rs1' rs2' rd'
where (rs1,rs2,rd,opcode) = rtype w32
rs1' = decodeReg rs1
rs2' = decodeReg rs2
rd' = decodeReg rd
W6 Zero Zero Zero Zero One Zero -> decodeJ offset t6
where (opcode,offset) = jtype w32
W6 Zero Zero Zero Zero One One -> decodeJ offset t6
where (opcode,offset) = jtype w32
_ -> decodeI opcode rs1' rd' imm
where (opcode,rs1,rd,imm) = itype w32
rs1' = decodeReg rs1
rd' = decodeReg rd
reset = putOutputs initOutputs
decodeR :: W6 ->
Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
decodeR opcode rs1 rs2 rd = case opcode of
" ADD 00100000 x20 "
" AND 00100100 x24 "
" OR 0x25 100101 "
" SEQ 0x28 101000 "
" SLE 0x2c 101100 "
W6 Zero Zero Zero One Zero Zero -> error "SLL 0x04 000100"
" SLT 0x2a 101010 "
" SNE 0x29 101001 "
W6 Zero Zero Zero One One One -> error "SRA 0x07 000111"
W6 Zero Zero Zero One One Zero -> error "SRL 0x06 000110"
W6 One Zero Zero Zero One Zero -> error "SUB 0x22 100010"
W6 One Zero Zero One One Zero -> error "XOR 0x26 100110"
decodeJ offset w6 = case w6 of
" JAL 00000011 x03 "
decodeI :: W6 ->
Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
decodeI opcode rs1 rd imm = case opcode of
" ADDI 00001000 x08 "
" ANDI 00001100 x0c "
" BEQZ 00000100 x04 "
" BNEZ 00000101 x05 "
" JALR 00010011 x13 "
" JR 00010010 x12 "
" ORI 0x0d 001101 "
" SEQI 0x18 011000 "
" SLEI 0x1c 011100 "
W6 Zero One Zero One Zero Zero -> error "SLLI 0x14 010100"
" SLTI 0x1a 011010 "
W6 Zero One Zero One One One -> error "SRAI 0x17 010111"
W6 Zero One Zero One One Zero -> error "SRLI 0x16 010110"
W6 Zero Zero One Zero One Zero -> error "SUBI 0x0a 001010"
W6 One Zero One Zero One One -> error "SW 0x2b 101011"
W6 Zero Zero One One One Zero -> error "XORI 0x0e 001110"
_ -> error "unknown opcode"
add :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
add rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
let (cout,v) = plusCW32 v1 v2 Zero
putReg rd v
tick
addi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
addi rD rS imm = do vS <- getReg rS
let signext_imm = signextend16_32 imm
let sum = plusW32 vS signext_imm Zero
putReg rD sum
tick
and :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
and rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
putReg rd (andW32 v1 v2)
andi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
andi rd rs1 imm = do v1 <- getReg rs1
let imm32 = zero16 || imm
putReg rd (andW32 v1 imm32)
beqz :: Register -> W16 -> DLXM ()
beqz rs1 offset = do v1 <- getReg rs1
let se_offset = signextend16_32 offset
pc <- getPC
let pc' = plusW32 (plusW32 pc w32_4 Zero) se_offset Zero
if zero v1 then putPC pc' >> tick else tick
bnez :: Register -> W16 -> DLXM ()
bnez rs1 offset = do v1 <- getReg rs1
let se_offset = signextend16_32 offset
pc <- getPC
let pc' = plusW32 (plusW32 pc w32_4 Zero) se_offset Zero
if zero v1 then tick else putPC pc' >> tick
zero w32 = case w32 of { (W32 Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero) -> True ; _ -> False }
j :: W26 -> DLXM ()
j offset = do pc <- getPC
let signext_offset = signextend26_to_32 offset
let pc' = plusW32 pc (plusW32 signext_offset w32_4 Zero) Zero
putPC pc'
tick
jal :: W26 -> DLXM ()
jal offset = do pc <- getPC
let signext_offset = signextend26_to_32 offset
let pc' = plusW32 pc (plusW32 signext_offset w32_4 Zero) Zero
let r31' = plusW32 pc w32_8 Zero
putReg R31 r31'
putPC pc'
tick
Jump and Link register
jalr :: Register -> DLXM ()
jalr rs1 = do pc <- getPC
let r31' = plusW32 pc w32_8 Zero
putReg R31 r31'
dst <- getReg rs1
putPC dst
tick
jr :: Register -> DLXM ()
jr rs1 = do pc <- getPC
dst <- getReg rs1
putPC dst
tick
lhi :: Register -> W16 -> DLXM ()
lhi rd imm = let
w32 = imm || zero16
in
putReg rd w32
lw :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
lw rs1 rd offset = do base <- getReg rs1
eff_addr <- return $ plusW32 base (signextend16_32 offset) Zero
putWeOut Zero
putAddrOut eff_addr
tick
v <- getDataIn
putReg rd v
there is a special register involved with the next two .
movi2s :: Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
movi2s rd rs1 = getReg rs1 >>= putReg rd >> tick
movs2i :: Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
movs2i rd rs1 = getReg rs1 >>= putReg rd >> tick
nop :: DLXM ()
nop = return ()
or :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
or rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
let vd = orW32 v1 v2
putReg rd vd
tick
ori :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
ori rd rs1 imm = do v1 <- getReg rs1
let imm32 = zero16 || imm
putReg rd (orW32 v1 imm32)
seq :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
seq rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1==v2
then
putReg rd one32
else
putReg rd zero32
seqi :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
seqi rd rs1 imm = do v1 <- getReg rs1
if v1 == signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
sle :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
sle rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_lte` v2
then
putReg rd one32
else
putReg rd zero32
slei :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slei rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_lte` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
slt :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slt rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_lt` v2
then
putReg rd one32
else
putReg rd zero32
slti :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
slti rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_lt` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
sne :: Register ->
Register ->
Register ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
sne rd rs1 rs2 = do v1 <- getReg rs1
v2 <- getReg rs2
if v1 `w32_ne` v2
then
putReg rd one32
else
putReg rd zero32
snei :: Register ->
Register ->
W16 ->
ReacT Inputs Outputs (StateT DLXState Identity) ()
snei rd rs1 imm = do v1 <- getReg rs1
if v1 `w32_ne` signextend16_32 imm
then
putReg rd one32
else
putReg rd zero32
data Register = R0 | R1 | R2 | R3 | R4 | R5 | R6 | R7
| R8 | R9 | R10 | R11 | R12 | R13 | R14 | R15
| R16 | R17 | R18 | R19 | R20 | R21 | R22 | R23
| R24 | R25 | R26 | R27 | R28 | R29 | R30 | R31
data Inputs = Inputs { dataIn :: W32,
rstIn :: Bit,
intIn :: Bit
}
data Outputs = Outputs { addrOut :: W32,
dataOut :: W32,
weOut :: Bit,
iackOut :: Bit
}
Breaks a W32 into opcode and value fields of a DLX J - type instruction .
jtype :: W32 -> (W6, W26)
jtype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (opcode,value)
where opcode = W6 b31 b30 b29 b28 b27 b26
value = W26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6
b5 b4 b3 b2 b1 b0
itype :: W32 -> (W6, W5, W5, W16)
itype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (opcode,rs1,rd,immediate)
where opcode = W6 b31 b30 b29 b28 b27 b26
rs1 = W5 b25 b24 b23 b22 b21
rd = W5 b20 b19 b18 b17 b16
immediate = W16 b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0
rtype :: W32 -> (W5, W5, W5, W6)
rtype (W32 b31 b30 b29 b28 b27 b26 b25 b24 b23 b22 b21 b20 b19 b18 b17 b16
b15 b14 b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= (rs1,rs2,rd,opcode)
where rs1 = W5 b25 b24 b23 b22 b21
rs2 = W5 b20 b19 b18 b17 b16
rd = W5 b15 b14 b13 b12 b11
opcode = W6 b5 b4 b3 b2 b1 b0
data DLXState = DLXState { inputs :: Inputs,
outputs :: Outputs,
overFlw :: Bit,
pc :: W32,
a " special register " along with FPSR
r0 :: W32,
r1 :: W32,
r2 :: W32,
r3 :: W32,
r4 :: W32,
r5 :: W32,
r6 :: W32,
r7 :: W32,
r8 :: W32,
r9 :: W32,
r10 :: W32,
r11 :: W32,
r12 :: W32,
r13 :: W32,
r14 :: W32,
r15 :: W32,
r16 :: W32,
r17 :: W32,
r18 :: W32,
r19 :: W32,
r20 :: W32,
r21 :: W32,
r22 :: W32,
r23 :: W32,
r24 :: W32,
r25 :: W32,
r26 :: W32,
r27 :: W32,
r28 :: W32,
r29 :: W32,
r30 :: W32,
r31 :: W32
}
type DLXM = ReacT Inputs Outputs (StateT DLXState Identity)
getState :: DLXM DLXState
getState = lift get
putState :: DLXState -> DLXM ()
putState = lift . put
putWeOut :: Bit -> DLXM ()
putWeOut b = do o <- getOutputs
putOutputs (o { weOut = b })
initInputs = Inputs { dataIn = w32_0, rstIn = Zero, intIn = Zero }
initOutputs = Outputs { addrOut = w32_0, dataOut = w32_0, weOut = Zero, iackOut = Zero }
getDataIn :: DLXM W32
getDataIn = do i <- getInputs
return (dataIn i)
getReg :: Register -> DLXM W32
getReg r = do s <- getState
case r of
R0 -> return (r0 s)
R1 -> return (r1 s)
R2 -> return (r2 s)
R3 -> return (r3 s)
R4 -> return (r4 s)
R5 -> return (r5 s)
R6 -> return (r6 s)
R7 -> return (r7 s)
R8 -> return (r8 s)
R9 -> return (r9 s)
R10 -> return (r10 s)
R11 -> return (r11 s)
R12 -> return (r12 s)
R13 -> return (r13 s)
R14 -> return (r14 s)
R15 -> return (r15 s)
R16 -> return (r16 s)
R17 -> return (r17 s)
R18 -> return (r18 s)
R19 -> return (r19 s)
R20 -> return (r20 s)
R21 -> return (r21 s)
R22 -> return (r22 s)
R23 -> return (r23 s)
R24 -> return (r24 s)
R25 -> return (r25 s)
R26 -> return (r26 s)
R27 -> return (r27 s)
R28 -> return (r28 s)
R29 -> return (r29 s)
R30 -> return (r30 s)
R31 -> return (r31 s)
putReg :: Register -> W32 -> DLXM ()
putReg r v = do s <- getState
case r of
R0 -> putState (s { r0 = v })
R1 -> putState (s { r1 = v })
R2 -> putState (s { r2 = v })
R3 -> putState (s { r3 = v })
R4 -> putState (s { r4 = v })
R5 -> putState (s { r5 = v })
R6 -> putState (s { r6 = v })
R7 -> putState (s { r7 = v })
R8 -> putState (s { r8 = v })
R9 -> putState (s { r9 = v })
R10 -> putState (s { r10 = v })
R11 -> putState (s { r11 = v })
R12 -> putState (s { r12 = v })
R13 -> putState (s { r13 = v })
R14 -> putState (s { r14 = v })
R15 -> putState (s { r15 = v })
R16 -> putState (s { r16 = v })
R17 -> putState (s { r17 = v })
R18 -> putState (s { r18 = v })
R19 -> putState (s { r19 = v })
R20 -> putState (s { r20 = v })
R21 -> putState (s { r21 = v })
R22 -> putState (s { r22 = v })
R23 -> putState (s { r23 = v })
R24 -> putState (s { r24 = v })
R25 -> putState (s { r25 = v })
R26 -> putState (s { r26 = v })
R27 -> putState (s { r27 = v })
R28 -> putState (s { r28 = v })
R29 -> putState (s { r29 = v })
R30 -> putState (s { r30 = v })
R31 -> putState (s { r31 = v })
getPC :: DLXM W32
getPC = do s <- getState
return (pc s)
putPC :: W32 -> DLXM ()
putPC v = do s <- getState
putState (s { pc = v })
getIAR :: DLXM W32
getIAR = do s <- getState
return (iar s)
putIAR :: W32 -> DLXM ()
putIAR v = do s <- getState
putState (s { iar = v })
getInputs :: DLXM Inputs
getInputs = do s <- getState
return (inputs s)
putInputs :: Inputs -> DLXM ()
putInputs i = do s <- getState
putState (s { inputs = i })
getOutputs :: DLXM Outputs
getOutputs = do s <- getState
return (outputs s)
putOutputs :: Outputs -> DLXM ()
putOutputs o = do s <- getState
putState (s { outputs = o })
putAddrOut :: W32 -> DLXM ()
putAddrOut a = do o <- getOutputs
putOutputs (o { addrOut = a })
tick :: DLXM ()
tick = do o <- getOutputs
i <- signal o
putInputs i
signextend26_to_32 :: W26 -> W32
signextend26_to_32 (W26 Zero b24 b23 b22 b21 b20 b19 b18 b17 b16 b15 b14
b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= W32 Zero Zero Zero Zero Zero Zero Zero
b24 b23 b22 b21 b20 b19 b18 b17
b16 b15 b14 b13 b12 b11 b10 b9
b8 b7 b6 b5 b4 b3 b2 b1 b0
signextend26_to_32 (W26 One b24 b23 b22 b21 b20 b19 b18 b17 b16 b15 b14
b13 b12 b11 b10 b9 b8 b7 b6 b5 b4 b3 b2 b1 b0)
= W32 One One One One One One One
b24 b23 b22 b21 b20 b19 b18 b17
b16 b15 b14 b13 b12 b11 b10 b9
b8 b7 b6 b5 b4 b3 b2 b1 b0
signextend16_32 :: W16 -> W32
signextend16_32 (W16 b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15)
= W32 Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero Zero
b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15
decodeReg w5 = case w5 of
W5 Zero Zero Zero Zero Zero -> R0
W5 Zero Zero Zero Zero One -> R1
W5 Zero Zero Zero One Zero -> R2
W5 Zero Zero Zero One One -> R3
W5 Zero Zero One Zero Zero -> R4
W5 Zero Zero One Zero One -> R5
W5 Zero Zero One One Zero -> R6
W5 Zero Zero One One One -> R7
W5 Zero One Zero Zero Zero -> R8
W5 Zero One Zero Zero One -> R9
W5 Zero One Zero One Zero -> R10
W5 Zero One Zero One One -> R11
W5 Zero One One Zero Zero -> R12
W5 Zero One One Zero One -> R13
W5 Zero One One One Zero -> R14
W5 Zero One One One One -> R15
W5 One Zero Zero Zero Zero -> R16
W5 One Zero Zero Zero One -> R17
W5 One Zero Zero One Zero -> R18
W5 One Zero Zero One One -> R19
W5 One Zero One Zero Zero -> R20
W5 One Zero One Zero One -> R21
W5 One Zero One One Zero -> R22
W5 One Zero One One One -> R23
W5 One One Zero Zero Zero -> R24
W5 One One Zero Zero One -> R25
W5 One One Zero One Zero -> R26
W5 One One Zero One One -> R27
W5 One One One Zero Zero -> R28
W5 One One One Zero One -> R29
W5 One One One One Zero -> R30
W5 One One One One One -> R31
|
dda88261ec98757776e59297086fcb507ca65c10b4d5219e8d7ab1e706e3c6f3 | DimaSamoz/mezzo | Music.hs |
-----------------------------------------------------------------------------
-- |
-- Module : Mezzo.Model.Music
-- Description : Mezzo music algebra
Copyright : ( c )
License : MIT
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
Algebraic description of music with type - level constraints .
--
-----------------------------------------------------------------------------
module Mezzo.Model.Music
(
-- * Music
Music (..)
, Signature (..)
-- * Constraints
, ValidNote
, ValidRest
, ValidChord
, ValidProg
, ValidHom
, ValidMel
, ValidHarm
, ValidTripl
) where
import Data.Kind
import GHC.TypeLits
import Text.PrettyPrint.Boxes
import Mezzo.Model.Prim
import Mezzo.Model.Harmony.Motion
import Mezzo.Model.Harmony.Chords
import Mezzo.Model.Harmony.Functional
import Mezzo.Model.Types
import Mezzo.Model.Reify
import Mezzo.Model.Rules.RuleSet
infixl 3 :|:
infixl 4 :-:
-------------------------------------------------------------------------------
-- The 'Music' datatype
-------------------------------------------------------------------------------
-- | Properties of a musical piece: the time signature, the key signature and rule set.
data Signature (t :: TimeSignature) (k :: KeyType) (ruleset :: Type) = Sig
-- | A piece of music consisting of parallel and sequential composition of notes
-- and rests, subject to constraints.
--
-- Currently enforced constraints are:
--
* Height ( number of voices ) of two sequentially composed pieces must be equal .
* ( number of temporal units ) of two parallelly composed pieces must be equal .
* Sequentially composed voices can not have any augmented , diminished or seventh leaps .
* Parallelly composed pieces can not have any minor second or major seventh harmonic intervals .
* Music must not contain parallel or concealed unisons , fifths or octaves .
--
data Music :: forall n l t k r. Signature t k r -> Partiture n l -> Type where
-- | Sequential or melodic composition of music.
(:|:) :: ValidMel s m1 m2 => Music s m1 -> Music s m2 -> Music s (m1 +|+ m2)
-- | Parallel or harmonic composition of music.
(:-:) :: ValidHarm s m1 m2 => Music s m1 -> Music s m2 -> Music s (m1 +-+ m2)
-- | A note specified by a pitch and a duration.
Note :: ValidNote s r d => Root r -> Dur d -> Music s (FromRoot r d)
-- | A rest specified by a duration.
Rest :: ValidRest s d => Dur d -> Music s (FromSilence d)
-- | A chord specified by a chord type and a duration.
Chord :: ValidChord s c d => Cho c -> Dur d -> Music s (FromChord c d)
-- | A progression specified by a time signature, and its progression schema.
Progression :: ValidProg r t p => Prog p -> Music (Sig :: Signature t k r) (FromProg p t)
-- | A homophonic composition with a melody line and an accompaniment.
Homophony :: ValidHom s m a => Music s m -> Music s a -> Music s (m +-+ a)
| A triplet with a nominal duration and three pitches .
Triplet :: ValidTripl s d r1 r2 r3 => Dur d -> Root r1 -> Root r2 -> Root r3 -> Music s (FromTriplet d r1 r2 r3)
-------------------------------------------------------------------------------
-- Musical constraints
-- Specifications of the rules that valid musical terms have to follow.
-------------------------------------------------------------------------------
-- | Select the active rule set.
type ActRuleSet = Classical
| Ensures that two pieces of music can be composed sequentially .
type ValidMel (s :: Signature t k r) m1 m2 =
MelConstraints r m1 m2
| Ensures that two pieces of music can be composed in parallel .
type ValidHarm (s :: Signature t k r) m1 m2 =
HarmConstraints r m1 m2
-- | Ensures that the note is valid.
type ValidNote (s :: Signature t k r) ro d =
(NoteConstraints r ro d, IntRep ro, Primitive d)
-- | Ensures that the rest is valid.
type ValidRest (s :: Signature t k r) d =
(RestConstraints r d, Primitive d)
-- | Ensures that the chord is valid.
type ValidChord (s :: Signature t k r) (c :: ChordType n) d =
(ChordConstraints r c d, IntListRep c, Primitive n, Primitive d)
-- | Ensures that a progression is valid.
type ValidProg r t p =
(ProgConstraints r t p, IntLListRep p, IntRep t, KnownNat t)
-- | Ensures that a homophonic composition is valid.
type ValidHom (s :: Signature t k r) m a =
HomConstraints r m a
-- | Ensures that a triplet is valid.
type ValidTripl (s :: Signature t k r) d r1 r2 r3 =
( TriplConstraints r d r1 r2 r3, IntRep r1, IntRep r2, IntRep r3, Primitive d
, Primitive (HalfOf d), NoteConstraints r r1 d, NoteConstraints r r2 (HalfOf d)
, NoteConstraints r r3 (HalfOf d))
-------------------------------------------------------------------------------
-- Pretty-printing
-------------------------------------------------------------------------------
instance Show (Music s m) where show = render . ppMusic
-- | Pretty-print a 'Music' value.
ppMusic :: Music s m -> Box
ppMusic (Note r d) = char '|' <+> doc r <+> doc d
ppMusic (Rest d) = char '|' <+> text "~~~~" <+> doc d
ppMusic (m1 :|: m2) = ppMusic m1 <> emptyBox 1 1 <> ppMusic m2
ppMusic (m1 :-: m2) = ppMusic m1 // ppMusic m2
ppMusic (Chord c d) = char '|' <+> doc c <+> doc d
ppMusic (Progression p) = text "Prog:" <+> doc p
-- | Convert a showable value into a pretty-printed box.
doc :: Show a => a -> Box
doc = text . show
| null | https://raw.githubusercontent.com/DimaSamoz/mezzo/13365da8907a714e34540f7a1d99898b67816cc9/src/Mezzo/Model/Music.hs | haskell | ---------------------------------------------------------------------------
|
Module : Mezzo.Model.Music
Description : Mezzo music algebra
Maintainer :
Stability : experimental
Portability : portable
---------------------------------------------------------------------------
* Music
* Constraints
-----------------------------------------------------------------------------
The 'Music' datatype
-----------------------------------------------------------------------------
| Properties of a musical piece: the time signature, the key signature and rule set.
| A piece of music consisting of parallel and sequential composition of notes
and rests, subject to constraints.
Currently enforced constraints are:
| Sequential or melodic composition of music.
| Parallel or harmonic composition of music.
| A note specified by a pitch and a duration.
| A rest specified by a duration.
| A chord specified by a chord type and a duration.
| A progression specified by a time signature, and its progression schema.
| A homophonic composition with a melody line and an accompaniment.
-----------------------------------------------------------------------------
Musical constraints
Specifications of the rules that valid musical terms have to follow.
-----------------------------------------------------------------------------
| Select the active rule set.
| Ensures that the note is valid.
| Ensures that the rest is valid.
| Ensures that the chord is valid.
| Ensures that a progression is valid.
| Ensures that a homophonic composition is valid.
| Ensures that a triplet is valid.
-----------------------------------------------------------------------------
Pretty-printing
-----------------------------------------------------------------------------
| Pretty-print a 'Music' value.
| Convert a showable value into a pretty-printed box. |
Copyright : ( c )
License : MIT
Algebraic description of music with type - level constraints .
module Mezzo.Model.Music
(
Music (..)
, Signature (..)
, ValidNote
, ValidRest
, ValidChord
, ValidProg
, ValidHom
, ValidMel
, ValidHarm
, ValidTripl
) where
import Data.Kind
import GHC.TypeLits
import Text.PrettyPrint.Boxes
import Mezzo.Model.Prim
import Mezzo.Model.Harmony.Motion
import Mezzo.Model.Harmony.Chords
import Mezzo.Model.Harmony.Functional
import Mezzo.Model.Types
import Mezzo.Model.Reify
import Mezzo.Model.Rules.RuleSet
infixl 3 :|:
infixl 4 :-:
data Signature (t :: TimeSignature) (k :: KeyType) (ruleset :: Type) = Sig
* Height ( number of voices ) of two sequentially composed pieces must be equal .
* ( number of temporal units ) of two parallelly composed pieces must be equal .
* Sequentially composed voices can not have any augmented , diminished or seventh leaps .
* Parallelly composed pieces can not have any minor second or major seventh harmonic intervals .
* Music must not contain parallel or concealed unisons , fifths or octaves .
data Music :: forall n l t k r. Signature t k r -> Partiture n l -> Type where
(:|:) :: ValidMel s m1 m2 => Music s m1 -> Music s m2 -> Music s (m1 +|+ m2)
(:-:) :: ValidHarm s m1 m2 => Music s m1 -> Music s m2 -> Music s (m1 +-+ m2)
Note :: ValidNote s r d => Root r -> Dur d -> Music s (FromRoot r d)
Rest :: ValidRest s d => Dur d -> Music s (FromSilence d)
Chord :: ValidChord s c d => Cho c -> Dur d -> Music s (FromChord c d)
Progression :: ValidProg r t p => Prog p -> Music (Sig :: Signature t k r) (FromProg p t)
Homophony :: ValidHom s m a => Music s m -> Music s a -> Music s (m +-+ a)
| A triplet with a nominal duration and three pitches .
Triplet :: ValidTripl s d r1 r2 r3 => Dur d -> Root r1 -> Root r2 -> Root r3 -> Music s (FromTriplet d r1 r2 r3)
type ActRuleSet = Classical
| Ensures that two pieces of music can be composed sequentially .
type ValidMel (s :: Signature t k r) m1 m2 =
MelConstraints r m1 m2
| Ensures that two pieces of music can be composed in parallel .
type ValidHarm (s :: Signature t k r) m1 m2 =
HarmConstraints r m1 m2
type ValidNote (s :: Signature t k r) ro d =
(NoteConstraints r ro d, IntRep ro, Primitive d)
type ValidRest (s :: Signature t k r) d =
(RestConstraints r d, Primitive d)
type ValidChord (s :: Signature t k r) (c :: ChordType n) d =
(ChordConstraints r c d, IntListRep c, Primitive n, Primitive d)
type ValidProg r t p =
(ProgConstraints r t p, IntLListRep p, IntRep t, KnownNat t)
type ValidHom (s :: Signature t k r) m a =
HomConstraints r m a
type ValidTripl (s :: Signature t k r) d r1 r2 r3 =
( TriplConstraints r d r1 r2 r3, IntRep r1, IntRep r2, IntRep r3, Primitive d
, Primitive (HalfOf d), NoteConstraints r r1 d, NoteConstraints r r2 (HalfOf d)
, NoteConstraints r r3 (HalfOf d))
instance Show (Music s m) where show = render . ppMusic
ppMusic :: Music s m -> Box
ppMusic (Note r d) = char '|' <+> doc r <+> doc d
ppMusic (Rest d) = char '|' <+> text "~~~~" <+> doc d
ppMusic (m1 :|: m2) = ppMusic m1 <> emptyBox 1 1 <> ppMusic m2
ppMusic (m1 :-: m2) = ppMusic m1 // ppMusic m2
ppMusic (Chord c d) = char '|' <+> doc c <+> doc d
ppMusic (Progression p) = text "Prog:" <+> doc p
doc :: Show a => a -> Box
doc = text . show
|
79eb20b37d1a2e60ffee433dd25ecb46f90b3104637bdcf3f3201a596d975fab | Chris00/ocaml-dropbox | shared_folders.ml | open Lwt
module D = Dropbox_lwt_unix
(** If there is no argument, return the list of all shared folders.
Otherwise, put [shared_folder_id] to get the metadata for a specific
shared folder. *)
let shared_folders t shared_folder_id =
D.shared_folders ~shared_folder_id t >>= fun shared_folders ->
Lwt_list.iter_s
(fun shared_folder ->
Lwt_io.printlf "%s" (Dropbox_j.string_of_shared_folder shared_folder))
shared_folders
let main t args =
match args with
| [] -> shared_folders t ""
| _ -> Lwt_list.iter_p (shared_folders t) args
let () =
Common.run main
| null | https://raw.githubusercontent.com/Chris00/ocaml-dropbox/36b222269e6bc7e5486cbb69738841d87a1212fb/tests/shared_folders.ml | ocaml | * If there is no argument, return the list of all shared folders.
Otherwise, put [shared_folder_id] to get the metadata for a specific
shared folder. | open Lwt
module D = Dropbox_lwt_unix
let shared_folders t shared_folder_id =
D.shared_folders ~shared_folder_id t >>= fun shared_folders ->
Lwt_list.iter_s
(fun shared_folder ->
Lwt_io.printlf "%s" (Dropbox_j.string_of_shared_folder shared_folder))
shared_folders
let main t args =
match args with
| [] -> shared_folders t ""
| _ -> Lwt_list.iter_p (shared_folders t) args
let () =
Common.run main
|
8e3462c4c015e30314d135a25e06217893e343569b76414100668cb6c513c8ef | softwarelanguageslab/maf | R5RS_scp1_ring-squares-4.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 0
* negated predicates : 1
; * swapped branches: 0
; * calls to id fun: 0
(letrec ((result ())
(output (lambda (i)
(set! result (cons i result))))
(kw-lijst (lambda (lst)
(letrec ((loop (lambda (l)
(let ((rest (cdr l))
(n (list (* (car l) (car l)))))
(set-cdr! l n)
(set-cdr! n rest)
(if (not (eq? rest lst)) (loop rest) #f)))))
(loop lst)
lst)))
(print-ring (lambda (r)
(letrec ((aux (lambda (l)
(if (not (null? l))
(if (<change> (eq? (cdr l) r) (not (eq? (cdr l) r)))
(begin
(output " ")
(<change>
()
(display " "))
(output (car l))
(output "..."))
(begin
(output " ")
(output (car l))
(aux (cdr l))))
#f))))
(aux r)
#t)))
(last-cons (cons 3 ()))
(test-lst (cons 1 (cons 4 last-cons))))
(set-cdr! last-cons test-lst)
(print-ring (kw-lijst test-lst))
(equal?
result
(__toplevel_cons
"..."
(__toplevel_cons
9
(__toplevel_cons
" "
(__toplevel_cons
3
(__toplevel_cons
" "
(__toplevel_cons
16
(__toplevel_cons
" "
(__toplevel_cons
4
(__toplevel_cons
" "
(__toplevel_cons 1 (__toplevel_cons " " (__toplevel_cons 1 (__toplevel_cons " " ()))))))))))))))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_ring-squares-4.scm | scheme | Changes:
* swapped branches: 0
* calls to id fun: 0 | * removed : 0
* added : 1
* swaps : 0
* negated predicates : 1
(letrec ((result ())
(output (lambda (i)
(set! result (cons i result))))
(kw-lijst (lambda (lst)
(letrec ((loop (lambda (l)
(let ((rest (cdr l))
(n (list (* (car l) (car l)))))
(set-cdr! l n)
(set-cdr! n rest)
(if (not (eq? rest lst)) (loop rest) #f)))))
(loop lst)
lst)))
(print-ring (lambda (r)
(letrec ((aux (lambda (l)
(if (not (null? l))
(if (<change> (eq? (cdr l) r) (not (eq? (cdr l) r)))
(begin
(output " ")
(<change>
()
(display " "))
(output (car l))
(output "..."))
(begin
(output " ")
(output (car l))
(aux (cdr l))))
#f))))
(aux r)
#t)))
(last-cons (cons 3 ()))
(test-lst (cons 1 (cons 4 last-cons))))
(set-cdr! last-cons test-lst)
(print-ring (kw-lijst test-lst))
(equal?
result
(__toplevel_cons
"..."
(__toplevel_cons
9
(__toplevel_cons
" "
(__toplevel_cons
3
(__toplevel_cons
" "
(__toplevel_cons
16
(__toplevel_cons
" "
(__toplevel_cons
4
(__toplevel_cons
" "
(__toplevel_cons 1 (__toplevel_cons " " (__toplevel_cons 1 (__toplevel_cons " " ()))))))))))))))) |
b812f6e0a74894720814520d451fc0c4831ad7172eb8ea16077236baec4cdc29 | emqx/pulsar-client-erl | pulsar_producers_sup.erl | Copyright ( c ) 2013 - 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(pulsar_producers_sup).
-behaviour(supervisor).
-export([start_link/0, init/1]).
-export([ensure_present/3, ensure_absence/2]).
-define(SUPERVISOR, ?MODULE).
-define(WORKER_ID(ClientId, Name), {ClientId, Name}).
start_link() ->
supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []).
init([]) ->
SupFlags = #{strategy => one_for_one,
intensity => 10,
period => 5
},
Children = [], %% dynamically added/stopped
{ok, {SupFlags, Children}}.
%% ensure a client started under supervisor
ensure_present(ClientId, Topic, ProducerOpts) ->
ChildSpec = child_spec(ClientId, Topic, ProducerOpts),
case supervisor:start_child(?SUPERVISOR, ChildSpec) of
{ok, Pid} -> {ok, Pid};
{error, {already_started, Pid}} -> {ok, Pid};
{error, {{already_started, Pid}, _}} -> {ok, Pid};
{error, already_present} -> {error, not_running}
end.
%% ensure client stopped and deleted under supervisor
ensure_absence(ClientId, Name) ->
Id = ?WORKER_ID(ClientId, Name),
case supervisor:terminate_child(?SUPERVISOR, Id) of
ok ->
case supervisor:delete_child(?SUPERVISOR, Id) of
ok -> ok;
{error, not_found} -> ok
end;
{error, not_found} -> ok
end.
child_spec(ClientId, Topic, ProducerOpts) ->
#{id => ?WORKER_ID(ClientId, get_name(ProducerOpts)),
start => {pulsar_producers, start_link, [ClientId, Topic, ProducerOpts]},
restart => transient,
type => worker,
modules => [pulsar_producers]
}.
get_name(ProducerOpts) ->
maps:get(name, ProducerOpts, pulsar_producers).
| null | https://raw.githubusercontent.com/emqx/pulsar-client-erl/15261e052d936a7927a21bf522292959ed9ced31/src/pulsar_producers_sup.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
dynamically added/stopped
ensure a client started under supervisor
ensure client stopped and deleted under supervisor | Copyright ( c ) 2013 - 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(pulsar_producers_sup).
-behaviour(supervisor).
-export([start_link/0, init/1]).
-export([ensure_present/3, ensure_absence/2]).
-define(SUPERVISOR, ?MODULE).
-define(WORKER_ID(ClientId, Name), {ClientId, Name}).
start_link() ->
supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []).
init([]) ->
SupFlags = #{strategy => one_for_one,
intensity => 10,
period => 5
},
{ok, {SupFlags, Children}}.
ensure_present(ClientId, Topic, ProducerOpts) ->
ChildSpec = child_spec(ClientId, Topic, ProducerOpts),
case supervisor:start_child(?SUPERVISOR, ChildSpec) of
{ok, Pid} -> {ok, Pid};
{error, {already_started, Pid}} -> {ok, Pid};
{error, {{already_started, Pid}, _}} -> {ok, Pid};
{error, already_present} -> {error, not_running}
end.
ensure_absence(ClientId, Name) ->
Id = ?WORKER_ID(ClientId, Name),
case supervisor:terminate_child(?SUPERVISOR, Id) of
ok ->
case supervisor:delete_child(?SUPERVISOR, Id) of
ok -> ok;
{error, not_found} -> ok
end;
{error, not_found} -> ok
end.
child_spec(ClientId, Topic, ProducerOpts) ->
#{id => ?WORKER_ID(ClientId, get_name(ProducerOpts)),
start => {pulsar_producers, start_link, [ClientId, Topic, ProducerOpts]},
restart => transient,
type => worker,
modules => [pulsar_producers]
}.
get_name(ProducerOpts) ->
maps:get(name, ProducerOpts, pulsar_producers).
|
a635096f443586965f16d44497b8bd3fa873569353ee16b6aa686a511f3d0823 | cloudant-labs/couchdb-erlfdb | erlfdb_01_basic_test.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(erlfdb_01_basic_test).
-include_lib("eunit/include/eunit.hrl").
load_test() ->
erlfdb_nif:ohai().
get_error_string_test() ->
?assertEqual(<<"Success">>, erlfdb_nif:get_error(0)),
?assertEqual(<<"Transaction exceeds byte limit">>,
erlfdb_nif:get_error(2101)),
?assertEqual(<<"UNKNOWN_ERROR">>, erlfdb_nif:get_error(9999)).
| null | https://raw.githubusercontent.com/cloudant-labs/couchdb-erlfdb/510664facbc28c946960db2d12b3baf33923f4ea/test/erlfdb_01_basic_test.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(erlfdb_01_basic_test).
-include_lib("eunit/include/eunit.hrl").
load_test() ->
erlfdb_nif:ohai().
get_error_string_test() ->
?assertEqual(<<"Success">>, erlfdb_nif:get_error(0)),
?assertEqual(<<"Transaction exceeds byte limit">>,
erlfdb_nif:get_error(2101)),
?assertEqual(<<"UNKNOWN_ERROR">>, erlfdb_nif:get_error(9999)).
|
a99946a2291adf990f772f3b6e68eb38f9525813968b54c930129002a30f88a6 | huangjs/cl | transs.lisp | -*- Mode : Lisp ; Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The data in this file contains enhancments. ;;;;;
;;; ;;;;;
Copyright ( c ) 1984,1987 by , University of Texas ; ; ; ; ;
;;; All rights reserved ;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
( c ) Copyright 1980 Massachusetts Institute of Technology ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package :maxima)
(macsyma-module transs)
(transl-module transs)
(defmvar *transl-file-debug* nil
"set this to T if you don't want to have the temporary files
used automaticaly deleted in case of errors.")
;;; User-hacking code, file-io, translator toplevel.
There are various macros to cons - up filename TEMPLATES
which to into . The filenames should be the only
;;; system dependant part of the code, although certain behavior
of RENAMEF / MERGEF / DELETE - FILE is assumed .
(defmvar $tr_output_file_default '$trlisp
"This is the second file name to be used for translated lisp
output.")
(defmvar $tr_file_tty_messagesp nil
"It TRUE messages about translation of the file are sent
to the TTY also.")
(defmvar $tr_windy t
"Generate helpful comments and programming hints.")
(defvar *translation-msgs-files* nil
"Where the warning and other comments goes.")
(defvar $tr_version (get 'transl-autoload 'version))
(defmvar transl-file nil "output stream of $compfile and $translate_file")
(defmvar $compgrind nil "If `true' lisp output will be pretty-printed.")
(defmvar $tr_true_name_of_file_being_translated nil
"This is set by TRANSLATE_FILE for use by user macros
which want to know the name of the source file.")
(defmvar $tr_state_vars
'((mlist) $transcompile $tr_semicompile
$translate_fast_arrays
$tr_warn_undeclared
$tr_warn_meval
$tr_warn_fexpr
$tr_warn_mode
$tr_warn_undefined_variable
$tr_function_call_default
$tr_array_as_ref
$tr_numer
$define_variable))
(defmacro trlisp-inputname-d1 () ;; so hacks on DEFAULTF will not
'`,(pathname "")) ;; stray the target.
(defmacro trlisp-outputname-d1 ()
'`,(pathname (maybe-invert-string-case (symbol-name (stripdollar $tr_output_file_default)))))
(defmacro trlisp-outputname ()
'`,(make-pathname :type "LISP"))
(defmacro trlisp-outputname-temp ()
'`,(pathname "_trli_"))
(defmacro trtags-outputname ()
'`,(pathname "tags"))
(defmacro trtags-outputname-temp ()
'`,(pathname "_tags_"))
(defmacro trcomments-outputname ()
'`,(pathname "unlisp"))
(defmacro trcomments-outputname-temp ()
'`,(pathname "_unli_"))
(defvar declares nil)
(defmspec $compfile (forms)
(setq forms (cdr forms))
(if (eq 1 (length forms))
(merror "compfile: bravely refusing to write file of length 0"))
(bind-transl-state
(setq $transcompile t
*in-compfile* t)
(let
((out-file-name (namestring (maxima-string (meval (car forms)))))
(t-error nil)
(*translation-msgs-files* nil))
(pop forms)
(unwind-protect
(progn
(setq transl-file (open out-file-name :direction :output :if-exists :overwrite :if-does-not-exist :create :element-type 'character))
(cond ((or (member '$all forms :test #'eq)
(member '$functions forms :test #'eq))
(setq forms (mapcar #'caar (cdr $functions)))))
(do ((l forms (cdr l))
(declares nil nil)
(tr-abort nil nil)
(item)
(lexprs nil nil)
(fexprs nil nil)
(t-item)) ;
((null l))
(setq item (car l))
(cond ((not (atom item))
(print* (dconvx (translate item))))
(t
(setq t-item (compile-function (setq item ($verbify item))))
(cond (tr-abort
(setq t-error (print-abort-msg item 'compfile)))
(t
(when $compgrind
(mformat transl-file "~2%;; Function ~:@M~%" item))
(print* t-item))))))
(to-macsyma-namestring out-file-name))
;; unwind-protected
(if transl-file (close transl-file))
(if t-error (delete-file transl-file))))))
(defun compile-function (f)
(mformat *translation-msgs-files* "~%Translating ~:@M" f)
(let ((fun (tr-mfun f)))
(cond (tr-abort nil)
(t fun))))
(defvar tr-defaultf nil
"A default only for the case of no arguments to $translate_file")
;;; Temporary hack during debugging of this code.
(defun mergef (x y)
(merge-pathnames y x))
(defun $compile_file (input-file &optional bin-file translation-output-file &aux result)
(setq input-file (maxima-string input-file))
(and bin-file(setq bin-file (maxima-string bin-file)))
(and translation-output-file
(setq translation-output-file (maxima-string translation-output-file)))
(cond ((string-equal (pathname-type input-file) "LISP")
(setq result (list '(mlist) input-file)))
(t (setq result (translate-file input-file translation-output-file))
(setq input-file (third result))))
#+(or cmu scl sbcl clisp allegro openmcl lispworks ecl)
(multiple-value-bind (output-truename warnings-p failure-p)
(if bin-file
(compile-file input-file :output-file bin-file)
(compile-file input-file))
(declare (ignore warnings-p))
;; If the compiler encountered errors, don't set bin-file to
;; indicate that we found errors. Is this what we want?
(unless failure-p
(setq bin-file output-truename)))
#-(or cmu scl sbcl clisp allegro openmcl lispworks ecl)
(setq bin-file (compile-file input-file :output-file bin-file))
(append result (list bin-file)))
(defun maxima-string (symb)
(print-invert-case symb))
(defmfun $translate_file (input-file &optional output-file)
(setq input-file (maxima-string input-file))
(cond (output-file (setq output-file (maxima-string output-file))))
(translate-file input-file output-file))
(defmvar $tr_gen_tags nil
"If TRUE, TRANSLATE_FILE generates a TAGS file for use by the text editor")
(defvar *pretty-print-translation* t)
;; Define a pprinter for defmtrfun.
#-gcl
(defun pprint-defmtrfun (stream s)
(pprint-logical-block (stream s :prefix "(" :suffix ")")
(write (pprint-pop) :stream stream)
(write-char #\space stream)
(write (pprint-pop) :stream stream)
(pprint-indent :block 4 stream)
(pprint-newline :mandatory stream)
(write (pprint-pop) :stream stream)
(pprint-indent :block 2 stream)
(pprint-newline :mandatory stream)
(loop
(pprint-exit-if-list-exhausted)
(write (pprint-pop) :stream stream)
(write-char #\space stream)
(pprint-newline :linear stream))))
(defun call-batch1 (in-stream out-stream &aux expr transl)
(cleanup)
;; we want the thing to start with a newline..
(newline in-stream)
(let ((*readtable* (copy-readtable nil))
#-gcl (*print-pprint-dispatch* (copy-pprint-dispatch)))
#-gcl
(progn
#-(or scl allegro)
(setf (readtable-case *readtable*) :invert)
#+(or scl allegro)
(unless #+scl (eq ext:*case-mode* :lower)
#+allegro (eq excl:*current-case-mode* :case-sensitive-lower)
(setf (readtable-case *readtable*) :invert))
(set-pprint-dispatch '(cons (member maxima::defmtrfun))
#'pprint-defmtrfun))
(loop while (and (setq expr (mread in-stream)) (consp expr))
do (setq transl (translate-macexpr-toplevel (third expr)))
(cond
(*pretty-print-translation*
(pprint transl out-stream))
(t
(format out-stream "~a" transl))))))
(defun translate-from-stream (from-stream &key to-stream eval pretty (print-function #'prin1)
&aux expr transl)
(bind-transl-state
(loop
with *in-translate-file* = t
with *print-pretty* = pretty
while (and (setq expr (mread from-stream)) (consp expr))
do (setq transl (translate-macexpr-toplevel (third expr)))
(cond (eval (eval transl)))
(cond (to-stream (funcall print-function transl to-stream)))
(loop for v in forms-to-compile-queue
do (show v to-stream)
when to-stream
do (funcall print-function v to-stream)
when eval
do (eval v))
(setq forms-to-compile-queue nil))))
(defvar trf-start-hook nil)
(defun delete-old-and-open (x)
(open x :direction :output))
(defun alter-pathname (pathname &rest options)
(apply 'make-pathname :defaults (pathname pathname) options))
(defun delete-with-side-effects-if (test list)
"Rudimentary DELETE-IF which, however, is guaranteed to call
the function TEST exactly once for each element of LIST, from
left to right."
(loop while (and list (funcall test (car list)))
do (pop list))
(loop with list = list
while (cdr list)
if (funcall test (cadr list))
do (pop (cdr list))
else
do (pop list))
list)
(defun insert-necessary-function-declares (stream)
"Write to STREAM two lists: The functions which are known to be
translated without actually being in the list passed to
$DECLARE_TRANSLATED, and those which are not known to be
translated."
(let (translated hint)
(setq *untranslated-functions-called*
(delete-with-side-effects-if
#'(lambda (v)
(prog1
(or (setq translated
(or (get v 'once-translated)
(get v 'translated)))
(and (fboundp v)
;; might be traced
(not (mget v 'mexpr)))
(get v 'mfexpr*))
(when (and translated
(not (member v *declared-translated-functions* :test #'eq)))
(push v hint))))
*untranslated-functions-called*))
(when hint
(format stream
"~2%/* The compiler might be able to optimize some function calls
if you prepend the following declaration to your maxima code: */~%")
(mgrind `(($eval_when) $translate (($declare_translated) ,@hint))
stream)
(format stream "$"))
(when *untranslated-functions-called*
(format stream "~2%/* The following functions are not known to be translated.~%")
(mgrind `((mlist) ,@(nreverse *untranslated-functions-called*)) stream)
(format stream "$ */"))
(fresh-line stream)
(when (or hint *untranslated-functions-called*)
(format t "~&See the `unlisp' file for possible optimizations.~%"))))
(defun translate-file (in-file-name out-file-name &optional (ttymsgsp $tr_file_tty_messagesp)
&aux warn-file translated-file *translation-msgs-files*
*untranslated-functions-called* *declared-translated-functions*)
(bind-transl-state
(setq *in-translate-file* t)
(setq translated-file (alter-pathname (or out-file-name in-file-name) :type "LISP"))
(setq warn-file (alter-pathname in-file-name :type "UNLISP"))
(with-open-file (in-stream in-file-name)
(with-open-file (out-stream translated-file :direction :output :if-exists :supersede)
(with-open-file (warn-stream warn-file :direction :output :if-exists :supersede)
(setq *translation-msgs-files* (list warn-stream))
(if ttymsgsp
(setq *translation-msgs-files* (cons *standard-output* *translation-msgs-files*)))
(format out-stream ";;; -*- Mode: Lisp; package:maxima; syntax:common-lisp ;Base: 10 -*- ;;;~%")
(flet ((timezone-iso8601-name (dst tz)
This function was borrowed from CMUCL .
(let ((tz (- tz)))
(if (and (not dst) (= tz 0))
"Z"
(multiple-value-bind (hours minutes)
(truncate (if dst (1+ tz) tz))
(format nil "~C~2,'0D:~2,'0D"
(if (minusp tz) #\- #\+)
(abs hours)
(abs (truncate (* minutes 60)))))))))
(multiple-value-bind (secs mins hours day month year dow dst tz)
(decode-universal-time (get-universal-time))
(declare (ignore dow))
(format out-stream ";;; Translated on: ~D-~2,'0D-~2,'0D ~2,'0D:~2,'0D:~2,'0D~A~%"
year month day hours mins secs (timezone-iso8601-name dst tz))))
(format out-stream ";;; Maxima System version: ~A~%" *autoconf-version*)
(format out-stream ";;; Lisp type: ~A~%" (lisp-implementation-type))
(format out-stream ";;; Lisp version: ~A~%" (lisp-implementation-version))
(format out-stream "~%(in-package :maxima)")
(format warn-stream "~%This is the unlisp file for ~A "
(namestring (pathname in-stream)))
(mformat out-stream
"~%;;** Variable settings were **~%~%")
(loop for v in (cdr $tr_state_vars)
do (mformat out-stream ";; ~:M: ~:M;~%" v (symbol-value v)))
(mformat *terminal-io* "~%Translation begun on ~A.~%"
(pathname in-stream))
(call-batch1 in-stream out-stream)
(insert-necessary-function-declares warn-stream)
BATCH1 calls TRANSLATE - MACEXPR - toplevel on each expression read .
(cons '(mlist)
(mapcar 'namestring
(mapcar 'pathname (list in-stream out-stream warn-stream)))))))))
Should be rewritten to use streams . Barf -- perhaps SPRINTER
;; doesn't take a stream argument?
Yes is old i / o , but KMP is writing a new one for NIL . -GJC
(defun print* (p)
(let ((^w t)
(outfiles (list transl-file))
(^r t)
($loadprint nil)) ;;; lusing old I/O !!!!!
(declare (special outfiles))
(sub-print* p)))
i might as well be real pretty and flatten out PROGN 's .
(defun sub-print* (p &aux (flag nil))
(cond ((atom p))
((and (eq (car p) 'progn) (cdr p) (equal (cadr p) ''compile))
(mapc #'sub-print* (cddr p)))
(t
(setq flag (and $tr_semicompile
(not (eq (car p) 'eval-when))))
(when flag (princ* #\() (princ* 'progn) (terpri*))
(if $compgrind
(prin1 p)
(prin1 p transl-file))
(when flag (princ* #\)))
(terpri transl-file))))
(defun princ* (form)
(princ form transl-file))
(defun nprinc* (&rest form)
(mapc #'(lambda (x) (princ x transl-file)) form))
(defun terpri* ()
(terpri transl-file))
(defun print-module (m)
(nprinc* " " m " version " (get m 'version)))
(defun new-comment-line ()
(terpri*)
(princ* ";;;"))
(defun print-transl-modules ()
(new-comment-line)
(print-module 'transl-autoload)
(do ((j 0 (1+ j))
(s (delete 'transl-autoload (copy-list transl-modules) :test #'equal)
(cdr s)))
((null s))
(if (= 0 (rem j 3)) (new-comment-line))
(print-module (car s))))
(defun print-transl-header (source)
(mformat transl-file ";;; -*- Mode: Lisp; package:maxima; syntax:common-lisp -*-~%")
(if source
(mformat transl-file ";;; Translated code for ~A" source)
(mformat transl-file ";;; Translated Maxima functions generated by `compfile'."))
(mformat transl-file "~%;;; Written on ~:M, from Maxima ~A~%;;;~%" ($timedate) $version)
(print-transl-modules)
(mformat transl-file
"~%~
~%~
~%(eval-when (:compile-toplevel :execute) ~
~% (setq *infile-name-key*~
~% ((lambda (file-name)~
~% ;; temp crock for multics.~
~% (cond ((eq (ml-typep file-name) 'list)~
~% (namestring file-name))~
~% (t file-name)))~
~% (truename infile))))~
~%~
~%(eval-when (:compile-toplevel) ~
~% (setq $tr_semicompile '~S)~
~% (setq forms-to-compile-queue ()))~
~%~%;;; ~S~%~%"
$tr_semicompile source)
(cond ($transcompile
(update-global-declares)
(when $compgrind
(mformat transl-file ";;; General declarations required for translated Maxima code.~%"))
(print* `(declare . ,declares)))))
(defun print-abort-msg (fun from)
(mformat *translation-msgs-files*
"~:@M failed to Translate.~
~%~A will continue, but file output will be aborted."
fun from))
(defmacro extension-filename (x)
`(caddr (namelist ,x)))
(defmspec $translate (functs)
(setq functs (cdr functs))
(cond ((and functs ($listp (car functs)))
(merror "Use the function `translate_file'"))
(t
(cond ((or (member '$functions functs :test #'eq)
(member '$all functs :test #'eq))
(setq functs (mapcar 'caar (cdr $functions)))))
(do ((l functs (cdr l))
(v nil))
((null l) `((mlist) ,@(nreverse v)))
(cond ((atom (car l))
(let ((it (translate-function ($verbify (car l)))))
(if it (push it v))))
(t
(tr-tell
(car l)
" is an illegal argument to `translate'.")))))))
(declare-top (special forms-to-compile-queue))
(defmspec $compile (form)
(let ((l (meval `(($translate),@(cdr form)))))
(let ((forms-to-compile-queue ()))
(mapc #'(lambda (x) (if (fboundp x) (compile x))) (cdr l))
(do ()
((null forms-to-compile-queue) l)
(mapc #'(lambda (form)
(eval form)
(and (consp form)
(eq (car form) 'defun)
(symbolp (cadr form))
(compile (cadr form))))
(prog1 forms-to-compile-queue
(setq forms-to-compile-queue nil)))))))
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/maxima/src/transs.lisp | lisp | Package : Maxima ; Syntax : Common - Lisp ; Base : 10 -*- ; ; ; ;
The data in this file contains enhancments. ;;;;;
;;;;;
; ; ; ;
All rights reserved ;;;;;
; ;
User-hacking code, file-io, translator toplevel.
system dependant part of the code, although certain behavior
so hacks on DEFAULTF will not
stray the target.
unwind-protected
Temporary hack during debugging of this code.
If the compiler encountered errors, don't set bin-file to
indicate that we found errors. Is this what we want?
Define a pprinter for defmtrfun.
we want the thing to start with a newline..
might be traced
doesn't take a stream argument?
lusing old I/O !!!!!
temp crock for multics.~
~S~%~%" |
(in-package :maxima)
(macsyma-module transs)
(transl-module transs)
(defmvar *transl-file-debug* nil
"set this to T if you don't want to have the temporary files
used automaticaly deleted in case of errors.")
There are various macros to cons - up filename TEMPLATES
which to into . The filenames should be the only
of RENAMEF / MERGEF / DELETE - FILE is assumed .
(defmvar $tr_output_file_default '$trlisp
"This is the second file name to be used for translated lisp
output.")
(defmvar $tr_file_tty_messagesp nil
"It TRUE messages about translation of the file are sent
to the TTY also.")
(defmvar $tr_windy t
"Generate helpful comments and programming hints.")
(defvar *translation-msgs-files* nil
"Where the warning and other comments goes.")
(defvar $tr_version (get 'transl-autoload 'version))
(defmvar transl-file nil "output stream of $compfile and $translate_file")
(defmvar $compgrind nil "If `true' lisp output will be pretty-printed.")
(defmvar $tr_true_name_of_file_being_translated nil
"This is set by TRANSLATE_FILE for use by user macros
which want to know the name of the source file.")
(defmvar $tr_state_vars
'((mlist) $transcompile $tr_semicompile
$translate_fast_arrays
$tr_warn_undeclared
$tr_warn_meval
$tr_warn_fexpr
$tr_warn_mode
$tr_warn_undefined_variable
$tr_function_call_default
$tr_array_as_ref
$tr_numer
$define_variable))
(defmacro trlisp-outputname-d1 ()
'`,(pathname (maybe-invert-string-case (symbol-name (stripdollar $tr_output_file_default)))))
(defmacro trlisp-outputname ()
'`,(make-pathname :type "LISP"))
(defmacro trlisp-outputname-temp ()
'`,(pathname "_trli_"))
(defmacro trtags-outputname ()
'`,(pathname "tags"))
(defmacro trtags-outputname-temp ()
'`,(pathname "_tags_"))
(defmacro trcomments-outputname ()
'`,(pathname "unlisp"))
(defmacro trcomments-outputname-temp ()
'`,(pathname "_unli_"))
(defvar declares nil)
(defmspec $compfile (forms)
(setq forms (cdr forms))
(if (eq 1 (length forms))
(merror "compfile: bravely refusing to write file of length 0"))
(bind-transl-state
(setq $transcompile t
*in-compfile* t)
(let
((out-file-name (namestring (maxima-string (meval (car forms)))))
(t-error nil)
(*translation-msgs-files* nil))
(pop forms)
(unwind-protect
(progn
(setq transl-file (open out-file-name :direction :output :if-exists :overwrite :if-does-not-exist :create :element-type 'character))
(cond ((or (member '$all forms :test #'eq)
(member '$functions forms :test #'eq))
(setq forms (mapcar #'caar (cdr $functions)))))
(do ((l forms (cdr l))
(declares nil nil)
(tr-abort nil nil)
(item)
(lexprs nil nil)
(fexprs nil nil)
((null l))
(setq item (car l))
(cond ((not (atom item))
(print* (dconvx (translate item))))
(t
(setq t-item (compile-function (setq item ($verbify item))))
(cond (tr-abort
(setq t-error (print-abort-msg item 'compfile)))
(t
(when $compgrind
(mformat transl-file "~2%;; Function ~:@M~%" item))
(print* t-item))))))
(to-macsyma-namestring out-file-name))
(if transl-file (close transl-file))
(if t-error (delete-file transl-file))))))
(defun compile-function (f)
(mformat *translation-msgs-files* "~%Translating ~:@M" f)
(let ((fun (tr-mfun f)))
(cond (tr-abort nil)
(t fun))))
(defvar tr-defaultf nil
"A default only for the case of no arguments to $translate_file")
(defun mergef (x y)
(merge-pathnames y x))
(defun $compile_file (input-file &optional bin-file translation-output-file &aux result)
(setq input-file (maxima-string input-file))
(and bin-file(setq bin-file (maxima-string bin-file)))
(and translation-output-file
(setq translation-output-file (maxima-string translation-output-file)))
(cond ((string-equal (pathname-type input-file) "LISP")
(setq result (list '(mlist) input-file)))
(t (setq result (translate-file input-file translation-output-file))
(setq input-file (third result))))
#+(or cmu scl sbcl clisp allegro openmcl lispworks ecl)
(multiple-value-bind (output-truename warnings-p failure-p)
(if bin-file
(compile-file input-file :output-file bin-file)
(compile-file input-file))
(declare (ignore warnings-p))
(unless failure-p
(setq bin-file output-truename)))
#-(or cmu scl sbcl clisp allegro openmcl lispworks ecl)
(setq bin-file (compile-file input-file :output-file bin-file))
(append result (list bin-file)))
(defun maxima-string (symb)
(print-invert-case symb))
(defmfun $translate_file (input-file &optional output-file)
(setq input-file (maxima-string input-file))
(cond (output-file (setq output-file (maxima-string output-file))))
(translate-file input-file output-file))
(defmvar $tr_gen_tags nil
"If TRUE, TRANSLATE_FILE generates a TAGS file for use by the text editor")
(defvar *pretty-print-translation* t)
#-gcl
(defun pprint-defmtrfun (stream s)
(pprint-logical-block (stream s :prefix "(" :suffix ")")
(write (pprint-pop) :stream stream)
(write-char #\space stream)
(write (pprint-pop) :stream stream)
(pprint-indent :block 4 stream)
(pprint-newline :mandatory stream)
(write (pprint-pop) :stream stream)
(pprint-indent :block 2 stream)
(pprint-newline :mandatory stream)
(loop
(pprint-exit-if-list-exhausted)
(write (pprint-pop) :stream stream)
(write-char #\space stream)
(pprint-newline :linear stream))))
(defun call-batch1 (in-stream out-stream &aux expr transl)
(cleanup)
(newline in-stream)
(let ((*readtable* (copy-readtable nil))
#-gcl (*print-pprint-dispatch* (copy-pprint-dispatch)))
#-gcl
(progn
#-(or scl allegro)
(setf (readtable-case *readtable*) :invert)
#+(or scl allegro)
(unless #+scl (eq ext:*case-mode* :lower)
#+allegro (eq excl:*current-case-mode* :case-sensitive-lower)
(setf (readtable-case *readtable*) :invert))
(set-pprint-dispatch '(cons (member maxima::defmtrfun))
#'pprint-defmtrfun))
(loop while (and (setq expr (mread in-stream)) (consp expr))
do (setq transl (translate-macexpr-toplevel (third expr)))
(cond
(*pretty-print-translation*
(pprint transl out-stream))
(t
(format out-stream "~a" transl))))))
(defun translate-from-stream (from-stream &key to-stream eval pretty (print-function #'prin1)
&aux expr transl)
(bind-transl-state
(loop
with *in-translate-file* = t
with *print-pretty* = pretty
while (and (setq expr (mread from-stream)) (consp expr))
do (setq transl (translate-macexpr-toplevel (third expr)))
(cond (eval (eval transl)))
(cond (to-stream (funcall print-function transl to-stream)))
(loop for v in forms-to-compile-queue
do (show v to-stream)
when to-stream
do (funcall print-function v to-stream)
when eval
do (eval v))
(setq forms-to-compile-queue nil))))
(defvar trf-start-hook nil)
(defun delete-old-and-open (x)
(open x :direction :output))
(defun alter-pathname (pathname &rest options)
(apply 'make-pathname :defaults (pathname pathname) options))
(defun delete-with-side-effects-if (test list)
"Rudimentary DELETE-IF which, however, is guaranteed to call
the function TEST exactly once for each element of LIST, from
left to right."
(loop while (and list (funcall test (car list)))
do (pop list))
(loop with list = list
while (cdr list)
if (funcall test (cadr list))
do (pop (cdr list))
else
do (pop list))
list)
(defun insert-necessary-function-declares (stream)
"Write to STREAM two lists: The functions which are known to be
translated without actually being in the list passed to
$DECLARE_TRANSLATED, and those which are not known to be
translated."
(let (translated hint)
(setq *untranslated-functions-called*
(delete-with-side-effects-if
#'(lambda (v)
(prog1
(or (setq translated
(or (get v 'once-translated)
(get v 'translated)))
(and (fboundp v)
(not (mget v 'mexpr)))
(get v 'mfexpr*))
(when (and translated
(not (member v *declared-translated-functions* :test #'eq)))
(push v hint))))
*untranslated-functions-called*))
(when hint
(format stream
"~2%/* The compiler might be able to optimize some function calls
if you prepend the following declaration to your maxima code: */~%")
(mgrind `(($eval_when) $translate (($declare_translated) ,@hint))
stream)
(format stream "$"))
(when *untranslated-functions-called*
(format stream "~2%/* The following functions are not known to be translated.~%")
(mgrind `((mlist) ,@(nreverse *untranslated-functions-called*)) stream)
(format stream "$ */"))
(fresh-line stream)
(when (or hint *untranslated-functions-called*)
(format t "~&See the `unlisp' file for possible optimizations.~%"))))
(defun translate-file (in-file-name out-file-name &optional (ttymsgsp $tr_file_tty_messagesp)
&aux warn-file translated-file *translation-msgs-files*
*untranslated-functions-called* *declared-translated-functions*)
(bind-transl-state
(setq *in-translate-file* t)
(setq translated-file (alter-pathname (or out-file-name in-file-name) :type "LISP"))
(setq warn-file (alter-pathname in-file-name :type "UNLISP"))
(with-open-file (in-stream in-file-name)
(with-open-file (out-stream translated-file :direction :output :if-exists :supersede)
(with-open-file (warn-stream warn-file :direction :output :if-exists :supersede)
(setq *translation-msgs-files* (list warn-stream))
(if ttymsgsp
(setq *translation-msgs-files* (cons *standard-output* *translation-msgs-files*)))
(format out-stream ";;; -*- Mode: Lisp; package:maxima; syntax:common-lisp ;Base: 10 -*- ;;;~%")
(flet ((timezone-iso8601-name (dst tz)
This function was borrowed from CMUCL .
(let ((tz (- tz)))
(if (and (not dst) (= tz 0))
"Z"
(multiple-value-bind (hours minutes)
(truncate (if dst (1+ tz) tz))
(format nil "~C~2,'0D:~2,'0D"
(if (minusp tz) #\- #\+)
(abs hours)
(abs (truncate (* minutes 60)))))))))
(multiple-value-bind (secs mins hours day month year dow dst tz)
(decode-universal-time (get-universal-time))
(declare (ignore dow))
(format out-stream ";;; Translated on: ~D-~2,'0D-~2,'0D ~2,'0D:~2,'0D:~2,'0D~A~%"
year month day hours mins secs (timezone-iso8601-name dst tz))))
(format out-stream ";;; Maxima System version: ~A~%" *autoconf-version*)
(format out-stream ";;; Lisp type: ~A~%" (lisp-implementation-type))
(format out-stream ";;; Lisp version: ~A~%" (lisp-implementation-version))
(format out-stream "~%(in-package :maxima)")
(format warn-stream "~%This is the unlisp file for ~A "
(namestring (pathname in-stream)))
(mformat out-stream
"~%;;** Variable settings were **~%~%")
(loop for v in (cdr $tr_state_vars)
do (mformat out-stream ";; ~:M: ~:M;~%" v (symbol-value v)))
(mformat *terminal-io* "~%Translation begun on ~A.~%"
(pathname in-stream))
(call-batch1 in-stream out-stream)
(insert-necessary-function-declares warn-stream)
BATCH1 calls TRANSLATE - MACEXPR - toplevel on each expression read .
(cons '(mlist)
(mapcar 'namestring
(mapcar 'pathname (list in-stream out-stream warn-stream)))))))))
Should be rewritten to use streams . Barf -- perhaps SPRINTER
Yes is old i / o , but KMP is writing a new one for NIL . -GJC
(defun print* (p)
(let ((^w t)
(outfiles (list transl-file))
(^r t)
(declare (special outfiles))
(sub-print* p)))
i might as well be real pretty and flatten out PROGN 's .
(defun sub-print* (p &aux (flag nil))
(cond ((atom p))
((and (eq (car p) 'progn) (cdr p) (equal (cadr p) ''compile))
(mapc #'sub-print* (cddr p)))
(t
(setq flag (and $tr_semicompile
(not (eq (car p) 'eval-when))))
(when flag (princ* #\() (princ* 'progn) (terpri*))
(if $compgrind
(prin1 p)
(prin1 p transl-file))
(when flag (princ* #\)))
(terpri transl-file))))
(defun princ* (form)
(princ form transl-file))
(defun nprinc* (&rest form)
(mapc #'(lambda (x) (princ x transl-file)) form))
(defun terpri* ()
(terpri transl-file))
(defun print-module (m)
(nprinc* " " m " version " (get m 'version)))
(defun new-comment-line ()
(terpri*)
(princ* ";;;"))
(defun print-transl-modules ()
(new-comment-line)
(print-module 'transl-autoload)
(do ((j 0 (1+ j))
(s (delete 'transl-autoload (copy-list transl-modules) :test #'equal)
(cdr s)))
((null s))
(if (= 0 (rem j 3)) (new-comment-line))
(print-module (car s))))
(defun print-transl-header (source)
(mformat transl-file ";;; -*- Mode: Lisp; package:maxima; syntax:common-lisp -*-~%")
(if source
(mformat transl-file ";;; Translated code for ~A" source)
(mformat transl-file ";;; Translated Maxima functions generated by `compfile'."))
(mformat transl-file "~%;;; Written on ~:M, from Maxima ~A~%;;;~%" ($timedate) $version)
(print-transl-modules)
(mformat transl-file
"~%~
~%~
~%(eval-when (:compile-toplevel :execute) ~
~% (setq *infile-name-key*~
~% ((lambda (file-name)~
~% (cond ((eq (ml-typep file-name) 'list)~
~% (namestring file-name))~
~% (t file-name)))~
~% (truename infile))))~
~%~
~%(eval-when (:compile-toplevel) ~
~% (setq $tr_semicompile '~S)~
~% (setq forms-to-compile-queue ()))~
$tr_semicompile source)
(cond ($transcompile
(update-global-declares)
(when $compgrind
(mformat transl-file ";;; General declarations required for translated Maxima code.~%"))
(print* `(declare . ,declares)))))
(defun print-abort-msg (fun from)
(mformat *translation-msgs-files*
"~:@M failed to Translate.~
~%~A will continue, but file output will be aborted."
fun from))
(defmacro extension-filename (x)
`(caddr (namelist ,x)))
(defmspec $translate (functs)
(setq functs (cdr functs))
(cond ((and functs ($listp (car functs)))
(merror "Use the function `translate_file'"))
(t
(cond ((or (member '$functions functs :test #'eq)
(member '$all functs :test #'eq))
(setq functs (mapcar 'caar (cdr $functions)))))
(do ((l functs (cdr l))
(v nil))
((null l) `((mlist) ,@(nreverse v)))
(cond ((atom (car l))
(let ((it (translate-function ($verbify (car l)))))
(if it (push it v))))
(t
(tr-tell
(car l)
" is an illegal argument to `translate'.")))))))
(declare-top (special forms-to-compile-queue))
(defmspec $compile (form)
(let ((l (meval `(($translate),@(cdr form)))))
(let ((forms-to-compile-queue ()))
(mapc #'(lambda (x) (if (fboundp x) (compile x))) (cdr l))
(do ()
((null forms-to-compile-queue) l)
(mapc #'(lambda (form)
(eval form)
(and (consp form)
(eq (car form) 'defun)
(symbolp (cadr form))
(compile (cadr form))))
(prog1 forms-to-compile-queue
(setq forms-to-compile-queue nil)))))))
|
31276744fae6cc66141061cd7b0d4197084d6cba558f5cb41be9414703f2d620 | well-typed-lightbulbs/ocaml-esp32 | pr5785.ml | (* TEST
* expect
*)
module Add (T : sig type two end) =
struct
type _ t =
| One : [`One] t
| Two : T.two t
let add (type a) : a t * a t -> string = function
| One, One -> "two"
| Two, Two -> "four"
end;;
[%%expect{|
Lines 7-9, characters 43-24:
7 | ...........................................function
8 | | One, One -> "two"
9 | | Two, Two -> "four"
Warning 8: this pattern-matching is not exhaustive.
Here is an example of a case that is not matched:
(Two, One)
module Add :
functor (T : sig type two end) ->
sig
type _ t = One : [ `One ] t | Two : T.two t
val add : 'a t * 'a t -> string
end
|}];;
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/typing-gadts/pr5785.ml | ocaml | TEST
* expect
|
module Add (T : sig type two end) =
struct
type _ t =
| One : [`One] t
| Two : T.two t
let add (type a) : a t * a t -> string = function
| One, One -> "two"
| Two, Two -> "four"
end;;
[%%expect{|
Lines 7-9, characters 43-24:
7 | ...........................................function
8 | | One, One -> "two"
9 | | Two, Two -> "four"
Warning 8: this pattern-matching is not exhaustive.
Here is an example of a case that is not matched:
(Two, One)
module Add :
functor (T : sig type two end) ->
sig
type _ t = One : [ `One ] t | Two : T.two t
val add : 'a t * 'a t -> string
end
|}];;
|
8e62606676bea37034238513f845678e78df277a85f6e247b6628f1ba0d63604 | jepsen-io/jepsen | project.clj | (defproject jepsen.hazelcast "0.1.0-SNAPSHOT"
:description "Jepsen tests for Hazelcast IMDG"
:url "/"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.11"]
[com.hazelcast/hazelcast-client "3.12-BETA-1"]]
:main jepsen.hazelcast)
| null | https://raw.githubusercontent.com/jepsen-io/jepsen/a75d5a50dd5fa8d639a622c124bf61253460b754/hazelcast/project.clj | clojure | (defproject jepsen.hazelcast "0.1.0-SNAPSHOT"
:description "Jepsen tests for Hazelcast IMDG"
:url "/"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.11"]
[com.hazelcast/hazelcast-client "3.12-BETA-1"]]
:main jepsen.hazelcast)
| |
ba7412463fd34e3c1554a257faa5c5259345cc521b397c4eca6f815ad903b570 | wlitwin/graphv | perfGraph.ml | module Gv = Graphv_gles2
type style = FPS
| Ms
| Percent
type t = {
style : style;
name : string;
values : float array;
mutable head : int;
mutable last : float;
}
let init style name = {
name;
style;
values = Array.make 100 0.;
head = 0;
last = 0.;
}
let average t =
let avg = ref 0. in
for i=0 to Array.length t.values-1 do
avg := !avg +. t.values.(i)
done;
!avg /. float (Array.length t.values)
;;
let update t dt =
t.head <- (t.head + 1) mod (Array.length t.values);
t.values.(t.head) <- dt;
t.last < - t.last + . dt ;
if t.last > 1 . then (
t.last < - 0 . ;
Printf.printf " FPS % .2f\n% ! " ( 1 . /. average t ) ;
)
t.last <- t.last +. dt;
if t.last > 1. then (
t.last <- 0.;
Printf.printf "FPS %.2f\n%!" (1. /. average t);
)
*)
;;
let render t (vg : Gv.t) x y =
let avg = average t in
let w = 200. in
let h = 35. in
let open Gv in
let open FloatOps in
Path.begin_ vg;
Path.rect vg ~x ~y ~w ~h;
set_fill_color vg ~color:(Color.rgba ~r:0 ~g:0 ~b:0 ~a:128);
fill vg;
Path.begin_ vg;
Path.move_to vg ~x ~y:(y+h);
let len = Array.length t.values in
begin match t.style with
| FPS ->
for i=0 to len-.1 do
let v = 1. / (0.00001 + t.values.((t.head+.i) mod len)) in
let v = if v > 80. then 80. else v in
let vx = x + (float i / (float len - 1.)) * w in
let vy = y + h - ((v / 80.) * h) in
Path.line_to vg ~x:vx ~y:vy
done;
| Percent -> ()
| Ms -> ()
end;
Path.line_to vg ~x:(x+w) ~y:(y+h);
set_fill_color vg ~color:(Color.rgba ~r:255 ~g:192 ~b:0 ~a:128);
fill vg;
Text.set_font_face vg ~name:"mono";
Text.set_size vg ~size:12.;
Text.set_align vg ~align:Align.(left lor top);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:192);
Text.text vg ~x:(x+3.) ~y:(y+3.) t.name;
begin match t.style with
| FPS ->
Text.set_size vg ~size:15.;
Text.set_align vg ~align:Align.(right lor top);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:255);
let s = Printf.sprintf "%.2f FPS" (1. / avg) in
Text.text vg ~x:(x+w-3.) ~y:(y+3.) s;
Text.set_size vg ~size:13.;
Text.set_align vg ~align:Align.(right lor baseline);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:160);
let s = Printf.sprintf "%.2f ms" (avg * 1000.) in
Text.text vg ~x:(x+w-3.) ~y:(y+h-3.) s;
| Percent -> ()
| Ms -> ()
end;
;;
| null | https://raw.githubusercontent.com/wlitwin/graphv/d0a09575c5ff5ee3727c222dd6130d22e4cf62d9/examples/native_sdl/perfGraph.ml | ocaml | module Gv = Graphv_gles2
type style = FPS
| Ms
| Percent
type t = {
style : style;
name : string;
values : float array;
mutable head : int;
mutable last : float;
}
let init style name = {
name;
style;
values = Array.make 100 0.;
head = 0;
last = 0.;
}
let average t =
let avg = ref 0. in
for i=0 to Array.length t.values-1 do
avg := !avg +. t.values.(i)
done;
!avg /. float (Array.length t.values)
;;
let update t dt =
t.head <- (t.head + 1) mod (Array.length t.values);
t.values.(t.head) <- dt;
t.last < - t.last + . dt ;
if t.last > 1 . then (
t.last < - 0 . ;
Printf.printf " FPS % .2f\n% ! " ( 1 . /. average t ) ;
)
t.last <- t.last +. dt;
if t.last > 1. then (
t.last <- 0.;
Printf.printf "FPS %.2f\n%!" (1. /. average t);
)
*)
;;
let render t (vg : Gv.t) x y =
let avg = average t in
let w = 200. in
let h = 35. in
let open Gv in
let open FloatOps in
Path.begin_ vg;
Path.rect vg ~x ~y ~w ~h;
set_fill_color vg ~color:(Color.rgba ~r:0 ~g:0 ~b:0 ~a:128);
fill vg;
Path.begin_ vg;
Path.move_to vg ~x ~y:(y+h);
let len = Array.length t.values in
begin match t.style with
| FPS ->
for i=0 to len-.1 do
let v = 1. / (0.00001 + t.values.((t.head+.i) mod len)) in
let v = if v > 80. then 80. else v in
let vx = x + (float i / (float len - 1.)) * w in
let vy = y + h - ((v / 80.) * h) in
Path.line_to vg ~x:vx ~y:vy
done;
| Percent -> ()
| Ms -> ()
end;
Path.line_to vg ~x:(x+w) ~y:(y+h);
set_fill_color vg ~color:(Color.rgba ~r:255 ~g:192 ~b:0 ~a:128);
fill vg;
Text.set_font_face vg ~name:"mono";
Text.set_size vg ~size:12.;
Text.set_align vg ~align:Align.(left lor top);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:192);
Text.text vg ~x:(x+3.) ~y:(y+3.) t.name;
begin match t.style with
| FPS ->
Text.set_size vg ~size:15.;
Text.set_align vg ~align:Align.(right lor top);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:255);
let s = Printf.sprintf "%.2f FPS" (1. / avg) in
Text.text vg ~x:(x+w-3.) ~y:(y+3.) s;
Text.set_size vg ~size:13.;
Text.set_align vg ~align:Align.(right lor baseline);
set_fill_color vg ~color:(Color.rgba ~r:240 ~g:240 ~b:240 ~a:160);
let s = Printf.sprintf "%.2f ms" (avg * 1000.) in
Text.text vg ~x:(x+w-3.) ~y:(y+h-3.) s;
| Percent -> ()
| Ms -> ()
end;
;;
| |
a3ea5be5b8ee417ec6d0b5057b73390279712d20a094b0bcd7911287183cadcd | nuprl/gradual-typing-performance | tabulate-paths.rkt | #lang racket/base
TODO add parameters
TODO get quad / gregor results
;; Answers the question:
;; "For a given overhead N, how many paths
;; never have unacceptable performance?"
;; By creating a table:
;; - rows = overhead vs. untyped (variable N* in this file)
;; - columns = number of unacceptable configs along a path
;; - values = percent of paths where `column` configs have overhead greater than `row`
;;
;; The number of columns varies by the project (num columns = num modules).
;; When counting the number of configs with/without bad overhead, we exclude the
;; fully-untyped and fully-typed configuration (because they're constant over all paths)
;;
Caveat : script does not run for quad & gregor . Those are too large .
(require
(only-in math/number-theory factorial)
(only-in racket/string string-suffix?)
gtp-summarize/summary
racket/format
racket/list
racket/match
)
;; =============================================================================
(define *OUTPUT* (make-parameter #f))
;; -----------------------------------------------------------------------------
;; Overheads to test. Add as many as you want.
(define N* (vector 1 3 5 10 20))
(define (rnd n)
(~r n #:precision (list '= 2)))
(define (print-header)
(void)
( printf " ; ; rows are N in ~a\n ; ; columns are % with at most M points above N\n " N * ) )
(define (print-footer)
(void))
;; Create a 2D matrix for a given number of modules.
Returns two values : the matrix and a procedure for incrementing a matrix entry .
( - > Index ( Values ( ( Vectorof Natural ) ) ( - > Natural Natural Void ) ) ) )
(define (init-pathtbl num-modules)
(define v* (for/vector ([_n (in-vector N*)])
(make-vector num-modules 0)))
(define (v++ r c)
(let ([old (vector-ref (vector-ref v* r) c)])
(vector-set! (vector-ref v* r) c (add1 old))))
(values v* v++))
(module+ main
(require racket/cmdline)
(command-line
#:program "list-paths"
#:once-each
[("-o" "--output")
o-p
"Output path"
(*OUTPUT* o-p)]
#:args FNAME*
(for ([fname (in-list FNAME*)])
;; -- For each valid .rktd file
(unless (and (file-exists? fname)
(string-suffix? fname ".rktd"))
(printf "Skipping invalid data file '~a'\n" fname))
(printf "Processing '~a' ...\n" fname)
(define S (from-rktd fname))
;; -- Create a matrix to fill
(define-values (T T++) (init-pathtbl (get-num-modules S)))
;; -- For every path in the lattice
(for ([p (all-paths S)])
(define overhead*
(map (lambda (c) (configuration->overhead S c))
Ignore the first and last variation
(cdr (drop-right p 1))))
;; -- For every overhead (row), count the configs in each path that run
;; no slower than the overhead
(for ([r (in-range (vector-length N*))])
(define n (vector-ref N* r))
(T++ r (count (lambda (overhead) (> overhead n)) overhead*))))
(define (print-paths)
(print-header)
(define total-paths (factorial (get-num-modules S)))
(printf "#( ;; ~a, ~a total paths\n" (get-project-name S) total-paths)
(for ([row (in-vector T)])
(displayln (for/list ([v (in-vector row)])
(~a (round (* (/ v total-paths) 100))
#:min-width 3
#:align 'right))))
(displayln ")")
(print-footer))
(match (*OUTPUT*)
[#f
(print-paths)]
[_
(define out (format "paths-~a.rktd" (get-project-name S)))
(with-output-to-file out #:exists 'replace
print-paths)]))))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/tools/summarize/tabulate-paths.rkt | racket | Answers the question:
"For a given overhead N, how many paths
never have unacceptable performance?"
By creating a table:
- rows = overhead vs. untyped (variable N* in this file)
- columns = number of unacceptable configs along a path
- values = percent of paths where `column` configs have overhead greater than `row`
The number of columns varies by the project (num columns = num modules).
When counting the number of configs with/without bad overhead, we exclude the
fully-untyped and fully-typed configuration (because they're constant over all paths)
=============================================================================
-----------------------------------------------------------------------------
Overheads to test. Add as many as you want.
Create a 2D matrix for a given number of modules.
-- For each valid .rktd file
-- Create a matrix to fill
-- For every path in the lattice
-- For every overhead (row), count the configs in each path that run
no slower than the overhead | #lang racket/base
TODO add parameters
TODO get quad / gregor results
Caveat : script does not run for quad & gregor . Those are too large .
(require
(only-in math/number-theory factorial)
(only-in racket/string string-suffix?)
gtp-summarize/summary
racket/format
racket/list
racket/match
)
(define *OUTPUT* (make-parameter #f))
(define N* (vector 1 3 5 10 20))
(define (rnd n)
(~r n #:precision (list '= 2)))
(define (print-header)
(void)
( printf " ; ; rows are N in ~a\n ; ; columns are % with at most M points above N\n " N * ) )
(define (print-footer)
(void))
Returns two values : the matrix and a procedure for incrementing a matrix entry .
( - > Index ( Values ( ( Vectorof Natural ) ) ( - > Natural Natural Void ) ) ) )
(define (init-pathtbl num-modules)
(define v* (for/vector ([_n (in-vector N*)])
(make-vector num-modules 0)))
(define (v++ r c)
(let ([old (vector-ref (vector-ref v* r) c)])
(vector-set! (vector-ref v* r) c (add1 old))))
(values v* v++))
(module+ main
(require racket/cmdline)
(command-line
#:program "list-paths"
#:once-each
[("-o" "--output")
o-p
"Output path"
(*OUTPUT* o-p)]
#:args FNAME*
(for ([fname (in-list FNAME*)])
(unless (and (file-exists? fname)
(string-suffix? fname ".rktd"))
(printf "Skipping invalid data file '~a'\n" fname))
(printf "Processing '~a' ...\n" fname)
(define S (from-rktd fname))
(define-values (T T++) (init-pathtbl (get-num-modules S)))
(for ([p (all-paths S)])
(define overhead*
(map (lambda (c) (configuration->overhead S c))
Ignore the first and last variation
(cdr (drop-right p 1))))
(for ([r (in-range (vector-length N*))])
(define n (vector-ref N* r))
(T++ r (count (lambda (overhead) (> overhead n)) overhead*))))
(define (print-paths)
(print-header)
(define total-paths (factorial (get-num-modules S)))
(printf "#( ;; ~a, ~a total paths\n" (get-project-name S) total-paths)
(for ([row (in-vector T)])
(displayln (for/list ([v (in-vector row)])
(~a (round (* (/ v total-paths) 100))
#:min-width 3
#:align 'right))))
(displayln ")")
(print-footer))
(match (*OUTPUT*)
[#f
(print-paths)]
[_
(define out (format "paths-~a.rktd" (get-project-name S)))
(with-output-to-file out #:exists 'replace
print-paths)]))))
|
01c028fba8faa4f1d931ebe5e7c45171c134e5eab38efee9de8b20e16544f5ec | paulbutcher/electron-app | runner_visual.cljs | (ns {{name}}.test.runner-visual
(:require [cljs-test-display.core]
[figwheel.main.testing :refer-macros [run-tests]]
[{{name}}.renderer.arithmetic-test]
[{{name}}.main.arithmetic-test]))
(run-tests (cljs-test-display.core/init!))
| null | https://raw.githubusercontent.com/paulbutcher/electron-app/2b67a893b7dba60bf91d504a2daa009149c6fc9b/resources/clj/new/electron_app/src/test/test/runner_visual.cljs | clojure | (ns {{name}}.test.runner-visual
(:require [cljs-test-display.core]
[figwheel.main.testing :refer-macros [run-tests]]
[{{name}}.renderer.arithmetic-test]
[{{name}}.main.arithmetic-test]))
(run-tests (cljs-test-display.core/init!))
| |
4da4b562700e75ccadbed88fd750daaf882f8c2dce4c2973c659a0c914f4d819 | msfm2018/Online-Service | hub_client_gc_wc.erl | coding : utf-8
-module(hub_client_gc_wc).
-include("common.hrl").
-include("proto_player.hrl").
-compile(export_all).
loop(Socket, {init}) ->
ok;
loop(Socket, {terminate, Reason}) ->
case get(player_id) of
?UNDEFINED -> ok;
Player_Id ->
OldPid = ?ETS_LOOKUP_ELEMENT(?GC_PLAYER_INFO_ETS, Player_Id, #gc_player_info_ets.pid),
?IF(OldPid =:= self(),
begin
ets:delete(?GC_PLAYER_INFO_ETS, Player_Id)
end)
end,
ok;
loop(_Socket, <<>>) ->
ok;
loop(Socket, Data) ->
[{Protocol, JsonData}] = jsx:decode(Data),
receive_data(Socket, binary_to_list(Protocol), JsonData).
receive_data(Socket, "0x01", _) ->
PrivateKey = uuid:to_string(uuid:uuid1()),
Login_name = erlang:phash2(PrivateKey),
Json = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#01},
{
<<"data">>,
[[ %%[[ 为了客户端好解析
{<<"account">>, integer_to_binary(Login_name)}
]]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json)),
ok;
receive_data(Socket, "0x02", NewPlayerId) ->
case ets:lookup(?GC_PLAYER_INFO_ETS, NewPlayerId) of
[#gc_player_info_ets{status = OldStatus, pid = OldPid}] ->
if
OldStatus =:= ?PLAYER_REGISTER_STATUS andalso OldPid =:= self() -> % 先前是注册状态,
ets:insert(?GC_PLAYER_INFO_ETS, #gc_player_info_ets{player_id = NewPlayerId, socket = Socket, status = ?PLAYER_LOGIN_STATUS, pid = self()}),
put(player_id, NewPlayerId);
OldStatus =:= ?PLAYER_LOGINED_STATUS andalso OldPid =/= self() ->
% 当前用户顶掉同节点上的用户
Msg = "13",% proto:pack(#mod_player_login_s2c{}),
gen_tcp:send(Socket, ?packwebtxtmsg(Msg)),
ok;
ok;
OldStatus =:= ?PLAYER_LOGINED_STATUS andalso OldPid =:= self() ->
已经登陆了 这里暂时不做处理 todo 添加log 为什么客户端会发送了两次login消息
ok;
true ->
io:format("444444444444444444~n"),% 登陆失败,当前这个账号正在登陆或者注册状态 直接返回稍后登陆
Msg = jsx:encode([{<<"resultCode">>, -5}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#01},
{
<<"data">>,
[]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Msg)),
ok
end;
_ -> % 当前节点没有用户登陆此账号,直接登陆
put(player_id, NewPlayerId),
ets:insert(?GC_PLAYER_INFO_ETS, #gc_player_info_ets{player_id = NewPlayerId, pid = self(), socket = Socket, status = ?PLAYER_LOGIN_STATUS})
end;
%%访客给客服消息 0x03_web
receive_data(Socket, "0x03_web_visitor", Json_data) ->
put(send_flag_0x03, no),
[{<<"senderid">>, Senderid},
{<<"toid">>, Toid},
{<<"nick">>, Nick},
{<<"msgtype">>, Msgtype},
{<<"content">>, Content},
{<<"sendtime">>, Sendtime},
{<<"msgid">>, Msgid}] = Json_data,
if Toid == <<>>
->
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
lists:dropwhile(
fun(L) ->
%%false 跳出循环
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
Left_value = string:left(binary_to_list(NewPlayerId), 2),
io:format("Left_value ~p~n", [Left_value]),
if Left_value == "kf"
->
Json_t = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
$ 05 给客服消息
gen_tcp:send(So, ?packwebtxtmsg(Json_t)),
0x06 协议 给顾客 返回客服id
Json = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#06},
{
<<"data">>,
[[
{<<"toid">>, NewPlayerId}
]]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json)),
put(send_flag_0x03, yes),
false;
true ->
true
end
end, UsrLst),
Flag = get(send_flag_0x03),
if Flag == no ->
io:format("客服未在线 返回错误码 -1~n"),
Json = jsx:encode([{<<"resultCode">>, -1}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>,
[
]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json));
true -> ok
end;
true ->
io : format("~ts ~ n " , [ { " 与 " , Toid , " 对话 " } ] ) ,
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
lists:dropwhile(
fun(L) ->
%%false 跳出循环
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
io:format("~p~n", [{NewPlayerId, Toid}]),
if Toid == NewPlayerId
->
io:format("客服在线 推送信息~n"),
Json_t1 = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
gen_tcp:send(So, ?packwebtxtmsg(Json_t1)),
put(send_flag_0x03, yes),
false;
true ->
true
end
end,
UsrLst),
Flag = get(send_flag_0x03),
if Flag == no ->
io:format("客服已离线 返回错误码 -2~n"),
Json = jsx:encode([{<<"resultCode">>, -2}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>,
[
]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json));
true -> ok
end
end,
%% io:format("~p~n", [Json_list]),
ok;
0x04 客服 给
receive_data(Socket, "0x04", Json_data) ->
Json_list = jsx:decode(Json_data),
[{<<"senderid">>, Senderid},
{<<"toid">>, Toid},
{<<"nick">>, Nick},
{<<"msgtype">>, Msgtype},
{<<"content">>, Content},
{<<"sendtime">>, Sendtime},
{<<"msgid">>, Msgid}] = Json_list,
io : format("~ts ~ n " , [ { " 与 " , Toid , " 对话 " } ] ) ,
io:format("aaaaaaaaaaaaaaaaaaaaaaaaaaa~n"),
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
put(send_flag_0x04, no),
io:format("bbbbbbbbbbbbbbbbbbbbbbbbbbb~n"),
lists:dropwhile(
fun(L) ->
%%false 跳出循环
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
io:format("~p~n", [{NewPlayerId, Toid}]),
if Toid == NewPlayerId
->
io:format("顾客在线 推送信息 ~n"),
Json_t1 = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
gen_tcp:send(So, ?packwebtxtmsg(Json_t1)),
put(send_flag_0x04, yes),
false;
true ->
true
end
end,
UsrLst),
Flagg = get(send_flag_0x04),
if Flagg == no
访客已离线
Json_err = jsx:encode([{<<"resultCode">>, -3}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>, Toid
%[
%]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json_err));
true -> ok
end,
ok;
receive_data(Socket, "0x07", _) ->
{ok, {IP_Address, Port}} = inet:peername(Socket),
io:format("心跳包 ~p~n~n", [{IP_Address, Port}]),
ok.
%%%%%%% internal function
| null | https://raw.githubusercontent.com/msfm2018/Online-Service/281c2db795f09c7e4dc6768d08d35bbe83350670/src/hub_client_gc_wc.erl | erlang | [[ 为了客户端好解析
先前是注册状态,
当前用户顶掉同节点上的用户
proto:pack(#mod_player_login_s2c{}),
登陆失败,当前这个账号正在登陆或者注册状态 直接返回稍后登陆
当前节点没有用户登陆此账号,直接登陆
访客给客服消息 0x03_web
false 跳出循环
false 跳出循环
io:format("~p~n", [Json_list]),
false 跳出循环
[
]
internal function | coding : utf-8
-module(hub_client_gc_wc).
-include("common.hrl").
-include("proto_player.hrl").
-compile(export_all).
loop(Socket, {init}) ->
ok;
loop(Socket, {terminate, Reason}) ->
case get(player_id) of
?UNDEFINED -> ok;
Player_Id ->
OldPid = ?ETS_LOOKUP_ELEMENT(?GC_PLAYER_INFO_ETS, Player_Id, #gc_player_info_ets.pid),
?IF(OldPid =:= self(),
begin
ets:delete(?GC_PLAYER_INFO_ETS, Player_Id)
end)
end,
ok;
loop(_Socket, <<>>) ->
ok;
loop(Socket, Data) ->
[{Protocol, JsonData}] = jsx:decode(Data),
receive_data(Socket, binary_to_list(Protocol), JsonData).
receive_data(Socket, "0x01", _) ->
PrivateKey = uuid:to_string(uuid:uuid1()),
Login_name = erlang:phash2(PrivateKey),
Json = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#01},
{
<<"data">>,
{<<"account">>, integer_to_binary(Login_name)}
]]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json)),
ok;
receive_data(Socket, "0x02", NewPlayerId) ->
case ets:lookup(?GC_PLAYER_INFO_ETS, NewPlayerId) of
[#gc_player_info_ets{status = OldStatus, pid = OldPid}] ->
if
ets:insert(?GC_PLAYER_INFO_ETS, #gc_player_info_ets{player_id = NewPlayerId, socket = Socket, status = ?PLAYER_LOGIN_STATUS, pid = self()}),
put(player_id, NewPlayerId);
OldStatus =:= ?PLAYER_LOGINED_STATUS andalso OldPid =/= self() ->
gen_tcp:send(Socket, ?packwebtxtmsg(Msg)),
ok;
ok;
OldStatus =:= ?PLAYER_LOGINED_STATUS andalso OldPid =:= self() ->
已经登陆了 这里暂时不做处理 todo 添加log 为什么客户端会发送了两次login消息
ok;
true ->
Msg = jsx:encode([{<<"resultCode">>, -5}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#01},
{
<<"data">>,
[]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Msg)),
ok
end;
put(player_id, NewPlayerId),
ets:insert(?GC_PLAYER_INFO_ETS, #gc_player_info_ets{player_id = NewPlayerId, pid = self(), socket = Socket, status = ?PLAYER_LOGIN_STATUS})
end;
receive_data(Socket, "0x03_web_visitor", Json_data) ->
put(send_flag_0x03, no),
[{<<"senderid">>, Senderid},
{<<"toid">>, Toid},
{<<"nick">>, Nick},
{<<"msgtype">>, Msgtype},
{<<"content">>, Content},
{<<"sendtime">>, Sendtime},
{<<"msgid">>, Msgid}] = Json_data,
if Toid == <<>>
->
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
lists:dropwhile(
fun(L) ->
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
Left_value = string:left(binary_to_list(NewPlayerId), 2),
io:format("Left_value ~p~n", [Left_value]),
if Left_value == "kf"
->
Json_t = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
$ 05 给客服消息
gen_tcp:send(So, ?packwebtxtmsg(Json_t)),
0x06 协议 给顾客 返回客服id
Json = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#06},
{
<<"data">>,
[[
{<<"toid">>, NewPlayerId}
]]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json)),
put(send_flag_0x03, yes),
false;
true ->
true
end
end, UsrLst),
Flag = get(send_flag_0x03),
if Flag == no ->
io:format("客服未在线 返回错误码 -1~n"),
Json = jsx:encode([{<<"resultCode">>, -1}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>,
[
]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json));
true -> ok
end;
true ->
io : format("~ts ~ n " , [ { " 与 " , Toid , " 对话 " } ] ) ,
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
lists:dropwhile(
fun(L) ->
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
io:format("~p~n", [{NewPlayerId, Toid}]),
if Toid == NewPlayerId
->
io:format("客服在线 推送信息~n"),
Json_t1 = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
gen_tcp:send(So, ?packwebtxtmsg(Json_t1)),
put(send_flag_0x03, yes),
false;
true ->
true
end
end,
UsrLst),
Flag = get(send_flag_0x03),
if Flag == no ->
io:format("客服已离线 返回错误码 -2~n"),
Json = jsx:encode([{<<"resultCode">>, -2}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>,
[
]
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json));
true -> ok
end
end,
ok;
0x04 客服 给
receive_data(Socket, "0x04", Json_data) ->
Json_list = jsx:decode(Json_data),
[{<<"senderid">>, Senderid},
{<<"toid">>, Toid},
{<<"nick">>, Nick},
{<<"msgtype">>, Msgtype},
{<<"content">>, Content},
{<<"sendtime">>, Sendtime},
{<<"msgid">>, Msgid}] = Json_list,
io : format("~ts ~ n " , [ { " 与 " , Toid , " 对话 " } ] ) ,
io:format("aaaaaaaaaaaaaaaaaaaaaaaaaaa~n"),
UsrLst = ets:tab2list(?GC_PLAYER_INFO_ETS),
put(send_flag_0x04, no),
io:format("bbbbbbbbbbbbbbbbbbbbbbbbbbb~n"),
lists:dropwhile(
fun(L) ->
#gc_player_info_ets{socket = So, player_id = NewPlayerId} = L,
io:format("~p~n", [{NewPlayerId, Toid}]),
if Toid == NewPlayerId
->
io:format("顾客在线 推送信息 ~n"),
Json_t1 = jsx:encode([{<<"resultCode">>, 0}, {<<"resultMsg">>, <<"ok">>}, {<<"protocol">>, 16#05},
{
<<"data">>, Json_data
}
]),
gen_tcp:send(So, ?packwebtxtmsg(Json_t1)),
put(send_flag_0x04, yes),
false;
true ->
true
end
end,
UsrLst),
Flagg = get(send_flag_0x04),
if Flagg == no
访客已离线
Json_err = jsx:encode([{<<"resultCode">>, -3}, {<<"resultMsg">>, <<"leave">>}, {<<"protocol">>, 16#03},
{
<<"data">>, Toid
}
]),
gen_tcp:send(Socket, ?packwebtxtmsg(Json_err));
true -> ok
end,
ok;
receive_data(Socket, "0x07", _) ->
{ok, {IP_Address, Port}} = inet:peername(Socket),
io:format("心跳包 ~p~n~n", [{IP_Address, Port}]),
ok.
|
0ca52e709d21b88de8c496f5a59319b0dcf7c2742fc145188252dd4288061bf8 | filipanselmo11/Lisp | 3.lisp | (defun quadrado(n) (* n n))
(defun soma(l) (cond ((equal nil l) 0) ((integerp(car l)) (if(evenp(car l)) (+ (quadrado(car l)) (soma(cdr l))) (soma(cdr l)))) (t (soma(cdr l)))))
(print (soma '(1 ss 0 4 a 8 gfy 3)))
| null | https://raw.githubusercontent.com/filipanselmo11/Lisp/0395bbfa40280ad0d9a9eab3378d813ec89e195a/3.lisp | lisp | (defun quadrado(n) (* n n))
(defun soma(l) (cond ((equal nil l) 0) ((integerp(car l)) (if(evenp(car l)) (+ (quadrado(car l)) (soma(cdr l))) (soma(cdr l)))) (t (soma(cdr l)))))
(print (soma '(1 ss 0 4 a 8 gfy 3)))
| |
641567b64acb75502c8d848baef1744c470c41e7d14e52592f4c53732e71e163 | NoahTheDuke/netrunner-data | data.clj | (ns nr-data.data
(:require
[clojure.edn :as edn]
[clojure.java.io :as io]
[clojure.set :refer [rename-keys union]]
[clojure.string :as str]
[cond-plus.core :refer [cond+]]
[nr-data.utils :refer [cards->map prune-null-fields]]))
(defn read-edn-file
[file-path]
(edn/read-string (slurp file-path)))
(defn load-edn-from-dir
[file-path]
(->> (io/file file-path)
(file-seq)
(filter (fn [file] (and (.isFile file)
(str/ends-with? file ".edn"))))
(map read-edn-file)
(flatten)
(into [])))
(comment
(load-edn-from-dir "edn/set-cards")
)
(defn load-data
([filename] (load-data filename {:id :code}))
([filename kmap]
(cards->map
(for [m (read-edn-file (str "edn/" filename ".edn"))]
(rename-keys m kmap)))))
(defn load-sets
[cycles]
(cards->map :id
(for [s (read-edn-file "edn/sets.edn")
:let [cy (get cycles (:cycle-id s))]]
{:available (or (:date-release s) "4096-01-01")
:bigbox (:deluxe s)
:code (:code s)
:cycle (:name cy)
:cycle_code (:cycle-id s)
:cycle_position (:position cy)
:date-release (:date-release s)
:ffg-id (:ffg-id s)
:id (:id s)
:name (:name s)
:position (:position s)
:size (:size s)})))
(defn merge-sets-and-cards
[set-cards raw-cards]
(map #(merge (get raw-cards (:card-id %)) %) set-cards))
(defn get-cost
[card]
(let [cost (:cost card)]
(cond+
[(= "X" cost) 0]
[cost]
[(case (:type card)
(:asset :event :hardware :operation :program :resource :upgrade) 0
nil)])))
(defn get-strength
[card]
(or (:strength card)
(case (:type card)
(:ice :program) 0
nil)))
(defn get-set->cards
[cards]
(reduce (fn [m [set-id card-id]]
(if (contains? m set-id)
(assoc m set-id (conj (get m set-id) card-id))
(assoc m set-id #{card-id})))
{}
(map (juxt :set-id :card-id) cards)))
(defn get-cycle->sets
[sets]
(into {}
(for [[f sts] (group-by :cycle_code (vals sets))]
{f (into #{} (map :id sts))})))
(defn get-format->cards
[formats set->cards cycle->sets]
(into {}
(for [[k f] formats
:let [cards (:cards f)
sets (:sets f)
cycles (:cycles f)]]
{k (apply union
(concat
(into #{} cards)
(for [s sets]
(get set->cards s))
(for [cy cycles
s (get cycle->sets cy)]
(get set->cards s))))})))
(defn generate-formats
[sets cards formats mwls]
(let [set->cards (get-set->cards cards)
cycle->sets (get-cycle->sets sets)
format->cards (get-format->cards formats set->cards cycle->sets)]
(into {}
(for [card cards
:let [id (:id card)]]
{id (into {}
(for [[f cs] format->cards
:let [mwl (get-in formats [f :mwl])]]
{(keyword f)
(cond
got ta check mwl first
(get-in mwls [mwl :cards id])
(let [restrictions (get-in mwls [mwl :cards id])]
(merge
(when (:deck-limit restrictions)
{:banned true})
(when (:is-restricted restrictions)
{:legal true :restricted true})
(when (:points restrictions)
{:legal true :points (:points restrictions)})))
;; then we can check if the card is on the list
(contains? cs id)
{:legal true}
neither mwl nor in the format
:else
{:rotated true})}))}))))
(defn link-previous-versions
[[_ cards]]
(if (= 1 (count cards))
(first cards)
(assoc (last cards)
:previous-versions
(->> cards
butlast
(mapv #(select-keys % [:code :set_code]))))))
(defn print-null-subtypes
[subtypes card subtype-keyword]
(let [subtype-string (get subtypes subtype-keyword)]
(when-not subtype-string
(println (:title card) "has a malformed subtype:" subtype-keyword))
(:name subtype-string)))
(defn load-cards
[sides factions types subtypes sets formats mwls]
(let [
set-cards (load-edn-from-dir "edn/set-cards")
raw-cards (cards->map :id (load-edn-from-dir "edn/cards"))
cards (merge-sets-and-cards set-cards raw-cards)
card->formats (generate-formats sets cards formats mwls)
]
(->> (for [card cards
:let [s (get sets (:set-id card))]]
{:advancementcost (:advancement-requirement card)
:agendapoints (:agenda-points card)
:baselink (:base-link card)
:code (:code card)
:cost (get-cost card)
:cycle_code (:cycle_code s)
:date-release (:date-release s)
:deck-limit (:deck-limit card)
:faction (:name (get factions (:faction card)))
:factioncost (:influence-cost card)
:format (get card->formats (:id card))
:influencelimit (:influence-limit card)
:memoryunits (:memory-cost card)
:minimumdecksize (:minimum-deck-size card)
:normalizedtitle (:id card)
:number (:position card)
:quantity (:quantity card)
:rotated (= :rotated (:standard (get card->formats (:id card))))
:set_code (:code s)
:setname (:name s)
:side (:name (get sides (:side card)))
:strength (get-strength card)
:subtype (when (seq (:subtype card))
(str/join " - " (map #(print-null-subtypes subtypes card %) (:subtype card))))
:subtypes (mapv #(print-null-subtypes subtypes card %) (:subtype card))
:text (:text card)
:title (:title card)
:trash (:trash-cost card)
:type (:name (get types (:type card)))
:uniqueness (:uniqueness card)})
(map prune-null-fields)
(sort-by :code)
(map #(dissoc % :date-release))
(group-by :normalizedtitle)
(map link-previous-versions)
(cards->map))))
(defn mwls [] (load-data "mwls" {:id :code}))
(defn sides [] (load-data "sides"))
(defn factions [] (load-data "factions"))
(defn types [] (load-data "types"))
(defn subtypes [] (load-data "subtypes"))
(defn formats [] (load-data "formats"))
(defn cycles [] (load-data "cycles"))
(defn sets
([] (load-sets (cycles)))
([cycles] (load-sets cycles)))
(defn combined-cards [] (load-cards (sides) (factions) (types) (subtypes) (sets) (formats) (mwls)))
(defn set-cards [] (load-edn-from-dir "edn/set-cards"))
(defn raw-cards [] (cards->map :id (load-edn-from-dir "edn/cards")))
(defn cards [] (merge-sets-and-cards (set-cards) (raw-cards)))
(defn card->formats [] (generate-formats (sets) (cards) (formats) (mwls)))
| null | https://raw.githubusercontent.com/NoahTheDuke/netrunner-data/8a38dd6a430033310ffc31b6796c9507799d9287/src/nr_data/data.clj | clojure | then we can check if the card is on the list | (ns nr-data.data
(:require
[clojure.edn :as edn]
[clojure.java.io :as io]
[clojure.set :refer [rename-keys union]]
[clojure.string :as str]
[cond-plus.core :refer [cond+]]
[nr-data.utils :refer [cards->map prune-null-fields]]))
(defn read-edn-file
[file-path]
(edn/read-string (slurp file-path)))
(defn load-edn-from-dir
[file-path]
(->> (io/file file-path)
(file-seq)
(filter (fn [file] (and (.isFile file)
(str/ends-with? file ".edn"))))
(map read-edn-file)
(flatten)
(into [])))
(comment
(load-edn-from-dir "edn/set-cards")
)
(defn load-data
([filename] (load-data filename {:id :code}))
([filename kmap]
(cards->map
(for [m (read-edn-file (str "edn/" filename ".edn"))]
(rename-keys m kmap)))))
(defn load-sets
[cycles]
(cards->map :id
(for [s (read-edn-file "edn/sets.edn")
:let [cy (get cycles (:cycle-id s))]]
{:available (or (:date-release s) "4096-01-01")
:bigbox (:deluxe s)
:code (:code s)
:cycle (:name cy)
:cycle_code (:cycle-id s)
:cycle_position (:position cy)
:date-release (:date-release s)
:ffg-id (:ffg-id s)
:id (:id s)
:name (:name s)
:position (:position s)
:size (:size s)})))
(defn merge-sets-and-cards
[set-cards raw-cards]
(map #(merge (get raw-cards (:card-id %)) %) set-cards))
(defn get-cost
[card]
(let [cost (:cost card)]
(cond+
[(= "X" cost) 0]
[cost]
[(case (:type card)
(:asset :event :hardware :operation :program :resource :upgrade) 0
nil)])))
(defn get-strength
[card]
(or (:strength card)
(case (:type card)
(:ice :program) 0
nil)))
(defn get-set->cards
[cards]
(reduce (fn [m [set-id card-id]]
(if (contains? m set-id)
(assoc m set-id (conj (get m set-id) card-id))
(assoc m set-id #{card-id})))
{}
(map (juxt :set-id :card-id) cards)))
(defn get-cycle->sets
[sets]
(into {}
(for [[f sts] (group-by :cycle_code (vals sets))]
{f (into #{} (map :id sts))})))
(defn get-format->cards
[formats set->cards cycle->sets]
(into {}
(for [[k f] formats
:let [cards (:cards f)
sets (:sets f)
cycles (:cycles f)]]
{k (apply union
(concat
(into #{} cards)
(for [s sets]
(get set->cards s))
(for [cy cycles
s (get cycle->sets cy)]
(get set->cards s))))})))
(defn generate-formats
[sets cards formats mwls]
(let [set->cards (get-set->cards cards)
cycle->sets (get-cycle->sets sets)
format->cards (get-format->cards formats set->cards cycle->sets)]
(into {}
(for [card cards
:let [id (:id card)]]
{id (into {}
(for [[f cs] format->cards
:let [mwl (get-in formats [f :mwl])]]
{(keyword f)
(cond
got ta check mwl first
(get-in mwls [mwl :cards id])
(let [restrictions (get-in mwls [mwl :cards id])]
(merge
(when (:deck-limit restrictions)
{:banned true})
(when (:is-restricted restrictions)
{:legal true :restricted true})
(when (:points restrictions)
{:legal true :points (:points restrictions)})))
(contains? cs id)
{:legal true}
neither mwl nor in the format
:else
{:rotated true})}))}))))
(defn link-previous-versions
[[_ cards]]
(if (= 1 (count cards))
(first cards)
(assoc (last cards)
:previous-versions
(->> cards
butlast
(mapv #(select-keys % [:code :set_code]))))))
(defn print-null-subtypes
[subtypes card subtype-keyword]
(let [subtype-string (get subtypes subtype-keyword)]
(when-not subtype-string
(println (:title card) "has a malformed subtype:" subtype-keyword))
(:name subtype-string)))
(defn load-cards
[sides factions types subtypes sets formats mwls]
(let [
set-cards (load-edn-from-dir "edn/set-cards")
raw-cards (cards->map :id (load-edn-from-dir "edn/cards"))
cards (merge-sets-and-cards set-cards raw-cards)
card->formats (generate-formats sets cards formats mwls)
]
(->> (for [card cards
:let [s (get sets (:set-id card))]]
{:advancementcost (:advancement-requirement card)
:agendapoints (:agenda-points card)
:baselink (:base-link card)
:code (:code card)
:cost (get-cost card)
:cycle_code (:cycle_code s)
:date-release (:date-release s)
:deck-limit (:deck-limit card)
:faction (:name (get factions (:faction card)))
:factioncost (:influence-cost card)
:format (get card->formats (:id card))
:influencelimit (:influence-limit card)
:memoryunits (:memory-cost card)
:minimumdecksize (:minimum-deck-size card)
:normalizedtitle (:id card)
:number (:position card)
:quantity (:quantity card)
:rotated (= :rotated (:standard (get card->formats (:id card))))
:set_code (:code s)
:setname (:name s)
:side (:name (get sides (:side card)))
:strength (get-strength card)
:subtype (when (seq (:subtype card))
(str/join " - " (map #(print-null-subtypes subtypes card %) (:subtype card))))
:subtypes (mapv #(print-null-subtypes subtypes card %) (:subtype card))
:text (:text card)
:title (:title card)
:trash (:trash-cost card)
:type (:name (get types (:type card)))
:uniqueness (:uniqueness card)})
(map prune-null-fields)
(sort-by :code)
(map #(dissoc % :date-release))
(group-by :normalizedtitle)
(map link-previous-versions)
(cards->map))))
(defn mwls [] (load-data "mwls" {:id :code}))
(defn sides [] (load-data "sides"))
(defn factions [] (load-data "factions"))
(defn types [] (load-data "types"))
(defn subtypes [] (load-data "subtypes"))
(defn formats [] (load-data "formats"))
(defn cycles [] (load-data "cycles"))
(defn sets
([] (load-sets (cycles)))
([cycles] (load-sets cycles)))
(defn combined-cards [] (load-cards (sides) (factions) (types) (subtypes) (sets) (formats) (mwls)))
(defn set-cards [] (load-edn-from-dir "edn/set-cards"))
(defn raw-cards [] (cards->map :id (load-edn-from-dir "edn/cards")))
(defn cards [] (merge-sets-and-cards (set-cards) (raw-cards)))
(defn card->formats [] (generate-formats (sets) (cards) (formats) (mwls)))
|
e16da1f3849e7958812caad46228c82e4147dc8f38d7ad4d5da899b6955c9a54 | rntz/moxy | values.rkt | #lang racket
;;(require (only-in racket [hash-map racket/hash-map]))
(require "util.rkt")
(require "tags.rkt")
;; Hashtable interface. Modelled on
;; -0.5.5.1/docs/Data-Map-Strict.html
;;
;; Needs to be defined here for access to Just, None.
;; TODO: more functionality
(provide
hash-empty? hash-count ;re-exports
hash-empty hash-single hash-from-list hash-from-keys-values
hash-has? hash-lookup hash-get
hash-put hash-delete
;; hash-put-with hash-alter hash-map
hash-union)
(define hash-empty (hash))
(define (hash-single k v) (hash k v))
(define (hash-from-list kvs)
;; convert a list-list like ((a x) (b y)) to a cons-list ((a . x) (b . y)).
(make-immutable-hash (map (lambda (x) (apply cons x)) kvs)))
(define (hash-from-keys-values keys values)
(hash-from-list (zip keys values)))
(define (hash-has? k h) (hash-has-key? h k))
(define (hash-lookup k h)
(if (hash-has? k h) (Just (hash-ref h k)) None))
(define (hash-get k h [or-else #f])
(cond
[(procedure? or-else) (hash-ref h k or-else)]
[(not or-else) (hash-ref h k)]
[else (error "or-else argument to hash-get must be a procedure or #f")]))
(define (hash-put k v h) (hash-set h k v))
;; (define (hash-put-with k v h f)
;; (hash-put k (maybe (hash-lookup k h) v (lambda (x) (f k x v))) h))
(define (hash-delete k h) (hash-remove h k))
;; ;; f takes (Maybe v) -> (Maybe v)
;; (define (hash-alter k h f)
;; (match (f (hash-lookup k h))
;; [(None) (hash-delete k h)]
;; [(Just x) (hash-put k x h)]))
;; ;; (f k v) --> new-v
;; (define (hash-map h f)
;; (make-immutable-hash
;; (racket/hash-map h (lambda (k v) (cons k (f k v))))))
(define (hash-union a b [combine (lambda (k x y) y)])
(for/fold ([a a])
([(key v2) b])
(hash-update a key (lambda (v1) (combine key v1 v2)) (lambda () v2))))
Builtin tags .
(provide
(tag-out L R True False Just None Ok Err ExtPoint)
truthy? falsey? truthify
maybe? maybe from-maybe maybe-bind maybe-map maybe-filter Maybe/c)
;; directions. built-in for associativity purposes.
(define-tags L R)
;; booleans
(define-tags True False)
(define (falsey? x)
(if (not (boolean? x)) (False? x)
(error "never call falsey? on a boolean!")))
(define (truthy? x) (not (falsey? x)))
(define (truthify x)
(if (boolean? x) (if x True False)
(error "never call truthify on a non-boolean!")))
;; maybe
(define-tags None (Just value))
(define (maybe? x) (or (Just? x) (None? x)))
(define (Maybe/c c)
(or/c None?
(struct/c tagged
(lambda (x) (equal? tag:Just x))
(vector-immutable/c c))))
(define (maybe v default inject)
(match v [(None) default] [(Just x) (inject x)]))
(define (from-maybe v default) (maybe v default identity))
(define (maybe-bind v f) (maybe v None f))
(define (maybe-map v f) (maybe-bind v (lambda (x) (Just (f x)))))
(define (maybe-filter v ok?)
(match v
[(Just x) (if (ok? x) v None)]
[(None) v]))
(define-tags (Ok value) (Err value))
(define-tag ExtPoint name uid join empty)
| null | https://raw.githubusercontent.com/rntz/moxy/18015aed1596ae55658be4ac9eb5cbb3debb1644/values.rkt | racket | (require (only-in racket [hash-map racket/hash-map]))
Hashtable interface. Modelled on
-0.5.5.1/docs/Data-Map-Strict.html
Needs to be defined here for access to Just, None.
TODO: more functionality
re-exports
hash-put-with hash-alter hash-map
convert a list-list like ((a x) (b y)) to a cons-list ((a . x) (b . y)).
(define (hash-put-with k v h f)
(hash-put k (maybe (hash-lookup k h) v (lambda (x) (f k x v))) h))
;; f takes (Maybe v) -> (Maybe v)
(define (hash-alter k h f)
(match (f (hash-lookup k h))
[(None) (hash-delete k h)]
[(Just x) (hash-put k x h)]))
;; (f k v) --> new-v
(define (hash-map h f)
(make-immutable-hash
(racket/hash-map h (lambda (k v) (cons k (f k v))))))
directions. built-in for associativity purposes.
booleans
maybe | #lang racket
(require "util.rkt")
(require "tags.rkt")
(provide
hash-empty hash-single hash-from-list hash-from-keys-values
hash-has? hash-lookup hash-get
hash-put hash-delete
hash-union)
(define hash-empty (hash))
(define (hash-single k v) (hash k v))
(define (hash-from-list kvs)
(make-immutable-hash (map (lambda (x) (apply cons x)) kvs)))
(define (hash-from-keys-values keys values)
(hash-from-list (zip keys values)))
(define (hash-has? k h) (hash-has-key? h k))
(define (hash-lookup k h)
(if (hash-has? k h) (Just (hash-ref h k)) None))
(define (hash-get k h [or-else #f])
(cond
[(procedure? or-else) (hash-ref h k or-else)]
[(not or-else) (hash-ref h k)]
[else (error "or-else argument to hash-get must be a procedure or #f")]))
(define (hash-put k v h) (hash-set h k v))
(define (hash-delete k h) (hash-remove h k))
(define (hash-union a b [combine (lambda (k x y) y)])
(for/fold ([a a])
([(key v2) b])
(hash-update a key (lambda (v1) (combine key v1 v2)) (lambda () v2))))
Builtin tags .
(provide
(tag-out L R True False Just None Ok Err ExtPoint)
truthy? falsey? truthify
maybe? maybe from-maybe maybe-bind maybe-map maybe-filter Maybe/c)
(define-tags L R)
(define-tags True False)
(define (falsey? x)
(if (not (boolean? x)) (False? x)
(error "never call falsey? on a boolean!")))
(define (truthy? x) (not (falsey? x)))
(define (truthify x)
(if (boolean? x) (if x True False)
(error "never call truthify on a non-boolean!")))
(define-tags None (Just value))
(define (maybe? x) (or (Just? x) (None? x)))
(define (Maybe/c c)
(or/c None?
(struct/c tagged
(lambda (x) (equal? tag:Just x))
(vector-immutable/c c))))
(define (maybe v default inject)
(match v [(None) default] [(Just x) (inject x)]))
(define (from-maybe v default) (maybe v default identity))
(define (maybe-bind v f) (maybe v None f))
(define (maybe-map v f) (maybe-bind v (lambda (x) (Just (f x)))))
(define (maybe-filter v ok?)
(match v
[(Just x) (if (ok? x) v None)]
[(None) v]))
(define-tags (Ok value) (Err value))
(define-tag ExtPoint name uid join empty)
|
cf40514f1ae11de1b08e505e7fa68a28e60b6c48f1173a25bf1594136c413f90 | tcsprojects/pgsolver | solverregistry.mli | open Paritygame
type global_solver_factory = string array -> global_solver
val register_solver_factory: global_solver_factory -> string -> string -> string -> unit
val register_solver: global_solver -> string -> string -> string -> unit
val mem_solver: string -> bool
val find_solver: string -> global_solver_factory * string * string
val enum_solvers: (global_solver_factory -> string -> string -> string -> unit) -> unit
val fold_solvers: (global_solver_factory -> string -> string -> string -> 'a -> 'a) -> 'a -> 'a
type partial_solver_factory = string array -> partial_solver
val register_partial_solver_factory: partial_solver_factory -> string -> string -> string -> unit
val register_partial_solver: partial_solver -> string -> string -> string -> unit
val mem_partial_solver: string -> bool
val find_partial_solver: string -> partial_solver_factory * string * string
val enum_partial_solvers: (partial_solver_factory -> string -> string -> string -> unit) -> unit
val fold_partial_solvers: (partial_solver_factory -> string -> string -> string -> 'a -> 'a) -> 'a -> 'a | null | https://raw.githubusercontent.com/tcsprojects/pgsolver/b0c31a8b367c405baed961385ad645d52f648325/src/paritygame/solverregistry.mli | ocaml | open Paritygame
type global_solver_factory = string array -> global_solver
val register_solver_factory: global_solver_factory -> string -> string -> string -> unit
val register_solver: global_solver -> string -> string -> string -> unit
val mem_solver: string -> bool
val find_solver: string -> global_solver_factory * string * string
val enum_solvers: (global_solver_factory -> string -> string -> string -> unit) -> unit
val fold_solvers: (global_solver_factory -> string -> string -> string -> 'a -> 'a) -> 'a -> 'a
type partial_solver_factory = string array -> partial_solver
val register_partial_solver_factory: partial_solver_factory -> string -> string -> string -> unit
val register_partial_solver: partial_solver -> string -> string -> string -> unit
val mem_partial_solver: string -> bool
val find_partial_solver: string -> partial_solver_factory * string * string
val enum_partial_solvers: (partial_solver_factory -> string -> string -> string -> unit) -> unit
val fold_partial_solvers: (partial_solver_factory -> string -> string -> string -> 'a -> 'a) -> 'a -> 'a | |
dbd892971d234b155bb7b68941c58668d0be8c82d475303a9053cad92e1b82c3 | tank-bohr/bookish_spork | bookish_spork_transport.erl | -module(bookish_spork_transport).
-export([
listen/2,
accept/1,
recv/1,
read_raw/2,
send/2,
close/1,
shutdown/1
]).
-export([
socket/1,
ssl_ext/1,
ssl_info/1,
connection_id/1
]).
-type callback_module() :: bookish_spork_tcp | bookish_spork_ssl.
-type socket() :: gen_tcp:socket() | ssl:sslsocket().
-record(listen, {
module :: callback_module(),
socket :: socket()
}).
-record(transport, {
id :: binary(),
module :: callback_module(),
socket :: socket(),
ssl_ext = #{} :: ssl:protocol_extensions()
}).
-callback listen(Port, Options) -> Result when
Port :: inet:port_number(),
Options :: [gen_tcp:listen_option()],
Result :: {ok, ListenSocket} | {error, Reason},
ListenSocket :: socket(),
Reason :: system_limit | inet:posix().
-callback accept(ListenSocket) -> Result when
ListenSocket :: socket(),
Result :: {ok, Socket} | {ok, Socket, Ext} | {error, Reason},
Socket :: socket(),
Ext :: ssl:protocol_extensions(),
Reason :: closed | timeout | system_limit | inet:posix().
-callback recv(Socket, Length) -> {ok, Packet} | {error, Reason} when
Socket :: socket(),
Length :: non_neg_integer(),
Packet :: term(),
Reason :: closed | timeout | inet:posix().
-callback send(Socket, Packet) -> ok | {error, Reason} when
Socket :: socket(),
Packet :: iodata(),
Reason :: closed | inet:posix().
-callback close(socket()) -> ok.
-callback shutdown(Socket, How) -> ok | {error, Reason} when
Socket :: socket(),
How :: read | write | read_write,
Reason :: inet:posix().
-callback setopts(Socket, Options) -> ok | {error, Reason} when
Socket :: socket(),
Options :: [gen_tcp:socket_setopt()],
Reason :: any().
-callback connection_information(Socket) -> {ok, Result} | {error, Reason} when
Socket :: socket(),
Result :: ssl:connection_info(),
Reason :: any().
-opaque t() :: #transport{}.
-opaque listen() :: #listen{}.
-export_type([
t/0,
listen/0,
socket/0
]).
-define(LISTEN_OPTIONS, [
binary,
{packet, http},
{active, false},
{reuseaddr, true}
]).
-define(IS_SSL_SOCKET(Socket), is_tuple(Socket) andalso element(1, Socket) =:= sslsocket).
-spec listen(callback_module(), inet:port_number()) -> listen().
listen(Module, Port) ->
{ok, Socket} = Module:listen(Port, ?LISTEN_OPTIONS),
#listen{socket = Socket, module = Module}.
-spec accept(listen()) -> t().
accept(#listen{socket = ListenSocket, module = Module}) ->
case Module:accept(ListenSocket) of
{ok, Socket, Ext} ->
#transport{id = generate_id(), module = Module, socket = Socket, ssl_ext = Ext};
{ok, Socket} ->
#transport{id = generate_id(), module = Module, socket = Socket}
end.
-spec recv(t()) -> {ok, term()} | {error, term()}.
recv(#transport{socket = Socket, module = Module}) ->
Module:recv(Socket, 0).
-spec read_raw(t(), integer()) -> binary().
read_raw(_, 0) ->
<<>>;
read_raw(#transport{socket = Socket, module = Module}, ContentLength) ->
Module:setopts(Socket, [{packet, raw}]),
{ok, Body} = Module:recv(Socket, ContentLength),
Module:setopts(Socket, [{packet, http}]),
Body.
-spec send(t(), iodata()) -> ok.
send(#transport{socket = Socket, module = Module}, String) ->
Module:send(Socket, [String]).
-spec close(listen() | t()) -> ok.
close(#listen{socket = Socket, module = Module}) ->
Module:close(Socket);
close(#transport{socket = Socket, module = Module}) ->
Module:close(Socket).
-spec shutdown(t()) -> ok.
shutdown(#transport{socket = Socket, module = Module}) ->
Module:shutdown(Socket, read_write).
-spec socket(t()) -> socket().
socket(#transport{socket = Socket}) ->
Socket.
-spec connection_id(t()) -> binary().
connection_id(#transport{id = Id}) ->
Id.
-spec ssl_ext(t()) -> ssl:protocol_extensions().
ssl_ext(#transport{ssl_ext = Ext}) ->
Ext.
-spec ssl_info(t()) -> proplists:proplist().
ssl_info(#transport{socket = Socket, module = Module}) ->
{ok, Info} = Module:connection_information(Socket),
Info.
-spec generate_id() -> Id :: binary().
%% @doc generates unique id to be a connection id
generate_id() ->
Bytes = crypto:strong_rand_bytes(7),
Base64 = base64:encode(Bytes),
string:trim(Base64, trailing, "=").
| null | https://raw.githubusercontent.com/tank-bohr/bookish_spork/7e3acde6de789723ab6302707458ac6d13b9df58/src/bookish_spork_transport.erl | erlang | @doc generates unique id to be a connection id | -module(bookish_spork_transport).
-export([
listen/2,
accept/1,
recv/1,
read_raw/2,
send/2,
close/1,
shutdown/1
]).
-export([
socket/1,
ssl_ext/1,
ssl_info/1,
connection_id/1
]).
-type callback_module() :: bookish_spork_tcp | bookish_spork_ssl.
-type socket() :: gen_tcp:socket() | ssl:sslsocket().
-record(listen, {
module :: callback_module(),
socket :: socket()
}).
-record(transport, {
id :: binary(),
module :: callback_module(),
socket :: socket(),
ssl_ext = #{} :: ssl:protocol_extensions()
}).
-callback listen(Port, Options) -> Result when
Port :: inet:port_number(),
Options :: [gen_tcp:listen_option()],
Result :: {ok, ListenSocket} | {error, Reason},
ListenSocket :: socket(),
Reason :: system_limit | inet:posix().
-callback accept(ListenSocket) -> Result when
ListenSocket :: socket(),
Result :: {ok, Socket} | {ok, Socket, Ext} | {error, Reason},
Socket :: socket(),
Ext :: ssl:protocol_extensions(),
Reason :: closed | timeout | system_limit | inet:posix().
-callback recv(Socket, Length) -> {ok, Packet} | {error, Reason} when
Socket :: socket(),
Length :: non_neg_integer(),
Packet :: term(),
Reason :: closed | timeout | inet:posix().
-callback send(Socket, Packet) -> ok | {error, Reason} when
Socket :: socket(),
Packet :: iodata(),
Reason :: closed | inet:posix().
-callback close(socket()) -> ok.
-callback shutdown(Socket, How) -> ok | {error, Reason} when
Socket :: socket(),
How :: read | write | read_write,
Reason :: inet:posix().
-callback setopts(Socket, Options) -> ok | {error, Reason} when
Socket :: socket(),
Options :: [gen_tcp:socket_setopt()],
Reason :: any().
-callback connection_information(Socket) -> {ok, Result} | {error, Reason} when
Socket :: socket(),
Result :: ssl:connection_info(),
Reason :: any().
-opaque t() :: #transport{}.
-opaque listen() :: #listen{}.
-export_type([
t/0,
listen/0,
socket/0
]).
-define(LISTEN_OPTIONS, [
binary,
{packet, http},
{active, false},
{reuseaddr, true}
]).
-define(IS_SSL_SOCKET(Socket), is_tuple(Socket) andalso element(1, Socket) =:= sslsocket).
-spec listen(callback_module(), inet:port_number()) -> listen().
listen(Module, Port) ->
{ok, Socket} = Module:listen(Port, ?LISTEN_OPTIONS),
#listen{socket = Socket, module = Module}.
-spec accept(listen()) -> t().
accept(#listen{socket = ListenSocket, module = Module}) ->
case Module:accept(ListenSocket) of
{ok, Socket, Ext} ->
#transport{id = generate_id(), module = Module, socket = Socket, ssl_ext = Ext};
{ok, Socket} ->
#transport{id = generate_id(), module = Module, socket = Socket}
end.
-spec recv(t()) -> {ok, term()} | {error, term()}.
recv(#transport{socket = Socket, module = Module}) ->
Module:recv(Socket, 0).
-spec read_raw(t(), integer()) -> binary().
read_raw(_, 0) ->
<<>>;
read_raw(#transport{socket = Socket, module = Module}, ContentLength) ->
Module:setopts(Socket, [{packet, raw}]),
{ok, Body} = Module:recv(Socket, ContentLength),
Module:setopts(Socket, [{packet, http}]),
Body.
-spec send(t(), iodata()) -> ok.
send(#transport{socket = Socket, module = Module}, String) ->
Module:send(Socket, [String]).
-spec close(listen() | t()) -> ok.
close(#listen{socket = Socket, module = Module}) ->
Module:close(Socket);
close(#transport{socket = Socket, module = Module}) ->
Module:close(Socket).
-spec shutdown(t()) -> ok.
shutdown(#transport{socket = Socket, module = Module}) ->
Module:shutdown(Socket, read_write).
-spec socket(t()) -> socket().
socket(#transport{socket = Socket}) ->
Socket.
-spec connection_id(t()) -> binary().
connection_id(#transport{id = Id}) ->
Id.
-spec ssl_ext(t()) -> ssl:protocol_extensions().
ssl_ext(#transport{ssl_ext = Ext}) ->
Ext.
-spec ssl_info(t()) -> proplists:proplist().
ssl_info(#transport{socket = Socket, module = Module}) ->
{ok, Info} = Module:connection_information(Socket),
Info.
-spec generate_id() -> Id :: binary().
generate_id() ->
Bytes = crypto:strong_rand_bytes(7),
Base64 = base64:encode(Bytes),
string:trim(Base64, trailing, "=").
|
35d6fd0a846b9e192f18801e657d04c93b5d9843b4889754dda3dae7e334f7b2 | ghollisjr/cl-ana | package.lisp | cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
;;;;
This file is part of cl - ana .
;;;;
;;;; cl-ana is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;;; (at your option) any later version.
;;;;
;;;; cl-ana is distributed in the hope that it will be useful, but
;;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; General Public License for more details.
;;;;
You should have received a copy of the GNU General Public License
;;;; along with cl-ana. If not, see </>.
;;;;
You may contact ( me ! ) via email at
;;;;
(defpackage #:cl-ana.hash-table-utils
(:use :cl)
(:export :hash-table->alist
:hash-keys
:hash-values
:hmap
:alist->hash-table))
| null | https://raw.githubusercontent.com/ghollisjr/cl-ana/5cb4c0b0c9c4957452ad2a769d6ff9e8d5df0b10/hash-table-utils/package.lisp | lisp |
cl-ana is free software: you can redistribute it and/or modify it
(at your option) any later version.
cl-ana is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with cl-ana. If not, see </>.
| cl - ana is a Common Lisp data analysis library .
Copyright 2013 , 2014
This file is part of cl - ana .
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
You may contact ( me ! ) via email at
(defpackage #:cl-ana.hash-table-utils
(:use :cl)
(:export :hash-table->alist
:hash-keys
:hash-values
:hmap
:alist->hash-table))
|
383176ae18b11d4172d9412b2c8bd57dcd43aaef5b397d8000e08f7042197259 | yogthos/memory-hole | db.cljs | (ns memory-hole.db)
(def default-db
;; user should be coeffect on init event, not hard coded as it is here
{:user (js->clj js/user :keywordize-keys true)
:selected-tag "Recent"
:admin/show-all-groups? false
:active-page (when-not js/user :login)
:login-events []})
| null | https://raw.githubusercontent.com/yogthos/memory-hole/925e399b0002d59998d10bd6f54ff3464a4b4ddb/src/cljs/memory_hole/db.cljs | clojure | user should be coeffect on init event, not hard coded as it is here | (ns memory-hole.db)
(def default-db
{:user (js->clj js/user :keywordize-keys true)
:selected-tag "Recent"
:admin/show-all-groups? false
:active-page (when-not js/user :login)
:login-events []})
|
080bd7594d5b9f12bfdc89162acbea4664db4bde6dd17d08bb29f80400948711 | KingMob/clojure-trie-performance | trie02.clj | (ns modulo-lotus.trie.trie02
(:require [clojure.string :refer (split triml)]
[taoensso.tufte :as tufte :refer (defnp p profiled profile)]))
;; From: -contacts/submissions/code/46151337
;; Like 00, but replaces {} with (array-map) - no real change
(defn add [db name]
(update-in db (seq name) (fnil assoc (array-map)) :* true))
(defn count-terminations [db]
(let [terminations (if (:* db) 1 0)]
(reduce +
terminations
(map count-terminations
(vals (dissoc db :*))))))
(defn find-partial [db partial]
(println
(if-let [sub-db (get-in db (seq partial))]
(count-terminations sub-db)
0)))
(defn process-op [db op contact]
(condp = op
"add" (add db contact)
"find" (do
(find-partial db contact)
db)))
(defn run
[]
(let [n (Integer/parseInt (read-line))]
(loop [i n
db (array-map)]
(when (> i 0)
(let [[op contact] (split (read-line) #"\s+")]
(recur (dec i)
(process-op db op contact))))))
(flush))
| null | https://raw.githubusercontent.com/KingMob/clojure-trie-performance/4c0c77799a0763c670fdde70b737e0af936a2bb8/src/modulo_lotus/trie/trie02.clj | clojure | From: -contacts/submissions/code/46151337
Like 00, but replaces {} with (array-map) - no real change | (ns modulo-lotus.trie.trie02
(:require [clojure.string :refer (split triml)]
[taoensso.tufte :as tufte :refer (defnp p profiled profile)]))
(defn add [db name]
(update-in db (seq name) (fnil assoc (array-map)) :* true))
(defn count-terminations [db]
(let [terminations (if (:* db) 1 0)]
(reduce +
terminations
(map count-terminations
(vals (dissoc db :*))))))
(defn find-partial [db partial]
(println
(if-let [sub-db (get-in db (seq partial))]
(count-terminations sub-db)
0)))
(defn process-op [db op contact]
(condp = op
"add" (add db contact)
"find" (do
(find-partial db contact)
db)))
(defn run
[]
(let [n (Integer/parseInt (read-line))]
(loop [i n
db (array-map)]
(when (> i 0)
(let [[op contact] (split (read-line) #"\s+")]
(recur (dec i)
(process-op db op contact))))))
(flush))
|
4e23613dea90ebe26e3b1965b0d5b41bad1b5445704a9966e2ceaebdac9c88cb | bmeurer/ocaml-rbtrees | rbmap.mli | -
* Copyright ( c ) 2007 , < >
*
* Permission is hereby granted , free of charge , to any person obtaining a
* copy of this software and associated documentation files ( the " Software " ) ,
* to deal in the Software without restriction , including without limitation
* the rights to use , copy , modify , merge , publish , distribute , sublicense ,
* and/or sell copies of the Software , and to permit persons to whom the
* Software is furnished to do so , subject to the following conditions :
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software .
*
* THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
* OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
* LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE .
* Copyright (c) 2007, Benedikt Meurer <>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*)
This is my implementation of Red - Black Trees for OCaml . It is based upon
* " Red - Black Trees in a Functional Setting " , in " Functional
* Pearls " .
* Red - Black Trees are exposed via a map and a set API , which is designed to
* be compatible with the Map and Set modules in the OCaml standard library
* ( which are implemented using AVL trees ) . You can use the Rbmap and * modules as drop - in replacement for the Map and Set modules .
* "Red-Black Trees in a Functional Setting", Chris Okasaki in "Functional
* Pearls".
* Red-Black Trees are exposed via a map and a set API, which is designed to
* be compatible with the Map and Set modules in the OCaml standard library
* (which are implemented using AVL trees). You can use the Rbmap and Rbset
* modules as drop-in replacement for the Map and Set modules.
*)
module type OrderedType =
sig
type t
(** The type of the map keys. *)
val compare : t -> t -> int
* A total ordering function over the keys .
This is a two - argument function [ f ] such that
[ f e1 e2 ] is zero if the keys [ e1 ] and [ e2 ] are equal ,
[ f e1 e2 ] is strictly negative if [ e1 ] is smaller than [ e2 ] ,
and [ f e1 e2 ] is strictly positive if [ e1 ] is greater than [ e2 ] .
Example : a suitable ordering function is the generic structural
comparison function { ! Pervasives.compare } .
This is a two-argument function [f] such that
[f e1 e2] is zero if the keys [e1] and [e2] are equal,
[f e1 e2] is strictly negative if [e1] is smaller than [e2],
and [f e1 e2] is strictly positive if [e1] is greater than [e2].
Example: a suitable ordering function is the generic structural
comparison function {!Pervasives.compare}. *)
end
(** Input signature of the functor {!Map.Make}. *)
module type S =
sig
type key
(** The type of the map keys. *)
type (+'a) t
(** The type of maps from type [key] to type ['a]. *)
val empty: 'a t
(** The empty map. *)
val is_empty: 'a t -> bool
(** Test whether a map is empty or not. *)
val add: key -> 'a -> 'a t -> 'a t
(** [add x y m] returns a map containing the same bindings as
[m], plus a binding of [x] to [y]. If [x] was already bound
in [m], its previous binding disappears. *)
val find: key -> 'a t -> 'a
(** [find x m] returns the current binding of [x] in [m],
or raises [Not_found] if no such binding exists. *)
val remove: key -> 'a t -> 'a t
(** [remove x m] returns a map containing the same bindings as
[m], except for [x] which is unbound in the returned map. *)
val mem: key -> 'a t -> bool
(** [mem x m] returns [true] if [m] contains a binding for [x],
and [false] otherwise. *)
val iter: (key -> 'a -> unit) -> 'a t -> unit
* [ iter f m ] applies [ f ] to all bindings in map [ m ] .
[ f ] receives the key as first argument , and the associated value
as second argument . The bindings are passed to [ f ] in increasing
order with respect to the ordering over the type of the keys .
[f] receives the key as first argument, and the associated value
as second argument. The bindings are passed to [f] in increasing
order with respect to the ordering over the type of the keys. *)
val map: ('a -> 'b) -> 'a t -> 'b t
(** [map f m] returns a map with same domain as [m], where the
associated value [a] of all bindings of [m] has been
replaced by the result of the application of [f] to [a].
The bindings are passed to [f] in increasing order
with respect to the ordering over the type of the keys. *)
val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t
(** Same as {!Map.S.map}, but the function receives as arguments both the
key and the associated value for each binding of the map. *)
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
(** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)],
where [k1 ... kN] are the keys of all bindings in [m]
(in increasing order), and [d1 ... dN] are the associated data. *)
val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int
* Total ordering between maps . The first argument is a total ordering
used to compare data associated with equal keys in the two maps .
used to compare data associated with equal keys in the two maps. *)
val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
* [ equal cmp ] tests whether the maps [ m1 ] and [ m2 ] are
equal , that is , contain equal keys and associate them with
equal data . [ cmp ] is the equality predicate used to compare
the data associated with the keys .
equal, that is, contain equal keys and associate them with
equal data. [cmp] is the equality predicate used to compare
the data associated with the keys. *)
end
(** Output signature of the functor {!Map.Make}. *)
module Make (Ord: OrderedType): S with type key = Ord.t
(** Functor building an implementation of the map structure
given a totally ordered type. *)
| null | https://raw.githubusercontent.com/bmeurer/ocaml-rbtrees/06c879da5b009025e65b9cbe9a36405bcf301f35/src/rbmap.mli | ocaml | * The type of the map keys.
* Input signature of the functor {!Map.Make}.
* The type of the map keys.
* The type of maps from type [key] to type ['a].
* The empty map.
* Test whether a map is empty or not.
* [add x y m] returns a map containing the same bindings as
[m], plus a binding of [x] to [y]. If [x] was already bound
in [m], its previous binding disappears.
* [find x m] returns the current binding of [x] in [m],
or raises [Not_found] if no such binding exists.
* [remove x m] returns a map containing the same bindings as
[m], except for [x] which is unbound in the returned map.
* [mem x m] returns [true] if [m] contains a binding for [x],
and [false] otherwise.
* [map f m] returns a map with same domain as [m], where the
associated value [a] of all bindings of [m] has been
replaced by the result of the application of [f] to [a].
The bindings are passed to [f] in increasing order
with respect to the ordering over the type of the keys.
* Same as {!Map.S.map}, but the function receives as arguments both the
key and the associated value for each binding of the map.
* [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)],
where [k1 ... kN] are the keys of all bindings in [m]
(in increasing order), and [d1 ... dN] are the associated data.
* Output signature of the functor {!Map.Make}.
* Functor building an implementation of the map structure
given a totally ordered type. | -
* Copyright ( c ) 2007 , < >
*
* Permission is hereby granted , free of charge , to any person obtaining a
* copy of this software and associated documentation files ( the " Software " ) ,
* to deal in the Software without restriction , including without limitation
* the rights to use , copy , modify , merge , publish , distribute , sublicense ,
* and/or sell copies of the Software , and to permit persons to whom the
* Software is furnished to do so , subject to the following conditions :
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software .
*
* THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
* OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
* LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE .
* Copyright (c) 2007, Benedikt Meurer <>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*)
This is my implementation of Red - Black Trees for OCaml . It is based upon
* " Red - Black Trees in a Functional Setting " , in " Functional
* Pearls " .
* Red - Black Trees are exposed via a map and a set API , which is designed to
* be compatible with the Map and Set modules in the OCaml standard library
* ( which are implemented using AVL trees ) . You can use the Rbmap and * modules as drop - in replacement for the Map and Set modules .
* "Red-Black Trees in a Functional Setting", Chris Okasaki in "Functional
* Pearls".
* Red-Black Trees are exposed via a map and a set API, which is designed to
* be compatible with the Map and Set modules in the OCaml standard library
* (which are implemented using AVL trees). You can use the Rbmap and Rbset
* modules as drop-in replacement for the Map and Set modules.
*)
module type OrderedType =
sig
type t
val compare : t -> t -> int
* A total ordering function over the keys .
This is a two - argument function [ f ] such that
[ f e1 e2 ] is zero if the keys [ e1 ] and [ e2 ] are equal ,
[ f e1 e2 ] is strictly negative if [ e1 ] is smaller than [ e2 ] ,
and [ f e1 e2 ] is strictly positive if [ e1 ] is greater than [ e2 ] .
Example : a suitable ordering function is the generic structural
comparison function { ! Pervasives.compare } .
This is a two-argument function [f] such that
[f e1 e2] is zero if the keys [e1] and [e2] are equal,
[f e1 e2] is strictly negative if [e1] is smaller than [e2],
and [f e1 e2] is strictly positive if [e1] is greater than [e2].
Example: a suitable ordering function is the generic structural
comparison function {!Pervasives.compare}. *)
end
module type S =
sig
type key
type (+'a) t
val empty: 'a t
val is_empty: 'a t -> bool
val add: key -> 'a -> 'a t -> 'a t
val find: key -> 'a t -> 'a
val remove: key -> 'a t -> 'a t
val mem: key -> 'a t -> bool
val iter: (key -> 'a -> unit) -> 'a t -> unit
* [ iter f m ] applies [ f ] to all bindings in map [ m ] .
[ f ] receives the key as first argument , and the associated value
as second argument . The bindings are passed to [ f ] in increasing
order with respect to the ordering over the type of the keys .
[f] receives the key as first argument, and the associated value
as second argument. The bindings are passed to [f] in increasing
order with respect to the ordering over the type of the keys. *)
val map: ('a -> 'b) -> 'a t -> 'b t
val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t
val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int
* Total ordering between maps . The first argument is a total ordering
used to compare data associated with equal keys in the two maps .
used to compare data associated with equal keys in the two maps. *)
val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
* [ equal cmp ] tests whether the maps [ m1 ] and [ m2 ] are
equal , that is , contain equal keys and associate them with
equal data . [ cmp ] is the equality predicate used to compare
the data associated with the keys .
equal, that is, contain equal keys and associate them with
equal data. [cmp] is the equality predicate used to compare
the data associated with the keys. *)
end
module Make (Ord: OrderedType): S with type key = Ord.t
|
d9fa3cee25a2952cee717d5bee359cb93334fdf6537366a406a6d7cee6b07a87 | haas/harmtrace | Annotate.hs | {-# OPTIONS_GHC -Wall #-}
# LANGUAGE GADTs #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TupleSections #
--------------------------------------------------------------------------------
-- |
Module : HarmTrace . Audio . Annotate
Copyright : ( c ) 2010 - 2012 Universiteit Utrecht , 2012 University of Oxford
-- License : GPL3
--
Maintainer : ,
-- Stability : experimental
-- Portability : non-portable
--
-- Summary: Combining low-level features (VAMP plug-ins) with high-level
knowledge ( the HarmTrace harmony model )
--------------------------------------------------------------------------------
module HarmTrace.Audio.Annotate ( mptreeAnnotator, groupAnnotator
, simpleAnnotator, mptreeAnnotatorSTG
, putSegStats, preProcessData
) where
-- parameters
import Constants ( maxSegmentSize, maxLProductSize)
-- Audio Stuff
import HarmTrace.Audio.ChromaChord ( createChordRanks, beatSync
, mergeByOneAndThree )
import HarmTrace.Audio.Key (getBeatSyncKeyFromChroma)
import HarmTrace.Base.MusicTime
Harmony Model stuff
import HarmTrace.Base.MusicRep
import HarmTrace.Models.Models
import HarmTrace.Models.Jazz.Main
import HarmTrace.Models.Pop.Main
import HarmTrace.Models.ChordTokens
import HarmTrace.IO.Errors
import HarmTrace.HAnTree.HAn (HAn)
import HarmTrace.HAnTree.Tree (Tree, size, depth)
import HarmTrace.HAnTree.ToHAnTree (GTree)
import HarmTrace.HarmTrace
import Text.ParserCombinators.UU
import Text.ParserCombinators.UU.BasicInstances
import System.IO (stdout,hFlush)
import Data.List (sortBy, groupBy, intersperse)
import Control.Arrow (first)
import Text.Printf (printf)
--------------------------------------------------------------------------------
-- From chords with probabilities to a single chord, using harmony
--------------------------------------------------------------------------------
mptreeAnnotatorSTG :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat
-> ChordAnnotation
mptreeAnnotatorSTG gex k = snapToGrid . mptreeAnnotator gex k
-- | MPTrEE (Model Propelled Transcription of Euphonic Enitities):
-- a sophisticated, harmony and beat informed chord annotator
mptreeAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat
-> ChordAnnotation
mptreeAnnotator (GrammarEx g) k f = concatMap (harmonize g) (preProcessData k f)
-- | preprocesses the raw audio data before using chord harmony model based
chord selection . First , the beats and chroma are synchronised . Second ,
chord candidate lists are created . Third , smart , beat informed grouping of
the chord candidates is performed . Fourth , the chord candidate lists are
-- grouped in segments based on the key (obtained
-- as provided by the user or as derived from the audio data). Last, the
-- chord candidate lists are further segmented based on the occurrences of
-- I and V chords.
preProcessData :: Maybe [TimedData Key] -> AudioFeat -> [ProbChordSeg]
preProcessData gtk af@(AudioFeat chrm beats _afk _id) =
segmentByTonic $ segmentByKey key . mergeByOneAndThree
. createChordRanks $ beatSync beats chrm
where key = maybe (getBeatSyncKeyFromChroma af) id gtk
reminder : ProbChordSeg = Segment Key [ TimedData [ ProbChord ] ]
| Chord selection based on the harmtrace model
harmonize :: forall g. (GTree g) => Grammar g -> ProbChordSeg
-> ChordAnnotation
harmonize g (Segment k cands) =
let isExpandable :: Bool
isExpandable = length (filter ((>1) . length) (map getData cands)) > 0
myParse :: [ProbChord] -> (Tree HAn,[ProbChord],Float)
myParse cs =
let x = map probChord cs
First , parse the tokens
res :: ([g],[Error Int])
res = case g of
Jazz -> parse_h ((,) <$> pJazz k <*> pEnd) (createStr 0 x)
Pop -> parse_h ((,) <$> pPop k <*> pEnd) (createStr 0 x)
Build a ParseResult from that
pr = ParseResult u (concatMap chords x) (fst res) u u u (snd res) []
-- So that we can post-process it. Then extract the Tree HAn
t = pieceTreeHAn (postProc [ RemovePDPT, MergeDelChords ] pr)
u :: forall a. a
u = error "harmonize: undefined placeholder evaluated"
-- Return the Tree HAn, the input tokens, and the error-ratio
in (t, cs, errorRatio (snd res) x )
-- Generate, parse, and evaluate all possible sequences of chords
parseResults :: [(Tree HAn,[ProbChord],Float)]
parseResults = [ myParse l
| l <- lProduct (map getData cands) ]
From all possible parse trees , take the best one
select :: [(Tree HAn,[ProbChord],Float)] -> [ProbChord]
select = select1 . head
. groupBy (\(_,_,a) (_,_,b) -> a `compare` b == EQ)
. sortBy (\(_,_,a) (_,_,b) -> a `compare` b)
These all have the same error ratio , so we sort them first by tree
size , then depth , and pick the first
select1 :: [(Tree HAn,[ProbChord],Float)] -> [ProbChord]
select1 = snd3 . head . sortBy cmp where
cmp (a,_,_) (b,_,_) = (size a, depth a) `compare` (size b, depth b)
snd3 (_,s,_) = s
probChord :: ProbChord -> ChordToken
probChord (ProbChord lab@(Chord r _sh _add _on _dur) _p) =
(ChordToken r' sh' [lab] NotParsed 1 0) where
r' = if isNone r then Note Nothing Imp else toScaleDegree k r
sh' = toClassType lab
replaces the candidate list by the selected chord in a TimedData
setBestChords :: [ProbChord] -> [TimedData ProbChord]
setBestChords = zipWith setData cands
-- if there is nothing to expand, do not parse
in if isExpandable then setBestChords $ select parseResults
else map pickHead cands
pickHead :: TimedData [ProbChord] -> TimedData ProbChord
pickHead = fmap head
--------------------------------------------------------------------------------
-- post-processing functions
--------------------------------------------------------------------------------
snapToGrid : : ChordAnnotation - > ChordAnnotation -- = [ TimedData ProbChord ]
snapToGrid :: [TimedData ProbChord] -> [TimedData ProbChord]
snapToGrid = foldr snap [] . reduceTimedPChords where
snap :: TimedData a -> [TimedData a] -> [TimedData a]
snap td [] = [td]
snap a (h : tl) = case ( odd (beatLen a) && odd (beatLen h) , getBeat h ) of
(True, Two ) -> shiftFwd a h ++ tl
(True, Four) -> shiftBwd a h ++ tl
_ -> a : h : tl
beatLen :: TimedData a -> Int
beatLen = pred . length . getTimeStamps
Shifts the second TimedData backwards in time , lengthing the first TimedData
shiftBwd :: TimedData a -> TimedData a -> [TimedData a]
shiftBwd (TimedData a ta) tdb = case getTimeStamps tdb of
[_on, off] -> [TimedData a (ta ++ [off]) ]
(_hb : hhb : tb) -> [TimedData a (ta ++ [hhb]), tdb{getTimeStamps = (hhb:tb)}]
[_] -> error "HarmTrace.Audio.Annotate.shiftBwd: 1 timestamp, onset == offset"
[ ] -> error "HarmTrace.Audio.Annotate.shiftBwd: No timestamps to shift"
Shifts the second TimedData forwards in time at the cost of the first
shiftFwd :: TimedData a -> TimedData a -> [TimedData a]
shiftFwd tda (TimedData b tb) = case getTimeStamps tda of
[ ] -> error "HarmTrace.Audio.Annotate.shiftFwd: No timestamps to shift"
[_] -> error "HarmTrace.Audio.Annotate.shiftFwd: 1 timestamp, onset == offset"
[on, _off] -> [TimedData b (on : tb) ]
ta -> [tda {getTimeStamps = initTa}, TimedData b (oneButLastTa : tb)]
where
(initTa,oneButLastTa) = snocsnoc ta
takes a list of elements and returns , all elements up to the one - but-
last element ( discarding the last elem ) , and the one - but last element
snocsnoc :: [a] -> ([a],a)
snocsnoc [] = error "snocsnoc: empty list"
snocsnoc [_] = error "snocsnoc: singleton list"
= the one - but - last element
snocsnoc (x:xs) = first (x :) (snocsnoc xs)
-- | Returns the reduced chord sequences, where repeated chords are merged
into one ' ProbChord ' , wrapped in a ' TimedData ' type .
reduceTimedPChords :: [TimedData ProbChord] -> [TimedData ProbChord]
reduceTimedPChords = foldr group [] where
group :: TimedData ProbChord -> [TimedData ProbChord] -> [TimedData ProbChord]
group c [] = [c]
group tc@(TimedData c tsc ) (th@(TimedData h tsh ) : t)
| c `pChordEq` h = concatTimedData c {prob = avgProb} tc th : t
| otherwise = tc : th : t where
avgProb :: NumData
avgProb = let ltsc = fromIntegral $ length tsc
ltsh = fromIntegral $ length tsh
tot = ltsc + ltsh
in (prob c * ltsc) + (prob h * ltsh) / tot
pChordEq :: ProbChord -> ProbChord -> Bool
pChordEq (ProbChord cA _pA) (ProbChord cB _pB) =
chordRoot cA == chordRoot cB &&
chordShorthand cA == chordShorthand cB
--------------------------------------------------------------------------------
-- Segmentation functions
--------------------------------------------------------------------------------
-- Temporary test values
test = segmentTonic testKey testSeq
testKey = Key ( Note Nothing C ) testSeq = testChordG + + testChordC + + testChordC + + testChordG + + testChordG
testChordC = [ TimedData [ ProbChord labC 1 , ProbChord labG 0.5 ] 0 0 ]
testChordG = [ TimedData [ ProbChord labG 1 , ProbChord labC 0.5 ] 0 0 ]
labC = Chord ( Note Nothing C ) Maj [ ] 0 0
labG = Chord ( Note Nothing G ) Maj [ ] 0 0
test = segmentTonic testKey testSeq
testKey = Key (Note Nothing C) MajMode
testSeq = testChordG ++ testChordC ++ testChordC ++ testChordG ++ testChordG
testChordC = [TimedData [ProbChord labC 1, ProbChord labG 0.5] 0 0]
testChordG = [TimedData [ProbChord labG 1, ProbChord labC 0.5] 0 0]
labC = Chord (Note Nothing C) Maj [] 0 0
labG = Chord (Note Nothing G) Maj [] 0 0
-}
move to segmentations function in Harmonize ?
segmentByKey :: [TimedData Key] -> [TimedData [ProbChord]] -> [ProbChordSeg]
segmentByKey [] _ = error "segmentByKey: empty key list"
segmentByKey [k] chds = [Segment (getData k) chds]
segmentByKey (k : ks) chds = let (seg,cs) = span ((<= offset k) . offset) chds
in Segment (getData k) seg : segmentByKey ks cs
segmentByTonic :: [ProbChordSeg] -> [ProbChordSeg]
segmentByTonic segs = concatMap emergencySplit $ concatMap split segs where
split :: ProbChordSeg -> [ProbChordSeg]
split (Segment key cs) = zipWith Segment (repeat key) (segmentTonic key cs)
In case segments are just to big , even after segmenting on Tonic and Dominant
-- split these segments into smaller segements recursively.
emergencySplit :: ProbChordSeg -> [ProbChordSeg]
emergencySplit (Segment k cs) = map (Segment k) (recSplit cs) where
recSplit : : [ TimedData [ a ] ] - > [ [ TimedData [ a ] ] ]
recSplit [] = []
recSplit b
| blen <= maxSegmentSize
&& snd (lProdStats b) <= maxLProductSize = [b]
| otherwise = recSplit l ++ recSplit r
where blen = length b
(l,r) = splitAt (blen `div` 2) b
-- Break into segments according to the key
segmentTonic :: Key -> [TimedData [ProbChord]] -> [[TimedData [ProbChord]]]
segmentTonic k cands = segment cands [] where
segment [] [] = []
segment [] acc = [reverse acc]
segment (c:cs) acc
| c' `isTonic` k || c' `isDom` k = reverse (c:acc) : segmentTonic k cs
| otherwise = segment cs (c:acc) where
c' = getFstChord c
Take the first chord ( which is the one with the highest probability , since
-- the list is sorted)
getFstChord :: TimedData [ProbChord] -> ChordLabel
getFstChord c = case getData c of
[] -> error "getFstChord: empty list"
(h:_) -> chordLab h -- only split on chords we are certain of
-- _ -> Chord (Note Nothing N) None [] 0 0 -- else return None
-- Check if this chord label is the tonic
isTonic :: ChordLabel -> Key -> Bool
isTonic (Chord (Note Nothing N) _ _ _ _) _ = False
isTonic c (Key r m) = r == chordRoot c && m == toMode (toTriad c)
-- Check if this chord label is the dominant
JPM : I do n't understand why this function looks so different from ` isTonic `
isDom :: ChordLabel -> Key -> Bool
isDom (Chord (Note Nothing N) _ _ _ _) _ = False
isDom c key = toScaleDegree key (chordRoot c) == Note Nothing V
&& toTriad c == MajTriad
lProduct :: [[a]] -> [[a]]
lProduct [] = []
lProduct [l] = [ [x] | x <- l ]
lProduct (h:t) = concat [ map (x:) (lProduct t) | x <- h ]
--------------------------------------------------------------------------------
-- Some printing and statistics functions
--------------------------------------------------------------------------------
| prints Segmetation statistics
putSegStats :: Maybe [TimedData Key] -> AudioFeat -> IO()
putSegStats k af = mapM_ segmentStat $ preProcessData k af
segmentStat :: ProbChordSeg -> IO ()
segmentStat (Segment k bs) =
do putStr ("\nstart: " ++ (printf "%.3f" . onset $ head bs))
putStr (", end: " ++ (printf "%.3f" . offset $ last bs))
putStr (", key: " ++ show k)
putStr (", probChords: " ++ show (length bs))
let (l, lpr) = lProdStats bs
putStr (", lists > 1: " ++ show l)
putStrLn (" lProduct: " ++ show lpr)
(putStrLn . concat . intersperse "\n" . map showTimedData $ bs)
>> hFlush stdout where
showTimedData :: TimedData [ProbChord] -> String
showTimedData td =
(concat . intersperse ", " . map showProbChord . getData $ td)
++ ": " ++ ( show . getTimeStamps $ td )
showProbChord :: ProbChord -> String
showProbChord (ProbChord lab p) = show lab ++ '@' : printf "%.3f" p
-- Given a Block list this function returns the number of probChords with a
list > 1 ( fst ) and the lProduct size ( snd )
lProdStats :: [TimedData [a]] -> (Int, Int)
lProdStats bs = (length l, lpr) where
l = filter ((>1) . length ) (map getData bs)
lpr = foldr (\a b -> length a * b) 1 l
--------------------------------------------------------------------------------
-- A baseline chord label annotator
--------------------------------------------------------------------------------
| Creates an annotation out of a Chord candidate list by just picking the
-- first chord. This annotator does smart grouping
( see ' HarmTrace . Audio . ChromaChord.mergeByBeat ' ) .
groupAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat -> ChordAnnotation
groupAnnotator _g _keyAnn (AudioFeat chrm beats _key _id) = -- ignore key info
-- TODO: check is this synchronisation still needed???
let endTime = BarTime (time $ last chrm) Four
beats' = takeWhile (< endTime) beats ++ [endTime]
in map pickHead . mergeByOneAndThree
. createChordRanks $ beatSync beats' chrm
-- | The most simple annotator, no grouping, no matching,
-- just pick the best matching chord
simpleAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat -> ChordAnnotation
simpleAnnotator _g _keyAnn (AudioFeat crm bts _key _id) = -- ignore key
map pickHead . createChordRanks $ beatSync bts crm
| null | https://raw.githubusercontent.com/haas/harmtrace/e250855a3bb6e5b28fe538c728f707cf82ca9fd3/src/HarmTrace/Audio/Annotate.hs | haskell | # OPTIONS_GHC -Wall #
# LANGUAGE ScopedTypeVariables #
------------------------------------------------------------------------------
|
License : GPL3
Stability : experimental
Portability : non-portable
Summary: Combining low-level features (VAMP plug-ins) with high-level
------------------------------------------------------------------------------
parameters
Audio Stuff
------------------------------------------------------------------------------
From chords with probabilities to a single chord, using harmony
------------------------------------------------------------------------------
| MPTrEE (Model Propelled Transcription of Euphonic Enitities):
a sophisticated, harmony and beat informed chord annotator
| preprocesses the raw audio data before using chord harmony model based
grouped in segments based on the key (obtained
as provided by the user or as derived from the audio data). Last, the
chord candidate lists are further segmented based on the occurrences of
I and V chords.
So that we can post-process it. Then extract the Tree HAn
Return the Tree HAn, the input tokens, and the error-ratio
Generate, parse, and evaluate all possible sequences of chords
if there is nothing to expand, do not parse
------------------------------------------------------------------------------
post-processing functions
------------------------------------------------------------------------------
= [ TimedData ProbChord ]
| Returns the reduced chord sequences, where repeated chords are merged
------------------------------------------------------------------------------
Segmentation functions
------------------------------------------------------------------------------
Temporary test values
split these segments into smaller segements recursively.
Break into segments according to the key
the list is sorted)
only split on chords we are certain of
_ -> Chord (Note Nothing N) None [] 0 0 -- else return None
Check if this chord label is the tonic
Check if this chord label is the dominant
------------------------------------------------------------------------------
Some printing and statistics functions
------------------------------------------------------------------------------
Given a Block list this function returns the number of probChords with a
------------------------------------------------------------------------------
A baseline chord label annotator
------------------------------------------------------------------------------
first chord. This annotator does smart grouping
ignore key info
TODO: check is this synchronisation still needed???
| The most simple annotator, no grouping, no matching,
just pick the best matching chord
ignore key | # LANGUAGE GADTs #
# LANGUAGE TupleSections #
Module : HarmTrace . Audio . Annotate
Copyright : ( c ) 2010 - 2012 Universiteit Utrecht , 2012 University of Oxford
Maintainer : ,
knowledge ( the HarmTrace harmony model )
module HarmTrace.Audio.Annotate ( mptreeAnnotator, groupAnnotator
, simpleAnnotator, mptreeAnnotatorSTG
, putSegStats, preProcessData
) where
import Constants ( maxSegmentSize, maxLProductSize)
import HarmTrace.Audio.ChromaChord ( createChordRanks, beatSync
, mergeByOneAndThree )
import HarmTrace.Audio.Key (getBeatSyncKeyFromChroma)
import HarmTrace.Base.MusicTime
Harmony Model stuff
import HarmTrace.Base.MusicRep
import HarmTrace.Models.Models
import HarmTrace.Models.Jazz.Main
import HarmTrace.Models.Pop.Main
import HarmTrace.Models.ChordTokens
import HarmTrace.IO.Errors
import HarmTrace.HAnTree.HAn (HAn)
import HarmTrace.HAnTree.Tree (Tree, size, depth)
import HarmTrace.HAnTree.ToHAnTree (GTree)
import HarmTrace.HarmTrace
import Text.ParserCombinators.UU
import Text.ParserCombinators.UU.BasicInstances
import System.IO (stdout,hFlush)
import Data.List (sortBy, groupBy, intersperse)
import Control.Arrow (first)
import Text.Printf (printf)
mptreeAnnotatorSTG :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat
-> ChordAnnotation
mptreeAnnotatorSTG gex k = snapToGrid . mptreeAnnotator gex k
mptreeAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat
-> ChordAnnotation
mptreeAnnotator (GrammarEx g) k f = concatMap (harmonize g) (preProcessData k f)
chord selection . First , the beats and chroma are synchronised . Second ,
chord candidate lists are created . Third , smart , beat informed grouping of
the chord candidates is performed . Fourth , the chord candidate lists are
preProcessData :: Maybe [TimedData Key] -> AudioFeat -> [ProbChordSeg]
preProcessData gtk af@(AudioFeat chrm beats _afk _id) =
segmentByTonic $ segmentByKey key . mergeByOneAndThree
. createChordRanks $ beatSync beats chrm
where key = maybe (getBeatSyncKeyFromChroma af) id gtk
reminder : ProbChordSeg = Segment Key [ TimedData [ ProbChord ] ]
| Chord selection based on the harmtrace model
harmonize :: forall g. (GTree g) => Grammar g -> ProbChordSeg
-> ChordAnnotation
harmonize g (Segment k cands) =
let isExpandable :: Bool
isExpandable = length (filter ((>1) . length) (map getData cands)) > 0
myParse :: [ProbChord] -> (Tree HAn,[ProbChord],Float)
myParse cs =
let x = map probChord cs
First , parse the tokens
res :: ([g],[Error Int])
res = case g of
Jazz -> parse_h ((,) <$> pJazz k <*> pEnd) (createStr 0 x)
Pop -> parse_h ((,) <$> pPop k <*> pEnd) (createStr 0 x)
Build a ParseResult from that
pr = ParseResult u (concatMap chords x) (fst res) u u u (snd res) []
t = pieceTreeHAn (postProc [ RemovePDPT, MergeDelChords ] pr)
u :: forall a. a
u = error "harmonize: undefined placeholder evaluated"
in (t, cs, errorRatio (snd res) x )
parseResults :: [(Tree HAn,[ProbChord],Float)]
parseResults = [ myParse l
| l <- lProduct (map getData cands) ]
From all possible parse trees , take the best one
select :: [(Tree HAn,[ProbChord],Float)] -> [ProbChord]
select = select1 . head
. groupBy (\(_,_,a) (_,_,b) -> a `compare` b == EQ)
. sortBy (\(_,_,a) (_,_,b) -> a `compare` b)
These all have the same error ratio , so we sort them first by tree
size , then depth , and pick the first
select1 :: [(Tree HAn,[ProbChord],Float)] -> [ProbChord]
select1 = snd3 . head . sortBy cmp where
cmp (a,_,_) (b,_,_) = (size a, depth a) `compare` (size b, depth b)
snd3 (_,s,_) = s
probChord :: ProbChord -> ChordToken
probChord (ProbChord lab@(Chord r _sh _add _on _dur) _p) =
(ChordToken r' sh' [lab] NotParsed 1 0) where
r' = if isNone r then Note Nothing Imp else toScaleDegree k r
sh' = toClassType lab
replaces the candidate list by the selected chord in a TimedData
setBestChords :: [ProbChord] -> [TimedData ProbChord]
setBestChords = zipWith setData cands
in if isExpandable then setBestChords $ select parseResults
else map pickHead cands
pickHead :: TimedData [ProbChord] -> TimedData ProbChord
pickHead = fmap head
snapToGrid :: [TimedData ProbChord] -> [TimedData ProbChord]
snapToGrid = foldr snap [] . reduceTimedPChords where
snap :: TimedData a -> [TimedData a] -> [TimedData a]
snap td [] = [td]
snap a (h : tl) = case ( odd (beatLen a) && odd (beatLen h) , getBeat h ) of
(True, Two ) -> shiftFwd a h ++ tl
(True, Four) -> shiftBwd a h ++ tl
_ -> a : h : tl
beatLen :: TimedData a -> Int
beatLen = pred . length . getTimeStamps
Shifts the second TimedData backwards in time , lengthing the first TimedData
shiftBwd :: TimedData a -> TimedData a -> [TimedData a]
shiftBwd (TimedData a ta) tdb = case getTimeStamps tdb of
[_on, off] -> [TimedData a (ta ++ [off]) ]
(_hb : hhb : tb) -> [TimedData a (ta ++ [hhb]), tdb{getTimeStamps = (hhb:tb)}]
[_] -> error "HarmTrace.Audio.Annotate.shiftBwd: 1 timestamp, onset == offset"
[ ] -> error "HarmTrace.Audio.Annotate.shiftBwd: No timestamps to shift"
Shifts the second TimedData forwards in time at the cost of the first
shiftFwd :: TimedData a -> TimedData a -> [TimedData a]
shiftFwd tda (TimedData b tb) = case getTimeStamps tda of
[ ] -> error "HarmTrace.Audio.Annotate.shiftFwd: No timestamps to shift"
[_] -> error "HarmTrace.Audio.Annotate.shiftFwd: 1 timestamp, onset == offset"
[on, _off] -> [TimedData b (on : tb) ]
ta -> [tda {getTimeStamps = initTa}, TimedData b (oneButLastTa : tb)]
where
(initTa,oneButLastTa) = snocsnoc ta
takes a list of elements and returns , all elements up to the one - but-
last element ( discarding the last elem ) , and the one - but last element
snocsnoc :: [a] -> ([a],a)
snocsnoc [] = error "snocsnoc: empty list"
snocsnoc [_] = error "snocsnoc: singleton list"
= the one - but - last element
snocsnoc (x:xs) = first (x :) (snocsnoc xs)
into one ' ProbChord ' , wrapped in a ' TimedData ' type .
reduceTimedPChords :: [TimedData ProbChord] -> [TimedData ProbChord]
reduceTimedPChords = foldr group [] where
group :: TimedData ProbChord -> [TimedData ProbChord] -> [TimedData ProbChord]
group c [] = [c]
group tc@(TimedData c tsc ) (th@(TimedData h tsh ) : t)
| c `pChordEq` h = concatTimedData c {prob = avgProb} tc th : t
| otherwise = tc : th : t where
avgProb :: NumData
avgProb = let ltsc = fromIntegral $ length tsc
ltsh = fromIntegral $ length tsh
tot = ltsc + ltsh
in (prob c * ltsc) + (prob h * ltsh) / tot
pChordEq :: ProbChord -> ProbChord -> Bool
pChordEq (ProbChord cA _pA) (ProbChord cB _pB) =
chordRoot cA == chordRoot cB &&
chordShorthand cA == chordShorthand cB
test = segmentTonic testKey testSeq
testKey = Key ( Note Nothing C ) testSeq = testChordG + + testChordC + + testChordC + + testChordG + + testChordG
testChordC = [ TimedData [ ProbChord labC 1 , ProbChord labG 0.5 ] 0 0 ]
testChordG = [ TimedData [ ProbChord labG 1 , ProbChord labC 0.5 ] 0 0 ]
labC = Chord ( Note Nothing C ) Maj [ ] 0 0
labG = Chord ( Note Nothing G ) Maj [ ] 0 0
test = segmentTonic testKey testSeq
testKey = Key (Note Nothing C) MajMode
testSeq = testChordG ++ testChordC ++ testChordC ++ testChordG ++ testChordG
testChordC = [TimedData [ProbChord labC 1, ProbChord labG 0.5] 0 0]
testChordG = [TimedData [ProbChord labG 1, ProbChord labC 0.5] 0 0]
labC = Chord (Note Nothing C) Maj [] 0 0
labG = Chord (Note Nothing G) Maj [] 0 0
-}
move to segmentations function in Harmonize ?
segmentByKey :: [TimedData Key] -> [TimedData [ProbChord]] -> [ProbChordSeg]
segmentByKey [] _ = error "segmentByKey: empty key list"
segmentByKey [k] chds = [Segment (getData k) chds]
segmentByKey (k : ks) chds = let (seg,cs) = span ((<= offset k) . offset) chds
in Segment (getData k) seg : segmentByKey ks cs
segmentByTonic :: [ProbChordSeg] -> [ProbChordSeg]
segmentByTonic segs = concatMap emergencySplit $ concatMap split segs where
split :: ProbChordSeg -> [ProbChordSeg]
split (Segment key cs) = zipWith Segment (repeat key) (segmentTonic key cs)
In case segments are just to big , even after segmenting on Tonic and Dominant
emergencySplit :: ProbChordSeg -> [ProbChordSeg]
emergencySplit (Segment k cs) = map (Segment k) (recSplit cs) where
recSplit : : [ TimedData [ a ] ] - > [ [ TimedData [ a ] ] ]
recSplit [] = []
recSplit b
| blen <= maxSegmentSize
&& snd (lProdStats b) <= maxLProductSize = [b]
| otherwise = recSplit l ++ recSplit r
where blen = length b
(l,r) = splitAt (blen `div` 2) b
segmentTonic :: Key -> [TimedData [ProbChord]] -> [[TimedData [ProbChord]]]
segmentTonic k cands = segment cands [] where
segment [] [] = []
segment [] acc = [reverse acc]
segment (c:cs) acc
| c' `isTonic` k || c' `isDom` k = reverse (c:acc) : segmentTonic k cs
| otherwise = segment cs (c:acc) where
c' = getFstChord c
Take the first chord ( which is the one with the highest probability , since
getFstChord :: TimedData [ProbChord] -> ChordLabel
getFstChord c = case getData c of
[] -> error "getFstChord: empty list"
isTonic :: ChordLabel -> Key -> Bool
isTonic (Chord (Note Nothing N) _ _ _ _) _ = False
isTonic c (Key r m) = r == chordRoot c && m == toMode (toTriad c)
JPM : I do n't understand why this function looks so different from ` isTonic `
isDom :: ChordLabel -> Key -> Bool
isDom (Chord (Note Nothing N) _ _ _ _) _ = False
isDom c key = toScaleDegree key (chordRoot c) == Note Nothing V
&& toTriad c == MajTriad
lProduct :: [[a]] -> [[a]]
lProduct [] = []
lProduct [l] = [ [x] | x <- l ]
lProduct (h:t) = concat [ map (x:) (lProduct t) | x <- h ]
| prints Segmetation statistics
putSegStats :: Maybe [TimedData Key] -> AudioFeat -> IO()
putSegStats k af = mapM_ segmentStat $ preProcessData k af
segmentStat :: ProbChordSeg -> IO ()
segmentStat (Segment k bs) =
do putStr ("\nstart: " ++ (printf "%.3f" . onset $ head bs))
putStr (", end: " ++ (printf "%.3f" . offset $ last bs))
putStr (", key: " ++ show k)
putStr (", probChords: " ++ show (length bs))
let (l, lpr) = lProdStats bs
putStr (", lists > 1: " ++ show l)
putStrLn (" lProduct: " ++ show lpr)
(putStrLn . concat . intersperse "\n" . map showTimedData $ bs)
>> hFlush stdout where
showTimedData :: TimedData [ProbChord] -> String
showTimedData td =
(concat . intersperse ", " . map showProbChord . getData $ td)
++ ": " ++ ( show . getTimeStamps $ td )
showProbChord :: ProbChord -> String
showProbChord (ProbChord lab p) = show lab ++ '@' : printf "%.3f" p
list > 1 ( fst ) and the lProduct size ( snd )
lProdStats :: [TimedData [a]] -> (Int, Int)
lProdStats bs = (length l, lpr) where
l = filter ((>1) . length ) (map getData bs)
lpr = foldr (\a b -> length a * b) 1 l
| Creates an annotation out of a Chord candidate list by just picking the
( see ' HarmTrace . Audio . ChromaChord.mergeByBeat ' ) .
groupAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat -> ChordAnnotation
let endTime = BarTime (time $ last chrm) Four
beats' = takeWhile (< endTime) beats ++ [endTime]
in map pickHead . mergeByOneAndThree
. createChordRanks $ beatSync beats' chrm
simpleAnnotator :: GrammarEx -> Maybe [TimedData Key] -> AudioFeat -> ChordAnnotation
map pickHead . createChordRanks $ beatSync bts crm
|
bda8427105f61b1740645944261fde4b90fd1ed07fba80e22f39e1dc1ffbe6d4 | DaveWM/lobster-writer | core.clj | (ns lobster-writer.core)
| null | https://raw.githubusercontent.com/DaveWM/lobster-writer/98eef942cc61725f0a2e00c34e6952c1e262f1ad/src/clj/lobster_writer/core.clj | clojure | (ns lobster-writer.core)
| |
e404db7edbf1cb124a9807aec908147b25720568e7f7d33edb1ff1dfbb791631 | fogfish/typhoon | typhoon.erl | %%
%% Copyright 2015 Zalando SE
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% native interface distributed system stress and load testing tool
-module(typhoon).
-author('').
-include_lib("ambitz/include/ambitz.hrl").
-compile({parse_transform, category}).
-export([start/0]).
%%
%% management interface
-export([
put/3
,get/2
,remove/2
,signup/2
,profile/2
,scenario/2
,peer/1
,run/1
,abort/1
,unit/1
,attr/1
]).
%%
%% data types
-type urn() :: {urn, _, _}.
-type opts() :: [_].
%%
RnD node start
start() ->
applib:boot(?MODULE, code:where_is_file("dev.config")).
%%%----------------------------------------------------------------------------
%%%
%%% management interface
%%%
%%%----------------------------------------------------------------------------
%%
%% create a new workload scenario
-spec put(urn(), binary(), opts()) -> {ok, _} | {error, _}.
put({urn, User, _} = Key, Spec, Opts) ->
[$^||
scenario_spawn(Key, Spec, Opts),
typhoon_kv:append(scenario, {urn, user, User}, Key, Opts)
].
scenario_spawn(Key, Spec, Opts) ->
ambitz:spawn(typhoon, uri:s(Key),
{typhoon_scenario, start_link, [scalar:atom(uri:path(Key)), Spec]},
Opts
).
%%
%% read content of workload scenario
-spec get(urn(), opts()) -> {ok, ambitz:entity()}.
get({urn, _, _} = Key, Opts) ->
ambitz:lookup(typhoon, uri:s(Key), Opts).
%%
%% remove workload scenario
-spec remove(urn(), opts()) -> {ok, _} | {error, _}.
remove({urn, User, _} = Key, Opts) ->
[$^||
typhoon_kv:remove(scenario, {urn, user, User}, Key, Opts),
scenario_free(Key, Opts)
].
scenario_free(Key, Opts) ->
ambitz:free(typhoon, uri:s(Key), Opts).
%%
%% sign up a new user
-spec signup(urn(), opts()) -> ok.
signup({urn, user, _} = User, Opts) ->
Profile = crdts:update(uri:path(User), crdts:new(lwwreg)),
Spec = {typhoon_user, start_link, [Profile]},
{ok, _} = ambitz:spawn(typhoon, uri:s(User), Spec, Opts),
ok.
%%
%% read user profile
-spec profile(urn(), opts()) -> {ok, _} | {error, not_found}.
profile({urn, user, _} = User, Opts) ->
case ambitz:get(typhoon, uri:s(User), profile, Opts) of
{ok, #entity{val = undefined}} ->
{error, not_found};
{ok, #entity{val = Profile}} ->
{ok, crdts:value(Profile)}
end.
%%
%% read user scenario
-spec scenario(urn(), opts()) -> {ok, _} | {error, not_found}.
scenario({urn, user, _} = User, Opts) ->
case ambitz:get(typhoon, uri:s(User), scenario, Opts) of
{ok, #entity{val = undefined}} ->
{error, not_found};
{ok, #entity{val = Scenario}} ->
{ok, crdts:value(Scenario)}
end.
%%
%% run load scenario, the scenario will terminate automatically after timeout
-spec run(urn()) -> ok | {error, _}.
run({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 1}]),
Pids = crdts:value(CRDT),
Pid = lists:nth(rand:uniform(length(Pids)), Pids),
pipe:call(Pid, run).
%%
%% abort load scenario
-spec abort(urn()) -> ok | {error, _}.
abort({urn, _, _} = Id) ->
{ok, #entity{val = Val}} = typhoon:get(Id, [{w, 3}]),
typhoon:remove(Id, [{w, 3}]),
{_, _, [Mod, Spec]} = crdts:value(Val),
lists:foreach(
fun(Node) ->
_ = rpc:call(Node, code, purge, [Mod]),
_ = rpc:call(Node, code, delete, [Mod])
end,
[erlang:node() | erlang:nodes()]
),
%% Note: this is not nice but we need to delay re-start of scenario
%% the ultimate fix in the pipeline
timer:sleep(5000),
typhoon:put(Id, Spec, [{w, 3}]).
%%
%% return list of peer(s) nodes (ip addresses)
-spec peer(urn()) -> [_].
peer(Id) ->
[addr(Vnode) || Vnode <- ek:successors(typhoon, scalar:s(Id)),
ek:vnode(type, Vnode) == primary].
addr(Vnode) ->
%% @todo: it would fail if cluster uses FQDN
[_, Host] = binary:split(ek:vnode(node, Vnode), <<$@>>),
{ok, IP} = inet_parse:address(scalar:c(Host)),
IP.
%%
%% return number of active load units
-spec unit(urn()) -> {ok, integer()} | {error, any()}.
unit({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 3}]),
lists:sum(
lists:map(
fun(Pid) -> pipe:ioctl(Pid, n) end,
crdts:value(CRDT)
)
).
%%
%% return static attributes about scenario
-spec attr(urn()) -> {ok, [_]} | {error, any()}.
attr({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 3}]),
Pid = hd( crdts:value(CRDT) ),
{ok, pipe:ioctl(Pid, attr)}.
%%%----------------------------------------------------------------------------
%%%
%%% private
%%%
%%%----------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/fogfish/typhoon/db0d76050a9c41c45582e9928206270450600d00/apps/typhoon/src/typhoon.erl | erlang |
Copyright 2015 Zalando SE
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc
native interface distributed system stress and load testing tool
management interface
data types
----------------------------------------------------------------------------
management interface
----------------------------------------------------------------------------
create a new workload scenario
read content of workload scenario
remove workload scenario
sign up a new user
read user profile
read user scenario
run load scenario, the scenario will terminate automatically after timeout
abort load scenario
Note: this is not nice but we need to delay re-start of scenario
the ultimate fix in the pipeline
return list of peer(s) nodes (ip addresses)
@todo: it would fail if cluster uses FQDN
return number of active load units
return static attributes about scenario
----------------------------------------------------------------------------
private
---------------------------------------------------------------------------- | Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(typhoon).
-author('').
-include_lib("ambitz/include/ambitz.hrl").
-compile({parse_transform, category}).
-export([start/0]).
-export([
put/3
,get/2
,remove/2
,signup/2
,profile/2
,scenario/2
,peer/1
,run/1
,abort/1
,unit/1
,attr/1
]).
-type urn() :: {urn, _, _}.
-type opts() :: [_].
RnD node start
start() ->
applib:boot(?MODULE, code:where_is_file("dev.config")).
-spec put(urn(), binary(), opts()) -> {ok, _} | {error, _}.
put({urn, User, _} = Key, Spec, Opts) ->
[$^||
scenario_spawn(Key, Spec, Opts),
typhoon_kv:append(scenario, {urn, user, User}, Key, Opts)
].
scenario_spawn(Key, Spec, Opts) ->
ambitz:spawn(typhoon, uri:s(Key),
{typhoon_scenario, start_link, [scalar:atom(uri:path(Key)), Spec]},
Opts
).
-spec get(urn(), opts()) -> {ok, ambitz:entity()}.
get({urn, _, _} = Key, Opts) ->
ambitz:lookup(typhoon, uri:s(Key), Opts).
-spec remove(urn(), opts()) -> {ok, _} | {error, _}.
remove({urn, User, _} = Key, Opts) ->
[$^||
typhoon_kv:remove(scenario, {urn, user, User}, Key, Opts),
scenario_free(Key, Opts)
].
scenario_free(Key, Opts) ->
ambitz:free(typhoon, uri:s(Key), Opts).
-spec signup(urn(), opts()) -> ok.
signup({urn, user, _} = User, Opts) ->
Profile = crdts:update(uri:path(User), crdts:new(lwwreg)),
Spec = {typhoon_user, start_link, [Profile]},
{ok, _} = ambitz:spawn(typhoon, uri:s(User), Spec, Opts),
ok.
-spec profile(urn(), opts()) -> {ok, _} | {error, not_found}.
profile({urn, user, _} = User, Opts) ->
case ambitz:get(typhoon, uri:s(User), profile, Opts) of
{ok, #entity{val = undefined}} ->
{error, not_found};
{ok, #entity{val = Profile}} ->
{ok, crdts:value(Profile)}
end.
-spec scenario(urn(), opts()) -> {ok, _} | {error, not_found}.
scenario({urn, user, _} = User, Opts) ->
case ambitz:get(typhoon, uri:s(User), scenario, Opts) of
{ok, #entity{val = undefined}} ->
{error, not_found};
{ok, #entity{val = Scenario}} ->
{ok, crdts:value(Scenario)}
end.
-spec run(urn()) -> ok | {error, _}.
run({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 1}]),
Pids = crdts:value(CRDT),
Pid = lists:nth(rand:uniform(length(Pids)), Pids),
pipe:call(Pid, run).
-spec abort(urn()) -> ok | {error, _}.
abort({urn, _, _} = Id) ->
{ok, #entity{val = Val}} = typhoon:get(Id, [{w, 3}]),
typhoon:remove(Id, [{w, 3}]),
{_, _, [Mod, Spec]} = crdts:value(Val),
lists:foreach(
fun(Node) ->
_ = rpc:call(Node, code, purge, [Mod]),
_ = rpc:call(Node, code, delete, [Mod])
end,
[erlang:node() | erlang:nodes()]
),
timer:sleep(5000),
typhoon:put(Id, Spec, [{w, 3}]).
-spec peer(urn()) -> [_].
peer(Id) ->
[addr(Vnode) || Vnode <- ek:successors(typhoon, scalar:s(Id)),
ek:vnode(type, Vnode) == primary].
addr(Vnode) ->
[_, Host] = binary:split(ek:vnode(node, Vnode), <<$@>>),
{ok, IP} = inet_parse:address(scalar:c(Host)),
IP.
-spec unit(urn()) -> {ok, integer()} | {error, any()}.
unit({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 3}]),
lists:sum(
lists:map(
fun(Pid) -> pipe:ioctl(Pid, n) end,
crdts:value(CRDT)
)
).
-spec attr(urn()) -> {ok, [_]} | {error, any()}.
attr({urn, _, _} = Id) ->
{ok, #entity{val = CRDT}} = ambitz:whereis(typhoon, uri:s(Id), [{r, 3}]),
Pid = hd( crdts:value(CRDT) ),
{ok, pipe:ioctl(Pid, attr)}.
|
250a1e60d093c1047a3a700061488447d009879639b488e6ee32e5d83405491a | CSVdB/pinky | Internal.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE RecordWildCards #
# LANGUAGE FlexibleContexts #
module Pinky.Core.HyperParams.Internal where
import Import
import Pinky.Utils
import Control.Monad.State.Lazy
import Pinky.Utils.PositiveInt.Internal
data HyperParams = HyperParams
{ hyperRate :: PositiveDouble
, hyperDecayRate :: ProperFraction
, hyperMomentum :: ProperFraction
, hyperRegulator :: PositiveDouble
, hyperBatchSize :: PositiveInt
} deriving (Show, Eq, Generic)
hyperParamsFromBasics ::
Double
-> Double
-> Double
-> Double
-> Natural
-> Either String HyperParams
hyperParamsFromBasics rate' dr' mom' reg' bs' = do
rate <- constructPositiveDouble rate'
dr <- constructProperFraction dr'
mom <- constructProperFraction mom'
reg <- constructPositiveDouble reg'
bs <- constructPositiveInt bs'
prettyValidation $ HyperParams rate dr mom reg bs
constructHyperParams ::
PositiveDouble
-> ProperFraction
-> ProperFraction
-> PositiveDouble
-> Natural
-> Either String HyperParams
constructHyperParams rate dr mom reg =
prettyValidation . HyperParams rate dr mom reg . PositiveInt
instance Validity HyperParams where
validate HyperParams {..} =
let valProd =
case (< posOne) <$> pMultiply hyperRate hyperRegulator of
Left errMess -> invalidOnPos "rate * regulator" errMess
Right True -> valid
Right False ->
invalidOnPos "rate * regulator" "Is smaller than 1"
in valProd <>
mconcat
[ delve "hyperRate" hyperRate
, delve "hyperDecayRate" hyperDecayRate
, delve "hyperMomentum" hyperMomentum
, delve "hyperRegulator" hyperRegulator
, delve "hyperBatchSize" hyperBatchSize
]
invalidOnPos :: String -> String -> Validation
invalidOnPos posString errString =
Validation [Location posString $ Violated errString]
decay :: MonadState HyperParams m => m ()
decay =
state $ \hp ->
((), hp {hyperRate = dMultiply (hyperRate hp) $ hyperDecayRate hp})
| null | https://raw.githubusercontent.com/CSVdB/pinky/e77a4c0812ceb4b8548c41a7652fb247c2ab39e0/pinky/src/Pinky/Core/HyperParams/Internal.hs | haskell | # LANGUAGE DeriveGeneric #
# LANGUAGE RecordWildCards #
# LANGUAGE FlexibleContexts #
module Pinky.Core.HyperParams.Internal where
import Import
import Pinky.Utils
import Control.Monad.State.Lazy
import Pinky.Utils.PositiveInt.Internal
data HyperParams = HyperParams
{ hyperRate :: PositiveDouble
, hyperDecayRate :: ProperFraction
, hyperMomentum :: ProperFraction
, hyperRegulator :: PositiveDouble
, hyperBatchSize :: PositiveInt
} deriving (Show, Eq, Generic)
hyperParamsFromBasics ::
Double
-> Double
-> Double
-> Double
-> Natural
-> Either String HyperParams
hyperParamsFromBasics rate' dr' mom' reg' bs' = do
rate <- constructPositiveDouble rate'
dr <- constructProperFraction dr'
mom <- constructProperFraction mom'
reg <- constructPositiveDouble reg'
bs <- constructPositiveInt bs'
prettyValidation $ HyperParams rate dr mom reg bs
constructHyperParams ::
PositiveDouble
-> ProperFraction
-> ProperFraction
-> PositiveDouble
-> Natural
-> Either String HyperParams
constructHyperParams rate dr mom reg =
prettyValidation . HyperParams rate dr mom reg . PositiveInt
instance Validity HyperParams where
validate HyperParams {..} =
let valProd =
case (< posOne) <$> pMultiply hyperRate hyperRegulator of
Left errMess -> invalidOnPos "rate * regulator" errMess
Right True -> valid
Right False ->
invalidOnPos "rate * regulator" "Is smaller than 1"
in valProd <>
mconcat
[ delve "hyperRate" hyperRate
, delve "hyperDecayRate" hyperDecayRate
, delve "hyperMomentum" hyperMomentum
, delve "hyperRegulator" hyperRegulator
, delve "hyperBatchSize" hyperBatchSize
]
invalidOnPos :: String -> String -> Validation
invalidOnPos posString errString =
Validation [Location posString $ Violated errString]
decay :: MonadState HyperParams m => m ()
decay =
state $ \hp ->
((), hp {hyperRate = dMultiply (hyperRate hp) $ hyperDecayRate hp})
| |
289b4980de331eb5008516eafef74a8ec09406198dc384d2265b68df36eada47 | kallisti-dev/hs-webdriver | WebDriver.hs | |
This module serves as the top - level interface to the Haskell WebDriver bindings ,
providing most of the functionality you 're likely to want .
This module serves as the top-level interface to the Haskell WebDriver bindings,
providing most of the functionality you're likely to want.
-}
module Test.WebDriver
* WebDriver monad
WD(..)
* Running WebDriver commands
, runSession, withSession, runWD
* WebDriver configuration
, WDConfig(..), defaultConfig
-- ** Configuration helper functions
-- | Instead of working with the 'Capabilities' record directly, you can use
-- these config modifier functions to specify common options.
, useBrowser, useProxy, useVersion, usePlatform
-- ** Session history configuration
, SessionHistoryConfig, noHistory, unlimitedHistory, onlyMostRecentHistory
-- ** HTTP request header utilities
, withRequestHeaders, withAuthHeaders
* WebDriver commands
, module Test.WebDriver.Commands
-- * Capabilities (advanced configuration)
, Capabilities(..), defaultCaps, allCaps, modifyCaps
, Platform(..), ProxyType(..)
-- ** Browser-specific capabilities
, Browser(..), LogLevel(..)
-- *** Browser defaults
, firefox, chrome, ie, opera, iPhone, iPad, android
-- * Exception handling
, finallyClose, closeOnException
, module Test.WebDriver.Exceptions
-- * Accessing session history
, SessionHistory(..), getSessionHistory, dumpSessionHistory
) where
import Test.WebDriver.Types
import Test.WebDriver.Commands
import Test.WebDriver.Monad
import Test.WebDriver.Exceptions
import Test.WebDriver.Config
import Test.WebDriver.Session
| null | https://raw.githubusercontent.com/kallisti-dev/hs-webdriver/ea594ce8720c9e11f053b2567f250079f0eac33b/src/Test/WebDriver.hs | haskell | ** Configuration helper functions
| Instead of working with the 'Capabilities' record directly, you can use
these config modifier functions to specify common options.
** Session history configuration
** HTTP request header utilities
* Capabilities (advanced configuration)
** Browser-specific capabilities
*** Browser defaults
* Exception handling
* Accessing session history | |
This module serves as the top - level interface to the Haskell WebDriver bindings ,
providing most of the functionality you 're likely to want .
This module serves as the top-level interface to the Haskell WebDriver bindings,
providing most of the functionality you're likely to want.
-}
module Test.WebDriver
* WebDriver monad
WD(..)
* Running WebDriver commands
, runSession, withSession, runWD
* WebDriver configuration
, WDConfig(..), defaultConfig
, useBrowser, useProxy, useVersion, usePlatform
, SessionHistoryConfig, noHistory, unlimitedHistory, onlyMostRecentHistory
, withRequestHeaders, withAuthHeaders
* WebDriver commands
, module Test.WebDriver.Commands
, Capabilities(..), defaultCaps, allCaps, modifyCaps
, Platform(..), ProxyType(..)
, Browser(..), LogLevel(..)
, firefox, chrome, ie, opera, iPhone, iPad, android
, finallyClose, closeOnException
, module Test.WebDriver.Exceptions
, SessionHistory(..), getSessionHistory, dumpSessionHistory
) where
import Test.WebDriver.Types
import Test.WebDriver.Commands
import Test.WebDriver.Monad
import Test.WebDriver.Exceptions
import Test.WebDriver.Config
import Test.WebDriver.Session
|
b63e7a10a2ef217f82e20f439823e523c7a77fc0951704b59f1a7491994d30f7 | gas2serra/mcclim-desktop | commands.lisp | (in-package :desktop-app-manager)
(define-desktop-app-manager-command (com-quit :menu nil
:keystroke (#\q :meta))
()
(clim:frame-exit clim:*application-frame*))
(define-desktop-app-manager-command (com-refresh :menu nil
:keystroke (#\r :meta))
()
(clim:redisplay-frame-pane clim:*application-frame*
(clim:find-pane-named clim:*application-frame* 'application-display)))
(define-desktop-app-manager-command (com-refresh-apps :menu nil)
()
(refresh-applications)
(clim:redisplay-frame-pane clim:*application-frame*
(clim:find-pane-named clim:*application-frame* 'application-display)))
(define-desktop-app-manager-command (com-clear-interactor
:menu nil
:keystroke (#\c :meta))
()
(let ((pane (clim:find-pane-named clim:*application-frame* 'interactor)))
(clim:window-clear pane)))
;; traslators
| null | https://raw.githubusercontent.com/gas2serra/mcclim-desktop/f85d19c57d76322ae3c05f98ae43bfc8c0d0a554/Apps/app-manager/gui/commands.lisp | lisp | traslators | (in-package :desktop-app-manager)
(define-desktop-app-manager-command (com-quit :menu nil
:keystroke (#\q :meta))
()
(clim:frame-exit clim:*application-frame*))
(define-desktop-app-manager-command (com-refresh :menu nil
:keystroke (#\r :meta))
()
(clim:redisplay-frame-pane clim:*application-frame*
(clim:find-pane-named clim:*application-frame* 'application-display)))
(define-desktop-app-manager-command (com-refresh-apps :menu nil)
()
(refresh-applications)
(clim:redisplay-frame-pane clim:*application-frame*
(clim:find-pane-named clim:*application-frame* 'application-display)))
(define-desktop-app-manager-command (com-clear-interactor
:menu nil
:keystroke (#\c :meta))
()
(let ((pane (clim:find-pane-named clim:*application-frame* 'interactor)))
(clim:window-clear pane)))
|
20f65e38b4538e1afddd64550e49bc74e518a49239549bb9df86ff0491cdabfa | haskell-repa/repa | Gang.hs |
-- | Gang Primitives.
module Data.Repa.Eval.Gang
(Gang, forkGang, gangSize, gangIO, gangST)
where
import GHC.IO
import GHC.ST
import GHC.Conc (forkOn)
import Control.Concurrent.MVar
import Control.Exception (assert)
import Control.Monad
import System.IO
import GHC.Exts
-- Requests -------------------------------------------------------------------
-- | The 'Req' type encapsulates work requests for individual members of a gang.
data Req
-- | Instruct the worker to run the given action.
= ReqDo (Int# -> IO ())
-- | Tell the worker that we're shutting the gang down.
-- The worker should signal that it's receieved the request by
-- writing to its result var before returning to the caller (forkGang).
| ReqShutdown
-- Gang -----------------------------------------------------------------------
-- | A 'Gang' is a group of threads that execute arbitrary work requests.
data Gang
= Gang
{ -- | Number of threads in the gang.
_gangThreads :: Int#
-- | Workers listen for requests on these vars.
, _gangRequestVars :: [MVar Req]
-- | Workers put their results in these vars.
, _gangResultVars :: [MVar ()]
-- | Indicates that the gang is busy.
, _gangBusy :: MVar Bool
}
instance Show Gang where
showsPrec p (Gang n _ _ _)
= showString "<<"
. showsPrec p (I# n)
. showString " threads>>"
-- | O(1). Yield the number of threads in the 'Gang'.
gangSize :: Gang -> Int#
gangSize (Gang n _ _ _)
= n
# NOINLINE gangSize #
| Fork a ' Gang ' with the given number of threads ( at least 1 ) .
forkGang :: Int -> IO Gang
forkGang !n@(I# n_)
= assert (n > 0)
$ do
-- Create the vars we'll use to issue work requests.
mvsRequest <- sequence $ replicate n $ newEmptyMVar
-- Create the vars we'll use to signal that threads are done.
mvsDone <- sequence $ replicate n $ newEmptyMVar
-- Add finalisers so we can shut the workers down cleanly if they
-- become unreachable.
zipWithM_ (\varReq varDone
-> mkWeakMVar varReq (finaliseWorker varReq varDone))
mvsRequest
mvsDone
-- Create all the worker threads
zipWithM_ forkOn [0..]
$ zipWith3 (\(I# i) -> gangWorker i)
[0 .. n - 1] mvsRequest mvsDone
-- The gang is currently idle.
busy <- newMVar False
return $ Gang n_ mvsRequest mvsDone busy
# NOINLINE forkGang #
-- | The worker thread of a 'Gang'.
The threads blocks on the MVar waiting for a work request .
gangWorker :: Int# -> MVar Req -> MVar () -> IO ()
gangWorker threadId varRequest varDone
= do
-- Wait for a request
req <- takeMVar varRequest
case req of
ReqDo action
-> do -- Run the action we were given.
action threadId
Signal that the action is complete .
putMVar varDone ()
-- Wait for more requests.
gangWorker threadId varRequest varDone
ReqShutdown
-> putMVar varDone ()
# NOINLINE gangWorker #
-- | Finaliser for worker threads.
We want to shutdown the corresponding thread when it 's MVar becomes
-- unreachable.
Without this programs can complain about " Blocked indefinitely
on an MVar " because worker threads are still blocked on the request
-- MVars when the program ends. Whether the finalizer is called or not
is very racey . It happens about 1 in 10 runs when for the
-- repa-edgedetect benchmark, and less often with the others.
--
We 're relying on the comment in System . Mem . Weak that says
-- "If there are no other threads to run, the runtime system will
-- check for runnablefinalizers before declaring the system to be
-- deadlocked."
--
-- If we were creating and destroying the gang cleanly we wouldn't need
-- this, but theGang is created with a top-level unsafePerformIO.
-- Hacks beget hacks beget hacks...
--
finaliseWorker :: MVar Req -> MVar () -> IO ()
finaliseWorker varReq varDone
= do putMVar varReq ReqShutdown
takeMVar varDone
return ()
# NOINLINE finaliseWorker #
-- | Issue work requests for the 'Gang' and wait until they complete.
--
-- If the gang is already busy then print a warning to `stderr` and just
-- run the actions sequentially in the requesting thread.
gangIO :: Gang
-> (Int# -> IO ())
-> IO ()
gangIO gang@(Gang _ _ _ busy) action
= do b <- swapMVar busy True
if b
then do
seqIO gang action
else do
parIO gang action
_ <- swapMVar busy False
return ()
# NOINLINE gangIO #
-- | Run an action on the gang sequentially.
seqIO :: Gang -> (Int# -> IO ()) -> IO ()
seqIO (Gang n _ _ _) action
= do hPutStr stderr
$ unlines
[ "Data.Array.Repa.Bulk.Par: Performing nested parallel computation sequentially."
, " Something is trying to run a compuation on a gang that is already busy. "
, " You've probably used a Repa 'computeP', 'foldP' or similar function while "
, " another instance was already running. This can happen if you've passed a "
, " parallel worker function to a combinator like 'map', or some parallel "
, " compuation was suspended via lazy evaluation. Try using `seq` to ensure that"
, " each array is fully evaluated before computing the next one. "
, "" ]
mapM_ (\(I# i) -> action i) [0 .. (I# n) - 1]
# NOINLINE seqIO #
-- | Run an action on the gang in parallel.
parIO :: Gang -> (Int# -> IO ()) -> IO ()
parIO (Gang _ mvsRequest mvsResult _) action
= do
-- Send requests to all the threads.
mapM_ (\v -> putMVar v (ReqDo action)) mvsRequest
-- Wait for all the requests to complete.
mapM_ takeMVar mvsResult
# NOINLINE parIO #
-- | Same as 'gangIO' but in the 'ST' monad.
gangST :: Gang -> (Int# -> ST s ()) -> ST s ()
gangST g p
= unsafeIOToST $ gangIO g (\i -> unsafeSTToIO $ p i)
# NOINLINE gangST #
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa-eval/Data/Repa/Eval/Gang.hs | haskell | | Gang Primitives.
Requests -------------------------------------------------------------------
| The 'Req' type encapsulates work requests for individual members of a gang.
| Instruct the worker to run the given action.
| Tell the worker that we're shutting the gang down.
The worker should signal that it's receieved the request by
writing to its result var before returning to the caller (forkGang).
Gang -----------------------------------------------------------------------
| A 'Gang' is a group of threads that execute arbitrary work requests.
| Number of threads in the gang.
| Workers listen for requests on these vars.
| Workers put their results in these vars.
| Indicates that the gang is busy.
| O(1). Yield the number of threads in the 'Gang'.
Create the vars we'll use to issue work requests.
Create the vars we'll use to signal that threads are done.
Add finalisers so we can shut the workers down cleanly if they
become unreachable.
Create all the worker threads
The gang is currently idle.
| The worker thread of a 'Gang'.
Wait for a request
Run the action we were given.
Wait for more requests.
| Finaliser for worker threads.
unreachable.
MVars when the program ends. Whether the finalizer is called or not
repa-edgedetect benchmark, and less often with the others.
"If there are no other threads to run, the runtime system will
check for runnablefinalizers before declaring the system to be
deadlocked."
If we were creating and destroying the gang cleanly we wouldn't need
this, but theGang is created with a top-level unsafePerformIO.
Hacks beget hacks beget hacks...
| Issue work requests for the 'Gang' and wait until they complete.
If the gang is already busy then print a warning to `stderr` and just
run the actions sequentially in the requesting thread.
| Run an action on the gang sequentially.
| Run an action on the gang in parallel.
Send requests to all the threads.
Wait for all the requests to complete.
| Same as 'gangIO' but in the 'ST' monad. |
module Data.Repa.Eval.Gang
(Gang, forkGang, gangSize, gangIO, gangST)
where
import GHC.IO
import GHC.ST
import GHC.Conc (forkOn)
import Control.Concurrent.MVar
import Control.Exception (assert)
import Control.Monad
import System.IO
import GHC.Exts
data Req
= ReqDo (Int# -> IO ())
| ReqShutdown
data Gang
= Gang
_gangThreads :: Int#
, _gangRequestVars :: [MVar Req]
, _gangResultVars :: [MVar ()]
, _gangBusy :: MVar Bool
}
instance Show Gang where
showsPrec p (Gang n _ _ _)
= showString "<<"
. showsPrec p (I# n)
. showString " threads>>"
gangSize :: Gang -> Int#
gangSize (Gang n _ _ _)
= n
# NOINLINE gangSize #
| Fork a ' Gang ' with the given number of threads ( at least 1 ) .
forkGang :: Int -> IO Gang
forkGang !n@(I# n_)
= assert (n > 0)
$ do
mvsRequest <- sequence $ replicate n $ newEmptyMVar
mvsDone <- sequence $ replicate n $ newEmptyMVar
zipWithM_ (\varReq varDone
-> mkWeakMVar varReq (finaliseWorker varReq varDone))
mvsRequest
mvsDone
zipWithM_ forkOn [0..]
$ zipWith3 (\(I# i) -> gangWorker i)
[0 .. n - 1] mvsRequest mvsDone
busy <- newMVar False
return $ Gang n_ mvsRequest mvsDone busy
# NOINLINE forkGang #
The threads blocks on the MVar waiting for a work request .
gangWorker :: Int# -> MVar Req -> MVar () -> IO ()
gangWorker threadId varRequest varDone
= do
req <- takeMVar varRequest
case req of
ReqDo action
action threadId
Signal that the action is complete .
putMVar varDone ()
gangWorker threadId varRequest varDone
ReqShutdown
-> putMVar varDone ()
# NOINLINE gangWorker #
We want to shutdown the corresponding thread when it 's MVar becomes
Without this programs can complain about " Blocked indefinitely
on an MVar " because worker threads are still blocked on the request
is very racey . It happens about 1 in 10 runs when for the
We 're relying on the comment in System . Mem . Weak that says
finaliseWorker :: MVar Req -> MVar () -> IO ()
finaliseWorker varReq varDone
= do putMVar varReq ReqShutdown
takeMVar varDone
return ()
# NOINLINE finaliseWorker #
gangIO :: Gang
-> (Int# -> IO ())
-> IO ()
gangIO gang@(Gang _ _ _ busy) action
= do b <- swapMVar busy True
if b
then do
seqIO gang action
else do
parIO gang action
_ <- swapMVar busy False
return ()
# NOINLINE gangIO #
seqIO :: Gang -> (Int# -> IO ()) -> IO ()
seqIO (Gang n _ _ _) action
= do hPutStr stderr
$ unlines
[ "Data.Array.Repa.Bulk.Par: Performing nested parallel computation sequentially."
, " Something is trying to run a compuation on a gang that is already busy. "
, " You've probably used a Repa 'computeP', 'foldP' or similar function while "
, " another instance was already running. This can happen if you've passed a "
, " parallel worker function to a combinator like 'map', or some parallel "
, " compuation was suspended via lazy evaluation. Try using `seq` to ensure that"
, " each array is fully evaluated before computing the next one. "
, "" ]
mapM_ (\(I# i) -> action i) [0 .. (I# n) - 1]
# NOINLINE seqIO #
parIO :: Gang -> (Int# -> IO ()) -> IO ()
parIO (Gang _ mvsRequest mvsResult _) action
= do
mapM_ (\v -> putMVar v (ReqDo action)) mvsRequest
mapM_ takeMVar mvsResult
# NOINLINE parIO #
gangST :: Gang -> (Int# -> ST s ()) -> ST s ()
gangST g p
= unsafeIOToST $ gangIO g (\i -> unsafeSTToIO $ p i)
# NOINLINE gangST #
|
2ae1019b45980afe472595f6587b07cf7bcf93b688bc85a430022f70e4d71699 | dialohq/ocaml-grpc | buffer.mli | type t
(** [t] represents a buffer which is based on bytes but keeps track of the
length of valid data inside it. The internal capacity is increased when
needed. *)
val v : ?capacity:int -> unit -> t
(** [c ~capacity ()] creates a new buffer with internal capacity [capacity]. *)
val length : t -> int
(** [length t] returns the length of valid data in the buffer. *)
val capacity : t -> int
(** [capacity t] returns the total capacity of the buffer. *)
val to_bytes : t -> bytes
(** [to_bytes t] converts the valid data in the buffer into bytes. *)
val to_string : t -> string
(** [to_string t] converts the valid data in the buffer into a string. *)
val copy_from_bigstringaf :
src_off:int -> src:Bigstringaf.t -> dst:t -> length:int -> unit
* [ copy_from_bigstringaf ~src_off ~src ~dst ] copies data from [ src ]
into [ dst ] starting from [ src_off ] and ending at [ src_off + length ] to the
end of the buffer .
into [dst] starting from [src_off] and ending at [src_off + length] to the
end of the buffer. *)
val sub : start:int -> length:int -> t -> t
* [ sub ~start t ] creates a new buffer from the current , containing the data in the range \[start , start+length ) .
val get_u8 : pos:int -> t -> int
* [ get_u8 ~pos t ] returns the unsigned 8 bit integer at [ pos ] in [ t ] .
val get_u32_be : pos:int -> t -> int
* [ get_u32_be ~pos t ] returns the unsigned 32 bit big endian integer at [ pos ] in [ t ] .
val shift_left : by:int -> t -> unit
(** [shift_left ~by t] shifts [t] left by [by] positions, discarding the data. *)
| null | https://raw.githubusercontent.com/dialohq/ocaml-grpc/79fa9373db069f7c8840b2f52b5df89499f75ba2/lib/grpc/buffer.mli | ocaml | * [t] represents a buffer which is based on bytes but keeps track of the
length of valid data inside it. The internal capacity is increased when
needed.
* [c ~capacity ()] creates a new buffer with internal capacity [capacity].
* [length t] returns the length of valid data in the buffer.
* [capacity t] returns the total capacity of the buffer.
* [to_bytes t] converts the valid data in the buffer into bytes.
* [to_string t] converts the valid data in the buffer into a string.
* [shift_left ~by t] shifts [t] left by [by] positions, discarding the data. | type t
val v : ?capacity:int -> unit -> t
val length : t -> int
val capacity : t -> int
val to_bytes : t -> bytes
val to_string : t -> string
val copy_from_bigstringaf :
src_off:int -> src:Bigstringaf.t -> dst:t -> length:int -> unit
* [ copy_from_bigstringaf ~src_off ~src ~dst ] copies data from [ src ]
into [ dst ] starting from [ src_off ] and ending at [ src_off + length ] to the
end of the buffer .
into [dst] starting from [src_off] and ending at [src_off + length] to the
end of the buffer. *)
val sub : start:int -> length:int -> t -> t
* [ sub ~start t ] creates a new buffer from the current , containing the data in the range \[start , start+length ) .
val get_u8 : pos:int -> t -> int
* [ get_u8 ~pos t ] returns the unsigned 8 bit integer at [ pos ] in [ t ] .
val get_u32_be : pos:int -> t -> int
* [ get_u32_be ~pos t ] returns the unsigned 32 bit big endian integer at [ pos ] in [ t ] .
val shift_left : by:int -> t -> unit
|
5ec0bd0301000adf6f6d5eb8323eea36c7f83a7f57da8eecc44397fd85f8e397 | nextjournal/advent-of-clerk | day_25.clj | # 🎄 Advent of Clerk : Day 25
(ns advent-of-clerk.day-25
(:require [nextjournal.clerk :as clerk]))
| null | https://raw.githubusercontent.com/nextjournal/advent-of-clerk/2401fe42669220673f4a8dc6b70682483fae7038/src/advent_of_clerk/day_25.clj | clojure | # 🎄 Advent of Clerk : Day 25
(ns advent-of-clerk.day-25
(:require [nextjournal.clerk :as clerk]))
| |
0bef7b92f361beb0de2c83418ed9d67284de512b673429fa9e8176c0f8b208cd | gedge-platform/gedge-platform | rabbit_prometheus_handler.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_prometheus_handler).
-export([init/2]).
-export([generate_response/2, content_types_provided/2, is_authorized/2]).
-export([setup/0]).
-include_lib("amqp_client/include/amqp_client.hrl").
-define(SCRAPE_DURATION, telemetry_scrape_duration_seconds).
-define(SCRAPE_SIZE, telemetry_scrape_size_bytes).
-define(SCRAPE_ENCODED_SIZE, telemetry_scrape_encoded_size_bytes).
%% ===================================================================
%% Cowboy Handler Callbacks
%% ===================================================================
init(Req, _State) ->
{cowboy_rest, Req, #{}}.
content_types_provided(ReqData, Context) ->
Since Prometheus 2.0 Protobuf is no longer supported
{[{{<<"text">>, <<"plain">>, '*'}, generate_response}], ReqData, Context}.
is_authorized(ReqData, Context) ->
{true, ReqData, Context}.
setup() ->
setup_metrics(telemetry_registry()),
setup_metrics('per-object').
setup_metrics(Registry) ->
ScrapeDuration = [{name, ?SCRAPE_DURATION},
{help, "Scrape duration"},
{labels, ["registry", "content_type"]},
{registry, Registry}],
ScrapeSize = [{name, ?SCRAPE_SIZE},
{help, "Scrape size, not encoded"},
{labels, ["registry", "content_type"]},
{registry, Registry}],
ScrapeEncodedSize = [{name, ?SCRAPE_ENCODED_SIZE},
{help, "Scrape size, encoded"},
{labels, ["registry", "content_type", "encoding"]},
{registry, Registry}],
prometheus_summary:declare(ScrapeDuration),
prometheus_summary:declare(ScrapeSize),
prometheus_summary:declare(ScrapeEncodedSize).
%% ===================================================================
%% Private functions
%% ===================================================================
generate_response(ReqData, Context) ->
Method = cowboy_req:method(ReqData),
Response = gen_response(Method, ReqData),
{stop, Response, Context}.
gen_response(<<"GET">>, Request) ->
Registry0 = cowboy_req:binding(registry, Request, <<"default">>),
case prometheus_registry:exists(Registry0) of
false ->
cowboy_req:reply(404, #{}, <<"Unknown Registry">>, Request);
Registry ->
gen_metrics_response(Registry, Request)
end;
gen_response(_, Request) ->
Request.
gen_metrics_response(Registry, Request) ->
{Code, RespHeaders, Body} = reply(Registry, Request),
Headers = to_cowboy_headers(RespHeaders),
cowboy_req:reply(Code, maps:from_list(Headers), Body, Request).
to_cowboy_headers(RespHeaders) ->
lists:map(fun to_cowboy_headers_/1, RespHeaders).
to_cowboy_headers_({Name, Value}) ->
{to_cowboy_name(Name), Value}.
to_cowboy_name(Name) ->
binary:replace(atom_to_binary(Name, utf8), <<"_">>, <<"-">>).
reply(Registry, Request) ->
case validate_registry(Registry, registry()) of
{true, RealRegistry} ->
format_metrics(Request, RealRegistry);
{registry_conflict, _ReqR, _ConfR} ->
{409, [], <<>>};
{registry_not_found, _ReqR} ->
{404, [], <<>>};
false ->
false
end.
format_metrics(Request, Registry) ->
AcceptEncoding = cowboy_req:header(<<"accept-encoding">>, Request, undefined),
ContentType = prometheus_text_format:content_type(),
Scrape = render_format(ContentType, Registry),
Encoding = accept_encoding_header:negotiate(AcceptEncoding, [<<"identity">>,
<<"gzip">>]),
encode_format(ContentType, binary_to_list(Encoding), Scrape, Registry).
render_format(ContentType, Registry) ->
Scrape = prometheus_summary:observe_duration(
Registry,
?SCRAPE_DURATION,
[Registry, ContentType],
fun () -> prometheus_text_format:format(Registry) end),
prometheus_summary:observe(Registry,
?SCRAPE_SIZE,
[Registry, ContentType],
iolist_size(Scrape)),
Scrape.
validate_registry(undefined, auto) ->
{true, default};
validate_registry(Registry, auto) ->
{true, Registry};
validate_registry(Registry, Registry) ->
{true, Registry};
validate_registry(Asked, Conf) ->
{registry_conflict, Asked, Conf}.
telemetry_registry() ->
application:get_env(rabbitmq_prometheus, telemetry_registry, default).
registry() ->
application:get_env(rabbitmq_prometheus, registry, auto).
encode_format(ContentType, Encoding, Scrape, Registry) ->
Encoded = encode_format_(Encoding, Scrape),
prometheus_summary:observe(telemetry_registry(),
?SCRAPE_ENCODED_SIZE,
[Registry, ContentType, Encoding],
iolist_size(Encoded)),
{200, [{content_type, binary_to_list(ContentType)},
{content_encoding, Encoding}], Encoded}.
encode_format_("gzip", Scrape) ->
zlib:gzip(Scrape);
encode_format_("identity", Scrape) ->
Scrape.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/rabbitmq_prometheus/src/rabbit_prometheus_handler.erl | erlang |
===================================================================
Cowboy Handler Callbacks
===================================================================
===================================================================
Private functions
=================================================================== | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_prometheus_handler).
-export([init/2]).
-export([generate_response/2, content_types_provided/2, is_authorized/2]).
-export([setup/0]).
-include_lib("amqp_client/include/amqp_client.hrl").
-define(SCRAPE_DURATION, telemetry_scrape_duration_seconds).
-define(SCRAPE_SIZE, telemetry_scrape_size_bytes).
-define(SCRAPE_ENCODED_SIZE, telemetry_scrape_encoded_size_bytes).
init(Req, _State) ->
{cowboy_rest, Req, #{}}.
content_types_provided(ReqData, Context) ->
Since Prometheus 2.0 Protobuf is no longer supported
{[{{<<"text">>, <<"plain">>, '*'}, generate_response}], ReqData, Context}.
is_authorized(ReqData, Context) ->
{true, ReqData, Context}.
setup() ->
setup_metrics(telemetry_registry()),
setup_metrics('per-object').
setup_metrics(Registry) ->
ScrapeDuration = [{name, ?SCRAPE_DURATION},
{help, "Scrape duration"},
{labels, ["registry", "content_type"]},
{registry, Registry}],
ScrapeSize = [{name, ?SCRAPE_SIZE},
{help, "Scrape size, not encoded"},
{labels, ["registry", "content_type"]},
{registry, Registry}],
ScrapeEncodedSize = [{name, ?SCRAPE_ENCODED_SIZE},
{help, "Scrape size, encoded"},
{labels, ["registry", "content_type", "encoding"]},
{registry, Registry}],
prometheus_summary:declare(ScrapeDuration),
prometheus_summary:declare(ScrapeSize),
prometheus_summary:declare(ScrapeEncodedSize).
generate_response(ReqData, Context) ->
Method = cowboy_req:method(ReqData),
Response = gen_response(Method, ReqData),
{stop, Response, Context}.
gen_response(<<"GET">>, Request) ->
Registry0 = cowboy_req:binding(registry, Request, <<"default">>),
case prometheus_registry:exists(Registry0) of
false ->
cowboy_req:reply(404, #{}, <<"Unknown Registry">>, Request);
Registry ->
gen_metrics_response(Registry, Request)
end;
gen_response(_, Request) ->
Request.
gen_metrics_response(Registry, Request) ->
{Code, RespHeaders, Body} = reply(Registry, Request),
Headers = to_cowboy_headers(RespHeaders),
cowboy_req:reply(Code, maps:from_list(Headers), Body, Request).
to_cowboy_headers(RespHeaders) ->
lists:map(fun to_cowboy_headers_/1, RespHeaders).
to_cowboy_headers_({Name, Value}) ->
{to_cowboy_name(Name), Value}.
to_cowboy_name(Name) ->
binary:replace(atom_to_binary(Name, utf8), <<"_">>, <<"-">>).
reply(Registry, Request) ->
case validate_registry(Registry, registry()) of
{true, RealRegistry} ->
format_metrics(Request, RealRegistry);
{registry_conflict, _ReqR, _ConfR} ->
{409, [], <<>>};
{registry_not_found, _ReqR} ->
{404, [], <<>>};
false ->
false
end.
format_metrics(Request, Registry) ->
AcceptEncoding = cowboy_req:header(<<"accept-encoding">>, Request, undefined),
ContentType = prometheus_text_format:content_type(),
Scrape = render_format(ContentType, Registry),
Encoding = accept_encoding_header:negotiate(AcceptEncoding, [<<"identity">>,
<<"gzip">>]),
encode_format(ContentType, binary_to_list(Encoding), Scrape, Registry).
render_format(ContentType, Registry) ->
Scrape = prometheus_summary:observe_duration(
Registry,
?SCRAPE_DURATION,
[Registry, ContentType],
fun () -> prometheus_text_format:format(Registry) end),
prometheus_summary:observe(Registry,
?SCRAPE_SIZE,
[Registry, ContentType],
iolist_size(Scrape)),
Scrape.
validate_registry(undefined, auto) ->
{true, default};
validate_registry(Registry, auto) ->
{true, Registry};
validate_registry(Registry, Registry) ->
{true, Registry};
validate_registry(Asked, Conf) ->
{registry_conflict, Asked, Conf}.
telemetry_registry() ->
application:get_env(rabbitmq_prometheus, telemetry_registry, default).
registry() ->
application:get_env(rabbitmq_prometheus, registry, auto).
encode_format(ContentType, Encoding, Scrape, Registry) ->
Encoded = encode_format_(Encoding, Scrape),
prometheus_summary:observe(telemetry_registry(),
?SCRAPE_ENCODED_SIZE,
[Registry, ContentType, Encoding],
iolist_size(Encoded)),
{200, [{content_type, binary_to_list(ContentType)},
{content_encoding, Encoding}], Encoded}.
encode_format_("gzip", Scrape) ->
zlib:gzip(Scrape);
encode_format_("identity", Scrape) ->
Scrape.
|
6f5a43b9e4ffd7409e8d62e37d4b2e52341ad35eb5e36b7ea0809ae6ba1c32ad | debug-ito/greskell | Env.hs | module TestUtil.Env
( requireEnv
, withEnvForExtServer
, withEnvForIntServer
) where
import System.Environment (lookupEnv)
import Test.Hspec
import Network.Greskell.WebSocket.Connection (Host, Port)
requireEnv :: String -> IO String
requireEnv env_key = maybe bail return =<< lookupEnv env_key
where
bail = expectationFailure msg >> return ""
where
msg = "Set environment variable "++ env_key ++ " for Server test. "
withEnvForExtServer :: SpecWith (Host, Port) -> Spec
withEnvForExtServer = before $ do
hostname <- requireEnv "GRESKELL_TEST_HOST"
port <- fmap read $ requireEnv "GRESKELL_TEST_PORT"
return (hostname, port)
withEnvForIntServer :: SpecWith Port -> Spec
withEnvForIntServer = before $ fmap read $ requireEnv "GRESKELL_TEST_INTERNAL_PORT"
| null | https://raw.githubusercontent.com/debug-ito/greskell/ff21b8297a158cb4b5bafcbb85094cef462c5390/greskell-websocket/test/TestUtil/Env.hs | haskell | module TestUtil.Env
( requireEnv
, withEnvForExtServer
, withEnvForIntServer
) where
import System.Environment (lookupEnv)
import Test.Hspec
import Network.Greskell.WebSocket.Connection (Host, Port)
requireEnv :: String -> IO String
requireEnv env_key = maybe bail return =<< lookupEnv env_key
where
bail = expectationFailure msg >> return ""
where
msg = "Set environment variable "++ env_key ++ " for Server test. "
withEnvForExtServer :: SpecWith (Host, Port) -> Spec
withEnvForExtServer = before $ do
hostname <- requireEnv "GRESKELL_TEST_HOST"
port <- fmap read $ requireEnv "GRESKELL_TEST_PORT"
return (hostname, port)
withEnvForIntServer :: SpecWith Port -> Spec
withEnvForIntServer = before $ fmap read $ requireEnv "GRESKELL_TEST_INTERNAL_PORT"
| |
735924072eb209d1e7b38dfe6ebae42a1891ac210941ff4def9ddf69ce231eed | synduce/Synduce | list_mps.ml |
let base_case = 0
let init a = max a 0
let join b c = max b (b + c)
let rec h =
function CNil -> base_case | Single(x) -> init x
| Concat(x, y) -> join (h x) (h y)
| null | https://raw.githubusercontent.com/synduce/Synduce/289888afb1c312adfd631ce8d90df2134de827b8/extras/solutions/constraints/all_positive/list_mps.ml | ocaml |
let base_case = 0
let init a = max a 0
let join b c = max b (b + c)
let rec h =
function CNil -> base_case | Single(x) -> init x
| Concat(x, y) -> join (h x) (h y)
| |
2ce675fa5f24e3330ff120effa09d592f08b0dcea766c1eb705c1e3e1e6a6e5f | emqx/replayq | benchmark_tests.erl | -module(benchmark_tests).
-include_lib("eunit/include/eunit.hrl").
-define(DIR, filename:join([data_dir(), ?FUNCTION_NAME, integer_to_list(uniq())])).
-define(WRITE_CHUNK_SIZE, 1).
-define(ITEM_BYTES, 256).
change to 10 or even more for real test
run_test_() ->
Dir = ?DIR,
Config = #{dir => Dir, seg_bytes => 100 bsl 20},
{timeout, ?RUN_SECS * 2,
fun() ->
Writter = erlang:spawn_link(fun() -> writter(Config) end),
Now = erlang:system_time(),
timer:sleep(timer:seconds(?RUN_SECS)),
_ = erlang:send(Writter, {stop, self()}),
Result = wait_for_result(Now),
print_result(Result)
end}.
wait_for_result(Then) ->
receive
{result, Stats} ->
Now = erlang:system_time(),
IntervalSeconds = (Now - Then) / 1000000000,
Stats#{time => IntervalSeconds}
end.
print_result(#{count := Count, bytes := Bytes, time := Seconds}) ->
io:format(user, "=============\n~p messages per second\n"
"~p bytes per second==================\n",
[Count / Seconds, Bytes / Seconds]).
writter(Config) ->
Q = replayq:open(Config),
writter_loop(Q).
writter_loop(Q0) ->
Q = do_write_chunk(Q0, ?WRITE_CHUNK_SIZE),
receive
{stop, Pid} ->
Pid ! {result, #{bytes => replayq:bytes(Q),
count => replayq:count(Q)
}},
exit(normal)
after
0 ->
writter_loop(Q)
end.
do_write_chunk(Q, N) ->
Bytes = lists:duplicate(N, iolist_to_binary(lists:duplicate(?ITEM_BYTES, 0))),
replayq:append(Q, Bytes).
data_dir() -> "./test-data".
uniq() ->
{_, _, Micro} = erlang:timestamp(),
Micro.
| null | https://raw.githubusercontent.com/emqx/replayq/cd54fbfd7bc2671f7a10bf5a9ef9fd16318def73/test/benchmark_tests.erl | erlang | -module(benchmark_tests).
-include_lib("eunit/include/eunit.hrl").
-define(DIR, filename:join([data_dir(), ?FUNCTION_NAME, integer_to_list(uniq())])).
-define(WRITE_CHUNK_SIZE, 1).
-define(ITEM_BYTES, 256).
change to 10 or even more for real test
run_test_() ->
Dir = ?DIR,
Config = #{dir => Dir, seg_bytes => 100 bsl 20},
{timeout, ?RUN_SECS * 2,
fun() ->
Writter = erlang:spawn_link(fun() -> writter(Config) end),
Now = erlang:system_time(),
timer:sleep(timer:seconds(?RUN_SECS)),
_ = erlang:send(Writter, {stop, self()}),
Result = wait_for_result(Now),
print_result(Result)
end}.
wait_for_result(Then) ->
receive
{result, Stats} ->
Now = erlang:system_time(),
IntervalSeconds = (Now - Then) / 1000000000,
Stats#{time => IntervalSeconds}
end.
print_result(#{count := Count, bytes := Bytes, time := Seconds}) ->
io:format(user, "=============\n~p messages per second\n"
"~p bytes per second==================\n",
[Count / Seconds, Bytes / Seconds]).
writter(Config) ->
Q = replayq:open(Config),
writter_loop(Q).
writter_loop(Q0) ->
Q = do_write_chunk(Q0, ?WRITE_CHUNK_SIZE),
receive
{stop, Pid} ->
Pid ! {result, #{bytes => replayq:bytes(Q),
count => replayq:count(Q)
}},
exit(normal)
after
0 ->
writter_loop(Q)
end.
do_write_chunk(Q, N) ->
Bytes = lists:duplicate(N, iolist_to_binary(lists:duplicate(?ITEM_BYTES, 0))),
replayq:append(Q, Bytes).
data_dir() -> "./test-data".
uniq() ->
{_, _, Micro} = erlang:timestamp(),
Micro.
| |
7fb022b2720cd2da5d3ed25a0aa7f87d52ad2fd0bad12dd426655baa6665b3d7 | frenetic-lang/ocaml-openflow | GroupTable0x04.ml | open OpenFlow0x04
open OpenFlow0x04_Core
type t = {
table : (groupId, (groupType * bucket list)) Hashtbl.t;
mutable next_group_id : groupId;
mutable pending_messages : Message.t list
}
let create () : t = {
table = Hashtbl.create 100;
next_group_id = 1l;
pending_messages = []
}
let add_group (tbl : t) (typ : groupType) (buckets : bucket list) : groupId =
let id = tbl.next_group_id in
tbl.next_group_id <- Int32.succ id;
if tbl.next_group_id = 0l then
failwith "out of group IDs"
else
let msg = Message.GroupModMsg (AddGroup (typ, id, buckets)) in
Hashtbl.add tbl.table id (typ, buckets);
tbl.pending_messages <- msg :: tbl.pending_messages;
id
let clear_groups (tbl : t) : unit =
tbl.next_group_id <- 1l;
let rm_group (id : groupId) ((typ, _) : groupType * bucket list) : unit =
let msg = Message.GroupModMsg (DeleteGroup (typ, id)) in
tbl.pending_messages <- msg :: tbl.pending_messages in
Hashtbl.iter rm_group tbl.table;
Hashtbl.clear tbl.table
let commit (tbl : t) : Message.t list =
let msgs = tbl.pending_messages in
tbl.pending_messages <- [];
List.rev msgs | null | https://raw.githubusercontent.com/frenetic-lang/ocaml-openflow/289ffb8a692cf32b8413cc58044aae9c151ddd44/lib/GroupTable0x04.ml | ocaml | open OpenFlow0x04
open OpenFlow0x04_Core
type t = {
table : (groupId, (groupType * bucket list)) Hashtbl.t;
mutable next_group_id : groupId;
mutable pending_messages : Message.t list
}
let create () : t = {
table = Hashtbl.create 100;
next_group_id = 1l;
pending_messages = []
}
let add_group (tbl : t) (typ : groupType) (buckets : bucket list) : groupId =
let id = tbl.next_group_id in
tbl.next_group_id <- Int32.succ id;
if tbl.next_group_id = 0l then
failwith "out of group IDs"
else
let msg = Message.GroupModMsg (AddGroup (typ, id, buckets)) in
Hashtbl.add tbl.table id (typ, buckets);
tbl.pending_messages <- msg :: tbl.pending_messages;
id
let clear_groups (tbl : t) : unit =
tbl.next_group_id <- 1l;
let rm_group (id : groupId) ((typ, _) : groupType * bucket list) : unit =
let msg = Message.GroupModMsg (DeleteGroup (typ, id)) in
tbl.pending_messages <- msg :: tbl.pending_messages in
Hashtbl.iter rm_group tbl.table;
Hashtbl.clear tbl.table
let commit (tbl : t) : Message.t list =
let msgs = tbl.pending_messages in
tbl.pending_messages <- [];
List.rev msgs | |
40bd69ce67de77b10a58516b994dd24f45481de596959ed2a4f25c6fbe94c7ee | typelead/intellij-eta | Comment00005.hs | module Comment00005 where
z = 2
This function implements double buffering of state for imnmutable
seqential loops ( forLoop and whileLoop ) . The intention is to generate
the minimal amount of copying ( especially deep array copies ) while
also avoiding frequent references to data that can not be allocated in
registers .
The state of the loop is implemented by two variables so that the
loop body reads from one and writes to the other . At the end of the
body , the contents of the second ( write ) variable is shallow copied to
the first ( read ) variable . In order to avoid inadvertent sharing of
data referenced by pointers in the variables ( array buffers , for
instance ) , the pointers in the state are swapped rather than just
copied so that the end up in the variable written to . Finally the read
variable is shallow copied to the result location .
There are some simplifying cases :
- When the target is a variable , and thus cheap to
access , it is reused as the read state variable
- When the type of the state is scalar , so that assignment is
atomic , only one state variable is used and it is both read and
written to in the loop body , eliminating the shallow copy in the
loop body .
The strategy implemented a com promise between different design constraints :
- Avoid deep copying of arrays ( that is what the double buffering is for )
- Avoid shallow copying if possible
- Avoid memory leaks of arrays and ivars
This function implements double buffering of state for imnmutable
seqential loops (forLoop and whileLoop). The intention is to generate
the minimal amount of copying (especially deep array copies) while
also avoiding frequent references to data that can not be allocated in
registers.
The state of the loop is implemented by two variables so that the
loop body reads from one and writes to the other. At the end of the
body, the contents of the second (write) variable is shallow copied to
the first (read) variable. In order to avoid inadvertent sharing of
data referenced by pointers in the variables (array buffers, for
instance), the pointers in the state are swapped rather than just
copied so that the end up in the variable written to. Finally the read
variable is shallow copied to the result location.
There are some simplifying cases:
- When the target lvalue loc is a variable, and thus cheap to
access, it is reused as the read state variable
- When the type of the state is scalar, so that assignment is
atomic, only one state variable is used and it is both read and
written to in the loop body, eliminating the shallow copy in the
loop body.
The strategy implemented a com promise between different design constraints:
- Avoid deep copying of arrays (that is what the double buffering is for)
- Avoid shallow copying if possible
- Avoid memory leaks of arrays and ivars
-}
f :: Int -> Int
f x = y
| null | https://raw.githubusercontent.com/typelead/intellij-eta/ee66d621aa0bfdf56d7d287279a9a54e89802cf9/plugin/src/test/resources/fixtures/eta/sources/Comment00005.hs | haskell | module Comment00005 where
z = 2
This function implements double buffering of state for imnmutable
seqential loops ( forLoop and whileLoop ) . The intention is to generate
the minimal amount of copying ( especially deep array copies ) while
also avoiding frequent references to data that can not be allocated in
registers .
The state of the loop is implemented by two variables so that the
loop body reads from one and writes to the other . At the end of the
body , the contents of the second ( write ) variable is shallow copied to
the first ( read ) variable . In order to avoid inadvertent sharing of
data referenced by pointers in the variables ( array buffers , for
instance ) , the pointers in the state are swapped rather than just
copied so that the end up in the variable written to . Finally the read
variable is shallow copied to the result location .
There are some simplifying cases :
- When the target is a variable , and thus cheap to
access , it is reused as the read state variable
- When the type of the state is scalar , so that assignment is
atomic , only one state variable is used and it is both read and
written to in the loop body , eliminating the shallow copy in the
loop body .
The strategy implemented a com promise between different design constraints :
- Avoid deep copying of arrays ( that is what the double buffering is for )
- Avoid shallow copying if possible
- Avoid memory leaks of arrays and ivars
This function implements double buffering of state for imnmutable
seqential loops (forLoop and whileLoop). The intention is to generate
the minimal amount of copying (especially deep array copies) while
also avoiding frequent references to data that can not be allocated in
registers.
The state of the loop is implemented by two variables so that the
loop body reads from one and writes to the other. At the end of the
body, the contents of the second (write) variable is shallow copied to
the first (read) variable. In order to avoid inadvertent sharing of
data referenced by pointers in the variables (array buffers, for
instance), the pointers in the state are swapped rather than just
copied so that the end up in the variable written to. Finally the read
variable is shallow copied to the result location.
There are some simplifying cases:
- When the target lvalue loc is a variable, and thus cheap to
access, it is reused as the read state variable
- When the type of the state is scalar, so that assignment is
atomic, only one state variable is used and it is both read and
written to in the loop body, eliminating the shallow copy in the
loop body.
The strategy implemented a com promise between different design constraints:
- Avoid deep copying of arrays (that is what the double buffering is for)
- Avoid shallow copying if possible
- Avoid memory leaks of arrays and ivars
-}
f :: Int -> Int
f x = y
| |
2395517d072b56758acff9e9726d26a247d946694485d9abfbe1d4798ca85932 | nodename/stately | re_frame_api.cljs | (ns nodename.stately.communications.re-frame-api
(:require [re-frame.core :as re-frame]
[re-frame.utils :as utils]
[re-frame.handlers :as handlers]
[re-frame.db :as db]))
(def dispatch re-frame/dispatch)
(def run-queue #()) ;; re-frame runs the queue automatically
(def register-handler re-frame/register-handler)
(def lookup-handler handlers/lookup-handler)
(def log utils/log)
(def warn utils/warn)
(def error utils/error)
(def app-db db/app-db)
| null | https://raw.githubusercontent.com/nodename/stately/cf4e1092445f1b88f0846756ba053ac1e2c5b9bf/src/nodename/stately/communications/re_frame_api.cljs | clojure | re-frame runs the queue automatically | (ns nodename.stately.communications.re-frame-api
(:require [re-frame.core :as re-frame]
[re-frame.utils :as utils]
[re-frame.handlers :as handlers]
[re-frame.db :as db]))
(def dispatch re-frame/dispatch)
(def register-handler re-frame/register-handler)
(def lookup-handler handlers/lookup-handler)
(def log utils/log)
(def warn utils/warn)
(def error utils/error)
(def app-db db/app-db)
|
90a0d351dcfaecc4c613c177cf84a90e982fe82e800c563e76b9172653342db5 | esumii/min-caml | asm.ml | 2オペランドではなく3オペランドのx86アセンブリもどき
type id_or_imm = V of Id.t | C of int
type t = (* 命令の列 (caml2html: sparcasm_t) *)
| Ans of exp
| Let of (Id.t * Type.t) * exp * t
and exp = (* 一つ一つの命令に対応する式 (caml2html: sparcasm_exp) *)
| Nop
| Set of int
| SetL of Id.l
| Mov of Id.t
| Neg of Id.t
| Add of Id.t * id_or_imm
| Sub of Id.t * id_or_imm
| Ld of Id.t * id_or_imm * int
| St of Id.t * Id.t * id_or_imm * int
| FMovD of Id.t
| FNegD of Id.t
| FAddD of Id.t * Id.t
| FSubD of Id.t * Id.t
| FMulD of Id.t * Id.t
| FDivD of Id.t * Id.t
| LdDF of Id.t * id_or_imm * int
| StDF of Id.t * Id.t * id_or_imm * int
| Comment of string
(* virtual instructions *)
| IfEq of Id.t * id_or_imm * t * t
| IfLE of Id.t * id_or_imm * t * t
| IfGE of Id.t * id_or_imm * t * t (* 左右対称ではないので必要 *)
| IfFEq of Id.t * Id.t * t * t
| IfFLE of Id.t * Id.t * t * t
(* closure address, integer arguments, and float arguments *)
| CallCls of Id.t * Id.t list * Id.t list
| CallDir of Id.l * Id.t list * Id.t list
| Save of Id.t * Id.t (* レジスタ変数の値をスタック変数へ保存 (caml2html: sparcasm_save) *)
| Restore of Id.t (* スタック変数から値を復元 (caml2html: sparcasm_restore) *)
type fundef = { name : Id.l; args : Id.t list; fargs : Id.t list; body : t; ret : Type.t }
プログラム全体 = + トップレベル関数 + メインの式 ( caml2html : sparcasm_prog )
type prog = Prog of (Id.l * float) list * fundef list * t
let fletd(x, e1, e2) = Let((x, Type.Float), e1, e2)
let seq(e1, e2) = Let((Id.gentmp Type.Unit, Type.Unit), e1, e2)
let regs = (* Array.init 16 (fun i -> Printf.sprintf "%%r%d" i) *)
[| "%eax"; "%ebx"; "%ecx"; "%edx"; "%esi"; "%edi" |]
let fregs = Array.init 8 (fun i -> Printf.sprintf "%%xmm%d" i)
let allregs = Array.to_list regs
let allfregs = Array.to_list fregs
let reg_cl = regs.(Array.length regs - 1) (* closure address (caml2html: sparcasm_regcl) *)
let reg_sw = regs.(Array.length regs - 1 ) ( * temporary for swap
let reg_sw = regs.(Array.length regs - 1) (* temporary for swap *)
let reg_fsw = fregs.(Array.length fregs - 1) (* temporary for swap *)
*)
let reg_sp = "%ebp" (* stack pointer *)
let reg_hp = "min_caml_hp" (* heap pointer (caml2html: sparcasm_reghp) *)
(* let reg_ra = "%eax" (* return address *) *)
let is_reg x = (x.[0] = '%' || x = reg_hp)
(* super-tenuki *)
let rec remove_and_uniq xs = function
| [] -> []
| x :: ys when S.mem x xs -> remove_and_uniq xs ys
| x :: ys -> x :: remove_and_uniq (S.add x xs) ys
free variables in the order of use ( for spilling ) ( caml2html : )
let fv_id_or_imm = function V(x) -> [x] | _ -> []
let rec fv_exp = function
| Nop | Set(_) | SetL(_) | Comment(_) | Restore(_) -> []
| Mov(x) | Neg(x) | FMovD(x) | FNegD(x) | Save(x, _) -> [x]
| Add(x, y') | Sub(x, y') | Ld(x, y', _) | LdDF(x, y', _) -> x :: fv_id_or_imm y'
| St(x, y, z', _) | StDF(x, y, z', _) -> x :: y :: fv_id_or_imm z'
| FAddD(x, y) | FSubD(x, y) | FMulD(x, y) | FDivD(x, y) -> [x; y]
| IfEq(x, y', e1, e2) | IfLE(x, y', e1, e2) | IfGE(x, y', e1, e2) -> x :: fv_id_or_imm y' @ remove_and_uniq S.empty (fv e1 @ fv e2) (* uniq here just for efficiency *)
| IfFEq(x, y, e1, e2) | IfFLE(x, y, e1, e2) -> x :: y :: remove_and_uniq S.empty (fv e1 @ fv e2) (* uniq here just for efficiency *)
| CallCls(x, ys, zs) -> x :: ys @ zs
| CallDir(_, ys, zs) -> ys @ zs
and fv = function
| Ans(exp) -> fv_exp exp
| Let((x, t), exp, e) ->
fv_exp exp @ remove_and_uniq (S.singleton x) (fv e)
let fv e = remove_and_uniq S.empty (fv e)
let rec concat e1 xt e2 =
match e1 with
| Ans(exp) -> Let(xt, exp, e2)
| Let(yt, exp, e1') -> Let(yt, exp, concat e1' xt e2)
let align i = (if i mod 8 = 0 then i else i + 4)
| null | https://raw.githubusercontent.com/esumii/min-caml/8860b6fbc50786a27963aff1f7639b94c244618a/x86/asm.ml | ocaml | 命令の列 (caml2html: sparcasm_t)
一つ一つの命令に対応する式 (caml2html: sparcasm_exp)
virtual instructions
左右対称ではないので必要
closure address, integer arguments, and float arguments
レジスタ変数の値をスタック変数へ保存 (caml2html: sparcasm_save)
スタック変数から値を復元 (caml2html: sparcasm_restore)
Array.init 16 (fun i -> Printf.sprintf "%%r%d" i)
closure address (caml2html: sparcasm_regcl)
temporary for swap
temporary for swap
stack pointer
heap pointer (caml2html: sparcasm_reghp)
let reg_ra = "%eax" (* return address
super-tenuki
uniq here just for efficiency
uniq here just for efficiency | 2オペランドではなく3オペランドのx86アセンブリもどき
type id_or_imm = V of Id.t | C of int
| Ans of exp
| Let of (Id.t * Type.t) * exp * t
| Nop
| Set of int
| SetL of Id.l
| Mov of Id.t
| Neg of Id.t
| Add of Id.t * id_or_imm
| Sub of Id.t * id_or_imm
| Ld of Id.t * id_or_imm * int
| St of Id.t * Id.t * id_or_imm * int
| FMovD of Id.t
| FNegD of Id.t
| FAddD of Id.t * Id.t
| FSubD of Id.t * Id.t
| FMulD of Id.t * Id.t
| FDivD of Id.t * Id.t
| LdDF of Id.t * id_or_imm * int
| StDF of Id.t * Id.t * id_or_imm * int
| Comment of string
| IfEq of Id.t * id_or_imm * t * t
| IfLE of Id.t * id_or_imm * t * t
| IfFEq of Id.t * Id.t * t * t
| IfFLE of Id.t * Id.t * t * t
| CallCls of Id.t * Id.t list * Id.t list
| CallDir of Id.l * Id.t list * Id.t list
type fundef = { name : Id.l; args : Id.t list; fargs : Id.t list; body : t; ret : Type.t }
プログラム全体 = + トップレベル関数 + メインの式 ( caml2html : sparcasm_prog )
type prog = Prog of (Id.l * float) list * fundef list * t
let fletd(x, e1, e2) = Let((x, Type.Float), e1, e2)
let seq(e1, e2) = Let((Id.gentmp Type.Unit, Type.Unit), e1, e2)
[| "%eax"; "%ebx"; "%ecx"; "%edx"; "%esi"; "%edi" |]
let fregs = Array.init 8 (fun i -> Printf.sprintf "%%xmm%d" i)
let allregs = Array.to_list regs
let allfregs = Array.to_list fregs
let reg_sw = regs.(Array.length regs - 1 ) ( * temporary for swap
*)
let is_reg x = (x.[0] = '%' || x = reg_hp)
let rec remove_and_uniq xs = function
| [] -> []
| x :: ys when S.mem x xs -> remove_and_uniq xs ys
| x :: ys -> x :: remove_and_uniq (S.add x xs) ys
free variables in the order of use ( for spilling ) ( caml2html : )
let fv_id_or_imm = function V(x) -> [x] | _ -> []
let rec fv_exp = function
| Nop | Set(_) | SetL(_) | Comment(_) | Restore(_) -> []
| Mov(x) | Neg(x) | FMovD(x) | FNegD(x) | Save(x, _) -> [x]
| Add(x, y') | Sub(x, y') | Ld(x, y', _) | LdDF(x, y', _) -> x :: fv_id_or_imm y'
| St(x, y, z', _) | StDF(x, y, z', _) -> x :: y :: fv_id_or_imm z'
| FAddD(x, y) | FSubD(x, y) | FMulD(x, y) | FDivD(x, y) -> [x; y]
| CallCls(x, ys, zs) -> x :: ys @ zs
| CallDir(_, ys, zs) -> ys @ zs
and fv = function
| Ans(exp) -> fv_exp exp
| Let((x, t), exp, e) ->
fv_exp exp @ remove_and_uniq (S.singleton x) (fv e)
let fv e = remove_and_uniq S.empty (fv e)
let rec concat e1 xt e2 =
match e1 with
| Ans(exp) -> Let(xt, exp, e2)
| Let(yt, exp, e1') -> Let(yt, exp, concat e1' xt e2)
let align i = (if i mod 8 = 0 then i else i + 4)
|
6a0a23ce895b7cceb31d7fccab346dd78d49241441f45ba47f0c1e7cc368a200 | dongcarl/guix | gnuzilla.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 , 2015 < >
Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
Copyright © 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 , 2021 < >
Copyright © 2015 < >
Copyright © 2016 , 2017 , 2018 , 2019 , 2021 < >
Copyright © 2016 < >
Copyright © 2017 Clément < >
Copyright © 2017 , 2018 Nikita < >
Copyright © 2017 , 2018 , 2020 < >
Copyright © 2018 , 2020 < >
Copyright © 2019 < >
Copyright © 2020 < >
Copyright © 2020 < >
Copyright © 2019 , 2020 < >
Copyright © 2020 < >
Copyright © 2020 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gnuzilla)
#:use-module ((srfi srfi-1) #:hide (zip))
#:use-module (ice-9 match)
#:use-module (gnu packages)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix hg-download)
#:use-module (guix gexp)
#:use-module (guix store)
#:use-module (guix monads)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cargo)
#:use-module (guix build-system trivial)
#:use-module (gnu packages admin)
#:use-module (gnu packages audio)
#:use-module (gnu packages autotools)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages databases)
#:use-module (gnu packages glib)
#:use-module (gnu packages gtk)
#:use-module (gnu packages gnome)
#:use-module (gnu packages libcanberra)
#:use-module (gnu packages cups)
#:use-module (gnu packages kerberos)
#:use-module (gnu packages linux)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages compression)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages libevent)
for hunspell
#:use-module (gnu packages image)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pulseaudio)
#:use-module (gnu packages python)
#:use-module (gnu packages python-xyz)
#:use-module (gnu packages node)
#:use-module (gnu packages xorg)
#:use-module (gnu packages gl)
#:use-module (gnu packages assembly)
#:use-module (gnu packages rust)
#:use-module (gnu packages rust-apps)
#:use-module (gnu packages llvm)
#:use-module (gnu packages nss)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages video)
#:use-module (gnu packages xiph)
#:use-module (gnu packages xdisorg)
#:use-module (gnu packages readline)
#:use-module (gnu packages sqlite))
(define-public mozjs
(package
(name "mozjs")
(version "17.0.0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name version ".tar.gz"))
(sha256
(base32
"1fig2wf4f10v43mqx67y68z6h77sy900d1w0pz9qarrqx57rc7ij"))
(patches (search-patches "mozjs17-aarch64-support.patch"))
(modules '((guix build utils)))
(snippet
;; Fix incompatibility with Perl 5.22+.
'(begin
(substitute* '("js/src/config/milestone.pl")
(("defined\\(@TEMPLATE_FILE)") "@TEMPLATE_FILE"))
#t))))
(build-system gnu-build-system)
(native-inputs
`(("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-2)))
(propagated-inputs
in the Requires.private field of
(inputs
`(("zlib" ,zlib)))
(arguments
`(;; XXX: parallel build fails, lacking:
-p " system_wrapper_js/ "
#:parallel-build? #f
#:make-flags '("CXXFLAGS=-fpermissive")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'delete-timedout-test
;; This test times out on slower hardware.
(lambda _
(delete-file "js/src/jit-test/tests/basic/bug698584.js")
#t))
(add-before 'configure 'chdir
(lambda _
(chdir "js/src")
#t))
(replace 'configure
configure fails if it is followed by SHELL and CONFIG_SHELL
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure" (string-append "--prefix=" out)
,@(if (string=? "aarch64-linux"
(%current-system))
'("--host=aarch64-unknown-linux-gnu")
'()))))))))
(home-page
"-US/docs/Mozilla/Projects/SpiderMonkey")
(synopsis "Mozilla javascript engine")
(description "SpiderMonkey is Mozilla's JavaScript engine written
in C/C++.")
(license license:mpl2.0))) ; and others for some files
(define-public mozjs-24
(package (inherit mozjs)
(name "mozjs")
(version "24.2.0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name "-" version ".tar.bz2"))
(sha256
(base32
"1n1phk8r3l8icqrrap4czplnylawa0ddc2cc4cgdz46x3lrkybz6"))
(modules '((guix build utils)))
(patches (search-patches "mozjs24-aarch64-support.patch"))
(snippet
;; Fix incompatibility with Perl 5.22+.
'(begin
(substitute* '("js/src/config/milestone.pl")
(("defined\\(@TEMPLATE_FILE)") "@TEMPLATE_FILE"))
#t))))
(arguments
(substitute-keyword-arguments (package-arguments mozjs)
((#:phases phases)
`(modify-phases ,phases
(replace 'configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
configure fails if it is followed by SHELL and CONFIG_SHELL
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure"
(string-append "--prefix=" out)
"--with-system-nspr"
"--enable-system-ffi"
"--enable-threadsafe"
,@(if (string=? "aarch64-linux"
(%current-system))
'("--host=aarch64-unknown-linux-gnu")
'())))))))))
(inputs
`(("libffi" ,libffi)
("zlib" ,zlib)))))
(define-public mozjs-38
(package
(inherit mozjs)
(name "mozjs")
(version "38.2.1.rc0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name "-" version ".tar.bz2"))
(sha256
(base32
"0p4bmbpgkfsj54xschcny0a118jdrdgg0q29rwxigg3lh5slr681"))
(patches
(search-patches
;; See for
GCC 6 compatibility .
for 0ad
"mozjs38-tracelogger.patch"
;; See .
"mozjs38-pkg-config-version.patch"
"mozjs38-shell-version.patch"))
(modules '((guix build utils)))
(snippet
'(begin
Fix incompatibility with sed 4.4 .
(substitute* "js/src/configure"
(("\\^\\[:space:\\]") "^[[:space:]]"))
;; The headers are symlinks to files that are in /tmp, so they
;; end up broken. Copy them instead.
(substitute*
"python/mozbuild/mozbuild/backend/recursivemake.py"
(("\\['dist_include'\\].add_symlink")
"['dist_include'].add_copy"))
;; Remove bundled libraries.
(for-each delete-file-recursively
'("intl"
"js/src/ctypes/libffi"
"js/src/ctypes/libffi-patches"
"modules/zlib"))
#t))))
(arguments
`(;; XXX: parallel build fails, lacking:
-p " system_wrapper_js/ "
#:parallel-build? #f
;; See .
#:tests? #f
#:phases
(modify-phases %standard-phases
(replace 'configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(chdir "js/src")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure"
(string-append "--prefix=" out)
"--enable-ctypes"
"--enable-gcgenerational"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--enable-threadsafe"
"--enable-xterm-updates"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
;; Intl API requires bundled ICU.
"--without-intl-api")))))))
(native-inputs
`(("perl" ,perl)
("pkg-config" ,pkg-config)
("python-2" ,python-2)))
(inputs
`(("libffi" ,libffi)
("readline" ,readline)
("icu4c" ,icu4c)
("zlib" ,zlib)))))
(define-public mozjs-52
;; No releases yet at </>.
;; While we could take a snapshot of the complete mozilla-esr52 repository at
;; </#/jobs?repo=mozilla-esr52&filter-searchStr=sm-tc>,
we take the Debian version instead , because it is easier to work with .
(let ((commit "6507e63cc416fd7a3269e390efe712f8b56f374a")
(revision "1"))
(package (inherit mozjs-38)
(version (git-version "52.0" revision commit))
(source (origin
(method git-fetch)
(uri (git-reference
(url "-team/mozjs52.git")
(commit commit)))
(file-name (git-file-name "mozjs" version))
(sha256
(base32
"1ny0s53r8wn4byys87h784xrq1xg767akmfm6gqrbvrz57mlm3q2"))))
(arguments
`(#:tests? #f ; depends on repository metadata
#:configure-flags
'("--enable-ctypes"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
;; Intl API requires bundled ICU.
"--without-intl-api"
;; Without this gnome-shell will crash at runtime.
"--disable-jemalloc")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'patch-and-chdir
(lambda* (#:key inputs #:allow-other-keys)
This patch prevents a segfault when executing JS_Init ( ) .
;; The build does not fail without this patch, but the
configure phase of the gjs package would fail .
;; See
(make-file-writable "js/src/old-configure.in")
(make-file-writable "js/src/old-configure")
(make-file-writable "mozglue/build/moz.build")
(invoke "patch" "-p1" "--force"
"--input" "debian/patches/disable-mozglue.patch")
(invoke "touch" "js/src/configure")
(chdir "js/src")
#t))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
;; The configure script does not accept environment variables
;; as arguments.
(let ((out (assoc-ref outputs "out")))
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "./configure"
(cons (string-append "--prefix=" out)
configure-flags))))))))
(native-inputs
`(("autoconf" ,autoconf-2.13)
("automake" ,automake)
,@(package-native-inputs mozjs-38))))))
(define-public mozjs-60
;; No releases yet at </>.
;; While we could take a snapshot of the complete mozilla-esr60 repository at
;; </#/jobs?repo=mozilla-esr60&filter-searchStr=sm-tc>,
we take the Debian version instead , because it is easier to work with .
(package
(inherit mozjs-38)
(version "60.2.3-2")
(source (origin
(method git-fetch)
(uri (git-reference
(url "-team/mozjs60.git")
(commit (string-append "debian/" version))))
(file-name (git-file-name "mozjs" version))
(sha256
(base32
"091w050rwzrdcbgyi934k2viyccmlqxrp13sm2mql71mabb5dai6"))))
(arguments
`(#:tests? #f ; FIXME: all tests pass, but then the check phase fails anyway.
#:test-target "check-jstests"
#:configure-flags
'("--enable-ctypes"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--with-system-nspr"
"--with-system-zlib"
"--with-system-icu"
"--with-intl-api"
This is important because without it gjs will segfault during the
;; configure phase. With jemalloc only the standalone mozjs console
;; will work.
"--disable-jemalloc")
#:phases
(modify-phases %standard-phases
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
;; The configure script does not accept environment variables as
;; arguments. It also must be run from a different directory,
;; but not the root directory either.
(let ((out (assoc-ref outputs "out")))
(mkdir "run-configure-from-here")
(chdir "run-configure-from-here")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "../js/src/configure"
(cons (string-append "--prefix=" out)
configure-flags))
#t)))
(add-after 'unpack 'disable-broken-tests
(lambda _
;; This test assumes that /bin exists and contains certain
;; executables.
(delete-file "js/src/tests/shell/os.js")
#t)))))
(native-inputs
`(("autoconf" ,autoconf)
("automake" ,automake)
("which" ,which)
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-2)))))
(define-public mozjs-78
(package
(inherit mozjs-60)
(version "78.5.0")
(source (origin
(method url-fetch)
TODO : Switch to IceCat source once available on ftp.gnu.org .
(uri (string-append ""
"/releases/" version "esr/source/firefox-"
version "esr.source.tar.xz"))
(sha256
(base32
"1442yjmwz69hkfcvh8kkb60jf4c9ms0pac04nc3xw2da13v4zxai"))))
(arguments
`(#:imported-modules ,%cargo-utils-modules ;for `generate-all-checksums'
#:modules ((guix build cargo-utils)
,@%gnu-build-system-modules)
#:test-target "check-jstests"
#:configure-flags
'(;; Disable debugging symbols to save space.
"--disable-debug"
"--disable-debug-symbols"
This is important because without it gjs will segfault during the
;; configure phase. With jemalloc only the standalone mozjs console
;; will work.
"--disable-jemalloc"
"--enable-tests"
"--enable-hardening"
"--enable-optimize"
"--enable-release"
"--enable-rust-simd"
"--enable-readline"
"--enable-shared-js"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
"--with-intl-api")
#:phases
(modify-phases %standard-phases
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(let ((null-hash
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo\\.lock$"))
(for-each generate-all-checksums
'("js" "third_party/rust"))
#t)))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
;; The configure script does not accept environment variables as
;; arguments. It also must be run from a different directory,
;; but not the root directory either.
(let ((out (assoc-ref outputs "out")))
(mkdir "run-configure-from-here")
(chdir "run-configure-from-here")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "../js/src/configure"
(cons (string-append "--prefix=" out)
configure-flags))
#t)))
(add-after 'unpack 'adjust-for-icu-68
(lambda _
(with-directory-excursion "js/src/tests"
The test suite expects a lightly patched ICU 67 . Since
;; Guix is about to switch to ICU 68, massage the tests to
;; work with that instead of patching ICU. Try removing this
phase for newer versions of mozjs .
;; These tests look up locale names and expects to get
" " instead of " UK " .
(substitute* "non262/Intl/DisplayNames/language.js"
(("Traditionell, GB")
"Traditionell, UK"))
(substitute* "non262/Intl/DisplayNames/region.js"
(("\"GB\": \"GB\"")
"\"GB\": \"UK\""))
;; XXX: Some localized time formats have changed, and
;; substitution fails for accented characters, even though
;; it works in the REPL(?). Just delete these for now.
(delete-file "non262/Intl/Date/toLocaleString_timeZone.js")
(delete-file "non262/Intl/Date/toLocaleDateString_timeZone.js")
;; Similarly, these get an unexpected "A" suffix when looking
up a time in the " ar - MA - u - ca - islamicc " locale , which is
;; tricky to substitute.
(delete-file "non262/Intl/DateTimeFormat/format_timeZone.js")
(delete-file "non262/Intl/DateTimeFormat/format.js")
;; This file compares a generated list of ICU locale names
;; with actual lookups. Some have changed slightly, i.e.
daf - Latn - ZZ - > daf - Latn - CI , so drop it for simplicity .
(delete-file "non262/Intl/Locale/likely-subtags-generated.js"))
#t))
(add-before 'check 'pre-check
(lambda _
(with-directory-excursion "../js/src/tests"
(substitute* "shell/os.js"
;; FIXME: Why does the killed process have an exit status?
((".*killed process should not have exitStatus.*")
""))
;; XXX: Delete all tests that test time zone functionality,
;; because the test suite uses /etc/localtime to figure out
;; the offset from the hardware clock, which does not work
;; in the build container. See <tests/non262/Date/shell.js>.
(delete-file-recursively "non262/Date")
(delete-file "non262/Intl/DateTimeFormat/tz-environment-variable.js")
(setenv "JSTESTS_EXTRA_ARGS"
(string-join
(list
;; Do not run tests marked as "random".
"--exclude-random"
;; Exclude web platform tests.
"--wpt=disabled"
;; Respect the daemons configured number of jobs.
(string-append "--worker-count="
(number->string (parallel-job-count)))))))
#t)))))
(native-inputs
`(("autoconf" ,autoconf-2.13)
("automake" ,automake)
for - objdump
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-3)
("rust" ,rust)
("cargo" ,rust "cargo")))
(inputs
`(("icu4c" ,icu4c-68)
("readline" ,readline)
("zlib" ,zlib)))))
(define mozilla-compare-locales
(origin
(method hg-fetch)
(uri (hg-reference
(url "-locales/")
(changeset "RELEASE_8_0_0")))
(file-name "mozilla-compare-locales")
(sha256 (base32 "0052wq92sg4i776x407b5838jx9h3phl9xy69m2q34f31n3gdyk2"))))
(define (mozilla-locale locale changeset hash-string)
(origin
(method hg-fetch)
(uri (hg-reference
(url (string-append "-central/"
locale))
(changeset changeset)))
(file-name (string-append "mozilla-locale-" locale))
(sha256 (base32 hash-string))))
(define-syntax-rule (mozilla-locales (hash-string changeset locale) ...)
(list (mozilla-locale locale changeset hash-string)
...))
(define all-mozilla-locales
(mozilla-locales
;; sha256 changeset locale
;;---------------------------------------------------------------------------
("1q1p7nl97478hkf1msbbcdh98k6fn87xwqihbw8np00ll5gk9k4v" "97cf3c155484" "ach")
("0sajpblp639l448xywx7xlybjr5dm0rfrzx511a8pa9wn7dma4mf" "46929b4dda4e" "af")
("04s756aaf1yyli1dn1vfdjymgyjs1pz2n7jvz76j27x4nzdgss7l" "96ffd2b04ba3" "an")
("16nw7slm7irsnjf81r6zl273kbzri8sdbimzgzxwm0vh6xr61rgs" "1c4231166ddf" "ar")
("0i0qrj4j8zjv55cisbmr21dp8mz933hc9wkpn3abz5k5pn84gpry" "bc4384b8be78" "ast")
("1r77mmwg9x1jfa7g5lqa31rgyfrnix1zj80ibn5rq9gyhhsdrwbd" "dd56aead51fa" "az")
("1kzjpy5bnvps8di72ksynhbhc9bpw1ml6hvphm74z8dz55ai4c18" "9d2bff64ddfb" "be")
("1fygvjgph3siknfm7l0fh9y4ava72z1rxkip1zgmcdgq7jz8wzpf" "3808f4fe4db6" "bg")
("1x9nlqia441xh04rhkmkw6qrgpwnyqw0grrf5n2qw96939wnmgl7" "5ca8752ed8df" "bn")
("18yyxqzab44bgqx7h052d5nxyhv0l5bidgr16z1b3ak2crsfyx9q" "4c5d30ca9bf2" "br")
("11bschjz7pgsm4r1qan5l4s3nkzm5jb0kivpp186wd1xsjci5bjb" "cf2478a7eae1" "bs")
("12bak64nl6qi092l55xv330vh38mfsicrrf4wi693nn7zqb3mbpw" "4aa2cc349211" "ca")
("0p97d7pnxqs971rr5c57i6cw3mx1mp3iasa0xdmdk0zpz9pjd3s4" "806914072144" "ca-valencia")
("1rligf98h9r16mw3r3n5jalzi74xn2dnh6hkiixp7s2bvhjicwa4" "db2163383129" "cak")
("18y5j8ljh72mj2nbx0m64mi3fyjbwyx992i004sv3zvs4d4z18w4" "d1d09eedddde" "cs")
("12i4m9q6f8sl8arm8ja4gs4sl9m59p3kddlqi68srpz9mk66rqxz" "1caf58c64227" "cy")
("16wacsd23zd4j0yi6vbj033wylia8hlpswx949x5hy1h9817j4vn" "9ef3b56aa243" "da")
("1ddxnqpfi1wnciwmzkvvjinb51h22bg70r0dn7db17q64p4271rk" "a050b0eaec0a" "de")
("0gw5h52rw3bc2lwffnv845sjwwj22gmabchmpa1rw2y6087dc7zk" "613135cb0759" "dsb")
("0v17da37w1kbs73i6xql4c9xng6rfachyylpy3w8hpibpvi7i30n" "903db7c76b31" "el")
("0ky1nj4kp070nriw0igsa64l39plcv2365zl8lqpbydyp6clwc3m" "6bb74673e9aa" "en-CA")
("15jck6si2l4h5lrs8zjx56y97p70njpi26nnq6xfmvj1dk5qb4g2" "c06bd83a9295" "en-GB")
("1gc400n8j8qk70vdw7fkf4m9yviia35cmj6361pxnyvsjrgm4qvm" "ae0fda5114c4" "eo")
("0y01lahdrd3krsh6nrr3iw36r2x4advh5qw54vf92w11l3aiyvfz" "0c294dc9ce40" "es-AR")
("16jjbsfa62dxk36ccziv7nmh26c1s0b05960in6zs5nnfrv4yh35" "7c30b2981fb4" "es-CL")
("0qh8c8f8dsv1v56n1mcpn84l39bpnqfl8v8iacq12zh322srr549" "ad1444f4f833" "es-ES")
("1vh3hm5r4ch6mk3ymbk12b9rppwc75fmbi0i7cml82kxsi77bzw5" "0a26cdb23536" "es-MX")
("159l92jxx48lgba1g8ig3mdzhwsf3jpg3nx1g9blq4majk5hqr6q" "65a38a830795" "et")
("1ha8y1wbgb66dqm26x1q2xwygy7bblkjz40y9vh5d2qlpr3fn0av" "21e0930b221d" "eu")
("0rq4pcw6klm0bbljm1wdfvna8bpa35cm47hh2s63i2xdax4scahf" "5a4bb020cf09" "fa")
("1py2594gsvpgrxnplz278ffb7grsf384kzjskrl1zyps0jw8fb1x" "4a4f712cd4aa" "ff")
("1dyd55ngsglp1w2gh0yaacwb0vsq23gdvnj76f2x6g39h1li9s0z" "9c51cd915e2b" "fi")
("0kimwivpq6pr63jh1k9fszsv8bi8vns3scg76mmnvbhh2ca8q7wj" "4f9e24a696ee" "fr")
("1sbbnnp12lgy5qan2kix02942830b1969bd7jxrv7piwqfch9i06" "9e21a0eeb5b1" "fy-NL")
("0dsvvyfrzkx5h44gqgdci7arb8h4lq48w20cnr8fc7j17grvnkzz" "999a995bc09d" "ga-IE")
("1487msbsgrfzmyijhf6a4wbqdjpd7b7ki9nwrjjjjlnbw0h0ljpb" "6a9ddcab3240" "gd")
("1kzc4fwr18kgisdzba2acj1ag8mxbifqpk5p30jv68nmvqfsvl8d" "51eb5e352db9" "gl")
("13gy3wn44kcxr7j3sbl69fp415875f4vb0gm91hx0fysqlvryhcs" "b9de1ffe3224" "gn")
("0w5nvzpjn5vr35c1852rlff581vpy71nc096cz125852kyqkzkc3" "5b3307475ada" "gu-IN")
("1ycakc4qpy9vcy50j3ricryjfnjr9v3a5ijj6bbfl4y6aks157fy" "c742df968ffd" "he")
("1b2jf83c500wm5wcdnijq0b7y4m8n6271smq8pygahn5nq17f0gq" "1a3039a52b8a" "hi-IN")
("19bbw8ix5m83cf4yarcmjl7jqa8xfabwqnh3nj6vi52rwvn7whk5" "8dc50e269ef3" "hr")
("12rrsvgg6bb2h8fhni7jbx8pv983q8ym5fhfjim957n9q2yp5db6" "67e40f48dec7" "hsb")
("0apyh713p3hrlj8041xwblcssahbmsqp9v9hbmb50ayf4z850kr1" "40073a597b1b" "hu")
("0q0r076lq250d3vmnz9d92wj137c2v8i362c2avmkgp5zr3mcl0z" "2ea33335afdb" "hy-AM")
("0qza33gdc1i9259dwd2f7vd78s0a6rg34aqdkhcn7f2l6ybw6xd6" "930041db15eb" "ia")
("1211h0gp7gianh3qf76w04gfzk4n2bnyc9i8dviyz0vh4cjbx11m" "08811a49b41c" "id")
("12lcr841g1j7453s7gb51vrvxmshx4ha3h1jx4vh8wr891xv8l6a" "2f7a8d31e0ba" "is")
("1x585g0r2kcv0d3phnxx85bk5g0pi1yl0hwp4idv19yc9hslr04s" "188357cc04b4" "it")
("09v35g9v7j6x0p1hggydm3a1rmq2fh4z7g1l88z3w5k6wq2nhj1b" "45cee0ba4771" "ja")
("0prs3vycfvvaffjigdgyxiq41ak2rc34lnan5a6cwdqjgy7z450s" "d60a19d9bf17" "ja-JP-mac")
("1nskzm8rgczrbgcxlzzq5zqdfd456ad0cylq27nf0wjiyq6kjzcm" "00cb00e78672" "ka")
("0g6zznmhiam172nm7g2qzfpk415mna8kiihm73z2sdns64xb3ymg" "77a293a4bced" "kab")
("17dld9lrym7rpvpvnkssivp4wx1f11zpk86wczbq1h52qgd70p55" "2c9b33a56d5d" "kk")
("1nlzl8930c8ql3yq425wyqlxvq8arrjv20xpm5g7yfxd54av89ac" "9cddd42af05c" "km")
("07hkrcdksvrqk816yimd036dlw15nc4sjk4gmw16ywbp093v0mqq" "e0c2969a8398" "kn")
("08aqggvk3qbv5bzks9i1iba9akhkpm01d2c9k0zf41mpr2r5yfg2" "827567d0dafc" "ko")
("0vagaiwy80bs1k3gkacshlzb5zchkcshx0ypwirvayc63sw4yl8l" "694b2a24e868" "lij")
("1r43kp1kzahrbza0hiyavqplk9h08pzsb6rpjy79jr6l1iqb89sy" "d6728db7e060" "lt")
("0sq2wbsj79xl7fi454k6q5xdhxyck1whjz315rv37vphqpx86b9b" "61e9b33f4d94" "lv")
("0q8jxg1af22hs9wjdf0jd3bqk4rafxyzvsjl35k75am7l2y1fl3c" "9e482f6dd72c" "mk")
("1zsfzjrzbc58d30a9yz12h5vphywgpw8xg6y6zn3677a785dvr20" "1fd2763336a4" "mr")
("1rzygkkpn1a59daygd3hdaqph2np6sqvpgh68j0xr4il958ymnsm" "67ddab62dab4" "ms")
("16jp6w5gizfxs7jvncg3ly13m59vqvh4rlmjd0q23m5g5ff9sklc" "3ed015b51bf3" "my")
("1wfv023j67lb4iyf49fsknwm4z3xy0xqcf25b2nzanggxj26n01x" "d01801233a8f" "nb-NO")
("1946vfia58vbjfippb5pfsskbjj95w7hb340smn6ry2vmza99mxp" "582defb08fb2" "ne-NP")
("12w5ywh4c3s55y3zqc48cp1gcpwwjg444yfh1bghhhb9ni1xkh5i" "05f6359a29a6" "nl")
("17jb076320cgkw1ypwmws2vjxsqlv2ww8aaisa3j334vbrw1m4zx" "50b41a1ddded" "nn-NO")
("1y840j0v5zdgj94cbacy6j1snf44rynmzxq3yk8i26arcar62akl" "a6a138531a44" "oc")
("0jq1hq4xhqxpa26r8pb1bgbaljgfkhn9l6p5pbnslkllpbh70q6l" "e70a3afaef25" "pa-IN")
("1hih138skwy2gb8q10ngg6zalrk3aa3d549mg79gqzxbi5zy19fw" "e035f33389eb" "pl")
("1hhif4hx4k351wm1bzykzycfzb5q8msxmiwh5r1cy32rh8wkxwhh" "54098495f37f" "pt-BR")
("0gkjs12rxjml1m3mljskpz1gr6aph0c31nwpwdqybfg54w9qslib" "3fdf021f624e" "pt-PT")
("0anyvwd9v6mr8y3ww33s6qnxkawqn5lz65vrxx3m3kzky63ai1xk" "794f9374eb87" "rm")
("1p4drj25gsfv7lwgm5saazh38iqrh53952n8i4bmxpn0nadbm2n5" "71ce18bcf6cc" "ro")
("17yssf4axd3zvncl4ka4wkfnwcn0z0arp3390vb9cps67na29p36" "3a9587227699" "ru")
("0xk6rksspcw1222v4rgk5a6gzrpx64k29hm7p9qkqwd70s34yj46" "c020582a72ce" "si")
("1ax5ibydyn7sj208r66zcxlcr8dxdqrw28vqyjif4jx583rp4lfp" "745a699b7f51" "sk")
("13rin7hm1dv8g0hbcv8mp2hiwpk1k5bhzvkqpqajkkik4lx523mc" "8e437e2351ef" "sl")
("0yh5jkl5zw3f7x1w2w6zfj3dyvcl4wj1zv4di7qsq2nl2yyizf7x" "2d99e2eff94f" "son")
("0vzq7s27jsdbw5k59wfykysg1kd8w229ab5d4zjdf30l59igkahh" "69bbdf07bd80" "sq")
("1mwivvs8vrk6vjq6i33kwlrlisra7dy35521ayps9p2rz2dll4rr" "215df5c5125c" "sr")
("0g97yz1rg5cfflj8vvq3sqliyvm06x818z9yldfh5rjg1y6n9fjd" "8be00a1a50d4" "sv-SE")
("0ii02jn3lh2i6d0s95whx9aj6w3x8axc7w1rlzj0lc2s9n52krz3" "170a84339dbe" "ta")
("1ss7symad2crapxjqc0xhc0n17l79r5vf7flvkgk7npjky4vb7nv" "72a79a304f7f" "te")
("11iqmg8zamgscnvs4n2xpw3g9azn6w38qs313wiwm86pyx6694ss" "8e91ce3064c5" "th")
("1zgkvn9flb8by62ip9r3gmpgxwgkww1zhml5mwa0djq3ppfdgi1c" "0f914d0cda56" "tl")
("1filkhdak6dbrd83q602x1qw1703nlm53nm9gcyab8s16gsx6ylz" "62ca6a8eaeba" "tr")
("0cgagdy0ixprk3knczdmkqxkmx4ybmshhh0956kxbd0iab0dfcf6" "f110ccac4cde" "trs")
("1f1ghk67zwnwc5x3nk82vcv94nf8glngkfya1hg074q3088sj9pa" "56c0102d5f1c" "uk")
("0iyw1b2jjylkdwxv9sxvj4ikxl64sx612b2dvvmf1di8bw86w74r" "7d53bce5ae98" "ur")
("1q83cp5pfgs8l03zirwi8r5qp8qyh4zvxdx1ilgaqqlyg42yql7c" "9b500e1a054d" "uz")
("1d4nkybz2hk64ay04k965b9lc5nhhpmzcs5ww3b6q4n93rf9c2z7" "2a000025928a" "vi")
("1cnrsfnyl3sw3sxsggmjwydvphb2diy0vzknvxdhpnvq3ln18hga" "74724087c25b" "xh")
("1j6l66v1xw27z8w78mpsnmqgv8m277mf4r0hgqcrb4zx7xc2vqyy" "527e5e090608" "zh-CN")
("1frwx35klpyz3sdwrkz7945ivb2dwaawhhyfnz4092h9hn7rc4ky" "6cd366ad2947" "zh-TW")))
(define* (computed-origin-method gexp-promise hash-algo hash
#:optional (name "source")
#:key (system (%current-system))
(guile (default-guile)))
"Return a derivation that executes the G-expression that results
from forcing GEXP-PROMISE."
(mlet %store-monad ((guile (package->derivation guile system)))
(gexp->derivation (or name "computed-origin")
(force gexp-promise)
#:graft? #f ;nothing to graft
#:system system
#:guile-for-build guile)))
(define %icecat-version "78.12.0-guix0-preview1")
must be of the form YYYYMMDDhhmmss
' icecat - source ' is a " computed " origin that generates an IceCat tarball
from the corresponding upstream Firefox ESR tarball , using the ' makeicecat '
script from the upstream IceCat project .
(define icecat-source
(let* ((base-version (first (string-split %icecat-version #\-)))
(major-version (first (string-split base-version #\.)))
(minor-version (second (string-split base-version #\.)))
(sub-version (third (string-split base-version #\.)))
(upstream-firefox-version (string-append base-version "esr"))
(upstream-firefox-source
(origin
(method url-fetch)
(uri (string-append
"/"
upstream-firefox-version "/source/"
"firefox-" upstream-firefox-version ".source.tar.xz"))
(sha256
(base32
"043lplq5i4ax6nh4am3b2bm8dbn4rzzcji1zp0yy1pad4nwahmcb"))))
(upstream-icecat-base-version "78.7.0") ; maybe older than base-version
( gnuzilla - commit ( string - append " v " upstream - icecat - base - version ) )
(gnuzilla-commit "abfe5eebaca3c2787f1a9505669393674493c177")
(gnuzilla-source
(origin
(method git-fetch)
(uri (git-reference
(url "git")
(commit gnuzilla-commit)))
(file-name (git-file-name "gnuzilla"
;;upstream-icecat-base-version
(string-take gnuzilla-commit 8)))
(sha256
(base32
"00ws3540x5whpicc5fx4k949ff73cqvajz6jp13ahn49wqdads47"))))
' search - patch ' returns either a valid file name or # f , so wrap it
;; in 'assume-valid-file-name' to avoid 'local-file' warnings.
(gnuzilla-fixes-patch
(local-file (assume-valid-file-name
(search-patch "icecat-use-older-reveal-hidden-html.patch"))))
(makeicecat-patch
(local-file (assume-valid-file-name
(search-patch "icecat-makeicecat.patch")))))
(origin
(method computed-origin-method)
(file-name (string-append "icecat-" %icecat-version ".tar.xz"))
(sha256 #f)
(uri
(delay
(with-imported-modules '((guix build utils))
#~(begin
(use-modules (guix build utils))
(let ((firefox-dir
(string-append "firefox-" #$base-version))
(icecat-dir
(string-append "icecat-" #$%icecat-version)))
(mkdir "/tmp/bin")
(set-path-environment-variable
"PATH" '("bin")
(list "/tmp"
#+(canonical-package bash)
#+(canonical-package coreutils)
#+(canonical-package findutils)
#+(canonical-package patch)
#+(canonical-package xz)
#+(canonical-package sed)
#+(canonical-package grep)
#+(canonical-package bzip2)
#+(canonical-package gzip)
#+(canonical-package tar)
#+rename))
(symlink #+(file-append rename "/bin/rename")
"/tmp/bin/prename")
;; We copy the gnuzilla source directory because it is
;; read-only in 'gnuzilla-source', and the makeicecat script
;; uses "cp -a" to copy parts of it and assumes that the
;; copies will be writable.
(copy-recursively #+gnuzilla-source "/tmp/gnuzilla"
#:log (%make-void-port "w"))
(with-directory-excursion "/tmp/gnuzilla"
(make-file-writable "makeicecat")
(invoke "patch" "--force" "--no-backup-if-mismatch"
"-p1" "--input" #+gnuzilla-fixes-patch)
(invoke "patch" "--force" "--no-backup-if-mismatch"
"-p1" "--input" #+makeicecat-patch)
(patch-shebang "makeicecat")
(substitute* "makeicecat"
(("^FFMAJOR=(.*)" all ffmajor)
(unless (string=? #$major-version
(string-trim-both ffmajor))
;; The makeicecat script cannot be expected to work
properly on a different version of Firefox , even if
;; no errors occur during execution.
(error "makeicecat major version mismatch"))
(string-append "FFMAJOR=" #$major-version "\n"))
(("^FFMINOR=.*")
(string-append "FFMINOR=" #$minor-version "\n"))
(("^FFSUB=.*")
(string-append "FFSUB=" #$sub-version "\n"))
(("^DATA=.*")
"DATA=/tmp/gnuzilla/data\n")
(("/bin/sed")
#+(file-append (canonical-package sed) "/bin/sed"))))
(format #t "Unpacking upstream firefox tarball...~%")
(force-output)
(invoke "tar" "xf" #+upstream-firefox-source)
(rename-file firefox-dir icecat-dir)
(with-directory-excursion icecat-dir
(format #t "Populating l10n directory...~%")
(force-output)
(mkdir "l10n")
(with-directory-excursion "l10n"
(for-each
(lambda (locale-dir)
(let ((locale
(string-drop (basename locale-dir)
(+ 32 ; length of hash
(string-length "-mozilla-locale-")))))
(format #t " ~a~%" locale)
(force-output)
(copy-recursively locale-dir locale
#:log (%make-void-port "w"))
(for-each make-file-writable (find-files locale))
(with-directory-excursion locale
(when (file-exists? ".hgtags")
(delete-file ".hgtags"))
(mkdir-p "browser/chrome/browser/preferences")
(call-with-output-file
"browser/chrome/browser/preferences/advanced-scripts.dtd"
(lambda (port) #f)))))
'#+all-mozilla-locales)
(copy-recursively #+mozilla-compare-locales
"compare-locales"
#:log (%make-void-port "w"))
(delete-file "compare-locales/.gitignore")
(delete-file "compare-locales/.hgignore")
(delete-file "compare-locales/.hgtags"))
(format #t "Running makeicecat script...~%")
(force-output)
(invoke "bash" "/tmp/gnuzilla/makeicecat"))
(format #t "Packing IceCat source tarball...~%")
(force-output)
(invoke "tar" "cfa" #$output
;; Avoid non-determinism in the archive. We set the
mtime of files in the archive to early 1980 because
the build process fails if the of source
files is pre-1980 , due to the creation of zip
;; archives.
1980 - 01 - 02 UTC
"--owner=root:0"
"--group=root:0"
"--sort=name"
icecat-dir)
#t))))))))
(define-public icecat
(package
(name "icecat")
(version %icecat-version)
(source icecat-source)
(build-system gnu-build-system)
(inputs
`(("alsa-lib" ,alsa-lib)
("bzip2" ,bzip2)
("cups" ,cups)
("dbus-glib" ,dbus-glib)
("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("gtk+-2" ,gtk+-2)
UNBUNDLE - ME ! ( " graphite2 " , )
("pango" ,pango)
("freetype" ,freetype)
UNBUNDLE - ME ! ( " harfbuzz " , harfbuzz )
("libcanberra" ,libcanberra)
("libgnome" ,libgnome)
("libjpeg-turbo" ,libjpeg-turbo)
UNBUNDLE - ME ! ( " libogg " , libogg )
UNBUNDLE - ME ! ( " " , ) ; wants theora-1.2 , not yet released
UNBUNDLE - ME ! ( " libvorbis " , libvorbis )
("libxft" ,libxft)
("libevent" ,libevent)
("libxinerama" ,libxinerama)
("libxscrnsaver" ,libxscrnsaver)
("libxcomposite" ,libxcomposite)
("libxt" ,libxt)
("libffi" ,libffi)
("ffmpeg" ,ffmpeg)
UNBUNDLE - ME ! ( " " , )
("icu4c" ,icu4c-67)
("pixman" ,pixman)
("pulseaudio" ,pulseaudio)
("mesa" ,mesa)
("mit-krb5" ,mit-krb5)
;; See <>
;; and related comments in the 'remove-bundled-libraries' phase.
UNBUNDLE - ME ! ( " nspr " , )
UNBUNDLE - ME ! ( " nss " , nss )
("shared-mime-info" ,shared-mime-info)
UNBUNDLE - ME ! ( " sqlite " , sqlite )
("unzip" ,unzip)
("zip" ,zip)
UNBUNDLE - ME ! ( " zlib " , zlib )
))
(native-inputs
The following patches are specific to the Guix packaging of IceCat ,
;; and therefore we prefer to leave them out of 'source', which should be
a tarball suitable for compilation on any system that IceCat supports .
;; (Bug fixes and security fixes, however, should go in 'source').
XXX TODO : Adapt these patches to IceCat 68 .
;; ("icecat-avoid-bundled-libraries.patch"
;; ,(search-patch "icecat-avoid-bundled-libraries.patch"))
;; ("icecat-use-system-graphite2+harfbuzz.patch"
;; ,(search-patch "icecat-use-system-graphite2+harfbuzz.patch"))
;; ("icecat-use-system-media-libs.patch"
;; ,(search-patch "icecat-use-system-media-libs.patch"))
("patch" ,(canonical-package patch))
("rust" ,rust-1.41)
("cargo" ,rust-1.41 "cargo")
("rust-cbindgen" ,rust-cbindgen-0.14)
("llvm" ,llvm)
("clang" ,clang)
("perl" ,perl)
("node" ,node)
("python" ,python)
("python-2" ,python-2)
("python2-pysqlite" ,python2-pysqlite)
("yasm" ,yasm)
XXX FIXME : only needed on x86_64 and i686
("pkg-config" ,pkg-config)
("autoconf" ,autoconf-2.13)
("which" ,which)))
(arguments
`(#:tests? #f ; no check target
#:out-of-source? #t ; must be built outside of the source directory
#:configure-flags `("--enable-default-toolkit=cairo-gtk3-wayland"
"--with-distribution-id=org.gnu"
;; Do not require addons in the global app or
system directories to be signed by Mozilla .
"--with-unsigned-addon-scopes=app,system"
"--allow-addon-sideload"
"--enable-pulseaudio"
"--disable-tests"
"--disable-updater"
"--disable-crashreporter"
"--disable-eme"
;; Building with debugging symbols takes ~5GiB, so
;; disable it.
"--disable-debug"
"--disable-debug-symbols"
Clang is needed to build Stylo , Mozilla 's new
;; CSS engine. We must specify the clang paths
manually , because otherwise the Mozilla build
;; system looks in the directories returned by
- config --bindir and - config --libdir ,
which return paths in the package where
;; clang is not found.
,(string-append "--with-clang-path="
(assoc-ref %build-inputs "clang")
"/bin/clang")
,(string-append "--with-libclang-path="
(assoc-ref %build-inputs "clang")
"/lib")
;; Hack to work around missing
;; "unofficial" branding in icecat.
"--enable-official-branding"
;; Avoid bundled libraries.
"--with-system-jpeg" ; must be libjpeg-turbo
UNBUNDLE - ME ! " --with - system - zlib "
UNBUNDLE - ME ! " --with - system - bz2 "
UNBUNDLE - ME ! " --with - system - libevent "
UNBUNDLE - ME ! " --with - system - ogg "
UNBUNDLE - ME ! " --with - system - vorbis "
UNBUNDLE - ME ! " --with - system - theora " ; wants theora-1.2 , not yet released
UNBUNDLE - ME ! " --with - system - libvpx "
"--with-system-icu"
;; See <>
;; and related comments in the
;; 'remove-bundled-libraries' phase below.
UNBUNDLE - ME ! " --with - system - nspr "
UNBUNDLE - ME ! " --with - system - nss "
UNBUNDLE - ME ! " --with - system - harfbuzz "
UNBUNDLE - ME ! " --with - system - graphite2 "
"--enable-system-pixman"
"--enable-system-ffi"
UNBUNDLE - ME ! " --enable - system - sqlite "
;; Fails with "--with-system-png won't work because
;; the system's libpng doesn't have APNG support".
;; According to
;; -apng/ ,
" the Animated Portable Network Graphics ( APNG )
is an unofficial extension of the Portable
Network Graphics ( PNG ) format " ;
;; we probably do not wish to support it.
;; "--with-system-png"
)
#:imported-modules ,%cargo-utils-modules ;for `generate-all-checksums'
#:modules ((ice-9 ftw)
(ice-9 rdelim)
(ice-9 regex)
(ice-9 match)
(srfi srfi-34)
(srfi srfi-35)
(rnrs bytevectors)
(rnrs io ports)
(guix elf)
(guix build gremlin)
,@%gnu-build-system-modules)
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'apply-guix-specific-patches
(lambda* (#:key inputs native-inputs #:allow-other-keys)
(let ((patch (string-append (assoc-ref (or native-inputs inputs)
"patch")
"/bin/patch")))
(for-each (match-lambda
((label . file)
(when (and (string-prefix? "icecat-" label)
(string-suffix? ".patch" label))
(format #t "applying '~a'...~%" file)
(invoke patch "--force" "--no-backup-if-mismatch"
"-p1" "--input" file))))
(or native-inputs inputs)))
#t))
(add-after 'apply-guix-specific-patches 'remove-bundled-libraries
(lambda _
;; Remove bundled libraries that we don't use, since they may
;; contain unpatched security flaws, they waste disk space and
;; memory, and may cause confusion.
(for-each (lambda (file)
(format #t "deleting '~a'...~%" file)
(delete-file-recursively file))
FIXME : Removing the bundled icu breaks configure .
* The bundled icu headers are used in some places .
;; * The version number is taken from the bundled copy.
" intl / icu "
;;
;; FIXME: A script from the bundled nspr is used.
;;"nsprpub"
;;
FIXME : With the update to IceCat 60 , using system NSS
;; broke certificate validation. See
;; <>. For now, we use
the bundled NSPR and NSS . TODO : Investigate ,
;; and try to unbundle these libraries again.
UNBUNDLE - ME ! " security / nss "
;;
;; TODO: Use more system media libraries. See:
;; <>
* : esr60 wants v1.2 , not yet released .
;; * soundtouch: avoiding the bundled library would
;; result in some loss of functionality. There's
;; also an issue with exception handling
;; configuration. It seems that this is needed in
some :
DEFINES['ST_NO_EXCEPTION_HANDLING ' ] = 1
;; * libopus
* speex
;;
"modules/freetype2"
" media / libjpeg " ; needed for now , because media / libjpeg / is referenced from config / external /
UNBUNDLE - ME ! " modules / zlib "
UNBUNDLE - ME ! " / chromium / src / third_party / libevent "
UNBUNDLE - ME ! " media / "
UNBUNDLE - ME ! " media / libogg "
UNBUNDLE - ME ! " media / libvorbis "
UNBUNDLE - ME ! " media / libtheora " ; wants theora-1.2 , not yet released
UNBUNDLE - ME ! " media / libtremor "
UNBUNDLE - ME ! " gfx / harfbuzz "
UNBUNDLE - ME ! " gfx / graphite2 "
"js/src/ctypes/libffi"
UNBUNDLE - ME ! " db / sqlite3 "
))
#t))
(add-after 'remove-bundled-libraries 'link-libxul-with-libraries
(lambda _
;; libxul.so dynamically opens libraries, so here we explicitly
;; link them into libxul.so instead.
;;
;; TODO: It might be preferable to patch in absolute file names in
;; calls to dlopen or PR_LoadLibrary, but that didn't seem to
;; work. More investigation is needed.
(substitute* "toolkit/library/moz.build"
(("^# This library needs to be last" all)
(string-append "OS_LIBS += [
'GL', 'gnome-2', 'canberra', 'Xss', 'cups', 'gssapi_krb5',
'avcodec', 'avutil', 'pulse' ]\n\n"
all)))
#t))
(add-after 'link-libxul-with-libraries 'fix-ffmpeg-runtime-linker
(lambda* (#:key inputs #:allow-other-keys)
(let* ((ffmpeg (assoc-ref inputs "ffmpeg"))
(libavcodec (string-append ffmpeg "/lib/libavcodec.so")))
;; Arrange to load libavcodec.so by its absolute file name.
(substitute* "dom/media/platforms/ffmpeg/FFmpegRuntimeLinker.cpp"
(("libavcodec\\.so")
libavcodec))
;; Populate the sandbox read-path whitelist as needed by ffmpeg.
(let* ((mime-info (assoc-ref inputs "shared-mime-info"))
(libavcodec-runpath (call-with-input-file libavcodec
(compose elf-dynamic-info-runpath
elf-dynamic-info
parse-elf
get-bytevector-all)))
(whitelist (cons (string-append mime-info "/share/mime/")
(map (lambda (dir)
(string-append dir "/"))
libavcodec-runpath)))
(whitelist-string (string-join whitelist ","))
(port (open-file "browser/app/profile/icecat.js" "a")))
(format #t "setting 'security.sandbox.content.read_path_whitelist' to '~a'~%"
whitelist-string)
(format port "~%pref(\"security.sandbox.content.read_path_whitelist\", ~S);~%"
whitelist-string)
(close-output-port port))
#t)))
(replace 'bootstrap
(lambda _
(invoke "sh" "-c" "autoconf old-configure.in > old-configure")
;; 'configure' must be newer than 'old-configure.in', or else the
;; build system will raise an alarm and abort.
(invoke "touch" "configure")))
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(use-modules (guix build cargo-utils))
(let ((null-hash "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo.lock$"))
(for-each generate-all-checksums
'("services"
"js"
"third_party/rust"
"dom/media"
"dom/webauthn"
"toolkit"
"gfx"
"storage"
"modules"
"xpcom/rust"
"media"
"mozglue/static/rust"
"netwerk"
"remote"
"intl"
"servo"
"security/manager/ssl"
"build")))
#t))
(replace 'configure
configure does not work followed by both " SHELL= ... " and
" CONFIG_SHELL= ... " ; set environment variables instead
(lambda* (#:key outputs configure-flags #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bash (which "bash"))
(abs-srcdir (getcwd))
(srcdir (string-append "../" (basename abs-srcdir)))
(flags `(,(string-append "--prefix=" out)
,(string-append "--with-l10n-base="
abs-srcdir "/l10n")
,@configure-flags)))
(setenv "SHELL" bash)
(setenv "CONFIG_SHELL" bash)
(setenv "AUTOCONF" (which "autoconf")) ; must be autoconf-2.13
apparently needed when Stylo is enabled
(setenv "MOZ_BUILD_DATE" ,%icecat-build-id) ; avoid timestamp
(setenv "LDFLAGS" (string-append "-Wl,-rpath="
(assoc-ref outputs "out")
"/lib/icecat"))
(mkdir "../build")
(chdir "../build")
(format #t "build directory: ~s~%" (getcwd))
(format #t "configure flags: ~s~%" flags)
(apply invoke bash
(string-append srcdir "/configure")
flags))))
(replace 'build
;; The build system often spuriously fails. See
;; <>. To
;; work around this, we try the standard 'build' phase up
to 5 times .
(lambda args
(let ((build (assoc-ref %standard-phases 'build)))
(let retry ((remaining-attempts 5))
(if (= remaining-attempts 1)
(apply build args)
(guard (c ((invoke-error? c)
(format #t "~%Retrying build! (~a attempts remaining)~%~%"
(- remaining-attempts 1))
(force-output)
(retry (- remaining-attempts 1))))
(apply build args)))))))
(add-after 'build 'neutralise-store-references
(lambda _
;; Mangle the store references to compilers & other build tools in
about : buildconfig , reducing IceCat 's closure by 1 on x86 - 64 .
(substitute*
"dist/bin/chrome/toolkit/content/global/buildconfig.html"
(((format #f "(~a/)([0-9a-df-np-sv-z]{32})"
(regexp-quote (%store-directory)))
_ store hash)
(string-append store
(string-take hash 8)
"<!-- Guix: not a runtime dependency -->"
(string-drop hash 8))))
#t))
(add-before 'configure 'install-desktop-entry
(lambda* (#:key outputs #:allow-other-keys)
;; Install the '.desktop' file.
(let* ((desktop-file "taskcluster/docker/icecat-snap/icecat.desktop")
(out (assoc-ref outputs "out"))
(applications (string-append out "/share/applications")))
(substitute* desktop-file
(("^Exec=icecat") (string-append "Exec=" out "/bin/icecat"))
(("IceCat") "GNU IceCat")
(("Icon=.*") "Icon=icecat\n")
(("NewWindow") "new-window")
(("NewPrivateWindow") "new-private-window"))
(install-file desktop-file applications)
#t)))
(add-after 'install-desktop-entry 'install-icons
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(with-directory-excursion "browser/branding/official"
(for-each
(lambda (file)
(let* ((size (string-filter char-numeric? file))
(icons (string-append out "/share/icons/hicolor/"
size "x" size "/apps")))
(mkdir-p icons)
(copy-file file (string-append icons "/icecat.png"))))
'("default16.png" "default22.png" "default24.png"
"default32.png" "default48.png" "content/icon64.png"
"mozicon128.png" "default256.png"))
#t))))
This fixes the file chooser crash that happens with GTK 3 .
(add-after 'install 'wrap-program
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(lib (string-append out "/lib"))
(gtk (assoc-ref inputs "gtk+"))
(gtk-share (string-append gtk "/share"))
(mesa (assoc-ref inputs "mesa"))
(mesa-lib (string-append mesa "/lib"))
(pulseaudio (assoc-ref inputs "pulseaudio"))
(pulseaudio-lib (string-append pulseaudio "/lib"))
(libxscrnsaver (assoc-ref inputs "libxscrnsaver"))
(libxscrnsaver-lib (string-append libxscrnsaver "/lib"))
(mit-krb5 (assoc-ref inputs "mit-krb5"))
(mit-krb5-lib (string-append mit-krb5 "/lib")))
(wrap-program (car (find-files lib "^icecat$"))
`("XDG_DATA_DIRS" prefix (,gtk-share))
;; The following line is commented out because the icecat
package on guix has been observed to be unstable when
using wayland , and the bundled extensions stop working .
` ( " MOZ_ENABLE_WAYLAND " = ( " 1 " ) )
`("LD_LIBRARY_PATH" prefix (,pulseaudio-lib
,mesa-lib
,libxscrnsaver-lib
,mit-krb5-lib)))
#t))))))
(home-page "/")
(synopsis "Entirely free browser derived from Mozilla Firefox")
(description
"IceCat is the GNU version of the Firefox browser. It is entirely free
software, which does not recommend non-free plugins and addons. It also
features built-in privacy-protecting features.
WARNING: IceCat 78 has not yet been released by the upstream IceCat project.
This is a preview release, and does not currently meet the privacy-respecting
standards of the IceCat project.")
(license license:mpl2.0) ;and others, see toolkit/content/license.html
(properties
`((ftp-directory . "/gnu/gnuzilla")
(cpe-name . "firefox_esr")
(cpe-version . ,(first (string-split version #\-)))))))
;; Update this together with icecat!
must be of the form YYYYMMDDhhmmss
(define-public icedove
(package
(name "icedove")
(version "78.11.0")
(source icecat-source)
(properties
`((cpe-name . "thunderbird_esr")))
(build-system gnu-build-system)
(arguments
`(#:tests? #f ; no check target
#:imported-modules ,%cargo-utils-modules ;for `generate-all-checksums'
#:modules ((guix build utils) ;find-files
(sxml simple)
(ice-9 regex)
,@%gnu-build-system-modules)
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'prepare-thunderbird-sources
(lambda* (#:key inputs #:allow-other-keys)
(mkdir "comm")
(copy-recursively (assoc-ref inputs "thunderbird-sources")
"comm")
(delete-file-recursively "obj-x86_64-pc-linux-gnu")
(delete-file "sourcestamp.txt")
#t))
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(use-modules (guix build cargo-utils))
(let ((null-hash "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo.lock$"))
(for-each generate-all-checksums
'("third_party/rust"
"toolkit/library/rust")))
#t))
;; Fixes issue where each installation directory generates its own profile.
See e.g.
(add-after 'patch-source-shebangs 'fix-profile-setting
(lambda _
(substitute* "comm/mail/moz.configure"
(("'MOZ_DEDICATED_PROFILES', True")
"'MOZ_DEDICATED_PROFILES', False"))
#t))
(add-after 'prepare-thunderbird-sources 'rename-to-icedove
(lambda _
(substitute* "comm/mail/confvars.sh"
(("MOZ_APP_BASENAME=Thunderbird")
"MOZ_APP_BASENAME=Icedove\nMOZ_APP_DISPLAYNAME=Icedove")
(("MOZ_APP_NAME=thunderbird")
"MOZ_APP_NAME=icedove")
(("MOZ_UPDATER=1")
"MOZ_UPDATER=0"))
Remove branding to comply with Mozilla 's trademark policy
(with-directory-excursion "comm/mail/branding/nightly"
(delete-file "content/about-wordmark.svg")
(call-with-output-file "content/about-wordmark.svg"
(lambda (port)
(sxml->xml '(svg (@ (xmlns "")
(viewBox "0 0 789.1 90.78")
(width "333")
(height "48")
(fill "#fff"))
(text (@ (x "400") (y "70")
(text-anchor "middle")
(font-size "90"))
"Icedove Daily"))
port)))
(substitute* '("locales/en-US/brand.properties"
"locales/en-US/brand.ftl"
"locales/en-US/brand.dtd"
"configure.sh")
(("Thunderbird") "Icedove")
(("mozilla.org") "guix.gnu.org")))
Remove other mentions of Thunderbird in user - visible text .
(with-directory-excursion "comm/mail/base/content"
(substitute* '("overrides/app-license-name.html")
(("Thunderbird") "Icedove")))
(with-directory-excursion "comm/mail/components/"
(substitute* '("MailGlue.jsm"
"extensions/schemas/addressBook.json"
"extensions/schemas/tabs.json"
"extensions/schemas/cloudFile.json"
"extensions/schemas/chrome_settings_overrides.json"
"extensions/schemas/windows.json"
"extensions/parent/ext-mail.js"
"im/messages/mail/Info.plist"
"enterprisepolicies/moz.build"
"enterprisepolicies/helpers/moz.build"
"enterprisepolicies/schemas/moz.build")
(("Thunderbird") "Icedove")))
(substitute* '("comm/mailnews/base/prefs/content/accountUtils.js"
"comm/common/src/customizeToolbar.js")
(("AppConstants.MOZ_APP_NAME (.)= \"thunderbird" _ e)
(format #f "AppConstants.MOZ_APP_NAME ~a= \"icedove" e)))
;; Override addon URLs and settings
(substitute* "comm/mail/app/profile/all-thunderbird.js"
(("(pref\\(\"extensions.webservice.discoverURL\").*" _ m)
(string-append m ", \"\");"))
(("(pref\\(\"extensions.getAddons.search.url\").*" _ m)
(string-append m ", \"\");"))
(("(pref\\(\"extensions.update.enabled\").*" _ m)
(string-append m ", false);"))
(("(pref\\(\"extensions.systemAddon.update.enabled\").*" _ m)
(string-append m ", false);"))
(("(pref\\(\"lightweightThemes.update.enabled\").*" _ m)
(string-append m ", false);")))
#t))
(add-after 'build 'neutralize-store-references
(lambda _
;; Mangle the store references to compilers & other build tools in
about : buildconfig , reducing Icedove 's closure significant .
;; The resulting files are saved in lib/thunderbird/omni.ja
(substitute*
;; Use find because the path "obj-x86_64-pc-linux-gnu" contains
;; the architecture and the system -> more complicated.
(find-files "." "buildconfig.html")
(((format #f "(~a/)([0-9a-df-np-sv-z]{32})"
(regexp-quote (%store-directory)))
_ store hash)
(string-append store
(string-take hash 8)
"<!-- Guix: not a runtime dependency -->"
(string-drop hash 8))))
#t))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bash (which "bash"))
(abs-srcdir (getcwd))
(srcdir (string-append "../" (basename abs-srcdir)))
(flags `(,(string-append "--prefix=" out)
,@configure-flags))
(mozconfig (string-append (getcwd) "/.mozconfig")))
(setenv "SHELL" bash)
(setenv "AUTOCONF"
(string-append (assoc-ref %build-inputs
"autoconf")
"/bin/autoconf"))
(setenv "CONFIG_SHELL" bash)
(setenv "QA_CONFIGURE_OPTIONS" ".*")
(setenv "MOZBUILD_STATE_PATH"
(string-append (getcwd) "/mach_state"))
(setenv "MOZCONFIG"
(string-append (getcwd) "/.mozconfig"))
(setenv "CC" "gcc")
(setenv "MOZ_NOSPAM" "1")
(setenv "PYTHON"
(string-append (assoc-ref inputs "python2")
"/bin/python"))
(setenv "MOZ_BUILD_DATE" ,%icedove-build-id) ; avoid timestamp
(setenv "LDFLAGS" (string-append "-Wl,-rpath="
(assoc-ref outputs "out")
"/lib/icedove"))
(mkdir-p (string-append (getcwd) "/builddir"))
(with-output-to-file mozconfig
(lambda ()
(display
(string-append
"ac_add_options --disable-crashreporter\n"
"ac_add_options --disable-debug\n"
"ac_add_options --disable-debug-symbols\n"
"ac_add_options --disable-elf-hack\n"
"ac_add_options --disable-jit\n"
"ac_add_options --disable-necko-wifi\n"
"ac_add_options --disable-official-branding\n"
"ac_add_options --disable-tests\n"
"ac_add_options --disable-updater\n"
"ac_add_options --disable-webrtc\n"
"ac_add_options --enable-application=comm/mail\n"
"ac_add_options --enable-calendar\n"
"ac_add_options --enable-default-toolkit=\"cairo-gtk3\"\n"
"ac_add_options --enable-optimize\n"
"ac_add_options --enable-pulseaudio\n"
"ac_add_options --enable-release\n"
"ac_add_options --enable-strip\n"
"ac_add_options --enable-system-ffi\n"
"ac_add_options --enable-system-pixman\n"
"ac_add_options --prefix=" out "\n"
"ac_add_options --with-clang-path=" (assoc-ref %build-inputs "clang") "/bin/clang\n"
"ac_add_options --with-distribution-id=org.gnu\n"
"ac_add_options --with-libclang-path=" (assoc-ref %build-inputs "clang") "/lib\n"
"ac_add_options --with-system-bz2\n"
"ac_add_options --with-system-icu\n"
"ac_add_options --with-system-jpeg\n"
"ac_add_options --with-system-libevent\n"
"ac_add_options --with-system-nspr\n"
"ac_add_options --with-system-nss\n"
"ac_add_options --with-system-zlib\n"
"ac_add_options --with-user-appdir=\\.icedove\n"
"mk_add_options MOZ_MAKE_FLAGS=-j"
(number->string (parallel-job-count)) "\n"))))
(display (getcwd))
(newline)
(display "mach configure")
(invoke "./mach" "configure"))))
(replace 'build
(lambda _ (invoke "./mach" "build")))
(replace 'install
(lambda _ (invoke "./mach" "install")))
Thunderbird does n't provide any .desktop file .
;; See
(add-after 'install 'install-desktop-file
(lambda* (#:key outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(apps (string-append out "/share/applications")))
(mkdir-p apps)
(with-output-to-file (string-append apps "/icedove.desktop")
(lambda _
(format #t
"[Desktop Entry]~@
Name=Icedove~@
Exec=~a/bin/icedove~@
Icon=icedove~@
GenericName=Mail/News Client~@
Email;~@
Terminal=false~@
StartupNotify=true~@
MimeType=x-scheme-handler/mailto;~@
Type=Application~@
Actions=ComposeMessage;~@
[Desktop Action ComposeMessage]~@
Name=Write new message~@
Exec=~@*~a/bin/icedove -compose~%"
out))))
#t))
(add-after 'install 'wrap-program
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(lib (string-append out "/lib"))
(gtk (assoc-ref inputs "gtk+"))
(gtk-share (string-append gtk "/share"))
(pulseaudio (assoc-ref inputs "pulseaudio"))
(pulseaudio-lib (string-append pulseaudio "/lib"))
(eudev (assoc-ref inputs "eudev"))
(eudev-lib (string-append eudev "/lib")))
(wrap-program (car (find-files lib "^icedove$"))
`("XDG_DATA_DIRS" prefix (,gtk-share))
`("LD_LIBRARY_PATH" prefix (,pulseaudio-lib ,eudev-lib)))
#t))))))
(inputs
`(("bzip2" ,bzip2)
("cairo" ,cairo)
("cups" ,cups)
("dbus-glib" ,dbus-glib)
("ffmpeg" ,ffmpeg)
("freetype" ,freetype)
("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("gtk+-2" ,gtk+-2)
("hunspell" ,hunspell)
("icu4c" ,icu4c-67)
("libcanberra" ,libcanberra)
("libevent" ,libevent)
("libffi" ,libffi)
("libgnome" ,libgnome)
("libjpeg-turbo" ,libjpeg-turbo)
("libpng-apng" ,libpng-apng)
("libvpx" ,libvpx)
("libxcomposite" ,libxcomposite)
("libxft" ,libxft)
("libxinerama" ,libxinerama)
("libxscrnsaver" ,libxscrnsaver)
("libxt" ,libxt)
("mesa" ,mesa)
("mit-krb5" ,mit-krb5)
("nspr" ,nspr)
("nss" ,nss)
("pango" ,pango)
("pixman" ,pixman)
("pulseaudio" ,pulseaudio)
("sqlite" ,sqlite)
("startup-notification" ,startup-notification)
("eudev" ,eudev)
("unzip" ,unzip)
("zip" ,zip)
("zlib" ,zlib)))
(native-inputs
`(("thunderbird-sources"
The changeset identifier is taken from the file " sourcestamp.txt "
in the Thunderbird release tarball . We do n't use the release
tarball because it duplicates the Icecat sources and only adds the
;; "comm" directory, which is provided by this repository.
,(let ((changeset "1717d8d5fbd359aab7a4a0a15f4d15c72a7e6afc"))
(origin
(method hg-fetch)
(uri (hg-reference
(url "-esr78")
(changeset changeset)))
(file-name (string-append "thunderbird-" version "-checkout"))
(sha256
(base32
"10l042dd7b8rvla0cbiks5kjrz2b28yy7hr8sr169wlx202hxa01")))))
("autoconf" ,autoconf-2.13)
("cargo" ,rust-1.41 "cargo")
("clang" ,clang)
("llvm" ,llvm)
("nasm" ,nasm)
("node" ,node)
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python)
("python2" ,python-2.7)
("rust" ,rust-1.41)
("rust-cbindgen" ,rust-cbindgen-0.14)
("which" ,which)
("yasm" ,yasm)))
(home-page "")
(synopsis "Rebranded Mozilla Thunderbird email client")
(description
"This package provides an email client built based on Mozilla
Thunderbird. It supports email, news feeds, chat, calendar and contacts.")
(license license:mpl2.0)))
(define-public icedove/wayland
(package
(inherit icedove)
(name "icedove-wayland")
(native-inputs '())
(inputs
`(("bash" ,bash-minimal)
("icedove" ,icedove)))
(build-system trivial-build-system)
(arguments
'(#:modules ((guix build utils))
#:builder
(begin
(use-modules (guix build utils))
(let* ((bash (assoc-ref %build-inputs "bash"))
(icedove (assoc-ref %build-inputs "icedove"))
(out (assoc-ref %outputs "out"))
(exe (string-append out "/bin/icedove")))
(mkdir-p (dirname exe))
(call-with-output-file exe
(lambda (port)
(format port "#!~a
MOZ_ENABLE_WAYLAND=1 exec ~a $@"
(string-append bash "/bin/bash")
(string-append icedove "/bin/icedove"))))
(chmod exe #o555)
;; Provide the manual and .desktop file.
(copy-recursively (string-append icedove "/share")
(string-append out "/share"))
(substitute* (string-append
out "/share/applications/icedove.desktop")
((icedove) out))
#t))))))
(define-public firefox-decrypt
(package
(name "firefox-decrypt")
(version "0.7.0")
(source (origin
(method git-fetch)
(uri (git-reference
(url "")
(commit version)))
(file-name (git-file-name name version))
(sha256
(base32
"17yyyxp47z4m8hnflcq34rc1y871515kr3f1y42j1l0yx3g0il07"))))
(build-system trivial-build-system)
(inputs
`(("nss" ,nss)
("python" ,python)))
(arguments
`(#:modules ((guix build utils))
#:builder
(begin
(use-modules (guix build utils))
(setenv "PATH"
(string-append
(assoc-ref %build-inputs "python") "/bin"))
(copy-file (string-append (assoc-ref %build-inputs "source")
"/firefox_decrypt.py")
"firefox_decrypt.py")
(substitute* "firefox_decrypt.py"
(("/usr/bin/env python") (which "python3"))
(("libnss3.so") (string-append (assoc-ref %build-inputs "nss")
"/lib/nss/libnss3.so")))
(install-file "firefox_decrypt.py" (string-append %output "/bin"))
#t)))
(home-page "/")
(synopsis "Tool to extract passwords from Mozilla profiles")
(description "Firefox Decrypt is a tool to extract passwords from
Mozilla (Firefox, Waterfox, Thunderbird, SeaMonkey) profiles.")
(license license:gpl3+)))
| null | https://raw.githubusercontent.com/dongcarl/guix/d2b30db788f1743f9f8738cb1de977b77748567f/gnu/packages/gnuzilla.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Fix incompatibility with Perl 5.22+.
XXX: parallel build fails, lacking:
This test times out on slower hardware.
and others for some files
Fix incompatibility with Perl 5.22+.
See for
See .
The headers are symlinks to files that are in /tmp, so they
end up broken. Copy them instead.
Remove bundled libraries.
XXX: parallel build fails, lacking:
See .
Intl API requires bundled ICU.
No releases yet at </>.
While we could take a snapshot of the complete mozilla-esr52 repository at
</#/jobs?repo=mozilla-esr52&filter-searchStr=sm-tc>,
depends on repository metadata
Intl API requires bundled ICU.
Without this gnome-shell will crash at runtime.
The build does not fail without this patch, but the
See
The configure script does not accept environment variables
as arguments.
No releases yet at </>.
While we could take a snapshot of the complete mozilla-esr60 repository at
</#/jobs?repo=mozilla-esr60&filter-searchStr=sm-tc>,
FIXME: all tests pass, but then the check phase fails anyway.
configure phase. With jemalloc only the standalone mozjs console
will work.
The configure script does not accept environment variables as
arguments. It also must be run from a different directory,
but not the root directory either.
This test assumes that /bin exists and contains certain
executables.
for `generate-all-checksums'
Disable debugging symbols to save space.
configure phase. With jemalloc only the standalone mozjs console
will work.
The configure script does not accept environment variables as
arguments. It also must be run from a different directory,
but not the root directory either.
Guix is about to switch to ICU 68, massage the tests to
work with that instead of patching ICU. Try removing this
These tests look up locale names and expects to get
XXX: Some localized time formats have changed, and
substitution fails for accented characters, even though
it works in the REPL(?). Just delete these for now.
Similarly, these get an unexpected "A" suffix when looking
tricky to substitute.
This file compares a generated list of ICU locale names
with actual lookups. Some have changed slightly, i.e.
FIXME: Why does the killed process have an exit status?
XXX: Delete all tests that test time zone functionality,
because the test suite uses /etc/localtime to figure out
the offset from the hardware clock, which does not work
in the build container. See <tests/non262/Date/shell.js>.
Do not run tests marked as "random".
Exclude web platform tests.
Respect the daemons configured number of jobs.
sha256 changeset locale
---------------------------------------------------------------------------
nothing to graft
maybe older than base-version
upstream-icecat-base-version
in 'assume-valid-file-name' to avoid 'local-file' warnings.
We copy the gnuzilla source directory because it is
read-only in 'gnuzilla-source', and the makeicecat script
uses "cp -a" to copy parts of it and assumes that the
copies will be writable.
The makeicecat script cannot be expected to work
no errors occur during execution.
length of hash
Avoid non-determinism in the archive. We set the
archives.
wants theora-1.2 , not yet released
See <>
and related comments in the 'remove-bundled-libraries' phase.
and therefore we prefer to leave them out of 'source', which should be
(Bug fixes and security fixes, however, should go in 'source').
("icecat-avoid-bundled-libraries.patch"
,(search-patch "icecat-avoid-bundled-libraries.patch"))
("icecat-use-system-graphite2+harfbuzz.patch"
,(search-patch "icecat-use-system-graphite2+harfbuzz.patch"))
("icecat-use-system-media-libs.patch"
,(search-patch "icecat-use-system-media-libs.patch"))
no check target
must be built outside of the source directory
Do not require addons in the global app or
Building with debugging symbols takes ~5GiB, so
disable it.
CSS engine. We must specify the clang paths
system looks in the directories returned by
clang is not found.
Hack to work around missing
"unofficial" branding in icecat.
Avoid bundled libraries.
must be libjpeg-turbo
wants theora-1.2 , not yet released
See <>
and related comments in the
'remove-bundled-libraries' phase below.
Fails with "--with-system-png won't work because
the system's libpng doesn't have APNG support".
According to
-apng/ ,
we probably do not wish to support it.
"--with-system-png"
for `generate-all-checksums'
Remove bundled libraries that we don't use, since they may
contain unpatched security flaws, they waste disk space and
memory, and may cause confusion.
* The version number is taken from the bundled copy.
FIXME: A script from the bundled nspr is used.
"nsprpub"
broke certificate validation. See
<>. For now, we use
and try to unbundle these libraries again.
TODO: Use more system media libraries. See:
<>
* soundtouch: avoiding the bundled library would
result in some loss of functionality. There's
also an issue with exception handling
configuration. It seems that this is needed in
* libopus
needed for now , because media / libjpeg / is referenced from config / external /
wants theora-1.2 , not yet released
libxul.so dynamically opens libraries, so here we explicitly
link them into libxul.so instead.
TODO: It might be preferable to patch in absolute file names in
calls to dlopen or PR_LoadLibrary, but that didn't seem to
work. More investigation is needed.
Arrange to load libavcodec.so by its absolute file name.
Populate the sandbox read-path whitelist as needed by ffmpeg.
'configure' must be newer than 'old-configure.in', or else the
build system will raise an alarm and abort.
set environment variables instead
must be autoconf-2.13
avoid timestamp
The build system often spuriously fails. See
<>. To
work around this, we try the standard 'build' phase up
Mangle the store references to compilers & other build tools in
Install the '.desktop' file.
The following line is commented out because the icecat
and others, see toolkit/content/license.html
Update this together with icecat!
no check target
for `generate-all-checksums'
find-files
Fixes issue where each installation directory generates its own profile.
Override addon URLs and settings
Mangle the store references to compilers & other build tools in
The resulting files are saved in lib/thunderbird/omni.ja
Use find because the path "obj-x86_64-pc-linux-gnu" contains
the architecture and the system -> more complicated.
avoid timestamp
See
~@
~@
~@
"comm" directory, which is provided by this repository.
Provide the manual and .desktop file. | Copyright © 2013 , 2015 < >
Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
Copyright © 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 , 2021 < >
Copyright © 2015 < >
Copyright © 2016 , 2017 , 2018 , 2019 , 2021 < >
Copyright © 2016 < >
Copyright © 2017 Clément < >
Copyright © 2017 , 2018 Nikita < >
Copyright © 2017 , 2018 , 2020 < >
Copyright © 2018 , 2020 < >
Copyright © 2019 < >
Copyright © 2020 < >
Copyright © 2020 < >
Copyright © 2019 , 2020 < >
Copyright © 2020 < >
Copyright © 2020 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gnuzilla)
#:use-module ((srfi srfi-1) #:hide (zip))
#:use-module (ice-9 match)
#:use-module (gnu packages)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix git-download)
#:use-module (guix hg-download)
#:use-module (guix gexp)
#:use-module (guix store)
#:use-module (guix monads)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system cargo)
#:use-module (guix build-system trivial)
#:use-module (gnu packages admin)
#:use-module (gnu packages audio)
#:use-module (gnu packages autotools)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages databases)
#:use-module (gnu packages glib)
#:use-module (gnu packages gtk)
#:use-module (gnu packages gnome)
#:use-module (gnu packages libcanberra)
#:use-module (gnu packages cups)
#:use-module (gnu packages kerberos)
#:use-module (gnu packages linux)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages compression)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages libevent)
for hunspell
#:use-module (gnu packages image)
#:use-module (gnu packages libffi)
#:use-module (gnu packages pulseaudio)
#:use-module (gnu packages python)
#:use-module (gnu packages python-xyz)
#:use-module (gnu packages node)
#:use-module (gnu packages xorg)
#:use-module (gnu packages gl)
#:use-module (gnu packages assembly)
#:use-module (gnu packages rust)
#:use-module (gnu packages rust-apps)
#:use-module (gnu packages llvm)
#:use-module (gnu packages nss)
#:use-module (gnu packages icu4c)
#:use-module (gnu packages video)
#:use-module (gnu packages xiph)
#:use-module (gnu packages xdisorg)
#:use-module (gnu packages readline)
#:use-module (gnu packages sqlite))
(define-public mozjs
(package
(name "mozjs")
(version "17.0.0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name version ".tar.gz"))
(sha256
(base32
"1fig2wf4f10v43mqx67y68z6h77sy900d1w0pz9qarrqx57rc7ij"))
(patches (search-patches "mozjs17-aarch64-support.patch"))
(modules '((guix build utils)))
(snippet
'(begin
(substitute* '("js/src/config/milestone.pl")
(("defined\\(@TEMPLATE_FILE)") "@TEMPLATE_FILE"))
#t))))
(build-system gnu-build-system)
(native-inputs
`(("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-2)))
(propagated-inputs
in the Requires.private field of
(inputs
`(("zlib" ,zlib)))
(arguments
-p " system_wrapper_js/ "
#:parallel-build? #f
#:make-flags '("CXXFLAGS=-fpermissive")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'delete-timedout-test
(lambda _
(delete-file "js/src/jit-test/tests/basic/bug698584.js")
#t))
(add-before 'configure 'chdir
(lambda _
(chdir "js/src")
#t))
(replace 'configure
configure fails if it is followed by SHELL and CONFIG_SHELL
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure" (string-append "--prefix=" out)
,@(if (string=? "aarch64-linux"
(%current-system))
'("--host=aarch64-unknown-linux-gnu")
'()))))))))
(home-page
"-US/docs/Mozilla/Projects/SpiderMonkey")
(synopsis "Mozilla javascript engine")
(description "SpiderMonkey is Mozilla's JavaScript engine written
in C/C++.")
(define-public mozjs-24
(package (inherit mozjs)
(name "mozjs")
(version "24.2.0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name "-" version ".tar.bz2"))
(sha256
(base32
"1n1phk8r3l8icqrrap4czplnylawa0ddc2cc4cgdz46x3lrkybz6"))
(modules '((guix build utils)))
(patches (search-patches "mozjs24-aarch64-support.patch"))
(snippet
'(begin
(substitute* '("js/src/config/milestone.pl")
(("defined\\(@TEMPLATE_FILE)") "@TEMPLATE_FILE"))
#t))))
(arguments
(substitute-keyword-arguments (package-arguments mozjs)
((#:phases phases)
`(modify-phases ,phases
(replace 'configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
configure fails if it is followed by SHELL and CONFIG_SHELL
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure"
(string-append "--prefix=" out)
"--with-system-nspr"
"--enable-system-ffi"
"--enable-threadsafe"
,@(if (string=? "aarch64-linux"
(%current-system))
'("--host=aarch64-unknown-linux-gnu")
'())))))))))
(inputs
`(("libffi" ,libffi)
("zlib" ,zlib)))))
(define-public mozjs-38
(package
(inherit mozjs)
(name "mozjs")
(version "38.2.1.rc0")
(source (origin
(method url-fetch)
(uri (string-append
"/"
name "-" version ".tar.bz2"))
(sha256
(base32
"0p4bmbpgkfsj54xschcny0a118jdrdgg0q29rwxigg3lh5slr681"))
(patches
(search-patches
GCC 6 compatibility .
for 0ad
"mozjs38-tracelogger.patch"
"mozjs38-pkg-config-version.patch"
"mozjs38-shell-version.patch"))
(modules '((guix build utils)))
(snippet
'(begin
Fix incompatibility with sed 4.4 .
(substitute* "js/src/configure"
(("\\^\\[:space:\\]") "^[[:space:]]"))
(substitute*
"python/mozbuild/mozbuild/backend/recursivemake.py"
(("\\['dist_include'\\].add_symlink")
"['dist_include'].add_copy"))
(for-each delete-file-recursively
'("intl"
"js/src/ctypes/libffi"
"js/src/ctypes/libffi-patches"
"modules/zlib"))
#t))))
(arguments
-p " system_wrapper_js/ "
#:parallel-build? #f
#:tests? #f
#:phases
(modify-phases %standard-phases
(replace 'configure
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(chdir "js/src")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(invoke "./configure"
(string-append "--prefix=" out)
"--enable-ctypes"
"--enable-gcgenerational"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--enable-threadsafe"
"--enable-xterm-updates"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
"--without-intl-api")))))))
(native-inputs
`(("perl" ,perl)
("pkg-config" ,pkg-config)
("python-2" ,python-2)))
(inputs
`(("libffi" ,libffi)
("readline" ,readline)
("icu4c" ,icu4c)
("zlib" ,zlib)))))
(define-public mozjs-52
we take the Debian version instead , because it is easier to work with .
(let ((commit "6507e63cc416fd7a3269e390efe712f8b56f374a")
(revision "1"))
(package (inherit mozjs-38)
(version (git-version "52.0" revision commit))
(source (origin
(method git-fetch)
(uri (git-reference
(url "-team/mozjs52.git")
(commit commit)))
(file-name (git-file-name "mozjs" version))
(sha256
(base32
"1ny0s53r8wn4byys87h784xrq1xg767akmfm6gqrbvrz57mlm3q2"))))
(arguments
#:configure-flags
'("--enable-ctypes"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
"--without-intl-api"
"--disable-jemalloc")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'patch-and-chdir
(lambda* (#:key inputs #:allow-other-keys)
This patch prevents a segfault when executing JS_Init ( ) .
configure phase of the gjs package would fail .
(make-file-writable "js/src/old-configure.in")
(make-file-writable "js/src/old-configure")
(make-file-writable "mozglue/build/moz.build")
(invoke "patch" "-p1" "--force"
"--input" "debian/patches/disable-mozglue.patch")
(invoke "touch" "js/src/configure")
(chdir "js/src")
#t))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "./configure"
(cons (string-append "--prefix=" out)
configure-flags))))))))
(native-inputs
`(("autoconf" ,autoconf-2.13)
("automake" ,automake)
,@(package-native-inputs mozjs-38))))))
(define-public mozjs-60
we take the Debian version instead , because it is easier to work with .
(package
(inherit mozjs-38)
(version "60.2.3-2")
(source (origin
(method git-fetch)
(uri (git-reference
(url "-team/mozjs60.git")
(commit (string-append "debian/" version))))
(file-name (git-file-name "mozjs" version))
(sha256
(base32
"091w050rwzrdcbgyi934k2viyccmlqxrp13sm2mql71mabb5dai6"))))
(arguments
#:test-target "check-jstests"
#:configure-flags
'("--enable-ctypes"
"--enable-optimize"
"--enable-pie"
"--enable-readline"
"--enable-shared-js"
"--enable-system-ffi"
"--with-system-nspr"
"--with-system-zlib"
"--with-system-icu"
"--with-intl-api"
This is important because without it gjs will segfault during the
"--disable-jemalloc")
#:phases
(modify-phases %standard-phases
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(mkdir "run-configure-from-here")
(chdir "run-configure-from-here")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "../js/src/configure"
(cons (string-append "--prefix=" out)
configure-flags))
#t)))
(add-after 'unpack 'disable-broken-tests
(lambda _
(delete-file "js/src/tests/shell/os.js")
#t)))))
(native-inputs
`(("autoconf" ,autoconf)
("automake" ,automake)
("which" ,which)
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-2)))))
(define-public mozjs-78
(package
(inherit mozjs-60)
(version "78.5.0")
(source (origin
(method url-fetch)
TODO : Switch to IceCat source once available on ftp.gnu.org .
(uri (string-append ""
"/releases/" version "esr/source/firefox-"
version "esr.source.tar.xz"))
(sha256
(base32
"1442yjmwz69hkfcvh8kkb60jf4c9ms0pac04nc3xw2da13v4zxai"))))
(arguments
#:modules ((guix build cargo-utils)
,@%gnu-build-system-modules)
#:test-target "check-jstests"
#:configure-flags
"--disable-debug"
"--disable-debug-symbols"
This is important because without it gjs will segfault during the
"--disable-jemalloc"
"--enable-tests"
"--enable-hardening"
"--enable-optimize"
"--enable-release"
"--enable-rust-simd"
"--enable-readline"
"--enable-shared-js"
"--with-system-icu"
"--with-system-nspr"
"--with-system-zlib"
"--with-intl-api")
#:phases
(modify-phases %standard-phases
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(let ((null-hash
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo\\.lock$"))
(for-each generate-all-checksums
'("js" "third_party/rust"))
#t)))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(mkdir "run-configure-from-here")
(chdir "run-configure-from-here")
(setenv "SHELL" (which "sh"))
(setenv "CONFIG_SHELL" (which "sh"))
(setenv "AUTOCONF" (string-append (assoc-ref inputs "autoconf")
"/bin/autoconf"))
(apply invoke "../js/src/configure"
(cons (string-append "--prefix=" out)
configure-flags))
#t)))
(add-after 'unpack 'adjust-for-icu-68
(lambda _
(with-directory-excursion "js/src/tests"
The test suite expects a lightly patched ICU 67 . Since
phase for newer versions of mozjs .
" " instead of " UK " .
(substitute* "non262/Intl/DisplayNames/language.js"
(("Traditionell, GB")
"Traditionell, UK"))
(substitute* "non262/Intl/DisplayNames/region.js"
(("\"GB\": \"GB\"")
"\"GB\": \"UK\""))
(delete-file "non262/Intl/Date/toLocaleString_timeZone.js")
(delete-file "non262/Intl/Date/toLocaleDateString_timeZone.js")
up a time in the " ar - MA - u - ca - islamicc " locale , which is
(delete-file "non262/Intl/DateTimeFormat/format_timeZone.js")
(delete-file "non262/Intl/DateTimeFormat/format.js")
daf - Latn - ZZ - > daf - Latn - CI , so drop it for simplicity .
(delete-file "non262/Intl/Locale/likely-subtags-generated.js"))
#t))
(add-before 'check 'pre-check
(lambda _
(with-directory-excursion "../js/src/tests"
(substitute* "shell/os.js"
((".*killed process should not have exitStatus.*")
""))
(delete-file-recursively "non262/Date")
(delete-file "non262/Intl/DateTimeFormat/tz-environment-variable.js")
(setenv "JSTESTS_EXTRA_ARGS"
(string-join
(list
"--exclude-random"
"--wpt=disabled"
(string-append "--worker-count="
(number->string (parallel-job-count)))))))
#t)))))
(native-inputs
`(("autoconf" ,autoconf-2.13)
("automake" ,automake)
for - objdump
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python-3)
("rust" ,rust)
("cargo" ,rust "cargo")))
(inputs
`(("icu4c" ,icu4c-68)
("readline" ,readline)
("zlib" ,zlib)))))
(define mozilla-compare-locales
(origin
(method hg-fetch)
(uri (hg-reference
(url "-locales/")
(changeset "RELEASE_8_0_0")))
(file-name "mozilla-compare-locales")
(sha256 (base32 "0052wq92sg4i776x407b5838jx9h3phl9xy69m2q34f31n3gdyk2"))))
(define (mozilla-locale locale changeset hash-string)
(origin
(method hg-fetch)
(uri (hg-reference
(url (string-append "-central/"
locale))
(changeset changeset)))
(file-name (string-append "mozilla-locale-" locale))
(sha256 (base32 hash-string))))
(define-syntax-rule (mozilla-locales (hash-string changeset locale) ...)
(list (mozilla-locale locale changeset hash-string)
...))
(define all-mozilla-locales
(mozilla-locales
("1q1p7nl97478hkf1msbbcdh98k6fn87xwqihbw8np00ll5gk9k4v" "97cf3c155484" "ach")
("0sajpblp639l448xywx7xlybjr5dm0rfrzx511a8pa9wn7dma4mf" "46929b4dda4e" "af")
("04s756aaf1yyli1dn1vfdjymgyjs1pz2n7jvz76j27x4nzdgss7l" "96ffd2b04ba3" "an")
("16nw7slm7irsnjf81r6zl273kbzri8sdbimzgzxwm0vh6xr61rgs" "1c4231166ddf" "ar")
("0i0qrj4j8zjv55cisbmr21dp8mz933hc9wkpn3abz5k5pn84gpry" "bc4384b8be78" "ast")
("1r77mmwg9x1jfa7g5lqa31rgyfrnix1zj80ibn5rq9gyhhsdrwbd" "dd56aead51fa" "az")
("1kzjpy5bnvps8di72ksynhbhc9bpw1ml6hvphm74z8dz55ai4c18" "9d2bff64ddfb" "be")
("1fygvjgph3siknfm7l0fh9y4ava72z1rxkip1zgmcdgq7jz8wzpf" "3808f4fe4db6" "bg")
("1x9nlqia441xh04rhkmkw6qrgpwnyqw0grrf5n2qw96939wnmgl7" "5ca8752ed8df" "bn")
("18yyxqzab44bgqx7h052d5nxyhv0l5bidgr16z1b3ak2crsfyx9q" "4c5d30ca9bf2" "br")
("11bschjz7pgsm4r1qan5l4s3nkzm5jb0kivpp186wd1xsjci5bjb" "cf2478a7eae1" "bs")
("12bak64nl6qi092l55xv330vh38mfsicrrf4wi693nn7zqb3mbpw" "4aa2cc349211" "ca")
("0p97d7pnxqs971rr5c57i6cw3mx1mp3iasa0xdmdk0zpz9pjd3s4" "806914072144" "ca-valencia")
("1rligf98h9r16mw3r3n5jalzi74xn2dnh6hkiixp7s2bvhjicwa4" "db2163383129" "cak")
("18y5j8ljh72mj2nbx0m64mi3fyjbwyx992i004sv3zvs4d4z18w4" "d1d09eedddde" "cs")
("12i4m9q6f8sl8arm8ja4gs4sl9m59p3kddlqi68srpz9mk66rqxz" "1caf58c64227" "cy")
("16wacsd23zd4j0yi6vbj033wylia8hlpswx949x5hy1h9817j4vn" "9ef3b56aa243" "da")
("1ddxnqpfi1wnciwmzkvvjinb51h22bg70r0dn7db17q64p4271rk" "a050b0eaec0a" "de")
("0gw5h52rw3bc2lwffnv845sjwwj22gmabchmpa1rw2y6087dc7zk" "613135cb0759" "dsb")
("0v17da37w1kbs73i6xql4c9xng6rfachyylpy3w8hpibpvi7i30n" "903db7c76b31" "el")
("0ky1nj4kp070nriw0igsa64l39plcv2365zl8lqpbydyp6clwc3m" "6bb74673e9aa" "en-CA")
("15jck6si2l4h5lrs8zjx56y97p70njpi26nnq6xfmvj1dk5qb4g2" "c06bd83a9295" "en-GB")
("1gc400n8j8qk70vdw7fkf4m9yviia35cmj6361pxnyvsjrgm4qvm" "ae0fda5114c4" "eo")
("0y01lahdrd3krsh6nrr3iw36r2x4advh5qw54vf92w11l3aiyvfz" "0c294dc9ce40" "es-AR")
("16jjbsfa62dxk36ccziv7nmh26c1s0b05960in6zs5nnfrv4yh35" "7c30b2981fb4" "es-CL")
("0qh8c8f8dsv1v56n1mcpn84l39bpnqfl8v8iacq12zh322srr549" "ad1444f4f833" "es-ES")
("1vh3hm5r4ch6mk3ymbk12b9rppwc75fmbi0i7cml82kxsi77bzw5" "0a26cdb23536" "es-MX")
("159l92jxx48lgba1g8ig3mdzhwsf3jpg3nx1g9blq4majk5hqr6q" "65a38a830795" "et")
("1ha8y1wbgb66dqm26x1q2xwygy7bblkjz40y9vh5d2qlpr3fn0av" "21e0930b221d" "eu")
("0rq4pcw6klm0bbljm1wdfvna8bpa35cm47hh2s63i2xdax4scahf" "5a4bb020cf09" "fa")
("1py2594gsvpgrxnplz278ffb7grsf384kzjskrl1zyps0jw8fb1x" "4a4f712cd4aa" "ff")
("1dyd55ngsglp1w2gh0yaacwb0vsq23gdvnj76f2x6g39h1li9s0z" "9c51cd915e2b" "fi")
("0kimwivpq6pr63jh1k9fszsv8bi8vns3scg76mmnvbhh2ca8q7wj" "4f9e24a696ee" "fr")
("1sbbnnp12lgy5qan2kix02942830b1969bd7jxrv7piwqfch9i06" "9e21a0eeb5b1" "fy-NL")
("0dsvvyfrzkx5h44gqgdci7arb8h4lq48w20cnr8fc7j17grvnkzz" "999a995bc09d" "ga-IE")
("1487msbsgrfzmyijhf6a4wbqdjpd7b7ki9nwrjjjjlnbw0h0ljpb" "6a9ddcab3240" "gd")
("1kzc4fwr18kgisdzba2acj1ag8mxbifqpk5p30jv68nmvqfsvl8d" "51eb5e352db9" "gl")
("13gy3wn44kcxr7j3sbl69fp415875f4vb0gm91hx0fysqlvryhcs" "b9de1ffe3224" "gn")
("0w5nvzpjn5vr35c1852rlff581vpy71nc096cz125852kyqkzkc3" "5b3307475ada" "gu-IN")
("1ycakc4qpy9vcy50j3ricryjfnjr9v3a5ijj6bbfl4y6aks157fy" "c742df968ffd" "he")
("1b2jf83c500wm5wcdnijq0b7y4m8n6271smq8pygahn5nq17f0gq" "1a3039a52b8a" "hi-IN")
("19bbw8ix5m83cf4yarcmjl7jqa8xfabwqnh3nj6vi52rwvn7whk5" "8dc50e269ef3" "hr")
("12rrsvgg6bb2h8fhni7jbx8pv983q8ym5fhfjim957n9q2yp5db6" "67e40f48dec7" "hsb")
("0apyh713p3hrlj8041xwblcssahbmsqp9v9hbmb50ayf4z850kr1" "40073a597b1b" "hu")
("0q0r076lq250d3vmnz9d92wj137c2v8i362c2avmkgp5zr3mcl0z" "2ea33335afdb" "hy-AM")
("0qza33gdc1i9259dwd2f7vd78s0a6rg34aqdkhcn7f2l6ybw6xd6" "930041db15eb" "ia")
("1211h0gp7gianh3qf76w04gfzk4n2bnyc9i8dviyz0vh4cjbx11m" "08811a49b41c" "id")
("12lcr841g1j7453s7gb51vrvxmshx4ha3h1jx4vh8wr891xv8l6a" "2f7a8d31e0ba" "is")
("1x585g0r2kcv0d3phnxx85bk5g0pi1yl0hwp4idv19yc9hslr04s" "188357cc04b4" "it")
("09v35g9v7j6x0p1hggydm3a1rmq2fh4z7g1l88z3w5k6wq2nhj1b" "45cee0ba4771" "ja")
("0prs3vycfvvaffjigdgyxiq41ak2rc34lnan5a6cwdqjgy7z450s" "d60a19d9bf17" "ja-JP-mac")
("1nskzm8rgczrbgcxlzzq5zqdfd456ad0cylq27nf0wjiyq6kjzcm" "00cb00e78672" "ka")
("0g6zznmhiam172nm7g2qzfpk415mna8kiihm73z2sdns64xb3ymg" "77a293a4bced" "kab")
("17dld9lrym7rpvpvnkssivp4wx1f11zpk86wczbq1h52qgd70p55" "2c9b33a56d5d" "kk")
("1nlzl8930c8ql3yq425wyqlxvq8arrjv20xpm5g7yfxd54av89ac" "9cddd42af05c" "km")
("07hkrcdksvrqk816yimd036dlw15nc4sjk4gmw16ywbp093v0mqq" "e0c2969a8398" "kn")
("08aqggvk3qbv5bzks9i1iba9akhkpm01d2c9k0zf41mpr2r5yfg2" "827567d0dafc" "ko")
("0vagaiwy80bs1k3gkacshlzb5zchkcshx0ypwirvayc63sw4yl8l" "694b2a24e868" "lij")
("1r43kp1kzahrbza0hiyavqplk9h08pzsb6rpjy79jr6l1iqb89sy" "d6728db7e060" "lt")
("0sq2wbsj79xl7fi454k6q5xdhxyck1whjz315rv37vphqpx86b9b" "61e9b33f4d94" "lv")
("0q8jxg1af22hs9wjdf0jd3bqk4rafxyzvsjl35k75am7l2y1fl3c" "9e482f6dd72c" "mk")
("1zsfzjrzbc58d30a9yz12h5vphywgpw8xg6y6zn3677a785dvr20" "1fd2763336a4" "mr")
("1rzygkkpn1a59daygd3hdaqph2np6sqvpgh68j0xr4il958ymnsm" "67ddab62dab4" "ms")
("16jp6w5gizfxs7jvncg3ly13m59vqvh4rlmjd0q23m5g5ff9sklc" "3ed015b51bf3" "my")
("1wfv023j67lb4iyf49fsknwm4z3xy0xqcf25b2nzanggxj26n01x" "d01801233a8f" "nb-NO")
("1946vfia58vbjfippb5pfsskbjj95w7hb340smn6ry2vmza99mxp" "582defb08fb2" "ne-NP")
("12w5ywh4c3s55y3zqc48cp1gcpwwjg444yfh1bghhhb9ni1xkh5i" "05f6359a29a6" "nl")
("17jb076320cgkw1ypwmws2vjxsqlv2ww8aaisa3j334vbrw1m4zx" "50b41a1ddded" "nn-NO")
("1y840j0v5zdgj94cbacy6j1snf44rynmzxq3yk8i26arcar62akl" "a6a138531a44" "oc")
("0jq1hq4xhqxpa26r8pb1bgbaljgfkhn9l6p5pbnslkllpbh70q6l" "e70a3afaef25" "pa-IN")
("1hih138skwy2gb8q10ngg6zalrk3aa3d549mg79gqzxbi5zy19fw" "e035f33389eb" "pl")
("1hhif4hx4k351wm1bzykzycfzb5q8msxmiwh5r1cy32rh8wkxwhh" "54098495f37f" "pt-BR")
("0gkjs12rxjml1m3mljskpz1gr6aph0c31nwpwdqybfg54w9qslib" "3fdf021f624e" "pt-PT")
("0anyvwd9v6mr8y3ww33s6qnxkawqn5lz65vrxx3m3kzky63ai1xk" "794f9374eb87" "rm")
("1p4drj25gsfv7lwgm5saazh38iqrh53952n8i4bmxpn0nadbm2n5" "71ce18bcf6cc" "ro")
("17yssf4axd3zvncl4ka4wkfnwcn0z0arp3390vb9cps67na29p36" "3a9587227699" "ru")
("0xk6rksspcw1222v4rgk5a6gzrpx64k29hm7p9qkqwd70s34yj46" "c020582a72ce" "si")
("1ax5ibydyn7sj208r66zcxlcr8dxdqrw28vqyjif4jx583rp4lfp" "745a699b7f51" "sk")
("13rin7hm1dv8g0hbcv8mp2hiwpk1k5bhzvkqpqajkkik4lx523mc" "8e437e2351ef" "sl")
("0yh5jkl5zw3f7x1w2w6zfj3dyvcl4wj1zv4di7qsq2nl2yyizf7x" "2d99e2eff94f" "son")
("0vzq7s27jsdbw5k59wfykysg1kd8w229ab5d4zjdf30l59igkahh" "69bbdf07bd80" "sq")
("1mwivvs8vrk6vjq6i33kwlrlisra7dy35521ayps9p2rz2dll4rr" "215df5c5125c" "sr")
("0g97yz1rg5cfflj8vvq3sqliyvm06x818z9yldfh5rjg1y6n9fjd" "8be00a1a50d4" "sv-SE")
("0ii02jn3lh2i6d0s95whx9aj6w3x8axc7w1rlzj0lc2s9n52krz3" "170a84339dbe" "ta")
("1ss7symad2crapxjqc0xhc0n17l79r5vf7flvkgk7npjky4vb7nv" "72a79a304f7f" "te")
("11iqmg8zamgscnvs4n2xpw3g9azn6w38qs313wiwm86pyx6694ss" "8e91ce3064c5" "th")
("1zgkvn9flb8by62ip9r3gmpgxwgkww1zhml5mwa0djq3ppfdgi1c" "0f914d0cda56" "tl")
("1filkhdak6dbrd83q602x1qw1703nlm53nm9gcyab8s16gsx6ylz" "62ca6a8eaeba" "tr")
("0cgagdy0ixprk3knczdmkqxkmx4ybmshhh0956kxbd0iab0dfcf6" "f110ccac4cde" "trs")
("1f1ghk67zwnwc5x3nk82vcv94nf8glngkfya1hg074q3088sj9pa" "56c0102d5f1c" "uk")
("0iyw1b2jjylkdwxv9sxvj4ikxl64sx612b2dvvmf1di8bw86w74r" "7d53bce5ae98" "ur")
("1q83cp5pfgs8l03zirwi8r5qp8qyh4zvxdx1ilgaqqlyg42yql7c" "9b500e1a054d" "uz")
("1d4nkybz2hk64ay04k965b9lc5nhhpmzcs5ww3b6q4n93rf9c2z7" "2a000025928a" "vi")
("1cnrsfnyl3sw3sxsggmjwydvphb2diy0vzknvxdhpnvq3ln18hga" "74724087c25b" "xh")
("1j6l66v1xw27z8w78mpsnmqgv8m277mf4r0hgqcrb4zx7xc2vqyy" "527e5e090608" "zh-CN")
("1frwx35klpyz3sdwrkz7945ivb2dwaawhhyfnz4092h9hn7rc4ky" "6cd366ad2947" "zh-TW")))
(define* (computed-origin-method gexp-promise hash-algo hash
#:optional (name "source")
#:key (system (%current-system))
(guile (default-guile)))
"Return a derivation that executes the G-expression that results
from forcing GEXP-PROMISE."
(mlet %store-monad ((guile (package->derivation guile system)))
(gexp->derivation (or name "computed-origin")
(force gexp-promise)
#:system system
#:guile-for-build guile)))
(define %icecat-version "78.12.0-guix0-preview1")
must be of the form YYYYMMDDhhmmss
' icecat - source ' is a " computed " origin that generates an IceCat tarball
from the corresponding upstream Firefox ESR tarball , using the ' makeicecat '
script from the upstream IceCat project .
(define icecat-source
(let* ((base-version (first (string-split %icecat-version #\-)))
(major-version (first (string-split base-version #\.)))
(minor-version (second (string-split base-version #\.)))
(sub-version (third (string-split base-version #\.)))
(upstream-firefox-version (string-append base-version "esr"))
(upstream-firefox-source
(origin
(method url-fetch)
(uri (string-append
"/"
upstream-firefox-version "/source/"
"firefox-" upstream-firefox-version ".source.tar.xz"))
(sha256
(base32
"043lplq5i4ax6nh4am3b2bm8dbn4rzzcji1zp0yy1pad4nwahmcb"))))
( gnuzilla - commit ( string - append " v " upstream - icecat - base - version ) )
(gnuzilla-commit "abfe5eebaca3c2787f1a9505669393674493c177")
(gnuzilla-source
(origin
(method git-fetch)
(uri (git-reference
(url "git")
(commit gnuzilla-commit)))
(file-name (git-file-name "gnuzilla"
(string-take gnuzilla-commit 8)))
(sha256
(base32
"00ws3540x5whpicc5fx4k949ff73cqvajz6jp13ahn49wqdads47"))))
' search - patch ' returns either a valid file name or # f , so wrap it
(gnuzilla-fixes-patch
(local-file (assume-valid-file-name
(search-patch "icecat-use-older-reveal-hidden-html.patch"))))
(makeicecat-patch
(local-file (assume-valid-file-name
(search-patch "icecat-makeicecat.patch")))))
(origin
(method computed-origin-method)
(file-name (string-append "icecat-" %icecat-version ".tar.xz"))
(sha256 #f)
(uri
(delay
(with-imported-modules '((guix build utils))
#~(begin
(use-modules (guix build utils))
(let ((firefox-dir
(string-append "firefox-" #$base-version))
(icecat-dir
(string-append "icecat-" #$%icecat-version)))
(mkdir "/tmp/bin")
(set-path-environment-variable
"PATH" '("bin")
(list "/tmp"
#+(canonical-package bash)
#+(canonical-package coreutils)
#+(canonical-package findutils)
#+(canonical-package patch)
#+(canonical-package xz)
#+(canonical-package sed)
#+(canonical-package grep)
#+(canonical-package bzip2)
#+(canonical-package gzip)
#+(canonical-package tar)
#+rename))
(symlink #+(file-append rename "/bin/rename")
"/tmp/bin/prename")
(copy-recursively #+gnuzilla-source "/tmp/gnuzilla"
#:log (%make-void-port "w"))
(with-directory-excursion "/tmp/gnuzilla"
(make-file-writable "makeicecat")
(invoke "patch" "--force" "--no-backup-if-mismatch"
"-p1" "--input" #+gnuzilla-fixes-patch)
(invoke "patch" "--force" "--no-backup-if-mismatch"
"-p1" "--input" #+makeicecat-patch)
(patch-shebang "makeicecat")
(substitute* "makeicecat"
(("^FFMAJOR=(.*)" all ffmajor)
(unless (string=? #$major-version
(string-trim-both ffmajor))
properly on a different version of Firefox , even if
(error "makeicecat major version mismatch"))
(string-append "FFMAJOR=" #$major-version "\n"))
(("^FFMINOR=.*")
(string-append "FFMINOR=" #$minor-version "\n"))
(("^FFSUB=.*")
(string-append "FFSUB=" #$sub-version "\n"))
(("^DATA=.*")
"DATA=/tmp/gnuzilla/data\n")
(("/bin/sed")
#+(file-append (canonical-package sed) "/bin/sed"))))
(format #t "Unpacking upstream firefox tarball...~%")
(force-output)
(invoke "tar" "xf" #+upstream-firefox-source)
(rename-file firefox-dir icecat-dir)
(with-directory-excursion icecat-dir
(format #t "Populating l10n directory...~%")
(force-output)
(mkdir "l10n")
(with-directory-excursion "l10n"
(for-each
(lambda (locale-dir)
(let ((locale
(string-drop (basename locale-dir)
(string-length "-mozilla-locale-")))))
(format #t " ~a~%" locale)
(force-output)
(copy-recursively locale-dir locale
#:log (%make-void-port "w"))
(for-each make-file-writable (find-files locale))
(with-directory-excursion locale
(when (file-exists? ".hgtags")
(delete-file ".hgtags"))
(mkdir-p "browser/chrome/browser/preferences")
(call-with-output-file
"browser/chrome/browser/preferences/advanced-scripts.dtd"
(lambda (port) #f)))))
'#+all-mozilla-locales)
(copy-recursively #+mozilla-compare-locales
"compare-locales"
#:log (%make-void-port "w"))
(delete-file "compare-locales/.gitignore")
(delete-file "compare-locales/.hgignore")
(delete-file "compare-locales/.hgtags"))
(format #t "Running makeicecat script...~%")
(force-output)
(invoke "bash" "/tmp/gnuzilla/makeicecat"))
(format #t "Packing IceCat source tarball...~%")
(force-output)
(invoke "tar" "cfa" #$output
mtime of files in the archive to early 1980 because
the build process fails if the of source
files is pre-1980 , due to the creation of zip
1980 - 01 - 02 UTC
"--owner=root:0"
"--group=root:0"
"--sort=name"
icecat-dir)
#t))))))))
(define-public icecat
(package
(name "icecat")
(version %icecat-version)
(source icecat-source)
(build-system gnu-build-system)
(inputs
`(("alsa-lib" ,alsa-lib)
("bzip2" ,bzip2)
("cups" ,cups)
("dbus-glib" ,dbus-glib)
("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("gtk+-2" ,gtk+-2)
UNBUNDLE - ME ! ( " graphite2 " , )
("pango" ,pango)
("freetype" ,freetype)
UNBUNDLE - ME ! ( " harfbuzz " , harfbuzz )
("libcanberra" ,libcanberra)
("libgnome" ,libgnome)
("libjpeg-turbo" ,libjpeg-turbo)
UNBUNDLE - ME ! ( " libogg " , libogg )
UNBUNDLE - ME ! ( " libvorbis " , libvorbis )
("libxft" ,libxft)
("libevent" ,libevent)
("libxinerama" ,libxinerama)
("libxscrnsaver" ,libxscrnsaver)
("libxcomposite" ,libxcomposite)
("libxt" ,libxt)
("libffi" ,libffi)
("ffmpeg" ,ffmpeg)
UNBUNDLE - ME ! ( " " , )
("icu4c" ,icu4c-67)
("pixman" ,pixman)
("pulseaudio" ,pulseaudio)
("mesa" ,mesa)
("mit-krb5" ,mit-krb5)
UNBUNDLE - ME ! ( " nspr " , )
UNBUNDLE - ME ! ( " nss " , nss )
("shared-mime-info" ,shared-mime-info)
UNBUNDLE - ME ! ( " sqlite " , sqlite )
("unzip" ,unzip)
("zip" ,zip)
UNBUNDLE - ME ! ( " zlib " , zlib )
))
(native-inputs
The following patches are specific to the Guix packaging of IceCat ,
a tarball suitable for compilation on any system that IceCat supports .
XXX TODO : Adapt these patches to IceCat 68 .
("patch" ,(canonical-package patch))
("rust" ,rust-1.41)
("cargo" ,rust-1.41 "cargo")
("rust-cbindgen" ,rust-cbindgen-0.14)
("llvm" ,llvm)
("clang" ,clang)
("perl" ,perl)
("node" ,node)
("python" ,python)
("python-2" ,python-2)
("python2-pysqlite" ,python2-pysqlite)
("yasm" ,yasm)
XXX FIXME : only needed on x86_64 and i686
("pkg-config" ,pkg-config)
("autoconf" ,autoconf-2.13)
("which" ,which)))
(arguments
#:configure-flags `("--enable-default-toolkit=cairo-gtk3-wayland"
"--with-distribution-id=org.gnu"
system directories to be signed by Mozilla .
"--with-unsigned-addon-scopes=app,system"
"--allow-addon-sideload"
"--enable-pulseaudio"
"--disable-tests"
"--disable-updater"
"--disable-crashreporter"
"--disable-eme"
"--disable-debug"
"--disable-debug-symbols"
Clang is needed to build Stylo , Mozilla 's new
manually , because otherwise the Mozilla build
- config --bindir and - config --libdir ,
which return paths in the package where
,(string-append "--with-clang-path="
(assoc-ref %build-inputs "clang")
"/bin/clang")
,(string-append "--with-libclang-path="
(assoc-ref %build-inputs "clang")
"/lib")
"--enable-official-branding"
UNBUNDLE - ME ! " --with - system - zlib "
UNBUNDLE - ME ! " --with - system - bz2 "
UNBUNDLE - ME ! " --with - system - libevent "
UNBUNDLE - ME ! " --with - system - ogg "
UNBUNDLE - ME ! " --with - system - vorbis "
UNBUNDLE - ME ! " --with - system - libvpx "
"--with-system-icu"
UNBUNDLE - ME ! " --with - system - nspr "
UNBUNDLE - ME ! " --with - system - nss "
UNBUNDLE - ME ! " --with - system - harfbuzz "
UNBUNDLE - ME ! " --with - system - graphite2 "
"--enable-system-pixman"
"--enable-system-ffi"
UNBUNDLE - ME ! " --enable - system - sqlite "
" the Animated Portable Network Graphics ( APNG )
is an unofficial extension of the Portable
)
#:modules ((ice-9 ftw)
(ice-9 rdelim)
(ice-9 regex)
(ice-9 match)
(srfi srfi-34)
(srfi srfi-35)
(rnrs bytevectors)
(rnrs io ports)
(guix elf)
(guix build gremlin)
,@%gnu-build-system-modules)
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'apply-guix-specific-patches
(lambda* (#:key inputs native-inputs #:allow-other-keys)
(let ((patch (string-append (assoc-ref (or native-inputs inputs)
"patch")
"/bin/patch")))
(for-each (match-lambda
((label . file)
(when (and (string-prefix? "icecat-" label)
(string-suffix? ".patch" label))
(format #t "applying '~a'...~%" file)
(invoke patch "--force" "--no-backup-if-mismatch"
"-p1" "--input" file))))
(or native-inputs inputs)))
#t))
(add-after 'apply-guix-specific-patches 'remove-bundled-libraries
(lambda _
(for-each (lambda (file)
(format #t "deleting '~a'...~%" file)
(delete-file-recursively file))
FIXME : Removing the bundled icu breaks configure .
* The bundled icu headers are used in some places .
" intl / icu "
FIXME : With the update to IceCat 60 , using system NSS
the bundled NSPR and NSS . TODO : Investigate ,
UNBUNDLE - ME ! " security / nss "
* : esr60 wants v1.2 , not yet released .
some :
DEFINES['ST_NO_EXCEPTION_HANDLING ' ] = 1
* speex
"modules/freetype2"
UNBUNDLE - ME ! " modules / zlib "
UNBUNDLE - ME ! " / chromium / src / third_party / libevent "
UNBUNDLE - ME ! " media / "
UNBUNDLE - ME ! " media / libogg "
UNBUNDLE - ME ! " media / libvorbis "
UNBUNDLE - ME ! " media / libtremor "
UNBUNDLE - ME ! " gfx / harfbuzz "
UNBUNDLE - ME ! " gfx / graphite2 "
"js/src/ctypes/libffi"
UNBUNDLE - ME ! " db / sqlite3 "
))
#t))
(add-after 'remove-bundled-libraries 'link-libxul-with-libraries
(lambda _
(substitute* "toolkit/library/moz.build"
(("^# This library needs to be last" all)
(string-append "OS_LIBS += [
'GL', 'gnome-2', 'canberra', 'Xss', 'cups', 'gssapi_krb5',
'avcodec', 'avutil', 'pulse' ]\n\n"
all)))
#t))
(add-after 'link-libxul-with-libraries 'fix-ffmpeg-runtime-linker
(lambda* (#:key inputs #:allow-other-keys)
(let* ((ffmpeg (assoc-ref inputs "ffmpeg"))
(libavcodec (string-append ffmpeg "/lib/libavcodec.so")))
(substitute* "dom/media/platforms/ffmpeg/FFmpegRuntimeLinker.cpp"
(("libavcodec\\.so")
libavcodec))
(let* ((mime-info (assoc-ref inputs "shared-mime-info"))
(libavcodec-runpath (call-with-input-file libavcodec
(compose elf-dynamic-info-runpath
elf-dynamic-info
parse-elf
get-bytevector-all)))
(whitelist (cons (string-append mime-info "/share/mime/")
(map (lambda (dir)
(string-append dir "/"))
libavcodec-runpath)))
(whitelist-string (string-join whitelist ","))
(port (open-file "browser/app/profile/icecat.js" "a")))
(format #t "setting 'security.sandbox.content.read_path_whitelist' to '~a'~%"
whitelist-string)
(format port "~%pref(\"security.sandbox.content.read_path_whitelist\", ~S);~%"
whitelist-string)
(close-output-port port))
#t)))
(replace 'bootstrap
(lambda _
(invoke "sh" "-c" "autoconf old-configure.in > old-configure")
(invoke "touch" "configure")))
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(use-modules (guix build cargo-utils))
(let ((null-hash "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo.lock$"))
(for-each generate-all-checksums
'("services"
"js"
"third_party/rust"
"dom/media"
"dom/webauthn"
"toolkit"
"gfx"
"storage"
"modules"
"xpcom/rust"
"media"
"mozglue/static/rust"
"netwerk"
"remote"
"intl"
"servo"
"security/manager/ssl"
"build")))
#t))
(replace 'configure
configure does not work followed by both " SHELL= ... " and
(lambda* (#:key outputs configure-flags #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bash (which "bash"))
(abs-srcdir (getcwd))
(srcdir (string-append "../" (basename abs-srcdir)))
(flags `(,(string-append "--prefix=" out)
,(string-append "--with-l10n-base="
abs-srcdir "/l10n")
,@configure-flags)))
(setenv "SHELL" bash)
(setenv "CONFIG_SHELL" bash)
apparently needed when Stylo is enabled
(setenv "LDFLAGS" (string-append "-Wl,-rpath="
(assoc-ref outputs "out")
"/lib/icecat"))
(mkdir "../build")
(chdir "../build")
(format #t "build directory: ~s~%" (getcwd))
(format #t "configure flags: ~s~%" flags)
(apply invoke bash
(string-append srcdir "/configure")
flags))))
(replace 'build
to 5 times .
(lambda args
(let ((build (assoc-ref %standard-phases 'build)))
(let retry ((remaining-attempts 5))
(if (= remaining-attempts 1)
(apply build args)
(guard (c ((invoke-error? c)
(format #t "~%Retrying build! (~a attempts remaining)~%~%"
(- remaining-attempts 1))
(force-output)
(retry (- remaining-attempts 1))))
(apply build args)))))))
(add-after 'build 'neutralise-store-references
(lambda _
about : buildconfig , reducing IceCat 's closure by 1 on x86 - 64 .
(substitute*
"dist/bin/chrome/toolkit/content/global/buildconfig.html"
(((format #f "(~a/)([0-9a-df-np-sv-z]{32})"
(regexp-quote (%store-directory)))
_ store hash)
(string-append store
(string-take hash 8)
"<!-- Guix: not a runtime dependency -->"
(string-drop hash 8))))
#t))
(add-before 'configure 'install-desktop-entry
(lambda* (#:key outputs #:allow-other-keys)
(let* ((desktop-file "taskcluster/docker/icecat-snap/icecat.desktop")
(out (assoc-ref outputs "out"))
(applications (string-append out "/share/applications")))
(substitute* desktop-file
(("^Exec=icecat") (string-append "Exec=" out "/bin/icecat"))
(("IceCat") "GNU IceCat")
(("Icon=.*") "Icon=icecat\n")
(("NewWindow") "new-window")
(("NewPrivateWindow") "new-private-window"))
(install-file desktop-file applications)
#t)))
(add-after 'install-desktop-entry 'install-icons
(lambda* (#:key outputs #:allow-other-keys)
(let ((out (assoc-ref outputs "out")))
(with-directory-excursion "browser/branding/official"
(for-each
(lambda (file)
(let* ((size (string-filter char-numeric? file))
(icons (string-append out "/share/icons/hicolor/"
size "x" size "/apps")))
(mkdir-p icons)
(copy-file file (string-append icons "/icecat.png"))))
'("default16.png" "default22.png" "default24.png"
"default32.png" "default48.png" "content/icon64.png"
"mozicon128.png" "default256.png"))
#t))))
This fixes the file chooser crash that happens with GTK 3 .
(add-after 'install 'wrap-program
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(lib (string-append out "/lib"))
(gtk (assoc-ref inputs "gtk+"))
(gtk-share (string-append gtk "/share"))
(mesa (assoc-ref inputs "mesa"))
(mesa-lib (string-append mesa "/lib"))
(pulseaudio (assoc-ref inputs "pulseaudio"))
(pulseaudio-lib (string-append pulseaudio "/lib"))
(libxscrnsaver (assoc-ref inputs "libxscrnsaver"))
(libxscrnsaver-lib (string-append libxscrnsaver "/lib"))
(mit-krb5 (assoc-ref inputs "mit-krb5"))
(mit-krb5-lib (string-append mit-krb5 "/lib")))
(wrap-program (car (find-files lib "^icecat$"))
`("XDG_DATA_DIRS" prefix (,gtk-share))
package on guix has been observed to be unstable when
using wayland , and the bundled extensions stop working .
` ( " MOZ_ENABLE_WAYLAND " = ( " 1 " ) )
`("LD_LIBRARY_PATH" prefix (,pulseaudio-lib
,mesa-lib
,libxscrnsaver-lib
,mit-krb5-lib)))
#t))))))
(home-page "/")
(synopsis "Entirely free browser derived from Mozilla Firefox")
(description
"IceCat is the GNU version of the Firefox browser. It is entirely free
software, which does not recommend non-free plugins and addons. It also
features built-in privacy-protecting features.
WARNING: IceCat 78 has not yet been released by the upstream IceCat project.
This is a preview release, and does not currently meet the privacy-respecting
standards of the IceCat project.")
(properties
`((ftp-directory . "/gnu/gnuzilla")
(cpe-name . "firefox_esr")
(cpe-version . ,(first (string-split version #\-)))))))
must be of the form YYYYMMDDhhmmss
(define-public icedove
(package
(name "icedove")
(version "78.11.0")
(source icecat-source)
(properties
`((cpe-name . "thunderbird_esr")))
(build-system gnu-build-system)
(arguments
(sxml simple)
(ice-9 regex)
,@%gnu-build-system-modules)
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'prepare-thunderbird-sources
(lambda* (#:key inputs #:allow-other-keys)
(mkdir "comm")
(copy-recursively (assoc-ref inputs "thunderbird-sources")
"comm")
(delete-file-recursively "obj-x86_64-pc-linux-gnu")
(delete-file "sourcestamp.txt")
#t))
(add-after 'patch-source-shebangs 'patch-cargo-checksums
(lambda _
(use-modules (guix build cargo-utils))
(let ((null-hash "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(for-each (lambda (file)
(format #t "patching checksums in ~a~%" file)
(substitute* file
(("^checksum = \".*\"")
(string-append "checksum = \"" null-hash "\""))))
(find-files "." "Cargo.lock$"))
(for-each generate-all-checksums
'("third_party/rust"
"toolkit/library/rust")))
#t))
See e.g.
(add-after 'patch-source-shebangs 'fix-profile-setting
(lambda _
(substitute* "comm/mail/moz.configure"
(("'MOZ_DEDICATED_PROFILES', True")
"'MOZ_DEDICATED_PROFILES', False"))
#t))
(add-after 'prepare-thunderbird-sources 'rename-to-icedove
(lambda _
(substitute* "comm/mail/confvars.sh"
(("MOZ_APP_BASENAME=Thunderbird")
"MOZ_APP_BASENAME=Icedove\nMOZ_APP_DISPLAYNAME=Icedove")
(("MOZ_APP_NAME=thunderbird")
"MOZ_APP_NAME=icedove")
(("MOZ_UPDATER=1")
"MOZ_UPDATER=0"))
Remove branding to comply with Mozilla 's trademark policy
(with-directory-excursion "comm/mail/branding/nightly"
(delete-file "content/about-wordmark.svg")
(call-with-output-file "content/about-wordmark.svg"
(lambda (port)
(sxml->xml '(svg (@ (xmlns "")
(viewBox "0 0 789.1 90.78")
(width "333")
(height "48")
(fill "#fff"))
(text (@ (x "400") (y "70")
(text-anchor "middle")
(font-size "90"))
"Icedove Daily"))
port)))
(substitute* '("locales/en-US/brand.properties"
"locales/en-US/brand.ftl"
"locales/en-US/brand.dtd"
"configure.sh")
(("Thunderbird") "Icedove")
(("mozilla.org") "guix.gnu.org")))
Remove other mentions of Thunderbird in user - visible text .
(with-directory-excursion "comm/mail/base/content"
(substitute* '("overrides/app-license-name.html")
(("Thunderbird") "Icedove")))
(with-directory-excursion "comm/mail/components/"
(substitute* '("MailGlue.jsm"
"extensions/schemas/addressBook.json"
"extensions/schemas/tabs.json"
"extensions/schemas/cloudFile.json"
"extensions/schemas/chrome_settings_overrides.json"
"extensions/schemas/windows.json"
"extensions/parent/ext-mail.js"
"im/messages/mail/Info.plist"
"enterprisepolicies/moz.build"
"enterprisepolicies/helpers/moz.build"
"enterprisepolicies/schemas/moz.build")
(("Thunderbird") "Icedove")))
(substitute* '("comm/mailnews/base/prefs/content/accountUtils.js"
"comm/common/src/customizeToolbar.js")
(("AppConstants.MOZ_APP_NAME (.)= \"thunderbird" _ e)
(format #f "AppConstants.MOZ_APP_NAME ~a= \"icedove" e)))
(substitute* "comm/mail/app/profile/all-thunderbird.js"
(("(pref\\(\"extensions.webservice.discoverURL\").*" _ m)
(string-append m ", \"\");"))
(("(pref\\(\"extensions.getAddons.search.url\").*" _ m)
(string-append m ", \"\");"))
(("(pref\\(\"extensions.update.enabled\").*" _ m)
(string-append m ", false);"))
(("(pref\\(\"extensions.systemAddon.update.enabled\").*" _ m)
(string-append m ", false);"))
(("(pref\\(\"lightweightThemes.update.enabled\").*" _ m)
(string-append m ", false);")))
#t))
(add-after 'build 'neutralize-store-references
(lambda _
about : buildconfig , reducing Icedove 's closure significant .
(substitute*
(find-files "." "buildconfig.html")
(((format #f "(~a/)([0-9a-df-np-sv-z]{32})"
(regexp-quote (%store-directory)))
_ store hash)
(string-append store
(string-take hash 8)
"<!-- Guix: not a runtime dependency -->"
(string-drop hash 8))))
#t))
(replace 'configure
(lambda* (#:key inputs outputs configure-flags #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(bash (which "bash"))
(abs-srcdir (getcwd))
(srcdir (string-append "../" (basename abs-srcdir)))
(flags `(,(string-append "--prefix=" out)
,@configure-flags))
(mozconfig (string-append (getcwd) "/.mozconfig")))
(setenv "SHELL" bash)
(setenv "AUTOCONF"
(string-append (assoc-ref %build-inputs
"autoconf")
"/bin/autoconf"))
(setenv "CONFIG_SHELL" bash)
(setenv "QA_CONFIGURE_OPTIONS" ".*")
(setenv "MOZBUILD_STATE_PATH"
(string-append (getcwd) "/mach_state"))
(setenv "MOZCONFIG"
(string-append (getcwd) "/.mozconfig"))
(setenv "CC" "gcc")
(setenv "MOZ_NOSPAM" "1")
(setenv "PYTHON"
(string-append (assoc-ref inputs "python2")
"/bin/python"))
(setenv "LDFLAGS" (string-append "-Wl,-rpath="
(assoc-ref outputs "out")
"/lib/icedove"))
(mkdir-p (string-append (getcwd) "/builddir"))
(with-output-to-file mozconfig
(lambda ()
(display
(string-append
"ac_add_options --disable-crashreporter\n"
"ac_add_options --disable-debug\n"
"ac_add_options --disable-debug-symbols\n"
"ac_add_options --disable-elf-hack\n"
"ac_add_options --disable-jit\n"
"ac_add_options --disable-necko-wifi\n"
"ac_add_options --disable-official-branding\n"
"ac_add_options --disable-tests\n"
"ac_add_options --disable-updater\n"
"ac_add_options --disable-webrtc\n"
"ac_add_options --enable-application=comm/mail\n"
"ac_add_options --enable-calendar\n"
"ac_add_options --enable-default-toolkit=\"cairo-gtk3\"\n"
"ac_add_options --enable-optimize\n"
"ac_add_options --enable-pulseaudio\n"
"ac_add_options --enable-release\n"
"ac_add_options --enable-strip\n"
"ac_add_options --enable-system-ffi\n"
"ac_add_options --enable-system-pixman\n"
"ac_add_options --prefix=" out "\n"
"ac_add_options --with-clang-path=" (assoc-ref %build-inputs "clang") "/bin/clang\n"
"ac_add_options --with-distribution-id=org.gnu\n"
"ac_add_options --with-libclang-path=" (assoc-ref %build-inputs "clang") "/lib\n"
"ac_add_options --with-system-bz2\n"
"ac_add_options --with-system-icu\n"
"ac_add_options --with-system-jpeg\n"
"ac_add_options --with-system-libevent\n"
"ac_add_options --with-system-nspr\n"
"ac_add_options --with-system-nss\n"
"ac_add_options --with-system-zlib\n"
"ac_add_options --with-user-appdir=\\.icedove\n"
"mk_add_options MOZ_MAKE_FLAGS=-j"
(number->string (parallel-job-count)) "\n"))))
(display (getcwd))
(newline)
(display "mach configure")
(invoke "./mach" "configure"))))
(replace 'build
(lambda _ (invoke "./mach" "build")))
(replace 'install
(lambda _ (invoke "./mach" "install")))
Thunderbird does n't provide any .desktop file .
(add-after 'install 'install-desktop-file
(lambda* (#:key outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(apps (string-append out "/share/applications")))
(mkdir-p apps)
(with-output-to-file (string-append apps "/icedove.desktop")
(lambda _
(format #t
"[Desktop Entry]~@
Name=Icedove~@
Exec=~a/bin/icedove~@
Icon=icedove~@
GenericName=Mail/News Client~@
Terminal=false~@
StartupNotify=true~@
Type=Application~@
[Desktop Action ComposeMessage]~@
Name=Write new message~@
Exec=~@*~a/bin/icedove -compose~%"
out))))
#t))
(add-after 'install 'wrap-program
(lambda* (#:key inputs outputs #:allow-other-keys)
(let* ((out (assoc-ref outputs "out"))
(lib (string-append out "/lib"))
(gtk (assoc-ref inputs "gtk+"))
(gtk-share (string-append gtk "/share"))
(pulseaudio (assoc-ref inputs "pulseaudio"))
(pulseaudio-lib (string-append pulseaudio "/lib"))
(eudev (assoc-ref inputs "eudev"))
(eudev-lib (string-append eudev "/lib")))
(wrap-program (car (find-files lib "^icedove$"))
`("XDG_DATA_DIRS" prefix (,gtk-share))
`("LD_LIBRARY_PATH" prefix (,pulseaudio-lib ,eudev-lib)))
#t))))))
(inputs
`(("bzip2" ,bzip2)
("cairo" ,cairo)
("cups" ,cups)
("dbus-glib" ,dbus-glib)
("ffmpeg" ,ffmpeg)
("freetype" ,freetype)
("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("gtk+-2" ,gtk+-2)
("hunspell" ,hunspell)
("icu4c" ,icu4c-67)
("libcanberra" ,libcanberra)
("libevent" ,libevent)
("libffi" ,libffi)
("libgnome" ,libgnome)
("libjpeg-turbo" ,libjpeg-turbo)
("libpng-apng" ,libpng-apng)
("libvpx" ,libvpx)
("libxcomposite" ,libxcomposite)
("libxft" ,libxft)
("libxinerama" ,libxinerama)
("libxscrnsaver" ,libxscrnsaver)
("libxt" ,libxt)
("mesa" ,mesa)
("mit-krb5" ,mit-krb5)
("nspr" ,nspr)
("nss" ,nss)
("pango" ,pango)
("pixman" ,pixman)
("pulseaudio" ,pulseaudio)
("sqlite" ,sqlite)
("startup-notification" ,startup-notification)
("eudev" ,eudev)
("unzip" ,unzip)
("zip" ,zip)
("zlib" ,zlib)))
(native-inputs
`(("thunderbird-sources"
The changeset identifier is taken from the file " sourcestamp.txt "
in the Thunderbird release tarball . We do n't use the release
tarball because it duplicates the Icecat sources and only adds the
,(let ((changeset "1717d8d5fbd359aab7a4a0a15f4d15c72a7e6afc"))
(origin
(method hg-fetch)
(uri (hg-reference
(url "-esr78")
(changeset changeset)))
(file-name (string-append "thunderbird-" version "-checkout"))
(sha256
(base32
"10l042dd7b8rvla0cbiks5kjrz2b28yy7hr8sr169wlx202hxa01")))))
("autoconf" ,autoconf-2.13)
("cargo" ,rust-1.41 "cargo")
("clang" ,clang)
("llvm" ,llvm)
("nasm" ,nasm)
("node" ,node)
("perl" ,perl)
("pkg-config" ,pkg-config)
("python" ,python)
("python2" ,python-2.7)
("rust" ,rust-1.41)
("rust-cbindgen" ,rust-cbindgen-0.14)
("which" ,which)
("yasm" ,yasm)))
(home-page "")
(synopsis "Rebranded Mozilla Thunderbird email client")
(description
"This package provides an email client built based on Mozilla
Thunderbird. It supports email, news feeds, chat, calendar and contacts.")
(license license:mpl2.0)))
(define-public icedove/wayland
(package
(inherit icedove)
(name "icedove-wayland")
(native-inputs '())
(inputs
`(("bash" ,bash-minimal)
("icedove" ,icedove)))
(build-system trivial-build-system)
(arguments
'(#:modules ((guix build utils))
#:builder
(begin
(use-modules (guix build utils))
(let* ((bash (assoc-ref %build-inputs "bash"))
(icedove (assoc-ref %build-inputs "icedove"))
(out (assoc-ref %outputs "out"))
(exe (string-append out "/bin/icedove")))
(mkdir-p (dirname exe))
(call-with-output-file exe
(lambda (port)
(format port "#!~a
MOZ_ENABLE_WAYLAND=1 exec ~a $@"
(string-append bash "/bin/bash")
(string-append icedove "/bin/icedove"))))
(chmod exe #o555)
(copy-recursively (string-append icedove "/share")
(string-append out "/share"))
(substitute* (string-append
out "/share/applications/icedove.desktop")
((icedove) out))
#t))))))
(define-public firefox-decrypt
(package
(name "firefox-decrypt")
(version "0.7.0")
(source (origin
(method git-fetch)
(uri (git-reference
(url "")
(commit version)))
(file-name (git-file-name name version))
(sha256
(base32
"17yyyxp47z4m8hnflcq34rc1y871515kr3f1y42j1l0yx3g0il07"))))
(build-system trivial-build-system)
(inputs
`(("nss" ,nss)
("python" ,python)))
(arguments
`(#:modules ((guix build utils))
#:builder
(begin
(use-modules (guix build utils))
(setenv "PATH"
(string-append
(assoc-ref %build-inputs "python") "/bin"))
(copy-file (string-append (assoc-ref %build-inputs "source")
"/firefox_decrypt.py")
"firefox_decrypt.py")
(substitute* "firefox_decrypt.py"
(("/usr/bin/env python") (which "python3"))
(("libnss3.so") (string-append (assoc-ref %build-inputs "nss")
"/lib/nss/libnss3.so")))
(install-file "firefox_decrypt.py" (string-append %output "/bin"))
#t)))
(home-page "/")
(synopsis "Tool to extract passwords from Mozilla profiles")
(description "Firefox Decrypt is a tool to extract passwords from
Mozilla (Firefox, Waterfox, Thunderbird, SeaMonkey) profiles.")
(license license:gpl3+)))
|
4ce47558b04aed73a08e6c99826b8c32bf31c1a022083e2c47997aa0e6c58ad7 | cram2/cram | prolog.lisp | Copyright ( c ) 2012 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Intelligent Autonomous Systems Group/
;;; Technische Universitaet Muenchen nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
(in-package :occupancy-grid-costmap)
(defmethod costmap-generator-name->score ((name (eql 'static-occupied)))
15)
(defmethod costmap-generator-name->score ((name (eql 'free-space)))
15)
(def-fact-group occupancy-grid-costmap (desig-costmap)
(<- (drivable-location-costmap ?cm ?padding)
(costmap ?cm)
(-> (symbol-value *current-map* ?map)
(and
(inverted-occupancy-grid ?map ?free-space)
(occupancy-grid ?map ?static-occupied (padding ?padding))
(costmap-add-function free-space (make-occupancy-grid-cost-function ?free-space) ?cm)
(costmap-add-function static-occupied
(make-occupancy-grid-cost-function ?static-occupied :invert t)
?cm))
(true)))
(<- (desig-costmap ?desig ?cm)
(cram-robot-interfaces:visibility-designator ?desig)
(costmap ?cm)
(costmap-padding ?padding)
(drivable-location-costmap ?cm ?padding))
(<- (desig-costmap ?desig ?cm)
(cram-robot-interfaces:reachability-designator ?desig)
(costmap ?cm)
(costmap-manipulation-padding ?padding)
(drivable-location-costmap ?cm ?padding)))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_common/cram_occupancy_grid_costmap/src/prolog.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Technische Universitaet Muenchen nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE. | Copyright ( c ) 2012 , < >
* Neither the name of the Intelligent Autonomous Systems Group/
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :occupancy-grid-costmap)
(defmethod costmap-generator-name->score ((name (eql 'static-occupied)))
15)
(defmethod costmap-generator-name->score ((name (eql 'free-space)))
15)
(def-fact-group occupancy-grid-costmap (desig-costmap)
(<- (drivable-location-costmap ?cm ?padding)
(costmap ?cm)
(-> (symbol-value *current-map* ?map)
(and
(inverted-occupancy-grid ?map ?free-space)
(occupancy-grid ?map ?static-occupied (padding ?padding))
(costmap-add-function free-space (make-occupancy-grid-cost-function ?free-space) ?cm)
(costmap-add-function static-occupied
(make-occupancy-grid-cost-function ?static-occupied :invert t)
?cm))
(true)))
(<- (desig-costmap ?desig ?cm)
(cram-robot-interfaces:visibility-designator ?desig)
(costmap ?cm)
(costmap-padding ?padding)
(drivable-location-costmap ?cm ?padding))
(<- (desig-costmap ?desig ?cm)
(cram-robot-interfaces:reachability-designator ?desig)
(costmap ?cm)
(costmap-manipulation-padding ?padding)
(drivable-location-costmap ?cm ?padding)))
|
89f8adec8e1ae7183e5fcb7543a8d636d4026ce2767e59f21e63a279d8cce631 | gerritjvv/fileape | write_support.clj | (ns
^{:doc "Implement java write support for parquet
the different types and cases that can be written is
done via multimethods while the actual WriterSupport is reified"}
fileape.parquet.write-support
(:require [fileape.util.lang :refer [case-enum]]
[clojure.tools.logging :refer [error info warn]])
(:import (org.apache.parquet.hadoop.api WriteSupport WriteSupport$WriteContext)
(org.apache.parquet.schema MessageType GroupType OriginalType Type PrimitiveType PrimitiveType$PrimitiveTypeName)
(java.util Map Date List Base64 Base64$Encoder)
(org.apache.parquet.io.api RecordConsumer Binary)
(java.util.concurrent TimeUnit)
(fileape Util)
(clojure.lang ISeq)))
(declare write-primitive-val)
(declare write-val)
(declare write-message-fields)
;;;;;;;;;;;;;;;;;;;;;;;;
private functions and default multi method impls
(def BYTE-ARRAY-CLASS (Class/forName "[B"))
(defn asbinary ^"[B" [v]
convert to base 64 instead of a string
(Binary/fromString (.encodeToString ^Base64$Encoder (Base64/getEncoder) (bytes v))) ;(Binary/fromConstantByteArray ^"[B" v)
(Binary/fromString (str v))))
(defn check-is-map
"If v is not null then check that v is an instanceof java.util.Map, if not an exception is thrown"
[v]
(when v
(if-not (instance? Map v)
(throw (RuntimeException. (str "Val must be a map but got " v))))))
(defn as-number [val]
(if (number? val) val
(Long/valueOf (str val))))
(defn as-float [val]
(if (number? val) val
(Double/valueOf (str val))))
(defmacro with-default [default & body]
`(try
~@body
(catch Exception e# (do
(warn (str "exception while parsing value " e# " using default instead " ~default))
~default))))
(defn write-primitive-val [^RecordConsumer rconsumer ^PrimitiveType schema val]
(try
(case-enum (.getPrimitiveTypeName schema)
PrimitiveType$PrimitiveTypeName/INT64 (.addLong rconsumer (long (with-default -1 (as-number val))))
PrimitiveType$PrimitiveTypeName/INT32 (.addInteger rconsumer (int (with-default -1 (as-number val))))
PrimitiveType$PrimitiveTypeName/BOOLEAN (.addBoolean rconsumer (with-default false (boolean val)))
PrimitiveType$PrimitiveTypeName/BINARY (.addBinary rconsumer (with-default (byte-array [-1]) (asbinary val)))
PrimitiveType$PrimitiveTypeName/FLOAT (.addFloat rconsumer (float (with-default -1 (as-float val))))
PrimitiveType$PrimitiveTypeName/DOUBLE (.addDouble rconsumer (double (with-default -1 (as-float val)))))
(catch Exception e
(do
(error e (str schema " val " val))
(throw (ex-info (str e) {:schema schema :val val :val-type (if val (type val) nil)}))))))
(defn start-field [^RecordConsumer rconsumer ^String field-name ^long i]
(.startField rconsumer field-name (int i)))
(defn end-field [^RecordConsumer rconsumer ^String field-name ^long i]
(.endField rconsumer field-name (int i)))
(defn start-group [^RecordConsumer rconsumer]
(.startGroup rconsumer))
(defn end-group [^RecordConsumer rconsumer] (.endGroup rconsumer))
(defn date->int-seconds
"Convert a date to seconds and return a casted int"
[^Date date]
(int (.toSeconds TimeUnit/MILLISECONDS (.getTime date))))
(defn get-map-schemas
"The schema should be a valid hive map and have the format
#<GroupType optional group [map-field-name] (MAP) {\n repeated group map (MAP_KEY_VALUE) {\n required binary key;\n optional binary value;\n }\n}"
[^Type schema]
(let [map-type (.asGroupType (.getType (.asGroupType schema) "map"))]
[(.getType map-type "key") (.getType map-type "value")]))
(defn write-key-value
"Write a hive map compatible data structure from the schema {\n repeated group map (MAP_KEY_VALUE) {\n required binary key;\n optional binary value;\n }\n}
Note only the key value parts are written, the group and field for map needs to be created before and ended after this function is called for all key values"
[rconsumer ^Type schema k v]
(let [[^Type key-type ^Type val-type] (get-map-schemas schema)
key-name (.getName key-type)
val-name (.getName val-type)]
(start-group rconsumer)
(start-field rconsumer key-name 0)
(write-val rconsumer key-type k)
(end-field rconsumer key-name 0)
(start-field rconsumer val-name 1)
(write-val rconsumer val-type v)
(end-field rconsumer val-name 1)
(end-group rconsumer)))
(defmulti write-extended-val "Any object other than a Java primitive, the dispatch is based on the Type::originalType" (fn [rconsumer ^Type schema val] (.getOriginalType schema)))
optional group mylist ( LIST ) { repeated group bag { optional type array_element ; } }
(defmethod write-extended-val OriginalType/LIST [rconsumer ^Type schema val]
(when-not (or (instance? List val) (instance? ISeq val))
(throw (RuntimeException. (str "Lists must be of type List or ISeq but got " val))))
(let [^Type type (.getType (.asGroupType (.getType (.asGroupType schema) (int 0))) (int 0)) ]
(start-group rconsumer)
(start-field rconsumer "bag" 0)
(reduce (fn [_ v]
(start-group rconsumer)
(when (Util/notNilOrEmpty v)
(start-field rconsumer "array_element" 0)
(write-val rconsumer type v)
(end-field rconsumer "array_element" 0))
(end-group rconsumer))
nil
val)
(end-field rconsumer "bag" 0)
(end-group rconsumer)))
(defmethod write-extended-val OriginalType/DATE [rconsumer schema val]
;;write hive compatible date
-8119.patch
(write-primitive-val rconsumer PrimitiveType$PrimitiveTypeName/INT32 (date->int-seconds val)))
(defmethod write-extended-val OriginalType/MAP [rconsumer ^Type schema val]
;;write a hive compatible map type
# < GroupType optional group [ map - field - name ] ( MAP ) {
;; repeated group map (MAP_KEY_VALUE) {
;; required binary key;
;; optional binary value;
;; }
;;}
(check-is-map val)
(start-field rconsumer (.getName schema) 0)
;;for each key val call write-key-value
(reduce-kv #(write-key-value rconsumer schema %2 %3) nil val)
(end-field rconsumer (.getName schema) 0))
Default is a Group and the val type must be a Map
(defmethod write-extended-val :default [rconsumer ^Type schema val]
(check-is-map val)
(start-group rconsumer)
(write-message-fields rconsumer (.asGroupType schema) val)
(end-group rconsumer))
(defn write-val
"Write a primitive or extend (List Map) value"
[rconsumer ^Type schema val]
(if (.isPrimitive schema)
(write-primitive-val rconsumer schema val)
(write-extended-val rconsumer schema val)))
(defn write-message-fields
"Write the fields of a top level message"
[rconsumer ^GroupType schema ^Map val]
(let [cnt (.getFieldCount schema)]
(loop [i 0]
(when (< i cnt)
(let [^Type field (.getType schema (int i))
field-name (.getName field)
map-v (get val field-name)]
(when (Util/notNilOrEmpty map-v)
(start-field rconsumer field-name i)
(write-val rconsumer field map-v)
(end-field rconsumer field-name i))
(recur (inc i)))))))
;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;; public functions
(defn java-write-support
"Returns an instance of WriteSupport that will correctly
serialise standard java types compatible with Hive.
Note: Repeated must have List as the original Type and will be a list type
groups must be a Map type"
[^MessageType schema meta]
{:pre [(instance? Map meta) schema]}
(let [record-consumer-state (atom nil)]
(proxy [WriteSupport] []
(init [_]
(WriteSupport$WriteContext. schema ^Map meta))
(prepareForWrite [record-consumer]
(reset! record-consumer-state record-consumer))
(write [val]
(let [^RecordConsumer rconsumer @record-consumer-state]
(.startMessage rconsumer)
(write-message-fields rconsumer schema val)
(.endMessage rconsumer)))))) | null | https://raw.githubusercontent.com/gerritjvv/fileape/be8859fea1745e572463b658f8eb057aa46fe46d/src/fileape/parquet/write_support.clj | clojure |
(Binary/fromConstantByteArray ^"[B" v)
\n optional binary value;\n }\n}"
\n optional binary value;\n }\n}
} }
write hive compatible date
write a hive compatible map type
repeated group map (MAP_KEY_VALUE) {
required binary key;
optional binary value;
}
}
for each key val call write-key-value
public functions | (ns
^{:doc "Implement java write support for parquet
the different types and cases that can be written is
done via multimethods while the actual WriterSupport is reified"}
fileape.parquet.write-support
(:require [fileape.util.lang :refer [case-enum]]
[clojure.tools.logging :refer [error info warn]])
(:import (org.apache.parquet.hadoop.api WriteSupport WriteSupport$WriteContext)
(org.apache.parquet.schema MessageType GroupType OriginalType Type PrimitiveType PrimitiveType$PrimitiveTypeName)
(java.util Map Date List Base64 Base64$Encoder)
(org.apache.parquet.io.api RecordConsumer Binary)
(java.util.concurrent TimeUnit)
(fileape Util)
(clojure.lang ISeq)))
(declare write-primitive-val)
(declare write-val)
(declare write-message-fields)
private functions and default multi method impls
(def BYTE-ARRAY-CLASS (Class/forName "[B"))
(defn asbinary ^"[B" [v]
convert to base 64 instead of a string
(Binary/fromString (str v))))
(defn check-is-map
"If v is not null then check that v is an instanceof java.util.Map, if not an exception is thrown"
[v]
(when v
(if-not (instance? Map v)
(throw (RuntimeException. (str "Val must be a map but got " v))))))
(defn as-number [val]
(if (number? val) val
(Long/valueOf (str val))))
(defn as-float [val]
(if (number? val) val
(Double/valueOf (str val))))
(defmacro with-default [default & body]
`(try
~@body
(catch Exception e# (do
(warn (str "exception while parsing value " e# " using default instead " ~default))
~default))))
(defn write-primitive-val [^RecordConsumer rconsumer ^PrimitiveType schema val]
(try
(case-enum (.getPrimitiveTypeName schema)
PrimitiveType$PrimitiveTypeName/INT64 (.addLong rconsumer (long (with-default -1 (as-number val))))
PrimitiveType$PrimitiveTypeName/INT32 (.addInteger rconsumer (int (with-default -1 (as-number val))))
PrimitiveType$PrimitiveTypeName/BOOLEAN (.addBoolean rconsumer (with-default false (boolean val)))
PrimitiveType$PrimitiveTypeName/BINARY (.addBinary rconsumer (with-default (byte-array [-1]) (asbinary val)))
PrimitiveType$PrimitiveTypeName/FLOAT (.addFloat rconsumer (float (with-default -1 (as-float val))))
PrimitiveType$PrimitiveTypeName/DOUBLE (.addDouble rconsumer (double (with-default -1 (as-float val)))))
(catch Exception e
(do
(error e (str schema " val " val))
(throw (ex-info (str e) {:schema schema :val val :val-type (if val (type val) nil)}))))))
(defn start-field [^RecordConsumer rconsumer ^String field-name ^long i]
(.startField rconsumer field-name (int i)))
(defn end-field [^RecordConsumer rconsumer ^String field-name ^long i]
(.endField rconsumer field-name (int i)))
(defn start-group [^RecordConsumer rconsumer]
(.startGroup rconsumer))
(defn end-group [^RecordConsumer rconsumer] (.endGroup rconsumer))
(defn date->int-seconds
"Convert a date to seconds and return a casted int"
[^Date date]
(int (.toSeconds TimeUnit/MILLISECONDS (.getTime date))))
(defn get-map-schemas
"The schema should be a valid hive map and have the format
[^Type schema]
(let [map-type (.asGroupType (.getType (.asGroupType schema) "map"))]
[(.getType map-type "key") (.getType map-type "value")]))
(defn write-key-value
Note only the key value parts are written, the group and field for map needs to be created before and ended after this function is called for all key values"
[rconsumer ^Type schema k v]
(let [[^Type key-type ^Type val-type] (get-map-schemas schema)
key-name (.getName key-type)
val-name (.getName val-type)]
(start-group rconsumer)
(start-field rconsumer key-name 0)
(write-val rconsumer key-type k)
(end-field rconsumer key-name 0)
(start-field rconsumer val-name 1)
(write-val rconsumer val-type v)
(end-field rconsumer val-name 1)
(end-group rconsumer)))
(defmulti write-extended-val "Any object other than a Java primitive, the dispatch is based on the Type::originalType" (fn [rconsumer ^Type schema val] (.getOriginalType schema)))
(defmethod write-extended-val OriginalType/LIST [rconsumer ^Type schema val]
(when-not (or (instance? List val) (instance? ISeq val))
(throw (RuntimeException. (str "Lists must be of type List or ISeq but got " val))))
(let [^Type type (.getType (.asGroupType (.getType (.asGroupType schema) (int 0))) (int 0)) ]
(start-group rconsumer)
(start-field rconsumer "bag" 0)
(reduce (fn [_ v]
(start-group rconsumer)
(when (Util/notNilOrEmpty v)
(start-field rconsumer "array_element" 0)
(write-val rconsumer type v)
(end-field rconsumer "array_element" 0))
(end-group rconsumer))
nil
val)
(end-field rconsumer "bag" 0)
(end-group rconsumer)))
(defmethod write-extended-val OriginalType/DATE [rconsumer schema val]
-8119.patch
(write-primitive-val rconsumer PrimitiveType$PrimitiveTypeName/INT32 (date->int-seconds val)))
(defmethod write-extended-val OriginalType/MAP [rconsumer ^Type schema val]
# < GroupType optional group [ map - field - name ] ( MAP ) {
(check-is-map val)
(start-field rconsumer (.getName schema) 0)
(reduce-kv #(write-key-value rconsumer schema %2 %3) nil val)
(end-field rconsumer (.getName schema) 0))
Default is a Group and the val type must be a Map
(defmethod write-extended-val :default [rconsumer ^Type schema val]
(check-is-map val)
(start-group rconsumer)
(write-message-fields rconsumer (.asGroupType schema) val)
(end-group rconsumer))
(defn write-val
"Write a primitive or extend (List Map) value"
[rconsumer ^Type schema val]
(if (.isPrimitive schema)
(write-primitive-val rconsumer schema val)
(write-extended-val rconsumer schema val)))
(defn write-message-fields
"Write the fields of a top level message"
[rconsumer ^GroupType schema ^Map val]
(let [cnt (.getFieldCount schema)]
(loop [i 0]
(when (< i cnt)
(let [^Type field (.getType schema (int i))
field-name (.getName field)
map-v (get val field-name)]
(when (Util/notNilOrEmpty map-v)
(start-field rconsumer field-name i)
(write-val rconsumer field map-v)
(end-field rconsumer field-name i))
(recur (inc i)))))))
(defn java-write-support
"Returns an instance of WriteSupport that will correctly
serialise standard java types compatible with Hive.
Note: Repeated must have List as the original Type and will be a list type
groups must be a Map type"
[^MessageType schema meta]
{:pre [(instance? Map meta) schema]}
(let [record-consumer-state (atom nil)]
(proxy [WriteSupport] []
(init [_]
(WriteSupport$WriteContext. schema ^Map meta))
(prepareForWrite [record-consumer]
(reset! record-consumer-state record-consumer))
(write [val]
(let [^RecordConsumer rconsumer @record-consumer-state]
(.startMessage rconsumer)
(write-message-fields rconsumer schema val)
(.endMessage rconsumer)))))) |
7c6a02c046ad1b5c53ccb73bf06fe0aa3e4736a35af33f902cc7fbaf836e56a6 | nomeata/incredible | ProofGraph.hs | # LANGUAGE DataKinds , TypeFamilies , RecordWildCards , FlexibleContexts , StandaloneDeriving , FlexibleInstances , GADTs , ExistentialQuantification #
-- |
-- The proof is (natuarlly) a directed graph. For full detail, one can view
-- blocks, out-ports, connections and in-ports as nodes of the graph. This
-- module realizes this view, and provides algorithms based on that.
--
-- This module plays with type families to get a very strongly typed graph, and
-- it ties the know to have the graph as such on the heap, i.e. no maps to go
-- to the successor.
module ProofGraph where
import qualified Data.IntMap as IM
import qualified Data.Map as M
import qualified Data.IntSet as IS
import Data.Map ((!))
import Data.List
import Control.Monad
import Control.Monad.Trans.State.Strict
import Control.Monad.Trans.Writer.Strict
import Control.Monad.Trans.Class
import Control.Arrow
import Types
data NodeType = BlockNodeType | OutPortNodeType | InPortNodeType | ConnNodeType
data NodeTag a where
BlockNodeTag :: NodeTag 'BlockNodeType
OutPortNodeTag :: NodeTag 'OutPortNodeType
InPortNodeTag :: NodeTag 'InPortNodeType
ConnNodeTag :: NodeTag 'ConnNodeType
type family Succ a where
Succ 'BlockNodeType = 'OutPortNodeType
Succ 'OutPortNodeType = 'ConnNodeType
Succ 'ConnNodeType = 'InPortNodeType
Succ 'InPortNodeType = 'BlockNodeType
type family Pred a where
Pred 'BlockNodeType = 'InPortNodeType
Pred 'OutPortNodeType = 'BlockNodeType
Pred 'ConnNodeType = 'OutPortNodeType
Pred 'InPortNodeType = 'ConnNodeType
type family NodeKey a where
NodeKey 'BlockNodeType = Key Block
NodeKey 'OutPortNodeType = PortSpec
NodeKey 'InPortNodeType = PortSpec
NodeKey 'ConnNodeType = Key Connection
type NodePath = [NodeUniq]
type NodeUniq = Int
data Node a = Node
{ nodeType :: NodeTag a
, nodeKey :: NodeKey a
, nodePred :: [Node (Pred a)]
, nodeSucc :: [Node (Succ a)]
, nodeUniq :: NodeUniq
}
data ANode = forall a. ANode (Node a)
type NodeMap a = M.Map (NodeKey a) (Node a)
type UniqMap a = IM.IntMap (NodeKey a)
data Graph = Graph
{ blockNodes :: NodeMap 'BlockNodeType
, inPortNodes :: NodeMap 'InPortNodeType
, outPortNodes :: NodeMap 'OutPortNodeType
, connectionNodes :: NodeMap 'ConnNodeType
, uniquesToBlock :: UniqMap 'BlockNodeType
, uniquesToInPort :: UniqMap 'InPortNodeType
, uniquesToConn :: UniqMap 'ConnNodeType
, localHypNodes :: UniqSet
, conclusionNodes :: [Node 'BlockNodeType]
}
-- For debugging
ppGraph :: Graph -> String
ppGraph Graph{..} = unlines $ concat $
[ show (nodeKey n) :
[ " ← " ++ show (nodeKey p) | p <- nodePred n ] ++
[ " → " ++ show (nodeKey p) | p <- nodeSucc n ]
| n <- M.elems blockNodes] ++
[ show (nodeKey n) :
[ " ← " ++ show (nodeKey p) | p <- nodePred n ] ++
[ " → " ++ show (nodeKey p) | p <- nodeSucc n ]
| n <- M.elems connectionNodes]
mkNodeMap :: Ord (NodeKey a) => Int -> [Int -> Node a] -> NodeMap a
mkNodeMap i nodes =
M.fromList [ (nodeKey node, node) | (n,nodeGen) <- zip [i..] nodes, let node = nodeGen n ]
mkNodeMap' :: Ord (NodeKey a) => Int -> [Int -> Node a] -> (NodeMap a, UniqMap a)
mkNodeMap' i nodes =
(M.fromList *** IM.fromList) $
unzip $
[ ( (key, node)
, (n,key)
)
| (n,nodeGen) <- zip [i..] nodes
, let node = nodeGen n
, let key = nodeKey node
]
proof2Graph :: Context -> Proof -> Graph
proof2Graph ctxt proof = Graph {..}
where
(blockNodes, uniquesToBlock)= mkNodeMap' 0 $
map go $ M.toList (blocks proof)
where
go (blockKey, block) =
Node BlockNodeTag
blockKey
[inPortNodes ! (BlockPort blockKey portKey) | (portKey,_) <- inPorts]
[outPortNodes ! (BlockPort blockKey portKey) | (portKey,_) <- outPorts]
where
(inPorts, outPorts) =
partition (isPortTypeIn . portType . snd) $
M.toList $ ports (block2Rule ctxt block)
n1 = M.size (blocks proof)
outPortNodes = mkNodeMap n1 $
[ Node OutPortNodeTag ps [blockNodes ! blockKey] succs
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (portKey, port) <- M.toList (ports rule)
, isPortTypeOut (portType port)
, let ps = BlockPort blockKey portKey
, let succs = map (connectionNodes !) $ M.findWithDefault [] ps outPortToConn
]
n2 = n1 + M.size outPortNodes
outPortToConn :: M.Map PortSpec [Key Connection]
outPortToConn = M.fromListWith (++)
[ (ps, [connKey])
| (connKey, conn) <- M.toList (connections proof)
, Just ps <- return $ connFrom conn
]
(inPortNodes, uniquesToInPort) = mkNodeMap' n2 $
[ Node InPortNodeTag ps preds [blockNodes ! blockKey]
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (portKey, port) <- M.toList (ports rule)
, isPortTypeIn (portType port)
, let ps = BlockPort blockKey portKey
, let preds = map (connectionNodes !) $ M.findWithDefault [] ps inPortToConn
]
n3 = n2 + M.size inPortNodes
inPortToConn :: M.Map PortSpec [Key Connection]
inPortToConn = M.fromListWith (++)
[ (ps, [connKey])
| (connKey, conn) <- M.toList (connections proof)
, Just ps <- return $ connTo conn
]
(connectionNodes, uniquesToConn)= mkNodeMap' n3 $
[ Node ConnNodeTag
connKey
[outPortNodes ! ps | Just ps <- return $ connFrom conn]
[inPortNodes ! ps | Just ps <- return $ connTo conn]
| (connKey, conn) <- M.toList (connections proof)
]
-- Some cached lists of special nodes
localHypNodes =
IS.fromList
[ nodeUniq node
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (hypKey, Port {portType = PTLocalHyp{}}) <- M.toList (ports rule)
, let node = outPortNodes ! BlockPort blockKey hypKey
]
conclusionNodes = [ blockNodes ! blockKey
| (blockKey, ConclusionBlock {}) <- M.toList $ blocks proof ]
-- | Finds a list of cycles from the given node to itself, ignoring the nodes
from stopAt along the way , and returns such paths .
calcCycle :: Node a -> UniqSet -> [NodePath]
calcCycle start stopAt = evalMarkM $ goBeyond [] start
where
goBeyond :: NodePath -> Node a -> MarkM [NodePath]
goBeyond path n = concat <$> mapM remember (nodeSucc n)
where
remember n = go (nodeUniq n:path) n
go :: NodePath -> Node a -> MarkM [NodePath]
go path n | nodeUniq n == nodeUniq start = return [path]
go _ n | nodeUniq n `IS.member` stopAt = return []
go path n = markAndFollow (nodeUniq n) $ goBeyond path n
-- | Starting with the given node,
-- this moves forward from the node, and then backwards, always ignoring the
nodes listed in stopAt .
--
-- It returns a map from all visited nodes, with the path from the start node
-- to the visited node as values.
--
-- The nodes mentiond in stopAt are _not_ included in the returned map.
calcNonScope :: Node a -> UniqSet -> UniqSet -> IM.IntMap NodePath
calcNonScope start stopAtForward stopAtBackward = flip execState IM.empty $ do
backActions <- execWriterT (goForward [] start)
sequence_ backActions
where
-- This goes only forward. It does not go backwards directly, but rather remembers
-- where to start going backwards and returns these actions in a list.
-- Going back too early might cut off certain paths (see trickyEscape test case)
goForward :: NodePath -> Node a -> NonScopeDeferM ()
goForward path n | nu `IS.member` stopAtForward = return ()
| otherwise = do
seen <- lift $ gets (nu `IM.member`)
unless seen $ do
lift $ modify $ IM.insert nu path'
tell $ map (goBackward path') (nodePred n)
mapM_ (goForward path') (nodeSucc n)
where nu = nodeUniq n
path' = nu:path
goBackward :: NodePath -> Node a -> NonScopeM ()
goBackward path n | nu `IS.member` stopAtBackward = return ()
| otherwise = do
seen <- gets (nu `IM.member`)
unless seen $ do
modify $ IM.insert nu path'
mapM_ (goBackward path') (nodePred n)
where nu = nodeUniq n
path' = nu:path
type NonScopeM = State (IM.IntMap NodePath)
type NonScopeDeferM = WriterT [NonScopeM ()] NonScopeM
calcSCC :: Node a -> UniqSet
calcSCC start = execMarkM $ go start
where
go :: Node a -> MarkM ()
go n = markAndFollow (nodeUniq n) $ do
mapM_ go (nodePred n)
mapM_ go (nodeSucc n)
backwardsSlice :: [Node a] -> [NodeUniq]
backwardsSlice starts = IS.toList $ execMarkM $ mapM_ goBackward starts
where
goBackward :: Node a -> MarkM ()
goBackward n = markAndFollow (nodeUniq n) $ do
mapM_ goBackward (nodePred n)
toBlockNodeKey :: Graph -> NodeUniq -> Maybe (Key Block)
toBlockNodeKey graph nu = IM.lookup nu (uniquesToBlock graph)
toConnKey :: Graph -> NodeUniq -> Maybe (Key Connection)
toConnKey graph nu = IM.lookup nu (uniquesToConn graph)
toInPortKey :: Graph -> NodeUniq -> Maybe PortSpec
toInPortKey graph nu = IM.lookup nu (uniquesToInPort graph)
type UniqSet = IS.IntSet
type MarkM = State UniqSet
execMarkM :: MarkM a -> UniqSet
execMarkM a = execState a IS.empty
evalMarkM :: MarkM a -> a
evalMarkM a = evalState a IS.empty
markAndFollow :: Monoid m => Int -> MarkM m -> MarkM m
markAndFollow k a = do
seen <- gets (k `IS.member`)
if seen then return mempty
else modify (IS.insert k) >> a
| null | https://raw.githubusercontent.com/nomeata/incredible/d18ada4ae7ce1c7ca268c050ee688b633a307c2e/logic/ProofGraph.hs | haskell | |
The proof is (natuarlly) a directed graph. For full detail, one can view
blocks, out-ports, connections and in-ports as nodes of the graph. This
module realizes this view, and provides algorithms based on that.
This module plays with type families to get a very strongly typed graph, and
it ties the know to have the graph as such on the heap, i.e. no maps to go
to the successor.
For debugging
Some cached lists of special nodes
| Finds a list of cycles from the given node to itself, ignoring the nodes
| Starting with the given node,
this moves forward from the node, and then backwards, always ignoring the
It returns a map from all visited nodes, with the path from the start node
to the visited node as values.
The nodes mentiond in stopAt are _not_ included in the returned map.
This goes only forward. It does not go backwards directly, but rather remembers
where to start going backwards and returns these actions in a list.
Going back too early might cut off certain paths (see trickyEscape test case) | # LANGUAGE DataKinds , TypeFamilies , RecordWildCards , FlexibleContexts , StandaloneDeriving , FlexibleInstances , GADTs , ExistentialQuantification #
module ProofGraph where
import qualified Data.IntMap as IM
import qualified Data.Map as M
import qualified Data.IntSet as IS
import Data.Map ((!))
import Data.List
import Control.Monad
import Control.Monad.Trans.State.Strict
import Control.Monad.Trans.Writer.Strict
import Control.Monad.Trans.Class
import Control.Arrow
import Types
data NodeType = BlockNodeType | OutPortNodeType | InPortNodeType | ConnNodeType
data NodeTag a where
BlockNodeTag :: NodeTag 'BlockNodeType
OutPortNodeTag :: NodeTag 'OutPortNodeType
InPortNodeTag :: NodeTag 'InPortNodeType
ConnNodeTag :: NodeTag 'ConnNodeType
type family Succ a where
Succ 'BlockNodeType = 'OutPortNodeType
Succ 'OutPortNodeType = 'ConnNodeType
Succ 'ConnNodeType = 'InPortNodeType
Succ 'InPortNodeType = 'BlockNodeType
type family Pred a where
Pred 'BlockNodeType = 'InPortNodeType
Pred 'OutPortNodeType = 'BlockNodeType
Pred 'ConnNodeType = 'OutPortNodeType
Pred 'InPortNodeType = 'ConnNodeType
type family NodeKey a where
NodeKey 'BlockNodeType = Key Block
NodeKey 'OutPortNodeType = PortSpec
NodeKey 'InPortNodeType = PortSpec
NodeKey 'ConnNodeType = Key Connection
type NodePath = [NodeUniq]
type NodeUniq = Int
data Node a = Node
{ nodeType :: NodeTag a
, nodeKey :: NodeKey a
, nodePred :: [Node (Pred a)]
, nodeSucc :: [Node (Succ a)]
, nodeUniq :: NodeUniq
}
data ANode = forall a. ANode (Node a)
type NodeMap a = M.Map (NodeKey a) (Node a)
type UniqMap a = IM.IntMap (NodeKey a)
data Graph = Graph
{ blockNodes :: NodeMap 'BlockNodeType
, inPortNodes :: NodeMap 'InPortNodeType
, outPortNodes :: NodeMap 'OutPortNodeType
, connectionNodes :: NodeMap 'ConnNodeType
, uniquesToBlock :: UniqMap 'BlockNodeType
, uniquesToInPort :: UniqMap 'InPortNodeType
, uniquesToConn :: UniqMap 'ConnNodeType
, localHypNodes :: UniqSet
, conclusionNodes :: [Node 'BlockNodeType]
}
ppGraph :: Graph -> String
ppGraph Graph{..} = unlines $ concat $
[ show (nodeKey n) :
[ " ← " ++ show (nodeKey p) | p <- nodePred n ] ++
[ " → " ++ show (nodeKey p) | p <- nodeSucc n ]
| n <- M.elems blockNodes] ++
[ show (nodeKey n) :
[ " ← " ++ show (nodeKey p) | p <- nodePred n ] ++
[ " → " ++ show (nodeKey p) | p <- nodeSucc n ]
| n <- M.elems connectionNodes]
mkNodeMap :: Ord (NodeKey a) => Int -> [Int -> Node a] -> NodeMap a
mkNodeMap i nodes =
M.fromList [ (nodeKey node, node) | (n,nodeGen) <- zip [i..] nodes, let node = nodeGen n ]
mkNodeMap' :: Ord (NodeKey a) => Int -> [Int -> Node a] -> (NodeMap a, UniqMap a)
mkNodeMap' i nodes =
(M.fromList *** IM.fromList) $
unzip $
[ ( (key, node)
, (n,key)
)
| (n,nodeGen) <- zip [i..] nodes
, let node = nodeGen n
, let key = nodeKey node
]
proof2Graph :: Context -> Proof -> Graph
proof2Graph ctxt proof = Graph {..}
where
(blockNodes, uniquesToBlock)= mkNodeMap' 0 $
map go $ M.toList (blocks proof)
where
go (blockKey, block) =
Node BlockNodeTag
blockKey
[inPortNodes ! (BlockPort blockKey portKey) | (portKey,_) <- inPorts]
[outPortNodes ! (BlockPort blockKey portKey) | (portKey,_) <- outPorts]
where
(inPorts, outPorts) =
partition (isPortTypeIn . portType . snd) $
M.toList $ ports (block2Rule ctxt block)
n1 = M.size (blocks proof)
outPortNodes = mkNodeMap n1 $
[ Node OutPortNodeTag ps [blockNodes ! blockKey] succs
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (portKey, port) <- M.toList (ports rule)
, isPortTypeOut (portType port)
, let ps = BlockPort blockKey portKey
, let succs = map (connectionNodes !) $ M.findWithDefault [] ps outPortToConn
]
n2 = n1 + M.size outPortNodes
outPortToConn :: M.Map PortSpec [Key Connection]
outPortToConn = M.fromListWith (++)
[ (ps, [connKey])
| (connKey, conn) <- M.toList (connections proof)
, Just ps <- return $ connFrom conn
]
(inPortNodes, uniquesToInPort) = mkNodeMap' n2 $
[ Node InPortNodeTag ps preds [blockNodes ! blockKey]
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (portKey, port) <- M.toList (ports rule)
, isPortTypeIn (portType port)
, let ps = BlockPort blockKey portKey
, let preds = map (connectionNodes !) $ M.findWithDefault [] ps inPortToConn
]
n3 = n2 + M.size inPortNodes
inPortToConn :: M.Map PortSpec [Key Connection]
inPortToConn = M.fromListWith (++)
[ (ps, [connKey])
| (connKey, conn) <- M.toList (connections proof)
, Just ps <- return $ connTo conn
]
(connectionNodes, uniquesToConn)= mkNodeMap' n3 $
[ Node ConnNodeTag
connKey
[outPortNodes ! ps | Just ps <- return $ connFrom conn]
[inPortNodes ! ps | Just ps <- return $ connTo conn]
| (connKey, conn) <- M.toList (connections proof)
]
localHypNodes =
IS.fromList
[ nodeUniq node
| (blockKey, block) <- M.toList (blocks proof)
, let rule = block2Rule ctxt block
, (hypKey, Port {portType = PTLocalHyp{}}) <- M.toList (ports rule)
, let node = outPortNodes ! BlockPort blockKey hypKey
]
conclusionNodes = [ blockNodes ! blockKey
| (blockKey, ConclusionBlock {}) <- M.toList $ blocks proof ]
from stopAt along the way , and returns such paths .
calcCycle :: Node a -> UniqSet -> [NodePath]
calcCycle start stopAt = evalMarkM $ goBeyond [] start
where
goBeyond :: NodePath -> Node a -> MarkM [NodePath]
goBeyond path n = concat <$> mapM remember (nodeSucc n)
where
remember n = go (nodeUniq n:path) n
go :: NodePath -> Node a -> MarkM [NodePath]
go path n | nodeUniq n == nodeUniq start = return [path]
go _ n | nodeUniq n `IS.member` stopAt = return []
go path n = markAndFollow (nodeUniq n) $ goBeyond path n
nodes listed in stopAt .
calcNonScope :: Node a -> UniqSet -> UniqSet -> IM.IntMap NodePath
calcNonScope start stopAtForward stopAtBackward = flip execState IM.empty $ do
backActions <- execWriterT (goForward [] start)
sequence_ backActions
where
goForward :: NodePath -> Node a -> NonScopeDeferM ()
goForward path n | nu `IS.member` stopAtForward = return ()
| otherwise = do
seen <- lift $ gets (nu `IM.member`)
unless seen $ do
lift $ modify $ IM.insert nu path'
tell $ map (goBackward path') (nodePred n)
mapM_ (goForward path') (nodeSucc n)
where nu = nodeUniq n
path' = nu:path
goBackward :: NodePath -> Node a -> NonScopeM ()
goBackward path n | nu `IS.member` stopAtBackward = return ()
| otherwise = do
seen <- gets (nu `IM.member`)
unless seen $ do
modify $ IM.insert nu path'
mapM_ (goBackward path') (nodePred n)
where nu = nodeUniq n
path' = nu:path
type NonScopeM = State (IM.IntMap NodePath)
type NonScopeDeferM = WriterT [NonScopeM ()] NonScopeM
calcSCC :: Node a -> UniqSet
calcSCC start = execMarkM $ go start
where
go :: Node a -> MarkM ()
go n = markAndFollow (nodeUniq n) $ do
mapM_ go (nodePred n)
mapM_ go (nodeSucc n)
backwardsSlice :: [Node a] -> [NodeUniq]
backwardsSlice starts = IS.toList $ execMarkM $ mapM_ goBackward starts
where
goBackward :: Node a -> MarkM ()
goBackward n = markAndFollow (nodeUniq n) $ do
mapM_ goBackward (nodePred n)
toBlockNodeKey :: Graph -> NodeUniq -> Maybe (Key Block)
toBlockNodeKey graph nu = IM.lookup nu (uniquesToBlock graph)
toConnKey :: Graph -> NodeUniq -> Maybe (Key Connection)
toConnKey graph nu = IM.lookup nu (uniquesToConn graph)
toInPortKey :: Graph -> NodeUniq -> Maybe PortSpec
toInPortKey graph nu = IM.lookup nu (uniquesToInPort graph)
type UniqSet = IS.IntSet
type MarkM = State UniqSet
execMarkM :: MarkM a -> UniqSet
execMarkM a = execState a IS.empty
evalMarkM :: MarkM a -> a
evalMarkM a = evalState a IS.empty
markAndFollow :: Monoid m => Int -> MarkM m -> MarkM m
markAndFollow k a = do
seen <- gets (k `IS.member`)
if seen then return mempty
else modify (IS.insert k) >> a
|
6b301c238a7303bc1d8f944c16d83e250025a8a9477cd10ca7dbfff7974f7a69 | basho/mochiweb | mochiweb_request.erl | @author < >
2007 Mochi Media , Inc.
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
%% Software is furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%% DEALINGS IN THE SOFTWARE.
%% @doc MochiWeb HTTP Request abstraction.
-module(mochiweb_request).
-author('').
-include_lib("kernel/include/file.hrl").
-include("internal.hrl").
-define(QUIP, "Any of you quaids got a smint?").
-export([new/5, new/6]).
-export([get_header_value/2, get_primary_header_value/2, get_combined_header_value/2, get/2, dump/1]).
-export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4, stream_body/5]).
-export([start_response/2, start_response_length/2, start_raw_response/2]).
-export([respond/2, ok/2]).
-export([not_found/1, not_found/2]).
-export([parse_post/1, parse_qs/1]).
-export([should_close/1, cleanup/1]).
-export([parse_cookie/1, get_cookie_value/2]).
-export([serve_file/3, serve_file/4]).
-export([accepted_encodings/2]).
-export([accepts_content_type/2, accepted_content_types/2]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_RECV, mochiweb_request_recv).
-define(SAVE_BODY, mochiweb_request_body).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
-define(SAVE_FORCE_CLOSE, mochiweb_request_force_close).
%% @type key() = atom() | string() | binary()
%% @type value() = atom() | string() | binary() | integer()
%% @type headers(). A mochiweb_headers structure.
%% @type request() = {mochiweb_request,[_Socket,_Method,_RawPath,_Version,_Headers]}
%% @type response(). A mochiweb_response tuple module instance.
%% @type ioheaders() = headers() | [{key(), value()}].
5 minute default idle timeout
-define(IDLE_TIMEOUT, 300000).
Maximum recv_body ( ) length of 1 MB
-define(MAX_RECV_BODY, (1024*1024)).
new(Socket , Method , RawPath , Version , headers ( ) ) - > request ( )
%% @doc Create a new request instance.
new(Socket, Method, RawPath, Version, Headers) ->
new(Socket, [], Method, RawPath, Version, Headers).
new(Socket , Opts , Method , RawPath , Version , headers ( ) ) - > request ( )
%% @doc Create a new request instance.
new(Socket, Opts, Method, RawPath, Version, Headers) ->
{?MODULE, [Socket, Opts, Method, RawPath, Version, Headers]}.
get_header_value(K , request ( ) ) - > undefined | Value
%% @doc Get the value of a given request header.
get_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_primary_value(K, Headers).
get_combined_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_combined_value(K, Headers).
%% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
( ) , request ( ) ) - > term ( )
%% @doc Return the internal representation of the given field. If
%% <code>socket</code> is requested on a HTTPS connection, then
%% an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
%% You can use <code>SslSocket</code> with the <code>ssl</code>
%% application, eg: <code>ssl:peercert(SslSocket)</code>.
get(socket, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
Socket;
get(scheme, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:type(Socket) of
plain ->
http;
ssl ->
https
end;
get(method, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}) ->
Method;
get(raw_path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
RawPath;
get(version, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}) ->
Version;
get(headers, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
Headers;
get(peer, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case mochiweb_socket:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for", THIS) of
undefined ->
inet_parse:ntoa(Addr);
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {{127, 0, 0, 1}, _Port}} ->
case get_header_value("x-forwarded-for", THIS) of
undefined ->
"127.0.0.1";
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
inet_parse:ntoa(Addr);
{error, enotconn} ->
exit(normal)
end;
get(path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_BODY_LENGTH) of
undefined ->
BodyLength = body_length(THIS),
put(?SAVE_BODY_LENGTH, {cached, BodyLength}),
BodyLength;
{cached, Cached} ->
Cached
end;
get(range, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value(range, THIS) of
undefined ->
undefined;
RawRange ->
mochiweb_http:parse_range_request(RawRange)
end;
get(opts, {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}) ->
Opts.
dump(request ( ) ) - > { mochiweb_request , [ { atom ( ) , term ( ) } ] }
%% @doc Dump the internal representation to a "human readable" set of terms
%% for debugging/inspection purposes.
dump({?MODULE, [_Socket, Opts, Method, RawPath, Version, Headers]}) ->
{?MODULE, [{method, Method},
{version, Version},
{raw_path, RawPath},
{opts, Opts},
{headers, mochiweb_headers:to_list(Headers)}]}.
( ) , request ( ) ) - > ok
%% @doc Send data over the socket.
send(Data, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:send(Socket, Data) of
ok ->
ok;
_ ->
exit(normal)
end.
recv(integer ( ) , request ( ) ) - > binary ( )
%% @doc Receive Length bytes from the client as a binary, with the default
%% idle timeout.
recv(Length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
recv(Length, ?IDLE_TIMEOUT, THIS).
recv(integer ( ) , integer ( ) , request ( ) ) - > binary ( )
%% @doc Receive Length bytes from the client as a binary, with the given
Timeout in msec .
recv(Length, Timeout, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
_ ->
exit(normal)
end.
%% @spec body_length(request()) -> undefined | chunked | unknown_transfer_encoding | integer()
%% @doc Infer body length from transfer-encoding and content-length headers.
body_length({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value("transfer-encoding", THIS) of
undefined ->
case get_combined_header_value("content-length", THIS) of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
recv_body(request ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will only receive up to the default max - body length of 1 MB .
recv_body({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
recv_body(?MAX_RECV_BODY, THIS).
recv_body(integer ( ) , request ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will receive up to MaxBody bytes .
recv_body(MaxBody, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_BODY) of
undefined ->
we could use a sane constant for chunk size
Body = stream_body(?MAX_RECV_BODY, fun
({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
iolist_to_binary(lists:reverse(BinAcc));
({Length, Bin}, {LengthAcc, BinAcc}) ->
NewLength = Length + LengthAcc,
if NewLength > MaxBody ->
exit({body_too_large, chunked});
true ->
{NewLength, [Bin | BinAcc]}
end
end, {0, []}, MaxBody, THIS),
put(?SAVE_BODY, Body),
Body;
Cached -> Cached
end.
stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Opts,_Method,_RawPath,_Version,_Headers]}=THIS) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined, THIS).
stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Expect = case get_header_value("expect", THIS) of
undefined ->
undefined;
Value when is_list(Value) ->
string:to_lower(Value)
end,
case Expect of
"100-continue" ->
_ = start_raw_response({100, gb_trees:empty()}, THIS),
ok;
_Else ->
ok
end,
case body_length(THIS) of
undefined ->
undefined;
{unknown_transfer_encoding, Unknown} ->
exit({unknown_transfer_encoding, Unknown});
chunked ->
In this case the MaxBody is actually used to
% determine the maximum allowed size of a single
% chunk.
stream_chunked_body(MaxChunkSize, ChunkFun, FunState, THIS);
0 ->
<<>>;
Length when is_integer(Length) ->
case MaxBodyLength of
MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
exit({body_too_large, content_length});
_ ->
stream_unchunked_body(MaxChunkSize,Length, ChunkFun, FunState, THIS)
end
end.
start_response({integer ( ) , ioheaders ( ) } , request ( ) ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders. The server will set header defaults such as Server
%% and Date if not present in ResponseHeaders.
start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
start_raw_response({Code, ResponseHeaders}, THIS).
@spec start_raw_response({integer ( ) , headers ( ) } , request ( ) ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders.
start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
{Header, Response} = format_response_header({Code, ResponseHeaders}, THIS),
send(Header, THIS),
Response.
@spec start_response_length({integer ( ) , ioheaders ( ) , integer ( ) } , request ( ) ) - > response ( )
%% @doc Start the HTTP response by sending the Code HTTP response and
%% ResponseHeaders including a Content-Length of Length. The server
%% will set header defaults such as Server
%% and Date if not present in ResponseHeaders.
start_response_length({Code, ResponseHeaders, Length},
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
start_response({Code, HResponse1}, THIS).
@spec format_response_header({integer ( ) , ioheaders ( ) } | { integer ( ) , ioheaders ( ) , integer ( ) } , request ( ) ) - > iolist ( )
@doc Format the HTTP response header , including the Code HTTP response and
ResponseHeaders including an optional Content - Length of Length . The server
%% will set header defaults such as Server
%% and Date if not present in ResponseHeaders.
format_response_header({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:default_from_list(server_headers(), HResponse),
HResponse2 = case should_close(THIS) of
true ->
mochiweb_headers:enter("Connection", "close", HResponse1);
false ->
HResponse1
end,
F = fun ({K, V}, Acc) ->
[mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>], mochiweb_headers:to_list(HResponse2)),
Response = mochiweb:new_response({THIS, Code, HResponse2}),
{[make_version(Version), make_code(Code), <<"\r\n">> | End], Response};
format_response_header({Code, ResponseHeaders, Length},
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
format_response_header({Code, HResponse1}, THIS).
respond({integer ( ) , ioheaders ( ) , ( ) | chunked | { file , IoDevice } } , request ( ) ) - > response ( )
%% @doc Start the HTTP response with start_response, and send Body to the
%% client (if the get(method) /= 'HEAD'). The Content-Length header
%% will be set by the Body length, and the server will insert header
%% defaults.
respond({Code, ResponseHeaders, {file, IoDevice}},
{?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
Length = mochiweb_io:iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}, THIS),
case Method of
'HEAD' ->
ok;
_ ->
mochiweb_io:iodevice_stream(fun(IOData) -> send(IOData,THIS) end, IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, _Opts, Method, _RawPath, Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = case Method of
'HEAD' ->
This is what Google does , /
is chunked but HEAD gets Content - Length : 0 .
The RFC is ambiguous so emulating Google is smart .
mochiweb_headers:enter("Content-Length", "0",
HResponse);
_ when Version >= {1, 1} ->
%% Only use chunked encoding for HTTP/1.1
mochiweb_headers:enter("Transfer-Encoding", "chunked",
HResponse);
_ ->
%% For pre-1.1 clients we send the data as-is
%% without a Content-Length header and without
%% chunk delimiters. Since the end of the document
%% is now ambiguous we must force a close.
put(?SAVE_FORCE_CLOSE, true),
HResponse
end,
start_response({Code, HResponse1}, THIS);
respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
{Header, Response} = format_response_header({Code, ResponseHeaders, iolist_size(Body)}, THIS),
case Method of
'HEAD' -> send(Header, THIS);
_ -> send([Header, Body], THIS)
end,
Response.
%% @spec not_found(request()) -> response()
@doc for < code > not_found([])</code > .
not_found({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
not_found([], THIS).
not_found(ExtraHeaders , request ( ) ) - > response ( )
@doc for < code > respond({404 , [ { " Content - Type " , " text / plain " }
%% | ExtraHeaders], <<"Not found.">>})</code>.
not_found(ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}, THIS).
@spec ok({value ( ) , ( ) } | { value ( ) , ioheaders ( ) , ( ) | { file , IoDevice } } , request ( ) ) - >
%% response()
@doc respond({200 , [ { " Content - Type " , ContentType } | Headers ] , Body } ) .
ok({ContentType, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
ok({ContentType, [], Body}, THIS);
ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
%%
Range header not supported when chunked , return 200 and provide
%% full response.
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType,
HResponse),
respond({200, HResponse1, Body}, THIS);
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
[] -> %% no valid ranges
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body}, THIS);
PartList ->
{RangeHeaders, RangeBody} =
mochiweb_multipart:parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody}, THIS)
end
end.
@spec should_close(request ( ) ) - > bool ( )
%% @doc Return true if the connection must be closed. If false, using
%% Keep-Alive should be safe.
should_close({?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
ForceClose = erlang:get(?SAVE_FORCE_CLOSE) =/= undefined,
DidNotRecv = erlang:get(?SAVE_RECV) =:= undefined,
ForceClose orelse Version < {1, 0}
%% Connection: close
orelse is_close(get_header_value("connection", THIS))
HTTP 1.0 requires Connection : Keep - Alive
orelse (Version =:= {1, 0}
andalso get_header_value("connection", THIS) =/= "Keep-Alive")
%% unread data left on the socket, can't safely continue
orelse (DidNotRecv
andalso get_combined_header_value("content-length", THIS) =/= undefined
andalso list_to_integer(get_combined_header_value("content-length", THIS)) > 0)
orelse (DidNotRecv
andalso get_header_value("transfer-encoding", THIS) =:= "chunked").
is_close("close") ->
true;
is_close(S=[_C, _L, _O, _S, _E]) ->
string:to_lower(S) =:= "close";
is_close(_) ->
false.
( ) ) - > ok
%% @doc Clean up any junk in the process dictionary, required before continuing
%% a Keep-Alive request.
cleanup({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
L = [?SAVE_QS, ?SAVE_PATH, ?SAVE_RECV, ?SAVE_BODY, ?SAVE_BODY_LENGTH,
?SAVE_POST, ?SAVE_COOKIE, ?SAVE_FORCE_CLOSE],
lists:foreach(fun(K) ->
erase(K)
end, L),
ok.
parse_qs(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
%% @doc Parse the query string of the URL.
parse_qs({?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
%% @spec get_cookie_value(Key::string, request()) -> string() | undefined
%% @doc Get the value of the given cookie.
get_cookie_value(Key, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
proplists:get_value(Key, parse_cookie(THIS)).
@spec parse_cookie(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
%% @doc Parse the cookie header.
parse_cookie({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie", THIS) of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
@spec parse_post(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
@doc an application / x - www - form - urlencoded form POST . This
%% has the side-effect of calling recv_body().
parse_post({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body(THIS) of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type",THIS) of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
%% @spec stream_chunked_body(integer(), fun(), term(), request()) -> term()
%% @doc The function is called for each chunk.
%% Used internally by read_chunked_body.
stream_chunked_body(MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case read_chunk_length(THIS) of
0 ->
Fun({0, read_chunk(0, THIS)}, FunState);
Length when Length > MaxChunkSize ->
NewState = read_sub_chunks(Length, MaxChunkSize, Fun, FunState, THIS),
stream_chunked_body(MaxChunkSize, Fun, NewState, THIS);
Length ->
NewState = Fun({Length, read_chunk(Length, THIS)}, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState, THIS)
end.
stream_unchunked_body(_MaxChunkSize, 0, Fun, FunState, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(MaxChunkSize, Length, Fun, FunState,
{?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 ->
RecBuf = case mochilists:get_value(recbuf, Opts, ?RECBUF_SIZE) of
undefined -> %os controlled buffer size
MaxChunkSize;
Val ->
Val
end,
PktSize=min(Length,RecBuf),
Bin = recv(PktSize, THIS),
NewState = Fun({PktSize, Bin}, FunState),
stream_unchunked_body(MaxChunkSize, Length - PktSize, Fun, NewState, THIS).
%% @spec read_chunk_length(request()) -> integer()
%% @doc Read the length of the next HTTP chunk.
read_chunk_length({?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
{Hex, _Rest} = lists:splitwith(Splitter, binary_to_list(Header)),
mochihex:to_int(Hex);
_ ->
exit(normal)
end.
read_chunk(integer ( ) , request ( ) ) - > Chunk::binary ( ) | [ Footer::binary ( ) ]
%% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
%% HTTP footers (as a list of binaries, since they're nominal).
read_chunk(0, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
F = fun (F1, Acc) ->
case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
F1(F1, [Footer | Acc]);
_ ->
exit(normal)
end
end,
Footers = F(F, []),
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
put(?SAVE_RECV, true),
Footers;
read_chunk(Length, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
exit(normal)
end.
read_sub_chunks(Length, MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize ->
Bin = recv(MaxChunkSize, THIS),
NewState = Fun({size(Bin), Bin}, FunState),
read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState, THIS);
read_sub_chunks(Length, _MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Fun({Length, read_chunk(Length, THIS)}, FunState).
, DocRoot , request ( ) ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
serve_file(Path, DocRoot, [], THIS).
, DocRoot , ExtraHeaders , request ( ) ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case mochiweb_util:safe_relative_path(Path) of
undefined ->
not_found(ExtraHeaders, THIS);
RelPath ->
FullPath = filename:join([DocRoot, RelPath]),
case filelib:is_dir(FullPath) of
true ->
maybe_redirect(RelPath, FullPath, ExtraHeaders, THIS);
false ->
maybe_serve_file(FullPath, ExtraHeaders, THIS)
end
end.
%% Internal API
This has the same effect as the DirectoryIndex directive in httpd
directory_index(FullPath) ->
filename:join([FullPath, "index.html"]).
maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
maybe_redirect(RelPath, FullPath, ExtraHeaders,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}=THIS) ->
case string:right(RelPath, 1) of
"/" ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
_ ->
Host = mochiweb_headers:get_value("host", Headers),
Location = "http://" ++ Host ++ "/" ++ RelPath ++ "/",
LocationBin = list_to_binary(Location),
MoreHeaders = [{"Location", Location},
{"Content-Type", "text/html"} | ExtraHeaders],
Top = <<"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">"
"<html><head>"
"<title>301 Moved Permanently</title>"
"</head><body>"
"<h1>Moved Permanently</h1>"
"<p>The document has moved <a href=\"">>,
Bottom = <<">here</a>.</p></body></html>\n">>,
Body = <<Top/binary, LocationBin/binary, Bottom/binary>>,
respond({301, MoreHeaders, Body}, THIS)
end.
maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case file:read_file_info(File) of
{ok, FileInfo} ->
LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
case get_header_value("if-modified-since", THIS) of
LastModified ->
respond({304, ExtraHeaders, ""}, THIS);
_ ->
case file:open(File, [raw, binary]) of
{ok, IoDevice} ->
ContentType = mochiweb_util:guess_mime(File),
Res = ok({ContentType,
[{"last-modified", LastModified}
| ExtraHeaders],
{file, IoDevice}}, THIS),
ok = file:close(IoDevice),
Res;
_ ->
not_found(ExtraHeaders, THIS)
end
end;
{error, _} ->
not_found(ExtraHeaders, THIS)
end.
server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
range_parts({file, IoDevice}, Ranges) ->
Size = mochiweb_io:iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
case Length of
0 ->
{Skip, Skip, <<>>};
_ ->
{Skip, Skip + Length - 1, PartialBody}
end
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
( ) ] , request ( ) ) - > [ encoding ( ) ] | bad_accept_encoding_value
%% @type encoding() = string().
%%
%% @doc Returns a list of encodings accepted by a request. Encodings that are
%% not supported by the server will not be included in the return list.
This list is computed from the " Accept - Encoding " header and
%% its elements are ordered, descendingly, according to their Q values.
%%
Section 14.3 of the RFC 2616 ( HTTP 1.1 ) describes the " Accept - Encoding "
%% header and the process of determining which server supported encodings
%% can be used for encoding the body for the request's response.
%%
%% Examples
%%
1 ) For a missing " Accept - Encoding " header :
%% accepted_encodings(["gzip", "identity"]) -> ["identity"]
%%
2 ) For an " Accept - Encoding " header with value " gzip , deflate " :
%% accepted_encodings(["gzip", "identity"]) -> ["gzip", "identity"]
%%
3 ) For an " Accept - Encoding " header with value " gzip;q=0.5 , deflate " :
%% accepted_encodings(["gzip", "deflate", "identity"]) ->
%% ["deflate", "gzip", "identity"]
%%
accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
AcceptEncodingHeader = case get_header_value("Accept-Encoding", THIS) of
undefined ->
"";
Value ->
Value
end,
case mochiweb_util:parse_qvalues(AcceptEncodingHeader) of
invalid_qvalue_string ->
bad_accept_encoding_value;
QList ->
mochiweb_util:pick_accepted_encodings(
QList, SupportedEncodings, "identity"
)
end.
%% @spec accepts_content_type(string() | binary(), request()) -> boolean() | bad_accept_header
%%
%% @doc Determines whether a request accepts a given media type by analyzing its
%% "Accept" header.
%%
%% Examples
%%
1 ) For a missing " Accept " header :
%% accepts_content_type("application/json") -> true
%%
2 ) For an " Accept " header with value " text / plain , application/ * " :
%% accepts_content_type("application/json") -> true
%%
3 ) For an " Accept " header with value " text / plain , * / * ; q=0.0 " :
%% accepts_content_type("application/json") -> false
%%
4 ) For an " Accept " header with value " text / plain ; q=0.5 , * / * ; q=0.1 " :
%% accepts_content_type("application/json") -> true
%%
5 ) For an " Accept " header with value " text/ * ; q=0.0 , * / * " :
%% accepts_content_type("text/plain") -> false
%%
accepts_content_type(ContentType1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]),
AcceptHeader = accept_header(THIS),
case mochiweb_util:parse_qvalues(AcceptHeader) of
invalid_qvalue_string ->
bad_accept_header;
QList ->
[MainType, _SubType] = string:tokens(ContentType, "/"),
SuperType = MainType ++ "/*",
lists:any(
fun({"*/*", Q}) when Q > 0.0 ->
true;
({Type, Q}) when Q > 0.0 ->
Type =:= ContentType orelse Type =:= SuperType;
(_) ->
false
end,
QList
) andalso
(not lists:member({ContentType, 0.0}, QList)) andalso
(not lists:member({SuperType, 0.0}, QList))
end.
( ) | binary ( ) ] , request ( ) ) - > [ string ( ) ] | bad_accept_header
%%
%% @doc Filters which of the given media types this request accepts. This filtering
%% is performed by analyzing the "Accept" header. The returned list is sorted
%% according to the preferences specified in the "Accept" header (higher Q values
first ) . If two or more types have the same preference ( Q value ) , they 're order
%% in the returned list is the same as they're order in the input list.
%%
%% Examples
%%
1 ) For a missing " Accept " header :
%% accepted_content_types(["text/html", "application/json"]) ->
%% ["text/html", "application/json"]
%%
2 ) For an " Accept " header with value " text / html , application/ * " :
accepted_content_types(["application / json " , " text / html " ] ) - >
%% ["application/json", "text/html"]
%%
3 ) For an " Accept " header with value " text / html , * / * ; q=0.0 " :
%% accepted_content_types(["text/html", "application/json"]) ->
%% ["text/html"]
%%
4 ) For an " Accept " header with value " text / html ; q=0.5 , * / * ; q=0.1 " :
%% accepts_content_types(["application/json", "text/html"]) ->
%% ["text/html", "application/json"]
%%
accepted_content_types(Types1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Types = lists:map(
fun(T) -> re:replace(T, "\\s", "", [global, {return, list}]) end,
Types1),
AcceptHeader = accept_header(THIS),
case mochiweb_util:parse_qvalues(AcceptHeader) of
invalid_qvalue_string ->
bad_accept_header;
QList ->
TypesQ = lists:foldr(
fun(T, Acc) ->
case proplists:get_value(T, QList) of
undefined ->
[MainType, _SubType] = string:tokens(T, "/"),
case proplists:get_value(MainType ++ "/*", QList) of
undefined ->
case proplists:get_value("*/*", QList) of
Q when is_float(Q), Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end;
Q when Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end;
Q when Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end
end,
[], Types),
Note : Stable sort . If 2 types have the same Q value we leave them in the
% same order as in the input list.
SortFun = fun({Q1, _}, {Q2, _}) -> Q1 >= Q2 end,
[Type || {_Q, Type} <- lists:sort(SortFun, TypesQ)]
end.
accept_header({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value("Accept", THIS) of
undefined ->
"*/*";
Value ->
Value
end.
%%
%% Tests
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
| null | https://raw.githubusercontent.com/basho/mochiweb/34d3e420c3b74bde56483d8bd2896e03126314bb/src/mochiweb_request.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
@doc MochiWeb HTTP Request abstraction.
@type key() = atom() | string() | binary()
@type value() = atom() | string() | binary() | integer()
@type headers(). A mochiweb_headers structure.
@type request() = {mochiweb_request,[_Socket,_Method,_RawPath,_Version,_Headers]}
@type response(). A mochiweb_response tuple module instance.
@type ioheaders() = headers() | [{key(), value()}].
@doc Create a new request instance.
@doc Create a new request instance.
@doc Get the value of a given request header.
@type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
@doc Return the internal representation of the given field. If
<code>socket</code> is requested on a HTTPS connection, then
an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
You can use <code>SslSocket</code> with the <code>ssl</code>
application, eg: <code>ssl:peercert(SslSocket)</code>.
@doc Dump the internal representation to a "human readable" set of terms
for debugging/inspection purposes.
@doc Send data over the socket.
@doc Receive Length bytes from the client as a binary, with the default
idle timeout.
@doc Receive Length bytes from the client as a binary, with the given
@spec body_length(request()) -> undefined | chunked | unknown_transfer_encoding | integer()
@doc Infer body length from transfer-encoding and content-length headers.
determine the maximum allowed size of a single
chunk.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders. The server will set header defaults such as Server
and Date if not present in ResponseHeaders.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders.
@doc Start the HTTP response by sending the Code HTTP response and
ResponseHeaders including a Content-Length of Length. The server
will set header defaults such as Server
and Date if not present in ResponseHeaders.
will set header defaults such as Server
and Date if not present in ResponseHeaders.
@doc Start the HTTP response with start_response, and send Body to the
client (if the get(method) /= 'HEAD'). The Content-Length header
will be set by the Body length, and the server will insert header
defaults.
Only use chunked encoding for HTTP/1.1
For pre-1.1 clients we send the data as-is
without a Content-Length header and without
chunk delimiters. Since the end of the document
is now ambiguous we must force a close.
@spec not_found(request()) -> response()
| ExtraHeaders], <<"Not found.">>})</code>.
response()
full response.
no valid ranges
@doc Return true if the connection must be closed. If false, using
Keep-Alive should be safe.
Connection: close
unread data left on the socket, can't safely continue
@doc Clean up any junk in the process dictionary, required before continuing
a Keep-Alive request.
@doc Parse the query string of the URL.
@spec get_cookie_value(Key::string, request()) -> string() | undefined
@doc Get the value of the given cookie.
@doc Parse the cookie header.
has the side-effect of calling recv_body().
@spec stream_chunked_body(integer(), fun(), term(), request()) -> term()
@doc The function is called for each chunk.
Used internally by read_chunked_body.
os controlled buffer size
@spec read_chunk_length(request()) -> integer()
@doc Read the length of the next HTTP chunk.
@doc Read in a HTTP chunk of the given length. If Length is 0, then read the
HTTP footers (as a list of binaries, since they're nominal).
Internal API
@type encoding() = string().
@doc Returns a list of encodings accepted by a request. Encodings that are
not supported by the server will not be included in the return list.
its elements are ordered, descendingly, according to their Q values.
header and the process of determining which server supported encodings
can be used for encoding the body for the request's response.
Examples
accepted_encodings(["gzip", "identity"]) -> ["identity"]
accepted_encodings(["gzip", "identity"]) -> ["gzip", "identity"]
accepted_encodings(["gzip", "deflate", "identity"]) ->
["deflate", "gzip", "identity"]
@spec accepts_content_type(string() | binary(), request()) -> boolean() | bad_accept_header
@doc Determines whether a request accepts a given media type by analyzing its
"Accept" header.
Examples
accepts_content_type("application/json") -> true
accepts_content_type("application/json") -> true
accepts_content_type("application/json") -> false
accepts_content_type("application/json") -> true
accepts_content_type("text/plain") -> false
@doc Filters which of the given media types this request accepts. This filtering
is performed by analyzing the "Accept" header. The returned list is sorted
according to the preferences specified in the "Accept" header (higher Q values
in the returned list is the same as they're order in the input list.
Examples
accepted_content_types(["text/html", "application/json"]) ->
["text/html", "application/json"]
["application/json", "text/html"]
accepted_content_types(["text/html", "application/json"]) ->
["text/html"]
accepts_content_types(["application/json", "text/html"]) ->
["text/html", "application/json"]
same order as in the input list.
Tests
| @author < >
2007 Mochi Media , Inc.
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
-module(mochiweb_request).
-author('').
-include_lib("kernel/include/file.hrl").
-include("internal.hrl").
-define(QUIP, "Any of you quaids got a smint?").
-export([new/5, new/6]).
-export([get_header_value/2, get_primary_header_value/2, get_combined_header_value/2, get/2, dump/1]).
-export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4, stream_body/5]).
-export([start_response/2, start_response_length/2, start_raw_response/2]).
-export([respond/2, ok/2]).
-export([not_found/1, not_found/2]).
-export([parse_post/1, parse_qs/1]).
-export([should_close/1, cleanup/1]).
-export([parse_cookie/1, get_cookie_value/2]).
-export([serve_file/3, serve_file/4]).
-export([accepted_encodings/2]).
-export([accepts_content_type/2, accepted_content_types/2]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
-define(SAVE_RECV, mochiweb_request_recv).
-define(SAVE_BODY, mochiweb_request_body).
-define(SAVE_BODY_LENGTH, mochiweb_request_body_length).
-define(SAVE_POST, mochiweb_request_post).
-define(SAVE_COOKIE, mochiweb_request_cookie).
-define(SAVE_FORCE_CLOSE, mochiweb_request_force_close).
5 minute default idle timeout
-define(IDLE_TIMEOUT, 300000).
Maximum recv_body ( ) length of 1 MB
-define(MAX_RECV_BODY, (1024*1024)).
new(Socket , Method , RawPath , Version , headers ( ) ) - > request ( )
new(Socket, Method, RawPath, Version, Headers) ->
new(Socket, [], Method, RawPath, Version, Headers).
new(Socket , Opts , Method , RawPath , Version , headers ( ) ) - > request ( )
new(Socket, Opts, Method, RawPath, Version, Headers) ->
{?MODULE, [Socket, Opts, Method, RawPath, Version, Headers]}.
get_header_value(K , request ( ) ) - > undefined | Value
get_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_value(K, Headers).
get_primary_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_primary_value(K, Headers).
get_combined_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
mochiweb_headers:get_combined_value(K, Headers).
( ) , request ( ) ) - > term ( )
get(socket, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
Socket;
get(scheme, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:type(Socket) of
plain ->
http;
ssl ->
https
end;
get(method, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}) ->
Method;
get(raw_path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
RawPath;
get(version, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}) ->
Version;
get(headers, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) ->
Headers;
get(peer, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case mochiweb_socket:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for", THIS) of
undefined ->
inet_parse:ntoa(Addr);
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {{127, 0, 0, 1}, _Port}} ->
case get_header_value("x-forwarded-for", THIS) of
undefined ->
"127.0.0.1";
Hosts ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
inet_parse:ntoa(Addr);
{error, enotconn} ->
exit(normal)
end;
get(path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
case erlang:get(?SAVE_PATH) of
undefined ->
{Path0, _, _} = mochiweb_util:urlsplit_path(RawPath),
Path = mochiweb_util:unquote(Path0),
put(?SAVE_PATH, Path),
Path;
Cached ->
Cached
end;
get(body_length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_BODY_LENGTH) of
undefined ->
BodyLength = body_length(THIS),
put(?SAVE_BODY_LENGTH, {cached, BodyLength}),
BodyLength;
{cached, Cached} ->
Cached
end;
get(range, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value(range, THIS) of
undefined ->
undefined;
RawRange ->
mochiweb_http:parse_range_request(RawRange)
end;
get(opts, {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}) ->
Opts.
dump(request ( ) ) - > { mochiweb_request , [ { atom ( ) , term ( ) } ] }
dump({?MODULE, [_Socket, Opts, Method, RawPath, Version, Headers]}) ->
{?MODULE, [{method, Method},
{version, Version},
{raw_path, RawPath},
{opts, Opts},
{headers, mochiweb_headers:to_list(Headers)}]}.
( ) , request ( ) ) - > ok
send(Data, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:send(Socket, Data) of
ok ->
ok;
_ ->
exit(normal)
end.
recv(integer ( ) , request ( ) ) - > binary ( )
recv(Length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
recv(Length, ?IDLE_TIMEOUT, THIS).
recv(integer ( ) , integer ( ) , request ( ) ) - > binary ( )
Timeout in msec .
recv(Length, Timeout, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
_ ->
exit(normal)
end.
body_length({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value("transfer-encoding", THIS) of
undefined ->
case get_combined_header_value("content-length", THIS) of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end;
"chunked" ->
chunked;
Unknown ->
{unknown_transfer_encoding, Unknown}
end.
recv_body(request ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will only receive up to the default max - body length of 1 MB .
recv_body({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
recv_body(?MAX_RECV_BODY, THIS).
recv_body(integer ( ) , request ( ) ) - > binary ( )
@doc Receive the body of the HTTP request ( defined by Content - Length ) .
Will receive up to MaxBody bytes .
recv_body(MaxBody, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_BODY) of
undefined ->
we could use a sane constant for chunk size
Body = stream_body(?MAX_RECV_BODY, fun
({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
iolist_to_binary(lists:reverse(BinAcc));
({Length, Bin}, {LengthAcc, BinAcc}) ->
NewLength = Length + LengthAcc,
if NewLength > MaxBody ->
exit({body_too_large, chunked});
true ->
{NewLength, [Bin | BinAcc]}
end
end, {0, []}, MaxBody, THIS),
put(?SAVE_BODY, Body),
Body;
Cached -> Cached
end.
stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Opts,_Method,_RawPath,_Version,_Headers]}=THIS) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined, THIS).
stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Expect = case get_header_value("expect", THIS) of
undefined ->
undefined;
Value when is_list(Value) ->
string:to_lower(Value)
end,
case Expect of
"100-continue" ->
_ = start_raw_response({100, gb_trees:empty()}, THIS),
ok;
_Else ->
ok
end,
case body_length(THIS) of
undefined ->
undefined;
{unknown_transfer_encoding, Unknown} ->
exit({unknown_transfer_encoding, Unknown});
chunked ->
In this case the MaxBody is actually used to
stream_chunked_body(MaxChunkSize, ChunkFun, FunState, THIS);
0 ->
<<>>;
Length when is_integer(Length) ->
case MaxBodyLength of
MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length ->
exit({body_too_large, content_length});
_ ->
stream_unchunked_body(MaxChunkSize,Length, ChunkFun, FunState, THIS)
end
end.
start_response({integer ( ) , ioheaders ( ) } , request ( ) ) - > response ( )
start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
start_raw_response({Code, ResponseHeaders}, THIS).
@spec start_raw_response({integer ( ) , headers ( ) } , request ( ) ) - > response ( )
start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
{Header, Response} = format_response_header({Code, ResponseHeaders}, THIS),
send(Header, THIS),
Response.
@spec start_response_length({integer ( ) , ioheaders ( ) , integer ( ) } , request ( ) ) - > response ( )
start_response_length({Code, ResponseHeaders, Length},
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
start_response({Code, HResponse1}, THIS).
@spec format_response_header({integer ( ) , ioheaders ( ) } | { integer ( ) , ioheaders ( ) , integer ( ) } , request ( ) ) - > iolist ( )
@doc Format the HTTP response header , including the Code HTTP response and
ResponseHeaders including an optional Content - Length of Length . The server
format_response_header({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:default_from_list(server_headers(), HResponse),
HResponse2 = case should_close(THIS) of
true ->
mochiweb_headers:enter("Connection", "close", HResponse1);
false ->
HResponse1
end,
F = fun ({K, V}, Acc) ->
[mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>], mochiweb_headers:to_list(HResponse2)),
Response = mochiweb:new_response({THIS, Code, HResponse2}),
{[make_version(Version), make_code(Code), <<"\r\n">> | End], Response};
format_response_header({Code, ResponseHeaders, Length},
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse),
format_response_header({Code, HResponse1}, THIS).
respond({integer ( ) , ioheaders ( ) , ( ) | chunked | { file , IoDevice } } , request ( ) ) - > response ( )
respond({Code, ResponseHeaders, {file, IoDevice}},
{?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
Length = mochiweb_io:iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}, THIS),
case Method of
'HEAD' ->
ok;
_ ->
mochiweb_io:iodevice_stream(fun(IOData) -> send(IOData,THIS) end, IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, _Opts, Method, _RawPath, Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
HResponse1 = case Method of
'HEAD' ->
This is what Google does , /
is chunked but HEAD gets Content - Length : 0 .
The RFC is ambiguous so emulating Google is smart .
mochiweb_headers:enter("Content-Length", "0",
HResponse);
_ when Version >= {1, 1} ->
mochiweb_headers:enter("Transfer-Encoding", "chunked",
HResponse);
_ ->
put(?SAVE_FORCE_CLOSE, true),
HResponse
end,
start_response({Code, HResponse1}, THIS);
respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) ->
{Header, Response} = format_response_header({Code, ResponseHeaders, iolist_size(Body)}, THIS),
case Method of
'HEAD' -> send(Header, THIS);
_ -> send([Header, Body], THIS)
end,
Response.
@doc for < code > not_found([])</code > .
not_found({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
not_found([], THIS).
not_found(ExtraHeaders , request ( ) ) - > response ( )
@doc for < code > respond({404 , [ { " Content - Type " , " text / plain " }
not_found(ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders],
<<"Not found.">>}, THIS).
@spec ok({value ( ) , ( ) } | { value ( ) , ioheaders ( ) , ( ) | { file , IoDevice } } , request ( ) ) - >
@doc respond({200 , [ { " Content - Type " , ContentType } | Headers ] , Body } ) .
ok({ContentType, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
ok({ContentType, [], Body}, THIS);
ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
Range header not supported when chunked , return 200 and provide
HResponse1 = mochiweb_headers:enter("Content-Type", ContentType,
HResponse),
respond({200, HResponse1, Body}, THIS);
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
case PartList of
HResponse1 = mochiweb_headers:enter("Content-Type",
ContentType,
HResponse),
could be 416 , for now we 'll just return 200
respond({200, HResponse1, Body}, THIS);
PartList ->
{RangeHeaders, RangeBody} =
mochiweb_multipart:parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
HResponse),
respond({206, HResponse1, RangeBody}, THIS)
end
end.
@spec should_close(request ( ) ) - > bool ( )
should_close({?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) ->
ForceClose = erlang:get(?SAVE_FORCE_CLOSE) =/= undefined,
DidNotRecv = erlang:get(?SAVE_RECV) =:= undefined,
ForceClose orelse Version < {1, 0}
orelse is_close(get_header_value("connection", THIS))
HTTP 1.0 requires Connection : Keep - Alive
orelse (Version =:= {1, 0}
andalso get_header_value("connection", THIS) =/= "Keep-Alive")
orelse (DidNotRecv
andalso get_combined_header_value("content-length", THIS) =/= undefined
andalso list_to_integer(get_combined_header_value("content-length", THIS)) > 0)
orelse (DidNotRecv
andalso get_header_value("transfer-encoding", THIS) =:= "chunked").
is_close("close") ->
true;
is_close(S=[_C, _L, _O, _S, _E]) ->
string:to_lower(S) =:= "close";
is_close(_) ->
false.
( ) ) - > ok
cleanup({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
L = [?SAVE_QS, ?SAVE_PATH, ?SAVE_RECV, ?SAVE_BODY, ?SAVE_BODY_LENGTH,
?SAVE_POST, ?SAVE_COOKIE, ?SAVE_FORCE_CLOSE],
lists:foreach(fun(K) ->
erase(K)
end, L),
ok.
parse_qs(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
parse_qs({?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) ->
case erlang:get(?SAVE_QS) of
undefined ->
{_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath),
Parsed = mochiweb_util:parse_qs(QueryString),
put(?SAVE_QS, Parsed),
Parsed;
Cached ->
Cached
end.
get_cookie_value(Key, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
proplists:get_value(Key, parse_cookie(THIS)).
@spec parse_cookie(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
parse_cookie({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_COOKIE) of
undefined ->
Cookies = case get_header_value("cookie", THIS) of
undefined ->
[];
Value ->
mochiweb_cookies:parse_cookie(Value)
end,
put(?SAVE_COOKIE, Cookies),
Cookies;
Cached ->
Cached
end.
@spec parse_post(request ( ) ) - > [ { Key::string ( ) , Value::string ( ) } ]
@doc an application / x - www - form - urlencoded form POST . This
parse_post({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case erlang:get(?SAVE_POST) of
undefined ->
Parsed = case recv_body(THIS) of
undefined ->
[];
Binary ->
case get_primary_header_value("content-type",THIS) of
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(Binary);
_ ->
[]
end
end,
put(?SAVE_POST, Parsed),
Parsed;
Cached ->
Cached
end.
stream_chunked_body(MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case read_chunk_length(THIS) of
0 ->
Fun({0, read_chunk(0, THIS)}, FunState);
Length when Length > MaxChunkSize ->
NewState = read_sub_chunks(Length, MaxChunkSize, Fun, FunState, THIS),
stream_chunked_body(MaxChunkSize, Fun, NewState, THIS);
Length ->
NewState = Fun({Length, read_chunk(Length, THIS)}, FunState),
stream_chunked_body(MaxChunkSize, Fun, NewState, THIS)
end.
stream_unchunked_body(_MaxChunkSize, 0, Fun, FunState, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(MaxChunkSize, Length, Fun, FunState,
{?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 ->
RecBuf = case mochilists:get_value(recbuf, Opts, ?RECBUF_SIZE) of
MaxChunkSize;
Val ->
Val
end,
PktSize=min(Length,RecBuf),
Bin = recv(PktSize, THIS),
NewState = Fun({PktSize, Bin}, FunState),
stream_unchunked_body(MaxChunkSize, Length - PktSize, Fun, NewState, THIS).
read_chunk_length({?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
{Hex, _Rest} = lists:splitwith(Splitter, binary_to_list(Header)),
mochihex:to_int(Hex);
_ ->
exit(normal)
end.
read_chunk(integer ( ) , request ( ) ) - > Chunk::binary ( ) | [ Footer::binary ( ) ]
read_chunk(0, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])),
F = fun (F1, Acc) ->
case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
F1(F1, [Footer | Acc]);
_ ->
exit(normal)
end
end,
Footers = F(F, []),
ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])),
put(?SAVE_RECV, true),
Footers;
read_chunk(Length, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) ->
case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
exit(normal)
end.
read_sub_chunks(Length, MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize ->
Bin = recv(MaxChunkSize, THIS),
NewState = Fun({size(Bin), Bin}, FunState),
read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState, THIS);
read_sub_chunks(Length, _MaxChunkSize, Fun, FunState,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Fun({Length, read_chunk(Length, THIS)}, FunState).
, DocRoot , request ( ) ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
serve_file(Path, DocRoot, [], THIS).
, DocRoot , ExtraHeaders , request ( ) ) - > Response
@doc Serve a file relative to DocRoot .
serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case mochiweb_util:safe_relative_path(Path) of
undefined ->
not_found(ExtraHeaders, THIS);
RelPath ->
FullPath = filename:join([DocRoot, RelPath]),
case filelib:is_dir(FullPath) of
true ->
maybe_redirect(RelPath, FullPath, ExtraHeaders, THIS);
false ->
maybe_serve_file(FullPath, ExtraHeaders, THIS)
end
end.
This has the same effect as the DirectoryIndex directive in httpd
directory_index(FullPath) ->
filename:join([FullPath, "index.html"]).
maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
maybe_redirect(RelPath, FullPath, ExtraHeaders,
{?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}=THIS) ->
case string:right(RelPath, 1) of
"/" ->
maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS);
_ ->
Host = mochiweb_headers:get_value("host", Headers),
Location = "http://" ++ Host ++ "/" ++ RelPath ++ "/",
LocationBin = list_to_binary(Location),
MoreHeaders = [{"Location", Location},
{"Content-Type", "text/html"} | ExtraHeaders],
Top = <<"<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">"
"<html><head>"
"<title>301 Moved Permanently</title>"
"</head><body>"
"<h1>Moved Permanently</h1>"
"<p>The document has moved <a href=\"">>,
Bottom = <<">here</a>.</p></body></html>\n">>,
Body = <<Top/binary, LocationBin/binary, Bottom/binary>>,
respond({301, MoreHeaders, Body}, THIS)
end.
maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case file:read_file_info(File) of
{ok, FileInfo} ->
LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime),
case get_header_value("if-modified-since", THIS) of
LastModified ->
respond({304, ExtraHeaders, ""}, THIS);
_ ->
case file:open(File, [raw, binary]) of
{ok, IoDevice} ->
ContentType = mochiweb_util:guess_mime(File),
Res = ok({ContentType,
[{"last-modified", LastModified}
| ExtraHeaders],
{file, IoDevice}}, THIS),
ok = file:close(IoDevice),
Res;
_ ->
not_found(ExtraHeaders, THIS)
end
end;
{error, _} ->
not_found(ExtraHeaders, THIS)
end.
server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
Io.
make_version({1, 0}) ->
<<"HTTP/1.0 ">>;
make_version(_) ->
<<"HTTP/1.1 ">>.
range_parts({file, IoDevice}, Ranges) ->
Size = mochiweb_io:iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
[V | Acc]
end
end,
LocNums = lists:foldr(F, [], Ranges),
{ok, Data} = file:pread(IoDevice, LocNums),
Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) ->
case Length of
0 ->
{Skip, Skip, <<>>};
_ ->
{Skip, Skip + Length - 1, PartialBody}
end
end,
LocNums, Data),
{Bodies, Size};
range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
<<_:Skip/binary, PartialBody:Length/binary, _/binary>> = Body,
[{Skip, Skip + Length - 1, PartialBody} | Acc]
end
end,
{lists:foldr(F, [], Ranges), Size}.
( ) ] , request ( ) ) - > [ encoding ( ) ] | bad_accept_encoding_value
This list is computed from the " Accept - Encoding " header and
Section 14.3 of the RFC 2616 ( HTTP 1.1 ) describes the " Accept - Encoding "
1 ) For a missing " Accept - Encoding " header :
2 ) For an " Accept - Encoding " header with value " gzip , deflate " :
3 ) For an " Accept - Encoding " header with value " gzip;q=0.5 , deflate " :
accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
AcceptEncodingHeader = case get_header_value("Accept-Encoding", THIS) of
undefined ->
"";
Value ->
Value
end,
case mochiweb_util:parse_qvalues(AcceptEncodingHeader) of
invalid_qvalue_string ->
bad_accept_encoding_value;
QList ->
mochiweb_util:pick_accepted_encodings(
QList, SupportedEncodings, "identity"
)
end.
1 ) For a missing " Accept " header :
2 ) For an " Accept " header with value " text / plain , application/ * " :
3 ) For an " Accept " header with value " text / plain , * / * ; q=0.0 " :
4 ) For an " Accept " header with value " text / plain ; q=0.5 , * / * ; q=0.1 " :
5 ) For an " Accept " header with value " text/ * ; q=0.0 , * / * " :
accepts_content_type(ContentType1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]),
AcceptHeader = accept_header(THIS),
case mochiweb_util:parse_qvalues(AcceptHeader) of
invalid_qvalue_string ->
bad_accept_header;
QList ->
[MainType, _SubType] = string:tokens(ContentType, "/"),
SuperType = MainType ++ "/*",
lists:any(
fun({"*/*", Q}) when Q > 0.0 ->
true;
({Type, Q}) when Q > 0.0 ->
Type =:= ContentType orelse Type =:= SuperType;
(_) ->
false
end,
QList
) andalso
(not lists:member({ContentType, 0.0}, QList)) andalso
(not lists:member({SuperType, 0.0}, QList))
end.
( ) | binary ( ) ] , request ( ) ) - > [ string ( ) ] | bad_accept_header
first ) . If two or more types have the same preference ( Q value ) , they 're order
1 ) For a missing " Accept " header :
2 ) For an " Accept " header with value " text / html , application/ * " :
accepted_content_types(["application / json " , " text / html " ] ) - >
3 ) For an " Accept " header with value " text / html , * / * ; q=0.0 " :
4 ) For an " Accept " header with value " text / html ; q=0.5 , * / * ; q=0.1 " :
accepted_content_types(Types1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
Types = lists:map(
fun(T) -> re:replace(T, "\\s", "", [global, {return, list}]) end,
Types1),
AcceptHeader = accept_header(THIS),
case mochiweb_util:parse_qvalues(AcceptHeader) of
invalid_qvalue_string ->
bad_accept_header;
QList ->
TypesQ = lists:foldr(
fun(T, Acc) ->
case proplists:get_value(T, QList) of
undefined ->
[MainType, _SubType] = string:tokens(T, "/"),
case proplists:get_value(MainType ++ "/*", QList) of
undefined ->
case proplists:get_value("*/*", QList) of
Q when is_float(Q), Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end;
Q when Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end;
Q when Q > 0.0 ->
[{Q, T} | Acc];
_ ->
Acc
end
end,
[], Types),
Note : Stable sort . If 2 types have the same Q value we leave them in the
SortFun = fun({Q1, _}, {Q2, _}) -> Q1 >= Q2 end,
[Type || {_Q, Type} <- lists:sort(SortFun, TypesQ)]
end.
accept_header({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) ->
case get_header_value("Accept", THIS) of
undefined ->
"*/*";
Value ->
Value
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.